From fab4879e280418600eb66b3f444c95f3d413267c Mon Sep 17 00:00:00 2001 From: Google APIs Date: Wed, 13 Mar 2019 00:37:11 +0000 Subject: [PATCH] Autogenerated update (2019-03-13) Update: - cloudasset_v1beta1 - cloudbuild_v1 - cloudbuild_v1alpha1 - cloudiot_v1 - cloudprivatecatalogproducer_v1beta1 - containeranalysis_v1beta1 - content_v2 - content_v2_1 - dialogflow_v2 - genomics_v1 - iap_v1 - iap_v1beta1 - language_v1 - language_v1beta1 - language_v1beta2 - poly_v1 - tpu_v1 - tpu_v1alpha1 - videointelligence_v1beta2 - videointelligence_v1p1beta1 - videointelligence_v1p2beta1 - vision_v1 - vision_v1p1beta1 - vision_v1p2beta1 --- api_names_out.yaml | 990 ++++ generated/google/apis/cloudasset_v1beta1.rb | 2 +- .../google/apis/cloudasset_v1beta1/classes.rb | 20 +- generated/google/apis/cloudbuild_v1.rb | 2 +- .../google/apis/cloudbuild_v1/classes.rb | 6 + .../apis/cloudbuild_v1/representations.rb | 2 + generated/google/apis/cloudbuild_v1alpha1.rb | 2 +- .../apis/cloudbuild_v1alpha1/classes.rb | 6 + .../cloudbuild_v1alpha1/representations.rb | 2 + generated/google/apis/cloudiot_v1.rb | 2 +- generated/google/apis/cloudiot_v1/classes.rb | 22 +- .../cloudprivatecatalogproducer_v1beta1.rb | 2 +- .../classes.rb | 20 +- .../google/apis/containeranalysis_v1beta1.rb | 2 +- .../apis/containeranalysis_v1beta1/classes.rb | 20 +- generated/google/apis/content_v2.rb | 2 +- generated/google/apis/content_v2/classes.rb | 75 +- generated/google/apis/content_v2/service.rb | 4 +- generated/google/apis/content_v2_1.rb | 2 +- generated/google/apis/content_v2_1/classes.rb | 24 +- .../apis/content_v2_1/representations.rb | 3 - generated/google/apis/content_v2_1/service.rb | 4 +- generated/google/apis/dialogflow_v2.rb | 2 +- .../google/apis/dialogflow_v2/classes.rb | 30 +- generated/google/apis/genomics_v1.rb | 11 +- generated/google/apis/genomics_v1/classes.rb | 3091 ----------- .../apis/genomics_v1/representations.rb | 1111 ---- generated/google/apis/genomics_v1/service.rb | 1906 ------- generated/google/apis/iap_v1.rb | 2 +- generated/google/apis/iap_v1/classes.rb | 2 +- generated/google/apis/iap_v1beta1.rb | 2 +- generated/google/apis/iap_v1beta1/classes.rb | 2 +- generated/google/apis/language_v1.rb | 2 +- generated/google/apis/language_v1/classes.rb | 10 +- generated/google/apis/language_v1beta1.rb | 2 +- .../google/apis/language_v1beta1/classes.rb | 10 +- generated/google/apis/language_v1beta2.rb | 2 +- .../google/apis/language_v1beta2/classes.rb | 10 +- generated/google/apis/poly_v1.rb | 2 +- generated/google/apis/poly_v1/classes.rb | 9 +- generated/google/apis/tpu_v1.rb | 2 +- generated/google/apis/tpu_v1/classes.rb | 20 +- generated/google/apis/tpu_v1alpha1.rb | 2 +- generated/google/apis/tpu_v1alpha1/classes.rb | 20 +- .../google/apis/videointelligence_v1beta2.rb | 2 +- .../apis/videointelligence_v1beta2/classes.rb | 214 +- .../representations.rb | 162 +- .../apis/videointelligence_v1p1beta1.rb | 2 +- .../videointelligence_v1p1beta1/classes.rb | 214 +- .../representations.rb | 162 +- .../apis/videointelligence_v1p2beta1.rb | 2 +- .../videointelligence_v1p2beta1/classes.rb | 214 +- .../representations.rb | 162 +- .../apis/videointelligence_v1p3beta1.rb | 36 + .../videointelligence_v1p3beta1/classes.rb | 4687 +++++++++++++++++ .../representations.rb | 2005 +++++++ .../videointelligence_v1p3beta1/service.rb | 94 + generated/google/apis/vision_v1.rb | 2 +- generated/google/apis/vision_v1/classes.rb | 116 +- generated/google/apis/vision_v1p1beta1.rb | 2 +- .../google/apis/vision_v1p1beta1/classes.rb | 106 +- generated/google/apis/vision_v1p2beta1.rb | 2 +- .../google/apis/vision_v1p2beta1/classes.rb | 106 +- 63 files changed, 8793 insertions(+), 6961 deletions(-) create mode 100644 generated/google/apis/videointelligence_v1p3beta1.rb create mode 100644 generated/google/apis/videointelligence_v1p3beta1/classes.rb create mode 100644 generated/google/apis/videointelligence_v1p3beta1/representations.rb create mode 100644 generated/google/apis/videointelligence_v1p3beta1/service.rb diff --git a/api_names_out.yaml b/api_names_out.yaml index 3875a48b8..64fc8f0dc 100644 --- a/api_names_out.yaml +++ b/api_names_out.yaml @@ -15315,6 +15315,7 @@ "/cloudbuild:v1/RepoSource/tagName": tag_name "/cloudbuild:v1/Results": results "/cloudbuild:v1/Results/artifactManifest": artifact_manifest +"/cloudbuild:v1/Results/artifactTiming": artifact_timing "/cloudbuild:v1/Results/buildStepImages": build_step_images "/cloudbuild:v1/Results/buildStepImages/build_step_image": build_step_image "/cloudbuild:v1/Results/buildStepOutputs": build_step_outputs @@ -15501,6 +15502,7 @@ "/cloudbuild:v1alpha1/RepoSource/tagName": tag_name "/cloudbuild:v1alpha1/Results": results "/cloudbuild:v1alpha1/Results/artifactManifest": artifact_manifest +"/cloudbuild:v1alpha1/Results/artifactTiming": artifact_timing "/cloudbuild:v1alpha1/Results/buildStepImages": build_step_images "/cloudbuild:v1alpha1/Results/buildStepImages/build_step_image": build_step_image "/cloudbuild:v1alpha1/Results/buildStepOutputs": build_step_outputs @@ -103601,9 +103603,11 @@ "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/segments": segments "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/segments/segment": segment "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelDetectionConfig": google_cloud_videointelligence_v1beta2_label_detection_config +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelDetectionConfig/frameConfidenceThreshold": frame_confidence_threshold "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelDetectionConfig/labelDetectionMode": label_detection_mode "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelDetectionConfig/model": model "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelDetectionConfig/stationaryCamera": stationary_camera +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelDetectionConfig/videoConfidenceThreshold": video_confidence_threshold "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelFrame": google_cloud_videointelligence_v1beta2_label_frame "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelFrame/confidence": confidence "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelFrame/timeOffset": time_offset @@ -103954,6 +103958,128 @@ "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p2beta1_WordInfo/speakerTag": speaker_tag "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p2beta1_WordInfo/startTime": start_time "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p2beta1_WordInfo/word": word +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoProgress": google_cloud_videointelligence_v1p3beta1_annotate_video_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse": google_cloud_videointelligence_v1p3beta1_annotate_video_response +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_Entity": google_cloud_videointelligence_v1p3beta1_entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_Entity/description": description +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_Entity/entityId": entity_id +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_Entity/languageCode": language_code +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation": google_cloud_videointelligence_v1p3beta1_explicit_content_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame": google_cloud_videointelligence_v1p3beta1_explicit_content_frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation": google_cloud_videointelligence_v1p3beta1_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/entity": entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/frames": frames +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/frames/frame": frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/segments": segments +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/segments/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelFrame": google_cloud_videointelligence_v1p3beta1_label_frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelFrame/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelFrame/timeOffset": time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelSegment": google_cloud_videointelligence_v1p3beta1_label_segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelSegment/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_LabelSegment/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox": google_cloud_videointelligence_v1p3beta1_normalized_bounding_box +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/bottom": bottom +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/left": left +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/right": right +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/top": top +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingPoly": google_cloud_videointelligence_v1p3beta1_normalized_bounding_poly +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingPoly/vertices": vertices +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingPoly/vertices/vertex": vertex +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_NormalizedVertex": google_cloud_videointelligence_v1p3beta1_normalized_vertex +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_NormalizedVertex/x": x +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_NormalizedVertex/y": y +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation": google_cloud_videointelligence_v1p3beta1_object_tracking_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/entity": entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/frames": frames +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/frames/frame": frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/trackId": track_id +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingFrame": google_cloud_videointelligence_v1p3beta1_object_tracking_frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingFrame/normalizedBoundingBox": normalized_bounding_box +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingFrame/timeOffset": time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative": google_cloud_videointelligence_v1p3beta1_speech_recognition_alternative +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/transcript": transcript +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/words": words +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/words/word": word +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription": google_cloud_videointelligence_v1p3beta1_speech_transcription +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription/alternatives": alternatives +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription/alternatives/alternative": alternative +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription/languageCode": language_code +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse": google_cloud_videointelligence_v1p3beta1_streaming_annotate_video_response +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse/annotationResultsUri": annotation_results_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse/error": error +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults": google_cloud_videointelligence_v1p3beta1_streaming_video_annotation_results +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/labelAnnotations": label_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/labelAnnotations/label_annotation" +: label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/objectAnnotations": object_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/objectAnnotations/object_annotation" +: object_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation": google_cloud_videointelligence_v1p3beta1_text_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation/segments": segments +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation/segments/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation/text": text +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_TextFrame": google_cloud_videointelligence_v1p3beta1_text_frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_TextFrame/rotatedBoundingBox": rotated_bounding_box +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_TextFrame/timeOffset": time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_TextSegment": google_cloud_videointelligence_v1p3beta1_text_segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_TextSegment/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_TextSegment/frames": frames +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_TextSegment/frames/frame": frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_TextSegment/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress": google_cloud_videointelligence_v1p3beta1_video_annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults": google_cloud_videointelligence_v1p3beta1_video_annotation_results +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/error": error +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation" +: frame_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/objectAnnotations": object_annotations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/objectAnnotations/object_annotation": object_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation" +: shot_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/speechTranscriptions": speech_transcriptions +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/speechTranscriptions/speech_transcription" +: speech_transcription +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/textAnnotations": text_annotations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/textAnnotations/text_annotation": text_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoSegment": google_cloud_videointelligence_v1p3beta1_video_segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_WordInfo": google_cloud_videointelligence_v1p3beta1_word_info +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_WordInfo/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_WordInfo/endTime": end_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_WordInfo/speakerTag": speaker_tag +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_WordInfo/startTime": start_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p3beta1_WordInfo/word": word "/videointelligence:v1beta2/GoogleCloudVideointelligenceV2beta1_AnnotateVideoProgress": google_cloud_videointelligence_v2beta1_annotate_video_progress "/videointelligence:v1beta2/GoogleCloudVideointelligenceV2beta1_AnnotateVideoProgress/annotationProgress": annotation_progress "/videointelligence:v1beta2/GoogleCloudVideointelligenceV2beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress @@ -104339,9 +104465,11 @@ "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/segments": segments "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/segments/segment": segment "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p1beta1_LabelDetectionConfig": google_cloud_videointelligence_v1p1beta1_label_detection_config +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p1beta1_LabelDetectionConfig/frameConfidenceThreshold": frame_confidence_threshold "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p1beta1_LabelDetectionConfig/labelDetectionMode": label_detection_mode "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p1beta1_LabelDetectionConfig/model": model "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p1beta1_LabelDetectionConfig/stationaryCamera": stationary_camera +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p1beta1_LabelDetectionConfig/videoConfidenceThreshold": video_confidence_threshold "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p1beta1_LabelFrame": google_cloud_videointelligence_v1p1beta1_label_frame "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p1beta1_LabelFrame/confidence": confidence "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p1beta1_LabelFrame/timeOffset": time_offset @@ -104568,6 +104696,129 @@ "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p2beta1_WordInfo/speakerTag": speaker_tag "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p2beta1_WordInfo/startTime": start_time "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p2beta1_WordInfo/word": word +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoProgress": google_cloud_videointelligence_v1p3beta1_annotate_video_progress +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse": google_cloud_videointelligence_v1p3beta1_annotate_video_response +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_Entity": google_cloud_videointelligence_v1p3beta1_entity +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_Entity/description": description +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_Entity/entityId": entity_id +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_Entity/languageCode": language_code +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation": google_cloud_videointelligence_v1p3beta1_explicit_content_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame": google_cloud_videointelligence_v1p3beta1_explicit_content_frame +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation": google_cloud_videointelligence_v1p3beta1_label_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/entity": entity +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/frames": frames +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/frames/frame": frame +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/segments": segments +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/segments/segment": segment +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelFrame": google_cloud_videointelligence_v1p3beta1_label_frame +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelFrame/confidence": confidence +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelFrame/timeOffset": time_offset +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelSegment": google_cloud_videointelligence_v1p3beta1_label_segment +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelSegment/confidence": confidence +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_LabelSegment/segment": segment +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox": google_cloud_videointelligence_v1p3beta1_normalized_bounding_box +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/bottom": bottom +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/left": left +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/right": right +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/top": top +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingPoly": google_cloud_videointelligence_v1p3beta1_normalized_bounding_poly +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingPoly/vertices": vertices +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingPoly/vertices/vertex": vertex +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedVertex": google_cloud_videointelligence_v1p3beta1_normalized_vertex +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedVertex/x": x +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedVertex/y": y +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation": google_cloud_videointelligence_v1p3beta1_object_tracking_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/confidence": confidence +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/entity": entity +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/frames": frames +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/frames/frame": frame +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/segment": segment +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/trackId": track_id +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingFrame": google_cloud_videointelligence_v1p3beta1_object_tracking_frame +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingFrame/normalizedBoundingBox": normalized_bounding_box +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingFrame/timeOffset": time_offset +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative": google_cloud_videointelligence_v1p3beta1_speech_recognition_alternative +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/confidence": confidence +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/transcript": transcript +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/words": words +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/words/word": word +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription": google_cloud_videointelligence_v1p3beta1_speech_transcription +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription/alternatives": alternatives +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription/alternatives/alternative": alternative +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription/languageCode": language_code +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse": google_cloud_videointelligence_v1p3beta1_streaming_annotate_video_response +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse/annotationResultsUri": annotation_results_uri +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse/error": error +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults": google_cloud_videointelligence_v1p3beta1_streaming_video_annotation_results +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/labelAnnotations": label_annotations +? "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/labelAnnotations/label_annotation" +: label_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/objectAnnotations": object_annotations +? "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/objectAnnotations/object_annotation" +: object_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/shotAnnotations": shot_annotations +? "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/shotAnnotations/shot_annotation" +: shot_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation": google_cloud_videointelligence_v1p3beta1_text_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation/segments": segments +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation/segments/segment": segment +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation/text": text +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_TextFrame": google_cloud_videointelligence_v1p3beta1_text_frame +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_TextFrame/rotatedBoundingBox": rotated_bounding_box +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_TextFrame/timeOffset": time_offset +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment": google_cloud_videointelligence_v1p3beta1_text_segment +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment/confidence": confidence +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment/frames": frames +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment/frames/frame": frame +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment/segment": segment +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress": google_cloud_videointelligence_v1p3beta1_video_annotation_progress +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults": google_cloud_videointelligence_v1p3beta1_video_annotation_results +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/error": error +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +? "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation" +: frame_label_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/objectAnnotations": object_annotations +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/objectAnnotations/object_annotation": object_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +? "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation" +: shot_label_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/speechTranscriptions": speech_transcriptions +? "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/speechTranscriptions/speech_transcription" +: speech_transcription +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/textAnnotations": text_annotations +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/textAnnotations/text_annotation": text_annotation +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoSegment": google_cloud_videointelligence_v1p3beta1_video_segment +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo": google_cloud_videointelligence_v1p3beta1_word_info +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/confidence": confidence +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/endTime": end_time +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/speakerTag": speaker_tag +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/startTime": start_time +"/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/word": word "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV2beta1_AnnotateVideoProgress": google_cloud_videointelligence_v2beta1_annotate_video_progress "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV2beta1_AnnotateVideoProgress/annotationProgress": annotation_progress "/videointelligence:v1p1beta1/GoogleCloudVideointelligenceV2beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress @@ -105064,9 +105315,11 @@ "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_LabelAnnotation/segments": segments "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_LabelAnnotation/segments/segment": segment "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_LabelDetectionConfig": google_cloud_videointelligence_v1p2beta1_label_detection_config +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_LabelDetectionConfig/frameConfidenceThreshold": frame_confidence_threshold "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_LabelDetectionConfig/labelDetectionMode": label_detection_mode "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_LabelDetectionConfig/model": model "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_LabelDetectionConfig/stationaryCamera": stationary_camera +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_LabelDetectionConfig/videoConfidenceThreshold": video_confidence_threshold "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_LabelFrame": google_cloud_videointelligence_v1p2beta1_label_frame "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_LabelFrame/confidence": confidence "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_LabelFrame/timeOffset": time_offset @@ -105179,6 +105432,129 @@ "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_WordInfo/speakerTag": speaker_tag "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_WordInfo/startTime": start_time "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p2beta1_WordInfo/word": word +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoProgress": google_cloud_videointelligence_v1p3beta1_annotate_video_progress +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse": google_cloud_videointelligence_v1p3beta1_annotate_video_response +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_Entity": google_cloud_videointelligence_v1p3beta1_entity +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_Entity/description": description +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_Entity/entityId": entity_id +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_Entity/languageCode": language_code +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation": google_cloud_videointelligence_v1p3beta1_explicit_content_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame": google_cloud_videointelligence_v1p3beta1_explicit_content_frame +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation": google_cloud_videointelligence_v1p3beta1_label_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/entity": entity +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/frames": frames +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/frames/frame": frame +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/segments": segments +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/segments/segment": segment +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelFrame": google_cloud_videointelligence_v1p3beta1_label_frame +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelFrame/confidence": confidence +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelFrame/timeOffset": time_offset +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelSegment": google_cloud_videointelligence_v1p3beta1_label_segment +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelSegment/confidence": confidence +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_LabelSegment/segment": segment +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox": google_cloud_videointelligence_v1p3beta1_normalized_bounding_box +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/bottom": bottom +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/left": left +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/right": right +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/top": top +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingPoly": google_cloud_videointelligence_v1p3beta1_normalized_bounding_poly +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingPoly/vertices": vertices +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingPoly/vertices/vertex": vertex +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedVertex": google_cloud_videointelligence_v1p3beta1_normalized_vertex +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedVertex/x": x +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedVertex/y": y +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation": google_cloud_videointelligence_v1p3beta1_object_tracking_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/confidence": confidence +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/entity": entity +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/frames": frames +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/frames/frame": frame +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/segment": segment +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/trackId": track_id +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingFrame": google_cloud_videointelligence_v1p3beta1_object_tracking_frame +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingFrame/normalizedBoundingBox": normalized_bounding_box +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingFrame/timeOffset": time_offset +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative": google_cloud_videointelligence_v1p3beta1_speech_recognition_alternative +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/confidence": confidence +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/transcript": transcript +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/words": words +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/words/word": word +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription": google_cloud_videointelligence_v1p3beta1_speech_transcription +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription/alternatives": alternatives +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription/alternatives/alternative": alternative +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription/languageCode": language_code +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse": google_cloud_videointelligence_v1p3beta1_streaming_annotate_video_response +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse/annotationResultsUri": annotation_results_uri +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse/error": error +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults": google_cloud_videointelligence_v1p3beta1_streaming_video_annotation_results +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/labelAnnotations": label_annotations +? "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/labelAnnotations/label_annotation" +: label_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/objectAnnotations": object_annotations +? "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/objectAnnotations/object_annotation" +: object_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/shotAnnotations": shot_annotations +? "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/shotAnnotations/shot_annotation" +: shot_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation": google_cloud_videointelligence_v1p3beta1_text_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation/segments": segments +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation/segments/segment": segment +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation/text": text +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_TextFrame": google_cloud_videointelligence_v1p3beta1_text_frame +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_TextFrame/rotatedBoundingBox": rotated_bounding_box +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_TextFrame/timeOffset": time_offset +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment": google_cloud_videointelligence_v1p3beta1_text_segment +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment/confidence": confidence +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment/frames": frames +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment/frames/frame": frame +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment/segment": segment +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress": google_cloud_videointelligence_v1p3beta1_video_annotation_progress +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults": google_cloud_videointelligence_v1p3beta1_video_annotation_results +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/error": error +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +? "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation" +: frame_label_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/objectAnnotations": object_annotations +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/objectAnnotations/object_annotation": object_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +? "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation" +: shot_label_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/speechTranscriptions": speech_transcriptions +? "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/speechTranscriptions/speech_transcription" +: speech_transcription +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/textAnnotations": text_annotations +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/textAnnotations/text_annotation": text_annotation +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoSegment": google_cloud_videointelligence_v1p3beta1_video_segment +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo": google_cloud_videointelligence_v1p3beta1_word_info +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/confidence": confidence +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/endTime": end_time +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/speakerTag": speaker_tag +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/startTime": start_time +"/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/word": word "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV2beta1_AnnotateVideoProgress": google_cloud_videointelligence_v2beta1_annotate_video_progress "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV2beta1_AnnotateVideoProgress/annotationProgress": annotation_progress "/videointelligence:v1p2beta1/GoogleCloudVideointelligenceV2beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress @@ -105319,6 +105695,620 @@ "/videointelligence:v1p2beta1/key": key "/videointelligence:v1p2beta1/quotaUser": quota_user "/videointelligence:v1p2beta1/videointelligence.videos.annotate": annotate_video +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_AnnotateVideoProgress": google_cloud_videointelligence_v1_annotate_video_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_AnnotateVideoResponse": google_cloud_videointelligence_v1_annotate_video_response +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_Entity": google_cloud_videointelligence_v1_entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_Entity/description": description +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_Entity/entityId": entity_id +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_Entity/languageCode": language_code +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ExplicitContentAnnotation": google_cloud_videointelligence_v1_explicit_content_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ExplicitContentFrame": google_cloud_videointelligence_v1_explicit_content_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelAnnotation": google_cloud_videointelligence_v1_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelAnnotation/entity": entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelAnnotation/segments": segments +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelAnnotation/segments/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelFrame": google_cloud_videointelligence_v1_label_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelFrame/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelSegment": google_cloud_videointelligence_v1_label_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelSegment/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_LabelSegment/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_NormalizedBoundingBox": google_cloud_videointelligence_v1_normalized_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_NormalizedBoundingBox/bottom": bottom +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_NormalizedBoundingBox/left": left +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_NormalizedBoundingBox/right": right +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_NormalizedBoundingBox/top": top +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_NormalizedBoundingPoly": google_cloud_videointelligence_v1_normalized_bounding_poly +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_NormalizedBoundingPoly/vertices": vertices +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_NormalizedBoundingPoly/vertices/vertex": vertex +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_NormalizedVertex": google_cloud_videointelligence_v1_normalized_vertex +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_NormalizedVertex/x": x +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_NormalizedVertex/y": y +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ObjectTrackingAnnotation": google_cloud_videointelligence_v1_object_tracking_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ObjectTrackingAnnotation/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ObjectTrackingAnnotation/entity": entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ObjectTrackingAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ObjectTrackingAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ObjectTrackingAnnotation/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ObjectTrackingAnnotation/trackId": track_id +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ObjectTrackingFrame": google_cloud_videointelligence_v1_object_tracking_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ObjectTrackingFrame/normalizedBoundingBox": normalized_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_ObjectTrackingFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_SpeechRecognitionAlternative": google_cloud_videointelligence_v1_speech_recognition_alternative +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_SpeechRecognitionAlternative/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_SpeechRecognitionAlternative/transcript": transcript +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_SpeechRecognitionAlternative/words": words +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_SpeechRecognitionAlternative/words/word": word +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_SpeechTranscription": google_cloud_videointelligence_v1_speech_transcription +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_SpeechTranscription/alternatives": alternatives +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_SpeechTranscription/alternatives/alternative": alternative +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_SpeechTranscription/languageCode": language_code +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_TextAnnotation": google_cloud_videointelligence_v1_text_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_TextAnnotation/segments": segments +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_TextAnnotation/segments/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_TextAnnotation/text": text +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_TextFrame": google_cloud_videointelligence_v1_text_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_TextFrame/rotatedBoundingBox": rotated_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_TextFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_TextSegment": google_cloud_videointelligence_v1_text_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_TextSegment/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_TextSegment/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_TextSegment/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_TextSegment/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationProgress": google_cloud_videointelligence_v1_video_annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults": google_cloud_videointelligence_v1_video_annotation_results +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/error": error +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation": frame_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/objectAnnotations": object_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/objectAnnotations/object_annotation": object_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation": shot_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/speechTranscriptions": speech_transcriptions +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/speechTranscriptions/speech_transcription": speech_transcription +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/textAnnotations": text_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/textAnnotations/text_annotation": text_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoSegment": google_cloud_videointelligence_v1_video_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_WordInfo": google_cloud_videointelligence_v1_word_info +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_WordInfo/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_WordInfo/endTime": end_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_WordInfo/speakerTag": speaker_tag +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_WordInfo/startTime": start_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1_WordInfo/word": word +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_AnnotateVideoProgress": google_cloud_videointelligence_v1beta2_annotate_video_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_AnnotateVideoResponse": google_cloud_videointelligence_v1beta2_annotate_video_response +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_Entity": google_cloud_videointelligence_v1beta2_entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_Entity/description": description +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_Entity/entityId": entity_id +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_Entity/languageCode": language_code +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ExplicitContentAnnotation": google_cloud_videointelligence_v1beta2_explicit_content_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ExplicitContentFrame": google_cloud_videointelligence_v1beta2_explicit_content_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation": google_cloud_videointelligence_v1beta2_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/entity": entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/segments": segments +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/segments/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelFrame": google_cloud_videointelligence_v1beta2_label_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelFrame/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelSegment": google_cloud_videointelligence_v1beta2_label_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelSegment/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_LabelSegment/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_NormalizedBoundingBox": google_cloud_videointelligence_v1beta2_normalized_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_NormalizedBoundingBox/bottom": bottom +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_NormalizedBoundingBox/left": left +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_NormalizedBoundingBox/right": right +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_NormalizedBoundingBox/top": top +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_NormalizedBoundingPoly": google_cloud_videointelligence_v1beta2_normalized_bounding_poly +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_NormalizedBoundingPoly/vertices": vertices +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_NormalizedBoundingPoly/vertices/vertex": vertex +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_NormalizedVertex": google_cloud_videointelligence_v1beta2_normalized_vertex +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_NormalizedVertex/x": x +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_NormalizedVertex/y": y +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ObjectTrackingAnnotation": google_cloud_videointelligence_v1beta2_object_tracking_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ObjectTrackingAnnotation/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ObjectTrackingAnnotation/entity": entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ObjectTrackingAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ObjectTrackingAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ObjectTrackingAnnotation/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ObjectTrackingAnnotation/trackId": track_id +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ObjectTrackingFrame": google_cloud_videointelligence_v1beta2_object_tracking_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ObjectTrackingFrame/normalizedBoundingBox": normalized_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_ObjectTrackingFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_SpeechRecognitionAlternative": google_cloud_videointelligence_v1beta2_speech_recognition_alternative +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_SpeechRecognitionAlternative/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_SpeechRecognitionAlternative/transcript": transcript +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_SpeechRecognitionAlternative/words": words +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_SpeechRecognitionAlternative/words/word": word +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_SpeechTranscription": google_cloud_videointelligence_v1beta2_speech_transcription +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_SpeechTranscription/alternatives": alternatives +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_SpeechTranscription/alternatives/alternative": alternative +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_SpeechTranscription/languageCode": language_code +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_TextAnnotation": google_cloud_videointelligence_v1beta2_text_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_TextAnnotation/segments": segments +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_TextAnnotation/segments/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_TextAnnotation/text": text +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_TextFrame": google_cloud_videointelligence_v1beta2_text_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_TextFrame/rotatedBoundingBox": rotated_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_TextFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_TextSegment": google_cloud_videointelligence_v1beta2_text_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_TextSegment/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_TextSegment/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_TextSegment/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_TextSegment/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress": google_cloud_videointelligence_v1beta2_video_annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults": google_cloud_videointelligence_v1beta2_video_annotation_results +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/error": error +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation" +: frame_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/objectAnnotations": object_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/objectAnnotations/object_annotation": object_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation" +: shot_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/speechTranscriptions": speech_transcriptions +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/speechTranscriptions/speech_transcription" +: speech_transcription +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/textAnnotations": text_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/textAnnotations/text_annotation": text_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoSegment": google_cloud_videointelligence_v1beta2_video_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_WordInfo": google_cloud_videointelligence_v1beta2_word_info +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_WordInfo/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_WordInfo/endTime": end_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_WordInfo/speakerTag": speaker_tag +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_WordInfo/startTime": start_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1beta2_WordInfo/word": word +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoProgress": google_cloud_videointelligence_v1p1beta1_annotate_video_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoResponse": google_cloud_videointelligence_v1p1beta1_annotate_video_response +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_Entity": google_cloud_videointelligence_v1p1beta1_entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_Entity/description": description +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_Entity/entityId": entity_id +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_Entity/languageCode": language_code +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentAnnotation": google_cloud_videointelligence_v1p1beta1_explicit_content_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentFrame": google_cloud_videointelligence_v1p1beta1_explicit_content_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation": google_cloud_videointelligence_v1p1beta1_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/entity": entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/segments": segments +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/segments/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelFrame": google_cloud_videointelligence_v1p1beta1_label_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelFrame/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelSegment": google_cloud_videointelligence_v1p1beta1_label_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelSegment/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_LabelSegment/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox": google_cloud_videointelligence_v1p1beta1_normalized_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox/bottom": bottom +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox/left": left +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox/right": right +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox/top": top +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingPoly": google_cloud_videointelligence_v1p1beta1_normalized_bounding_poly +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingPoly/vertices": vertices +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingPoly/vertices/vertex": vertex +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_NormalizedVertex": google_cloud_videointelligence_v1p1beta1_normalized_vertex +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_NormalizedVertex/x": x +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_NormalizedVertex/y": y +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ObjectTrackingAnnotation": google_cloud_videointelligence_v1p1beta1_object_tracking_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ObjectTrackingAnnotation/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ObjectTrackingAnnotation/entity": entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ObjectTrackingAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ObjectTrackingAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ObjectTrackingAnnotation/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ObjectTrackingAnnotation/trackId": track_id +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ObjectTrackingFrame": google_cloud_videointelligence_v1p1beta1_object_tracking_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ObjectTrackingFrame/normalizedBoundingBox": normalized_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_ObjectTrackingFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative": google_cloud_videointelligence_v1p1beta1_speech_recognition_alternative +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative/transcript": transcript +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative/words": words +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative/words/word": word +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_SpeechTranscription": google_cloud_videointelligence_v1p1beta1_speech_transcription +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_SpeechTranscription/alternatives": alternatives +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_SpeechTranscription/alternatives/alternative": alternative +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_SpeechTranscription/languageCode": language_code +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_TextAnnotation": google_cloud_videointelligence_v1p1beta1_text_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_TextAnnotation/segments": segments +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_TextAnnotation/segments/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_TextAnnotation/text": text +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_TextFrame": google_cloud_videointelligence_v1p1beta1_text_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_TextFrame/rotatedBoundingBox": rotated_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_TextFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_TextSegment": google_cloud_videointelligence_v1p1beta1_text_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_TextSegment/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_TextSegment/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_TextSegment/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_TextSegment/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress": google_cloud_videointelligence_v1p1beta1_video_annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults": google_cloud_videointelligence_v1p1beta1_video_annotation_results +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/error": error +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation" +: frame_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/objectAnnotations": object_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/objectAnnotations/object_annotation": object_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation" +: shot_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/speechTranscriptions": speech_transcriptions +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/speechTranscriptions/speech_transcription" +: speech_transcription +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/textAnnotations": text_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/textAnnotations/text_annotation": text_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoSegment": google_cloud_videointelligence_v1p1beta1_video_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_WordInfo": google_cloud_videointelligence_v1p1beta1_word_info +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_WordInfo/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_WordInfo/endTime": end_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_WordInfo/speakerTag": speaker_tag +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_WordInfo/startTime": start_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p1beta1_WordInfo/word": word +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_AnnotateVideoProgress": google_cloud_videointelligence_v1p2beta1_annotate_video_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_AnnotateVideoResponse": google_cloud_videointelligence_v1p2beta1_annotate_video_response +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_Entity": google_cloud_videointelligence_v1p2beta1_entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_Entity/description": description +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_Entity/entityId": entity_id +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_Entity/languageCode": language_code +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ExplicitContentAnnotation": google_cloud_videointelligence_v1p2beta1_explicit_content_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ExplicitContentFrame": google_cloud_videointelligence_v1p2beta1_explicit_content_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelAnnotation": google_cloud_videointelligence_v1p2beta1_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelAnnotation/entity": entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelAnnotation/segments": segments +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelAnnotation/segments/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelFrame": google_cloud_videointelligence_v1p2beta1_label_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelFrame/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelSegment": google_cloud_videointelligence_v1p2beta1_label_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelSegment/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_LabelSegment/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_NormalizedBoundingBox": google_cloud_videointelligence_v1p2beta1_normalized_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_NormalizedBoundingBox/bottom": bottom +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_NormalizedBoundingBox/left": left +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_NormalizedBoundingBox/right": right +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_NormalizedBoundingBox/top": top +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_NormalizedBoundingPoly": google_cloud_videointelligence_v1p2beta1_normalized_bounding_poly +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_NormalizedBoundingPoly/vertices": vertices +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_NormalizedBoundingPoly/vertices/vertex": vertex +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_NormalizedVertex": google_cloud_videointelligence_v1p2beta1_normalized_vertex +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_NormalizedVertex/x": x +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_NormalizedVertex/y": y +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ObjectTrackingAnnotation": google_cloud_videointelligence_v1p2beta1_object_tracking_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ObjectTrackingAnnotation/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ObjectTrackingAnnotation/entity": entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ObjectTrackingAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ObjectTrackingAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ObjectTrackingAnnotation/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ObjectTrackingAnnotation/trackId": track_id +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ObjectTrackingFrame": google_cloud_videointelligence_v1p2beta1_object_tracking_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ObjectTrackingFrame/normalizedBoundingBox": normalized_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_ObjectTrackingFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_SpeechRecognitionAlternative": google_cloud_videointelligence_v1p2beta1_speech_recognition_alternative +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_SpeechRecognitionAlternative/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_SpeechRecognitionAlternative/transcript": transcript +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_SpeechRecognitionAlternative/words": words +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_SpeechRecognitionAlternative/words/word": word +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_SpeechTranscription": google_cloud_videointelligence_v1p2beta1_speech_transcription +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_SpeechTranscription/alternatives": alternatives +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_SpeechTranscription/alternatives/alternative": alternative +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_SpeechTranscription/languageCode": language_code +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_TextAnnotation": google_cloud_videointelligence_v1p2beta1_text_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_TextAnnotation/segments": segments +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_TextAnnotation/segments/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_TextAnnotation/text": text +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_TextFrame": google_cloud_videointelligence_v1p2beta1_text_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_TextFrame/rotatedBoundingBox": rotated_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_TextFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_TextSegment": google_cloud_videointelligence_v1p2beta1_text_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_TextSegment/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_TextSegment/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_TextSegment/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_TextSegment/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationProgress": google_cloud_videointelligence_v1p2beta1_video_annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults": google_cloud_videointelligence_v1p2beta1_video_annotation_results +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/error": error +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation" +: frame_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/objectAnnotations": object_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/objectAnnotations/object_annotation": object_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation" +: shot_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/speechTranscriptions": speech_transcriptions +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/speechTranscriptions/speech_transcription" +: speech_transcription +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/textAnnotations": text_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoAnnotationResults/textAnnotations/text_annotation": text_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoSegment": google_cloud_videointelligence_v1p2beta1_video_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_WordInfo": google_cloud_videointelligence_v1p2beta1_word_info +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_WordInfo/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_WordInfo/endTime": end_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_WordInfo/speakerTag": speaker_tag +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_WordInfo/startTime": start_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p2beta1_WordInfo/word": word +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoProgress": google_cloud_videointelligence_v1p3beta1_annotate_video_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoRequest": google_cloud_videointelligence_v1p3beta1_annotate_video_request +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoRequest/features": features +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoRequest/features/feature": feature +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoRequest/inputContent": input_content +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoRequest/inputUri": input_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoRequest/locationId": location_id +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoRequest/outputUri": output_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoRequest/videoContext": video_context +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse": google_cloud_videointelligence_v1p3beta1_annotate_video_response +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_Entity": google_cloud_videointelligence_v1p3beta1_entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_Entity/description": description +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_Entity/entityId": entity_id +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_Entity/languageCode": language_code +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation": google_cloud_videointelligence_v1p3beta1_explicit_content_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentDetectionConfig": google_cloud_videointelligence_v1p3beta1_explicit_content_detection_config +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentDetectionConfig/model": model +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame": google_cloud_videointelligence_v1p3beta1_explicit_content_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation": google_cloud_videointelligence_v1p3beta1_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/entity": entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/segments": segments +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelAnnotation/segments/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelDetectionConfig": google_cloud_videointelligence_v1p3beta1_label_detection_config +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelDetectionConfig/frameConfidenceThreshold": frame_confidence_threshold +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelDetectionConfig/labelDetectionMode": label_detection_mode +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelDetectionConfig/model": model +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelDetectionConfig/stationaryCamera": stationary_camera +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelDetectionConfig/videoConfidenceThreshold": video_confidence_threshold +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelFrame": google_cloud_videointelligence_v1p3beta1_label_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelFrame/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelSegment": google_cloud_videointelligence_v1p3beta1_label_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelSegment/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_LabelSegment/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox": google_cloud_videointelligence_v1p3beta1_normalized_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/bottom": bottom +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/left": left +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/right": right +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingBox/top": top +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingPoly": google_cloud_videointelligence_v1p3beta1_normalized_bounding_poly +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingPoly/vertices": vertices +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedBoundingPoly/vertices/vertex": vertex +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedVertex": google_cloud_videointelligence_v1p3beta1_normalized_vertex +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedVertex/x": x +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_NormalizedVertex/y": y +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation": google_cloud_videointelligence_v1p3beta1_object_tracking_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/entity": entity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingAnnotation/trackId": track_id +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingFrame": google_cloud_videointelligence_v1p3beta1_object_tracking_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingFrame/normalizedBoundingBox": normalized_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ObjectTrackingFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ShotChangeDetectionConfig": google_cloud_videointelligence_v1p3beta1_shot_change_detection_config +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_ShotChangeDetectionConfig/model": model +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechContext": google_cloud_videointelligence_v1p3beta1_speech_context +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechContext/phrases": phrases +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechContext/phrases/phrase": phrase +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative": google_cloud_videointelligence_v1p3beta1_speech_recognition_alternative +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/transcript": transcript +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/words": words +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechRecognitionAlternative/words/word": word +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription": google_cloud_videointelligence_v1p3beta1_speech_transcription +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription/alternatives": alternatives +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription/alternatives/alternative": alternative +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscription/languageCode": language_code +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscriptionConfig": google_cloud_videointelligence_v1p3beta1_speech_transcription_config +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscriptionConfig/audioTracks": audio_tracks +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscriptionConfig/audioTracks/audio_track": audio_track +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscriptionConfig/diarizationSpeakerCount": diarization_speaker_count +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscriptionConfig/enableAutomaticPunctuation": enable_automatic_punctuation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscriptionConfig/enableSpeakerDiarization": enable_speaker_diarization +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscriptionConfig/enableWordConfidence": enable_word_confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscriptionConfig/filterProfanity": filter_profanity +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscriptionConfig/languageCode": language_code +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscriptionConfig/maxAlternatives": max_alternatives +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscriptionConfig/speechContexts": speech_contexts +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_SpeechTranscriptionConfig/speechContexts/speech_context": speech_context +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse": google_cloud_videointelligence_v1p3beta1_streaming_annotate_video_response +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse/annotationResultsUri": annotation_results_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingAnnotateVideoResponse/error": error +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults": google_cloud_videointelligence_v1p3beta1_streaming_video_annotation_results +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/labelAnnotations": label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/labelAnnotations/label_annotation" +: label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/objectAnnotations": object_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/objectAnnotations/object_annotation" +: object_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/shotAnnotations": shot_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_StreamingVideoAnnotationResults/shotAnnotations/shot_annotation" +: shot_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation": google_cloud_videointelligence_v1p3beta1_text_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation/segments": segments +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation/segments/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextAnnotation/text": text +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextDetectionConfig": google_cloud_videointelligence_v1p3beta1_text_detection_config +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextDetectionConfig/languageHints": language_hints +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextDetectionConfig/languageHints/language_hint": language_hint +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextFrame": google_cloud_videointelligence_v1p3beta1_text_frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextFrame/rotatedBoundingBox": rotated_bounding_box +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextFrame/timeOffset": time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment": google_cloud_videointelligence_v1p3beta1_text_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment/frames": frames +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment/frames/frame": frame +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_TextSegment/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress": google_cloud_videointelligence_v1p3beta1_video_annotation_progress +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults": google_cloud_videointelligence_v1p3beta1_video_annotation_results +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/error": error +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation" +: frame_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/objectAnnotations": object_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/objectAnnotations/object_annotation": object_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation" +: shot_label_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/speechTranscriptions": speech_transcriptions +? "/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/speechTranscriptions/speech_transcription" +: speech_transcription +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/textAnnotations": text_annotations +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoAnnotationResults/textAnnotations/text_annotation": text_annotation +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoContext": google_cloud_videointelligence_v1p3beta1_video_context +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoContext/explicitContentDetectionConfig": explicit_content_detection_config +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoContext/labelDetectionConfig": label_detection_config +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoContext/segments": segments +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoContext/segments/segment": segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoContext/shotChangeDetectionConfig": shot_change_detection_config +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoContext/speechTranscriptionConfig": speech_transcription_config +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoContext/textDetectionConfig": text_detection_config +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoSegment": google_cloud_videointelligence_v1p3beta1_video_segment +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo": google_cloud_videointelligence_v1p3beta1_word_info +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/confidence": confidence +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/endTime": end_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/speakerTag": speaker_tag +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/startTime": start_time +"/videointelligence:v1p3beta1/GoogleCloudVideointelligenceV1p3beta1_WordInfo/word": word +"/videointelligence:v1p3beta1/GoogleLongrunning_Operation": google_longrunning_operation +"/videointelligence:v1p3beta1/GoogleLongrunning_Operation/done": done +"/videointelligence:v1p3beta1/GoogleLongrunning_Operation/error": error +"/videointelligence:v1p3beta1/GoogleLongrunning_Operation/metadata": metadata +"/videointelligence:v1p3beta1/GoogleLongrunning_Operation/metadata/metadatum": metadatum +"/videointelligence:v1p3beta1/GoogleLongrunning_Operation/name": name +"/videointelligence:v1p3beta1/GoogleLongrunning_Operation/response": response +"/videointelligence:v1p3beta1/GoogleLongrunning_Operation/response/response": response +"/videointelligence:v1p3beta1/GoogleRpc_Status": google_rpc_status +"/videointelligence:v1p3beta1/GoogleRpc_Status/code": code +"/videointelligence:v1p3beta1/GoogleRpc_Status/details": details +"/videointelligence:v1p3beta1/GoogleRpc_Status/details/detail": detail +"/videointelligence:v1p3beta1/GoogleRpc_Status/details/detail/detail": detail +"/videointelligence:v1p3beta1/GoogleRpc_Status/message": message +"/videointelligence:v1p3beta1/fields": fields +"/videointelligence:v1p3beta1/key": key +"/videointelligence:v1p3beta1/quotaUser": quota_user +"/videointelligence:v1p3beta1/videointelligence.videos.annotate": annotate_video "/vision:v1/AddProductToProductSetRequest": add_product_to_product_set_request "/vision:v1/AddProductToProductSetRequest/product": product "/vision:v1/AnnotateFileResponse": annotate_file_response diff --git a/generated/google/apis/cloudasset_v1beta1.rb b/generated/google/apis/cloudasset_v1beta1.rb index c0f1e3ede..3e996c0d0 100644 --- a/generated/google/apis/cloudasset_v1beta1.rb +++ b/generated/google/apis/cloudasset_v1beta1.rb @@ -25,7 +25,7 @@ module Google # @see https://console.cloud.google.com/apis/api/cloudasset.googleapis.com/overview module CloudassetV1beta1 VERSION = 'V1beta1' - REVISION = '20190302' + REVISION = '20190311' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/cloudasset_v1beta1/classes.rb b/generated/google/apis/cloudasset_v1beta1/classes.rb index bf6588509..e944b01a1 100644 --- a/generated/google/apis/cloudasset_v1beta1/classes.rb +++ b/generated/google/apis/cloudasset_v1beta1/classes.rb @@ -414,14 +414,14 @@ module Google attr_accessor :done alias_method :done?, :done - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -668,14 +668,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/cloudbuild_v1.rb b/generated/google/apis/cloudbuild_v1.rb index 9f8cdefa8..b111cd2ab 100644 --- a/generated/google/apis/cloudbuild_v1.rb +++ b/generated/google/apis/cloudbuild_v1.rb @@ -25,7 +25,7 @@ module Google # @see https://cloud.google.com/cloud-build/docs/ module CloudbuildV1 VERSION = 'V1' - REVISION = '20190306' + REVISION = '20190312' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/cloudbuild_v1/classes.rb b/generated/google/apis/cloudbuild_v1/classes.rb index 83961b5a1..28e63ecb3 100644 --- a/generated/google/apis/cloudbuild_v1/classes.rb +++ b/generated/google/apis/cloudbuild_v1/classes.rb @@ -1155,6 +1155,11 @@ module Google # @return [String] attr_accessor :artifact_manifest + # Start and end times for a build execution phase. + # Corresponds to the JSON property `artifactTiming` + # @return [Google::Apis::CloudbuildV1::TimeSpan] + attr_accessor :artifact_timing + # List of build step digests, in the order corresponding to build step # indices. # Corresponds to the JSON property `buildStepImages` @@ -1187,6 +1192,7 @@ module Google # Update properties of this object def update!(**args) @artifact_manifest = args[:artifact_manifest] if args.key?(:artifact_manifest) + @artifact_timing = args[:artifact_timing] if args.key?(:artifact_timing) @build_step_images = args[:build_step_images] if args.key?(:build_step_images) @build_step_outputs = args[:build_step_outputs] if args.key?(:build_step_outputs) @images = args[:images] if args.key?(:images) diff --git a/generated/google/apis/cloudbuild_v1/representations.rb b/generated/google/apis/cloudbuild_v1/representations.rb index 000ee4e9f..5f9b60003 100644 --- a/generated/google/apis/cloudbuild_v1/representations.rb +++ b/generated/google/apis/cloudbuild_v1/representations.rb @@ -481,6 +481,8 @@ module Google # @private class Representation < Google::Apis::Core::JsonRepresentation property :artifact_manifest, as: 'artifactManifest' + property :artifact_timing, as: 'artifactTiming', class: Google::Apis::CloudbuildV1::TimeSpan, decorator: Google::Apis::CloudbuildV1::TimeSpan::Representation + collection :build_step_images, as: 'buildStepImages' collection :build_step_outputs, as: 'buildStepOutputs' collection :images, as: 'images', class: Google::Apis::CloudbuildV1::BuiltImage, decorator: Google::Apis::CloudbuildV1::BuiltImage::Representation diff --git a/generated/google/apis/cloudbuild_v1alpha1.rb b/generated/google/apis/cloudbuild_v1alpha1.rb index 2738f8ae0..38f91d723 100644 --- a/generated/google/apis/cloudbuild_v1alpha1.rb +++ b/generated/google/apis/cloudbuild_v1alpha1.rb @@ -25,7 +25,7 @@ module Google # @see https://cloud.google.com/cloud-build/docs/ module CloudbuildV1alpha1 VERSION = 'V1alpha1' - REVISION = '20190306' + REVISION = '20190312' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/cloudbuild_v1alpha1/classes.rb b/generated/google/apis/cloudbuild_v1alpha1/classes.rb index c4f8d2b74..06d019c3d 100644 --- a/generated/google/apis/cloudbuild_v1alpha1/classes.rb +++ b/generated/google/apis/cloudbuild_v1alpha1/classes.rb @@ -785,6 +785,11 @@ module Google # @return [String] attr_accessor :artifact_manifest + # Start and end times for a build execution phase. + # Corresponds to the JSON property `artifactTiming` + # @return [Google::Apis::CloudbuildV1alpha1::TimeSpan] + attr_accessor :artifact_timing + # List of build step digests, in the order corresponding to build step # indices. # Corresponds to the JSON property `buildStepImages` @@ -817,6 +822,7 @@ module Google # Update properties of this object def update!(**args) @artifact_manifest = args[:artifact_manifest] if args.key?(:artifact_manifest) + @artifact_timing = args[:artifact_timing] if args.key?(:artifact_timing) @build_step_images = args[:build_step_images] if args.key?(:build_step_images) @build_step_outputs = args[:build_step_outputs] if args.key?(:build_step_outputs) @images = args[:images] if args.key?(:images) diff --git a/generated/google/apis/cloudbuild_v1alpha1/representations.rb b/generated/google/apis/cloudbuild_v1alpha1/representations.rb index 4e0796dab..880d09aec 100644 --- a/generated/google/apis/cloudbuild_v1alpha1/representations.rb +++ b/generated/google/apis/cloudbuild_v1alpha1/representations.rb @@ -337,6 +337,8 @@ module Google # @private class Representation < Google::Apis::Core::JsonRepresentation property :artifact_manifest, as: 'artifactManifest' + property :artifact_timing, as: 'artifactTiming', class: Google::Apis::CloudbuildV1alpha1::TimeSpan, decorator: Google::Apis::CloudbuildV1alpha1::TimeSpan::Representation + collection :build_step_images, as: 'buildStepImages' collection :build_step_outputs, as: 'buildStepOutputs' collection :images, as: 'images', class: Google::Apis::CloudbuildV1alpha1::BuiltImage, decorator: Google::Apis::CloudbuildV1alpha1::BuiltImage::Representation diff --git a/generated/google/apis/cloudiot_v1.rb b/generated/google/apis/cloudiot_v1.rb index cc0ccd431..8d3cf0fad 100644 --- a/generated/google/apis/cloudiot_v1.rb +++ b/generated/google/apis/cloudiot_v1.rb @@ -26,7 +26,7 @@ module Google # @see https://cloud.google.com/iot module CloudiotV1 VERSION = 'V1' - REVISION = '20181120' + REVISION = '20190306' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/cloudiot_v1/classes.rb b/generated/google/apis/cloudiot_v1/classes.rb index eb8e98cb3..d296efea8 100644 --- a/generated/google/apis/cloudiot_v1/classes.rb +++ b/generated/google/apis/cloudiot_v1/classes.rb @@ -87,7 +87,7 @@ module Google # account. For example, `my-other-app@appspot.gserviceaccount.com`. # * `group:`emailid``: An email address that represents a Google group. # For example, `admins@example.com`. - # * `domain:`domain``: A Google Apps domain name that represents all the + # * `domain:`domain``: The G Suite domain (primary) that represents all the # users of that domain. For example, `google.com` or `example.com`. # Corresponds to the JSON property `members` # @return [Array] @@ -162,14 +162,14 @@ module Google # @return [String] attr_accessor :last_config_send_time - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -1066,14 +1066,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/cloudprivatecatalogproducer_v1beta1.rb b/generated/google/apis/cloudprivatecatalogproducer_v1beta1.rb index c5ba77a9a..ca5f6bde2 100644 --- a/generated/google/apis/cloudprivatecatalogproducer_v1beta1.rb +++ b/generated/google/apis/cloudprivatecatalogproducer_v1beta1.rb @@ -26,7 +26,7 @@ module Google # @see https://sites.google.com/corp/google.com/cloudprivatecatalog module CloudprivatecatalogproducerV1beta1 VERSION = 'V1beta1' - REVISION = '20190304' + REVISION = '20190309' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/cloudprivatecatalogproducer_v1beta1/classes.rb b/generated/google/apis/cloudprivatecatalogproducer_v1beta1/classes.rb index a55f89ab2..b1240c2a4 100644 --- a/generated/google/apis/cloudprivatecatalogproducer_v1beta1/classes.rb +++ b/generated/google/apis/cloudprivatecatalogproducer_v1beta1/classes.rb @@ -986,14 +986,14 @@ module Google attr_accessor :done alias_method :done?, :done - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -1089,14 +1089,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/containeranalysis_v1beta1.rb b/generated/google/apis/containeranalysis_v1beta1.rb index 16f5d8926..4bd790994 100644 --- a/generated/google/apis/containeranalysis_v1beta1.rb +++ b/generated/google/apis/containeranalysis_v1beta1.rb @@ -26,7 +26,7 @@ module Google # @see https://cloud.google.com/container-analysis/api/reference/rest/ module ContaineranalysisV1beta1 VERSION = 'V1beta1' - REVISION = '20190222' + REVISION = '20190308' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/containeranalysis_v1beta1/classes.rb b/generated/google/apis/containeranalysis_v1beta1/classes.rb index 1ab5a8cee..2fad36601 100644 --- a/generated/google/apis/containeranalysis_v1beta1/classes.rb +++ b/generated/google/apis/containeranalysis_v1beta1/classes.rb @@ -900,14 +900,14 @@ module Google # @return [String] attr_accessor :analysis_status - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -2495,14 +2495,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/content_v2.rb b/generated/google/apis/content_v2.rb index 834cd38fe..e5a44c856 100644 --- a/generated/google/apis/content_v2.rb +++ b/generated/google/apis/content_v2.rb @@ -26,7 +26,7 @@ module Google # @see https://developers.google.com/shopping-content module ContentV2 VERSION = 'V2' - REVISION = '20190304' + REVISION = '20190308' # Manage your product listings and accounts for Google Shopping AUTH_CONTENT = 'https://www.googleapis.com/auth/content' diff --git a/generated/google/apis/content_v2/classes.rb b/generated/google/apis/content_v2/classes.rb index 015a2bcb9..26945d1de 100644 --- a/generated/google/apis/content_v2/classes.rb +++ b/generated/google/apis/content_v2/classes.rb @@ -329,7 +329,7 @@ module Google # @return [Array] attr_accessor :account_level_issues - # A list of data quality issues. + # DEPRECATED - never populated. # Corresponds to the JSON property `dataQualityIssues` # @return [Array] attr_accessor :data_quality_issues @@ -426,57 +426,57 @@ module Google class AccountStatusDataQualityIssue include Google::Apis::Core::Hashable - # Country for which this issue is reported. + # # Corresponds to the JSON property `country` # @return [String] attr_accessor :country - # The destination the issue applies to. + # # Corresponds to the JSON property `destination` # @return [String] attr_accessor :destination - # A more detailed description of the issue. + # # Corresponds to the JSON property `detail` # @return [String] attr_accessor :detail - # Actual value displayed on the landing page. + # # Corresponds to the JSON property `displayedValue` # @return [String] attr_accessor :displayed_value - # Example items featuring the issue. + # # Corresponds to the JSON property `exampleItems` # @return [Array] attr_accessor :example_items - # Issue identifier. + # # Corresponds to the JSON property `id` # @return [String] attr_accessor :id - # Last time the account was checked for this issue. + # # Corresponds to the JSON property `lastChecked` # @return [String] attr_accessor :last_checked - # The attribute name that is relevant for the issue. + # # Corresponds to the JSON property `location` # @return [String] attr_accessor :location - # Number of items in the account found to have the said issue. + # # Corresponds to the JSON property `numItems` # @return [Fixnum] attr_accessor :num_items - # Severity of the problem. + # # Corresponds to the JSON property `severity` # @return [String] attr_accessor :severity - # Submitted value that causes the issue. + # # Corresponds to the JSON property `submittedValue` # @return [String] attr_accessor :submitted_value @@ -501,32 +501,31 @@ module Google end end - # An example of an item that has poor data quality. An item value on the landing - # page differs from what is submitted, or conflicts with a policy. + # class AccountStatusExampleItem include Google::Apis::Core::Hashable - # Unique item ID as specified in the uploaded product data. + # # Corresponds to the JSON property `itemId` # @return [String] attr_accessor :item_id - # Landing page of the item. + # # Corresponds to the JSON property `link` # @return [String] attr_accessor :link - # The item value that was submitted. + # # Corresponds to the JSON property `submittedValue` # @return [String] attr_accessor :submitted_value - # Title of the item. + # # Corresponds to the JSON property `title` # @return [String] attr_accessor :title - # The actual value on the landing page. + # # Corresponds to the JSON property `valueOnLandingPage` # @return [String] attr_accessor :value_on_landing_page @@ -4011,7 +4010,7 @@ module Google attr_accessor :acknowledged alias_method :acknowledged?, :acknowledged - # The channel type of the order: "purchaseOnGoogle" or "googleExpress". + # Deprecated. # Corresponds to the JSON property `channelType` # @return [String] attr_accessor :channel_type @@ -4074,9 +4073,8 @@ module Google # @return [String] attr_accessor :placed_date - # Deprecated. Ignored if provided for createTestOrder. The details of the - # merchant provided promotions applied to the order. More details about the - # program are here. + # The details of the merchant provided promotions applied to the order. More + # details about the program are here. # Corresponds to the JSON property `promotions` # @return [Array] attr_accessor :promotions @@ -4316,7 +4314,7 @@ module Google # Email address that can be used for marketing purposes. The field may be empty # even if explicitMarketingPreference is 'granted'. This happens when retrieving - # an old order from the customer who deleted his account. + # an old order from the customer who deleted their account. # Corresponds to the JSON property `marketingEmailAddress` # @return [String] attr_accessor :marketing_email_address @@ -9199,7 +9197,7 @@ module Google end # The status of a product, i.e., information about a product computed - # asynchronously by the data quality analysis. + # asynchronously. class ProductStatus include Google::Apis::Core::Hashable @@ -9208,7 +9206,7 @@ module Google # @return [String] attr_accessor :creation_date - # A list of data quality issues associated with the product. + # DEPRECATED - never populated # Corresponds to the JSON property `dataQualityIssues` # @return [Array] attr_accessor :data_quality_issues @@ -9284,47 +9282,47 @@ module Google class ProductStatusDataQualityIssue include Google::Apis::Core::Hashable - # The destination the issue applies to. + # # Corresponds to the JSON property `destination` # @return [String] attr_accessor :destination - # A more detailed error string. + # # Corresponds to the JSON property `detail` # @return [String] attr_accessor :detail - # The fetch status for landing_page_errors. + # # Corresponds to the JSON property `fetchStatus` # @return [String] attr_accessor :fetch_status - # The ID of the data quality issue. + # # Corresponds to the JSON property `id` # @return [String] attr_accessor :id - # The attribute name that is relevant for the issue. + # # Corresponds to the JSON property `location` # @return [String] attr_accessor :location - # The severity of the data quality issue. + # # Corresponds to the JSON property `severity` # @return [String] attr_accessor :severity - # The time stamp of the data quality issue. + # # Corresponds to the JSON property `timestamp` # @return [String] attr_accessor :timestamp - # The value of that attribute that was found on the landing page + # # Corresponds to the JSON property `valueOnLandingPage` # @return [String] attr_accessor :value_on_landing_page - # The value the attribute had at time of evaluation. + # # Corresponds to the JSON property `valueProvided` # @return [String] attr_accessor :value_provided @@ -9827,7 +9825,7 @@ module Google attr_accessor :kind # The status of a product, i.e., information about a product computed - # asynchronously by the data quality analysis. + # asynchronously. # Corresponds to the JSON property `productStatus` # @return [Google::Apis::ContentV2::ProductStatus] attr_accessor :product_status @@ -10550,8 +10548,7 @@ module Google # @return [String] attr_accessor :predefined_delivery_address - # Deprecated. The details of the merchant provided promotions applied to the - # order. More details about the program are here. + # Deprecated. Ignored if provided. # Corresponds to the JSON property `promotions` # @return [Array] attr_accessor :promotions @@ -10710,7 +10707,7 @@ module Google # @return [String] attr_accessor :brand - # The item's channel. + # Deprecated. # Corresponds to the JSON property `channel` # @return [String] attr_accessor :channel diff --git a/generated/google/apis/content_v2/service.rb b/generated/google/apis/content_v2/service.rb index 00fca5aec..3030ce0f9 100644 --- a/generated/google/apis/content_v2/service.rb +++ b/generated/google/apis/content_v2/service.rb @@ -487,8 +487,8 @@ module Google execute_or_queue_command(command, &block) end - # Retrieves the status of a Merchant Center account. Multi-client accounts can - # only call this method for sub-accounts. + # Retrieves the status of a Merchant Center account. No itemLevelIssues are + # returned for multi-client accounts. # @param [Fixnum] merchant_id # The ID of the managing account. If this parameter is not the same as accountId, # then this account must be a multi-client account and accountId must be the ID diff --git a/generated/google/apis/content_v2_1.rb b/generated/google/apis/content_v2_1.rb index 129fc7018..e44c225f2 100644 --- a/generated/google/apis/content_v2_1.rb +++ b/generated/google/apis/content_v2_1.rb @@ -26,7 +26,7 @@ module Google # @see https://developers.google.com/shopping-content module ContentV2_1 VERSION = 'V2_1' - REVISION = '20190304' + REVISION = '20190308' # Manage your product listings and accounts for Google Shopping AUTH_CONTENT = 'https://www.googleapis.com/auth/content' diff --git a/generated/google/apis/content_v2_1/classes.rb b/generated/google/apis/content_v2_1/classes.rb index 817bfdf54..c7a8db5d6 100644 --- a/generated/google/apis/content_v2_1/classes.rb +++ b/generated/google/apis/content_v2_1/classes.rb @@ -3391,11 +3391,6 @@ module Google # @return [Google::Apis::ContentV2_1::OrderAddress] attr_accessor :billing_address - # The channel type of the order: "purchaseOnGoogle" or "googleExpress". - # Corresponds to the JSON property `channelType` - # @return [String] - attr_accessor :channel_type - # The details of the customer who placed the order. # Corresponds to the JSON property `customer` # @return [Google::Apis::ContentV2_1::OrderCustomer] @@ -3499,7 +3494,6 @@ module Google def update!(**args) @acknowledged = args[:acknowledged] if args.key?(:acknowledged) @billing_address = args[:billing_address] if args.key?(:billing_address) - @channel_type = args[:channel_type] if args.key?(:channel_type) @customer = args[:customer] if args.key?(:customer) @delivery_details = args[:delivery_details] if args.key?(:delivery_details) @id = args[:id] if args.key?(:id) @@ -3678,7 +3672,7 @@ module Google # Email address that can be used for marketing purposes. The field may be empty # even if explicitMarketingPreference is 'granted'. This happens when retrieving - # an old order from the customer who deleted his account. + # an old order from the customer who deleted their account. # Corresponds to the JSON property `marketingEmailAddress` # @return [String] attr_accessor :marketing_email_address @@ -3836,11 +3830,6 @@ module Google # @return [String] attr_accessor :brand - # The item's channel (online or local). - # Corresponds to the JSON property `channel` - # @return [String] - attr_accessor :channel - # Condition or state of the item. # Corresponds to the JSON property `condition` # @return [String] @@ -3920,7 +3909,6 @@ module Google # Update properties of this object def update!(**args) @brand = args[:brand] if args.key?(:brand) - @channel = args[:channel] if args.key?(:channel) @condition = args[:condition] if args.key?(:condition) @content_language = args[:content_language] if args.key?(:content_language) @fees = args[:fees] if args.key?(:fees) @@ -7288,7 +7276,7 @@ module Google end # The status of a product, i.e., information about a product computed - # asynchronously by the data quality analysis. + # asynchronously. class ProductStatus include Google::Apis::Core::Hashable @@ -7823,7 +7811,7 @@ module Google attr_accessor :kind # The status of a product, i.e., information about a product computed - # asynchronously by the data quality analysis. + # asynchronously. # Corresponds to the JSON property `productStatus` # @return [Google::Apis::ContentV2_1::ProductStatus] attr_accessor :product_status @@ -8790,11 +8778,6 @@ module Google # @return [String] attr_accessor :brand - # The item's channel. - # Corresponds to the JSON property `channel` - # @return [String] - attr_accessor :channel - # Condition or state of the item. # Corresponds to the JSON property `condition` # @return [String] @@ -8858,7 +8841,6 @@ module Google # Update properties of this object def update!(**args) @brand = args[:brand] if args.key?(:brand) - @channel = args[:channel] if args.key?(:channel) @condition = args[:condition] if args.key?(:condition) @content_language = args[:content_language] if args.key?(:content_language) @gtin = args[:gtin] if args.key?(:gtin) diff --git a/generated/google/apis/content_v2_1/representations.rb b/generated/google/apis/content_v2_1/representations.rb index 165d1d2bd..2065629a0 100644 --- a/generated/google/apis/content_v2_1/representations.rb +++ b/generated/google/apis/content_v2_1/representations.rb @@ -2361,7 +2361,6 @@ module Google property :acknowledged, as: 'acknowledged' property :billing_address, as: 'billingAddress', class: Google::Apis::ContentV2_1::OrderAddress, decorator: Google::Apis::ContentV2_1::OrderAddress::Representation - property :channel_type, as: 'channelType' property :customer, as: 'customer', class: Google::Apis::ContentV2_1::OrderCustomer, decorator: Google::Apis::ContentV2_1::OrderCustomer::Representation property :delivery_details, as: 'deliveryDetails', class: Google::Apis::ContentV2_1::OrderDeliveryDetails, decorator: Google::Apis::ContentV2_1::OrderDeliveryDetails::Representation @@ -2478,7 +2477,6 @@ module Google # @private class Representation < Google::Apis::Core::JsonRepresentation property :brand, as: 'brand' - property :channel, as: 'channel' property :condition, as: 'condition' property :content_language, as: 'contentLanguage' collection :fees, as: 'fees', class: Google::Apis::ContentV2_1::OrderLineItemProductFee, decorator: Google::Apis::ContentV2_1::OrderLineItemProductFee::Representation @@ -3852,7 +3850,6 @@ module Google # @private class Representation < Google::Apis::Core::JsonRepresentation property :brand, as: 'brand' - property :channel, as: 'channel' property :condition, as: 'condition' property :content_language, as: 'contentLanguage' property :gtin, as: 'gtin' diff --git a/generated/google/apis/content_v2_1/service.rb b/generated/google/apis/content_v2_1/service.rb index 23ede75bc..bd568c6ad 100644 --- a/generated/google/apis/content_v2_1/service.rb +++ b/generated/google/apis/content_v2_1/service.rb @@ -426,8 +426,8 @@ module Google execute_or_queue_command(command, &block) end - # Retrieves the status of a Merchant Center account. Multi-client accounts can - # only call this method for sub-accounts. + # Retrieves the status of a Merchant Center account. No itemLevelIssues are + # returned for multi-client accounts. # @param [Fixnum] merchant_id # The ID of the managing account. If this parameter is not the same as accountId, # then this account must be a multi-client account and accountId must be the ID diff --git a/generated/google/apis/dialogflow_v2.rb b/generated/google/apis/dialogflow_v2.rb index 223d065e4..704c53baa 100644 --- a/generated/google/apis/dialogflow_v2.rb +++ b/generated/google/apis/dialogflow_v2.rb @@ -26,7 +26,7 @@ module Google # @see https://cloud.google.com/dialogflow-enterprise/ module DialogflowV2 VERSION = 'V2' - REVISION = '20190219' + REVISION = '20190311' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/dialogflow_v2/classes.rb b/generated/google/apis/dialogflow_v2/classes.rb index 44b2fed53..5e19bfb15 100644 --- a/generated/google/apis/dialogflow_v2/classes.rb +++ b/generated/google/apis/dialogflow_v2/classes.rb @@ -470,14 +470,14 @@ module Google # @return [String] attr_accessor :response_id - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -4222,14 +4222,14 @@ module Google attr_accessor :done alias_method :done?, :done - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -4325,14 +4325,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/genomics_v1.rb b/generated/google/apis/genomics_v1.rb index 4ffcc79cd..df7a49dd1 100644 --- a/generated/google/apis/genomics_v1.rb +++ b/generated/google/apis/genomics_v1.rb @@ -25,22 +25,13 @@ module Google # @see https://cloud.google.com/genomics module GenomicsV1 VERSION = 'V1' - REVISION = '20190309' - - # View and manage your data in Google BigQuery - AUTH_BIGQUERY = 'https://www.googleapis.com/auth/bigquery' + REVISION = '20190312' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' - # Manage your data in Google Cloud Storage - AUTH_DEVSTORAGE_READ_WRITE = 'https://www.googleapis.com/auth/devstorage.read_write' - # View and manage Genomics data AUTH_GENOMICS = 'https://www.googleapis.com/auth/genomics' - - # View Genomics data - AUTH_GENOMICS_READONLY = 'https://www.googleapis.com/auth/genomics.readonly' end end end diff --git a/generated/google/apis/genomics_v1/classes.rb b/generated/google/apis/genomics_v1/classes.rb index 500678d7a..484098bcf 100644 --- a/generated/google/apis/genomics_v1/classes.rb +++ b/generated/google/apis/genomics_v1/classes.rb @@ -22,322 +22,6 @@ module Google module Apis module GenomicsV1 - # An annotation describes a region of reference genome. The value of an - # annotation may be one of several canonical types, supplemented by arbitrary - # info tags. An annotation is not inherently associated with a specific - # sample or individual (though a client could choose to use annotations in - # this way). Example canonical annotation types are `GENE` and - # `VARIANT`. - class Annotation - include Google::Apis::Core::Hashable - - # The annotation set to which this annotation belongs. - # Corresponds to the JSON property `annotationSetId` - # @return [String] - attr_accessor :annotation_set_id - - # The end position of the range on the reference, 0-based exclusive. - # Corresponds to the JSON property `end` - # @return [Fixnum] - attr_accessor :end - - # The server-generated annotation ID, unique across all annotations. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # A map of additional read alignment information. This must be of the form - # map (string key mapping to a list of string values). - # Corresponds to the JSON property `info` - # @return [Hash>] - attr_accessor :info - - # The display name of this annotation. - # Corresponds to the JSON property `name` - # @return [String] - attr_accessor :name - - # The ID of the Google Genomics reference associated with this range. - # Corresponds to the JSON property `referenceId` - # @return [String] - attr_accessor :reference_id - - # The display name corresponding to the reference specified by - # `referenceId`, for example `chr1`, `1`, or `chrX`. - # Corresponds to the JSON property `referenceName` - # @return [String] - attr_accessor :reference_name - - # Whether this range refers to the reverse strand, as opposed to the forward - # strand. Note that regardless of this field, the start/end position of the - # range always refer to the forward strand. - # Corresponds to the JSON property `reverseStrand` - # @return [Boolean] - attr_accessor :reverse_strand - alias_method :reverse_strand?, :reverse_strand - - # The start position of the range on the reference, 0-based inclusive. - # Corresponds to the JSON property `start` - # @return [Fixnum] - attr_accessor :start - - # A transcript represents the assertion that a particular region of the - # reference genome may be transcribed as RNA. - # Corresponds to the JSON property `transcript` - # @return [Google::Apis::GenomicsV1::Transcript] - attr_accessor :transcript - - # The data type for this annotation. Must match the containing annotation - # set's type. - # Corresponds to the JSON property `type` - # @return [String] - attr_accessor :type - - # A variant annotation, which describes the effect of a variant on the - # genome, the coding sequence, and/or higher level consequences at the - # organism level e.g. pathogenicity. This field is only set for annotations - # of type `VARIANT`. - # Corresponds to the JSON property `variant` - # @return [Google::Apis::GenomicsV1::VariantAnnotation] - attr_accessor :variant - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @annotation_set_id = args[:annotation_set_id] if args.key?(:annotation_set_id) - @end = args[:end] if args.key?(:end) - @id = args[:id] if args.key?(:id) - @info = args[:info] if args.key?(:info) - @name = args[:name] if args.key?(:name) - @reference_id = args[:reference_id] if args.key?(:reference_id) - @reference_name = args[:reference_name] if args.key?(:reference_name) - @reverse_strand = args[:reverse_strand] if args.key?(:reverse_strand) - @start = args[:start] if args.key?(:start) - @transcript = args[:transcript] if args.key?(:transcript) - @type = args[:type] if args.key?(:type) - @variant = args[:variant] if args.key?(:variant) - end - end - - # An annotation set is a logical grouping of annotations that share consistent - # type information and provenance. Examples of annotation sets include 'all - # genes from refseq', and 'all variant annotations from ClinVar'. - class AnnotationSet - include Google::Apis::Core::Hashable - - # The dataset to which this annotation set belongs. - # Corresponds to the JSON property `datasetId` - # @return [String] - attr_accessor :dataset_id - - # The server-generated annotation set ID, unique across all annotation sets. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # A map of additional read alignment information. This must be of the form - # map (string key mapping to a list of string values). - # Corresponds to the JSON property `info` - # @return [Hash>] - attr_accessor :info - - # The display name for this annotation set. - # Corresponds to the JSON property `name` - # @return [String] - attr_accessor :name - - # The ID of the reference set that defines the coordinate space for this - # set's annotations. - # Corresponds to the JSON property `referenceSetId` - # @return [String] - attr_accessor :reference_set_id - - # The source URI describing the file from which this annotation set was - # generated, if any. - # Corresponds to the JSON property `sourceUri` - # @return [String] - attr_accessor :source_uri - - # The type of annotations contained within this set. - # Corresponds to the JSON property `type` - # @return [String] - attr_accessor :type - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @dataset_id = args[:dataset_id] if args.key?(:dataset_id) - @id = args[:id] if args.key?(:id) - @info = args[:info] if args.key?(:info) - @name = args[:name] if args.key?(:name) - @reference_set_id = args[:reference_set_id] if args.key?(:reference_set_id) - @source_uri = args[:source_uri] if args.key?(:source_uri) - @type = args[:type] if args.key?(:type) - end - end - - # - class BatchCreateAnnotationsRequest - include Google::Apis::Core::Hashable - - # The annotations to be created. At most 4096 can be specified in a single - # request. - # Corresponds to the JSON property `annotations` - # @return [Array] - attr_accessor :annotations - - # A unique request ID which enables the server to detect duplicated requests. - # If provided, duplicated requests will result in the same response; if not - # provided, duplicated requests may result in duplicated data. For a given - # annotation set, callers should not reuse `request_id`s when writing - # different batches of annotations - behavior in this case is undefined. - # A common approach is to use a UUID. For batch jobs where worker crashes are - # a possibility, consider using some unique variant of a worker or run ID. - # Corresponds to the JSON property `requestId` - # @return [String] - attr_accessor :request_id - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @annotations = args[:annotations] if args.key?(:annotations) - @request_id = args[:request_id] if args.key?(:request_id) - end - end - - # - class BatchCreateAnnotationsResponse - include Google::Apis::Core::Hashable - - # The resulting per-annotation entries, ordered consistently with the - # original request. - # Corresponds to the JSON property `entries` - # @return [Array] - attr_accessor :entries - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @entries = args[:entries] if args.key?(:entries) - end - end - - # Associates `members` with a `role`. - class Binding - include Google::Apis::Core::Hashable - - # Represents an expression text. Example: - # title: "User account presence" - # description: "Determines whether the request has a user account" - # expression: "size(request.user) > 0" - # Corresponds to the JSON property `condition` - # @return [Google::Apis::GenomicsV1::Expr] - attr_accessor :condition - - # Specifies the identities requesting access for a Cloud Platform resource. - # `members` can have the following values: - # * `allUsers`: A special identifier that represents anyone who is - # on the internet; with or without a Google account. - # * `allAuthenticatedUsers`: A special identifier that represents anyone - # who is authenticated with a Google account or a service account. - # * `user:`emailid``: An email address that represents a specific Google - # account. For example, `alice@gmail.com` . - # * `serviceAccount:`emailid``: An email address that represents a service - # account. For example, `my-other-app@appspot.gserviceaccount.com`. - # * `group:`emailid``: An email address that represents a Google group. - # For example, `admins@example.com`. - # * `domain:`domain``: The G Suite domain (primary) that represents all the - # users of that domain. For example, `google.com` or `example.com`. - # Corresponds to the JSON property `members` - # @return [Array] - attr_accessor :members - - # Role that is assigned to `members`. - # For example, `roles/viewer`, `roles/editor`, or `roles/owner`. - # Corresponds to the JSON property `role` - # @return [String] - attr_accessor :role - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @condition = args[:condition] if args.key?(:condition) - @members = args[:members] if args.key?(:members) - @role = args[:role] if args.key?(:role) - end - end - - # A call set is a collection of variant calls, typically for one sample. It - # belongs to a variant set. - class CallSet - include Google::Apis::Core::Hashable - - # The date this call set was created in milliseconds from the epoch. - # Corresponds to the JSON property `created` - # @return [Fixnum] - attr_accessor :created - - # The server-generated call set ID, unique across all call sets. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # A map of additional call set information. This must be of the form - # map (string key mapping to a list of string values). - # Corresponds to the JSON property `info` - # @return [Hash>] - attr_accessor :info - - # The call set name. - # Corresponds to the JSON property `name` - # @return [String] - attr_accessor :name - - # The sample ID this call set corresponds to. - # Corresponds to the JSON property `sampleId` - # @return [String] - attr_accessor :sample_id - - # The IDs of the variant sets this call set belongs to. This field must - # have exactly length one, as a call set belongs to a single variant set. - # This field is repeated for compatibility with the - # [GA4GH 0.5.1 - # API](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/ - # variants.avdl#L76). - # Corresponds to the JSON property `variantSetIds` - # @return [Array] - attr_accessor :variant_set_ids - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @created = args[:created] if args.key?(:created) - @id = args[:id] if args.key?(:id) - @info = args[:info] if args.key?(:info) - @name = args[:name] if args.key?(:name) - @sample_id = args[:sample_id] if args.key?(:sample_id) - @variant_set_ids = args[:variant_set_ids] if args.key?(:variant_set_ids) - end - end - # The request message for Operations.CancelOperation. class CancelOperationRequest include Google::Apis::Core::Hashable @@ -351,108 +35,6 @@ module Google end end - # A single CIGAR operation. - class CigarUnit - include Google::Apis::Core::Hashable - - # - # Corresponds to the JSON property `operation` - # @return [String] - attr_accessor :operation - - # The number of genomic bases that the operation runs for. Required. - # Corresponds to the JSON property `operationLength` - # @return [Fixnum] - attr_accessor :operation_length - - # `referenceSequence` is only used at mismatches - # (`SEQUENCE_MISMATCH`) and deletions (`DELETE`). - # Filling this field replaces SAM's MD tag. If the relevant information is - # not available, this field is unset. - # Corresponds to the JSON property `referenceSequence` - # @return [String] - attr_accessor :reference_sequence - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @operation = args[:operation] if args.key?(:operation) - @operation_length = args[:operation_length] if args.key?(:operation_length) - @reference_sequence = args[:reference_sequence] if args.key?(:reference_sequence) - end - end - - # - class ClinicalCondition - include Google::Apis::Core::Hashable - - # The MedGen concept id associated with this gene. - # Search for these IDs at http://www.ncbi.nlm.nih.gov/medgen/ - # Corresponds to the JSON property `conceptId` - # @return [String] - attr_accessor :concept_id - - # The set of external IDs for this condition. - # Corresponds to the JSON property `externalIds` - # @return [Array] - attr_accessor :external_ids - - # A set of names for the condition. - # Corresponds to the JSON property `names` - # @return [Array] - attr_accessor :names - - # The OMIM id for this condition. - # Search for these IDs at http://omim.org/ - # Corresponds to the JSON property `omimId` - # @return [String] - attr_accessor :omim_id - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @concept_id = args[:concept_id] if args.key?(:concept_id) - @external_ids = args[:external_ids] if args.key?(:external_ids) - @names = args[:names] if args.key?(:names) - @omim_id = args[:omim_id] if args.key?(:omim_id) - end - end - - # - class CodingSequence - include Google::Apis::Core::Hashable - - # The end of the coding sequence on this annotation's reference sequence, - # 0-based exclusive. Note that this position is relative to the reference - # start, and *not* the containing annotation start. - # Corresponds to the JSON property `end` - # @return [Fixnum] - attr_accessor :end - - # The start of the coding sequence on this annotation's reference sequence, - # 0-based inclusive. Note that this position is relative to the reference - # start, and *not* the containing annotation start. - # Corresponds to the JSON property `start` - # @return [Fixnum] - attr_accessor :start - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @end = args[:end] if args.key?(:end) - @start = args[:start] if args.key?(:start) - end - end - # Describes a Compute Engine resource that is being managed by a running # pipeline. class ComputeEngine @@ -585,70 +167,6 @@ module Google end end - # A bucket over which read coverage has been precomputed. A bucket corresponds - # to a specific range of the reference sequence. - class CoverageBucket - include Google::Apis::Core::Hashable - - # The average number of reads which are aligned to each individual - # reference base in this bucket. - # Corresponds to the JSON property `meanCoverage` - # @return [Float] - attr_accessor :mean_coverage - - # A 0-based half-open genomic coordinate range for search requests. - # Corresponds to the JSON property `range` - # @return [Google::Apis::GenomicsV1::Range] - attr_accessor :range - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @mean_coverage = args[:mean_coverage] if args.key?(:mean_coverage) - @range = args[:range] if args.key?(:range) - end - end - - # A Dataset is a collection of genomic data. - class Dataset - include Google::Apis::Core::Hashable - - # The time this dataset was created, in seconds from the epoch. - # Corresponds to the JSON property `createTime` - # @return [String] - attr_accessor :create_time - - # The server-generated dataset ID, unique across all datasets. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # The dataset name. - # Corresponds to the JSON property `name` - # @return [String] - attr_accessor :name - - # The Google Cloud project ID that this dataset belongs to. - # Corresponds to the JSON property `projectId` - # @return [String] - attr_accessor :project_id - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @create_time = args[:create_time] if args.key?(:create_time) - @id = args[:id] if args.key?(:id) - @name = args[:name] if args.key?(:name) - @project_id = args[:project_id] if args.key?(:project_id) - end - end - # An event generated whenever a resource limitation or transient error # delays execution of a pipeline that was otherwise ready to run. class DelayedEvent @@ -699,74 +217,6 @@ module Google end end - # - class Entry - include Google::Apis::Core::Hashable - - # An annotation describes a region of reference genome. The value of an - # annotation may be one of several canonical types, supplemented by arbitrary - # info tags. An annotation is not inherently associated with a specific - # sample or individual (though a client could choose to use annotations in - # this way). Example canonical annotation types are `GENE` and - # `VARIANT`. - # Corresponds to the JSON property `annotation` - # @return [Google::Apis::GenomicsV1::Annotation] - attr_accessor :annotation - - # The `Status` type defines a logical error model that is suitable for - # different programming environments, including REST APIs and RPC APIs. It is - # used by [gRPC](https://github.com/grpc). The error model is designed to be: - # - Simple to use and understand for most users - # - Flexible enough to meet unexpected needs - # # Overview - # The `Status` message contains three pieces of data: error code, error - # message, and error details. The error code should be an enum value of - # google.rpc.Code, but it may accept additional error codes if needed. The - # error message should be a developer-facing English message that helps - # developers *understand* and *resolve* the error. If a localized user-facing - # error message is needed, put the localized message in the error details or - # localize it in the client. The optional error details may contain arbitrary - # information about the error. There is a predefined set of error detail types - # in the package `google.rpc` that can be used for common error conditions. - # # Language mapping - # The `Status` message is the logical representation of the error model, but it - # is not necessarily the actual wire format. When the `Status` message is - # exposed in different client libraries and different wire protocols, it can be - # mapped differently. For example, it will likely be mapped to some exceptions - # in Java, but more likely mapped to some error codes in C. - # # Other uses - # The error model and the `Status` message can be used in a variety of - # environments, either with or without APIs, to provide a - # consistent developer experience across different environments. - # Example uses of this error model include: - # - Partial errors. If a service needs to return partial errors to the client, - # it may embed the `Status` in the normal response to indicate the partial - # errors. - # - Workflow errors. A typical workflow has multiple steps. Each step may - # have a `Status` message for error reporting. - # - Batch operations. If a client uses batch request and batch response, the - # `Status` message should be used directly inside batch response, one for - # each error sub-response. - # - Asynchronous operations. If an API call embeds asynchronous operation - # results in its response, the status of those operations should be - # represented directly using the `Status` message. - # - Logging. If some API errors are stored in logs, the message `Status` could - # be used directly after any stripping needed for security/privacy reasons. - # Corresponds to the JSON property `status` - # @return [Google::Apis::GenomicsV1::Status] - attr_accessor :status - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @annotation = args[:annotation] if args.key?(:annotation) - @status = args[:status] if args.key?(:status) - end - end - # Carries information about events that occur during pipeline execution. class Event include Google::Apis::Core::Hashable @@ -800,252 +250,6 @@ module Google end end - # - class Exon - include Google::Apis::Core::Hashable - - # The end position of the exon on this annotation's reference sequence, - # 0-based exclusive. Note that this is relative to the reference start, and - # *not* the containing annotation start. - # Corresponds to the JSON property `end` - # @return [Fixnum] - attr_accessor :end - - # The frame of this exon. Contains a value of 0, 1, or 2, which indicates - # the offset of the first coding base of the exon within the reading frame - # of the coding DNA sequence, if any. This field is dependent on the - # strandedness of this annotation (see - # Annotation.reverse_strand). - # For forward stranded annotations, this offset is relative to the - # exon.start. For reverse - # strand annotations, this offset is relative to the - # exon.end `- 1`. - # Unset if this exon does not intersect the coding sequence. Upon creation - # of a transcript, the frame must be populated for all or none of the - # coding exons. - # Corresponds to the JSON property `frame` - # @return [Fixnum] - attr_accessor :frame - - # The start position of the exon on this annotation's reference sequence, - # 0-based inclusive. Note that this is relative to the reference start, and - # **not** the containing annotation start. - # Corresponds to the JSON property `start` - # @return [Fixnum] - attr_accessor :start - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @end = args[:end] if args.key?(:end) - @frame = args[:frame] if args.key?(:frame) - @start = args[:start] if args.key?(:start) - end - end - - # - class Experiment - include Google::Apis::Core::Hashable - - # The instrument model used as part of this experiment. This maps to - # sequencing technology in the SAM spec. - # Corresponds to the JSON property `instrumentModel` - # @return [String] - attr_accessor :instrument_model - - # A client-supplied library identifier; a library is a collection of DNA - # fragments which have been prepared for sequencing from a sample. This - # field is important for quality control as error or bias can be introduced - # during sample preparation. - # Corresponds to the JSON property `libraryId` - # @return [String] - attr_accessor :library_id - - # The platform unit used as part of this experiment, for example - # flowcell-barcode.lane for Illumina or slide for SOLiD. Corresponds to the - # @RG PU field in the SAM spec. - # Corresponds to the JSON property `platformUnit` - # @return [String] - attr_accessor :platform_unit - - # The sequencing center used as part of this experiment. - # Corresponds to the JSON property `sequencingCenter` - # @return [String] - attr_accessor :sequencing_center - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @instrument_model = args[:instrument_model] if args.key?(:instrument_model) - @library_id = args[:library_id] if args.key?(:library_id) - @platform_unit = args[:platform_unit] if args.key?(:platform_unit) - @sequencing_center = args[:sequencing_center] if args.key?(:sequencing_center) - end - end - - # The read group set export request. - class ExportReadGroupSetRequest - include Google::Apis::Core::Hashable - - # Required. A Google Cloud Storage URI for the exported BAM file. - # The currently authenticated user must have write access to the new file. - # An error will be returned if the URI already contains data. - # Corresponds to the JSON property `exportUri` - # @return [String] - attr_accessor :export_uri - - # Required. The Google Cloud project ID that owns this - # export. The caller must have WRITE access to this project. - # Corresponds to the JSON property `projectId` - # @return [String] - attr_accessor :project_id - - # The reference names to export. If this is not specified, all reference - # sequences, including unmapped reads, are exported. - # Use `*` to export only unmapped reads. - # Corresponds to the JSON property `referenceNames` - # @return [Array] - attr_accessor :reference_names - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @export_uri = args[:export_uri] if args.key?(:export_uri) - @project_id = args[:project_id] if args.key?(:project_id) - @reference_names = args[:reference_names] if args.key?(:reference_names) - end - end - - # The variant data export request. - class ExportVariantSetRequest - include Google::Apis::Core::Hashable - - # Required. The BigQuery dataset to export data to. This dataset must already - # exist. Note that this is distinct from the Genomics concept of "dataset". - # Corresponds to the JSON property `bigqueryDataset` - # @return [String] - attr_accessor :bigquery_dataset - - # Required. The BigQuery table to export data to. - # If the table doesn't exist, it will be created. If it already exists, it - # will be overwritten. - # Corresponds to the JSON property `bigqueryTable` - # @return [String] - attr_accessor :bigquery_table - - # If provided, only variant call information from the specified call sets - # will be exported. By default all variant calls are exported. - # Corresponds to the JSON property `callSetIds` - # @return [Array] - attr_accessor :call_set_ids - - # The format for the exported data. - # Corresponds to the JSON property `format` - # @return [String] - attr_accessor :format - - # Required. The Google Cloud project ID that owns the destination - # BigQuery dataset. The caller must have WRITE access to this project. This - # project will also own the resulting export job. - # Corresponds to the JSON property `projectId` - # @return [String] - attr_accessor :project_id - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @bigquery_dataset = args[:bigquery_dataset] if args.key?(:bigquery_dataset) - @bigquery_table = args[:bigquery_table] if args.key?(:bigquery_table) - @call_set_ids = args[:call_set_ids] if args.key?(:call_set_ids) - @format = args[:format] if args.key?(:format) - @project_id = args[:project_id] if args.key?(:project_id) - end - end - - # Represents an expression text. Example: - # title: "User account presence" - # description: "Determines whether the request has a user account" - # expression: "size(request.user) > 0" - class Expr - include Google::Apis::Core::Hashable - - # An optional description of the expression. This is a longer text which - # describes the expression, e.g. when hovered over it in a UI. - # Corresponds to the JSON property `description` - # @return [String] - attr_accessor :description - - # Textual representation of an expression in - # Common Expression Language syntax. - # The application context of the containing message determines which - # well-known feature set of CEL is supported. - # Corresponds to the JSON property `expression` - # @return [String] - attr_accessor :expression - - # An optional string indicating the location of the expression for error - # reporting, e.g. a file name and a position in the file. - # Corresponds to the JSON property `location` - # @return [String] - attr_accessor :location - - # An optional title for the expression, i.e. a short string describing - # its purpose. This can be used e.g. in UIs which allow to enter the - # expression. - # Corresponds to the JSON property `title` - # @return [String] - attr_accessor :title - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @description = args[:description] if args.key?(:description) - @expression = args[:expression] if args.key?(:expression) - @location = args[:location] if args.key?(:location) - @title = args[:title] if args.key?(:title) - end - end - - # - class ExternalId - include Google::Apis::Core::Hashable - - # The id used by the source of this data. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # The name of the source of this data. - # Corresponds to the JSON property `sourceName` - # @return [String] - attr_accessor :source_name - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @id = args[:id] if args.key?(:id) - @source_name = args[:source_name] if args.key?(:source_name) - end - end - # An event generated when the execution of a pipeline has failed. Note # that other events can continue to occur after this event. class FailedEvent @@ -1072,69 +276,6 @@ module Google end end - # Request message for `GetIamPolicy` method. - class GetIamPolicyRequest - include Google::Apis::Core::Hashable - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - end - end - - # The read group set import request. - class ImportReadGroupSetsRequest - include Google::Apis::Core::Hashable - - # Required. The ID of the dataset these read group sets will belong to. The - # caller must have WRITE permissions to this dataset. - # Corresponds to the JSON property `datasetId` - # @return [String] - attr_accessor :dataset_id - - # The partition strategy describes how read groups are partitioned into read - # group sets. - # Corresponds to the JSON property `partitionStrategy` - # @return [String] - attr_accessor :partition_strategy - - # The reference set to which the imported read group sets are aligned to, if - # any. The reference names of this reference set must be a superset of those - # found in the imported file headers. If no reference set id is provided, a - # best effort is made to associate with a matching reference set. - # Corresponds to the JSON property `referenceSetId` - # @return [String] - attr_accessor :reference_set_id - - # A list of URIs pointing at [BAM - # files](https://samtools.github.io/hts-specs/SAMv1.pdf) - # in Google Cloud Storage. - # Those URIs can include wildcards (*), but do not add or remove - # matching files before import has completed. - # Note that Google Cloud Storage object listing is only eventually - # consistent: files added may be not be immediately visible to - # everyone. Thus, if using a wildcard it is preferable not to start - # the import immediately after the files are created. - # Corresponds to the JSON property `sourceUris` - # @return [Array] - attr_accessor :source_uris - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @dataset_id = args[:dataset_id] if args.key?(:dataset_id) - @partition_strategy = args[:partition_strategy] if args.key?(:partition_strategy) - @reference_set_id = args[:reference_set_id] if args.key?(:reference_set_id) - @source_uris = args[:source_uris] if args.key?(:source_uris) - end - end - # The read group set import response. class ImportReadGroupSetsResponse include Google::Apis::Core::Hashable @@ -1154,63 +295,6 @@ module Google end end - # The variant data import request. - class ImportVariantsRequest - include Google::Apis::Core::Hashable - - # The format of the variant data being imported. If unspecified, defaults to - # to `VCF`. - # Corresponds to the JSON property `format` - # @return [String] - attr_accessor :format - - # A mapping between info field keys and the InfoMergeOperations to - # be performed on them. This is plumbed down to the MergeVariantRequests - # generated by the resulting import job. - # Corresponds to the JSON property `infoMergeConfig` - # @return [Hash] - attr_accessor :info_merge_config - - # Convert reference names to the canonical representation. - # hg19 haploytypes (those reference names containing "_hap") - # are not modified in any way. - # All other reference names are modified according to the following rules: - # The reference name is capitalized. - # The "chr" prefix is dropped for all autosomes and sex chromsomes. - # For example "chr17" becomes "17" and "chrX" becomes "X". - # All mitochondrial chromosomes ("chrM", "chrMT", etc) become "MT". - # Corresponds to the JSON property `normalizeReferenceNames` - # @return [Boolean] - attr_accessor :normalize_reference_names - alias_method :normalize_reference_names?, :normalize_reference_names - - # A list of URIs referencing variant files in Google Cloud Storage. URIs can - # include wildcards [as described - # here](https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames). - # Note that recursive wildcards ('**') are not supported. - # Corresponds to the JSON property `sourceUris` - # @return [Array] - attr_accessor :source_uris - - # Required. The variant set to which variant data should be imported. - # Corresponds to the JSON property `variantSetId` - # @return [String] - attr_accessor :variant_set_id - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @format = args[:format] if args.key?(:format) - @info_merge_config = args[:info_merge_config] if args.key?(:info_merge_config) - @normalize_reference_names = args[:normalize_reference_names] if args.key?(:normalize_reference_names) - @source_uris = args[:source_uris] if args.key?(:source_uris) - @variant_set_id = args[:variant_set_id] if args.key?(:variant_set_id) - end - end - # The variant data import response. class ImportVariantsResponse include Google::Apis::Core::Hashable @@ -1230,146 +314,6 @@ module Google end end - # A linear alignment can be represented by one CIGAR string. Describes the - # mapped position and local alignment of the read to the reference. - class LinearAlignment - include Google::Apis::Core::Hashable - - # Represents the local alignment of this sequence (alignment matches, indels, - # etc) against the reference. - # Corresponds to the JSON property `cigar` - # @return [Array] - attr_accessor :cigar - - # The mapping quality of this alignment. Represents how likely - # the read maps to this position as opposed to other locations. - # Specifically, this is -10 log10 Pr(mapping position is wrong), rounded to - # the nearest integer. - # Corresponds to the JSON property `mappingQuality` - # @return [Fixnum] - attr_accessor :mapping_quality - - # An abstraction for referring to a genomic position, in relation to some - # already known reference. For now, represents a genomic position as a - # reference name, a base number on that reference (0-based), and a - # determination of forward or reverse strand. - # Corresponds to the JSON property `position` - # @return [Google::Apis::GenomicsV1::Position] - attr_accessor :position - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @cigar = args[:cigar] if args.key?(:cigar) - @mapping_quality = args[:mapping_quality] if args.key?(:mapping_quality) - @position = args[:position] if args.key?(:position) - end - end - - # - class ListBasesResponse - include Google::Apis::Core::Hashable - - # The continuation token, which is used to page through large result sets. - # Provide this value in a subsequent request to return the next page of - # results. This field will be empty if there aren't any additional results. - # Corresponds to the JSON property `nextPageToken` - # @return [String] - attr_accessor :next_page_token - - # The offset position (0-based) of the given `sequence` from the - # start of this `Reference`. This value will differ for each page - # in a paginated request. - # Corresponds to the JSON property `offset` - # @return [Fixnum] - attr_accessor :offset - - # A substring of the bases that make up this reference. - # Corresponds to the JSON property `sequence` - # @return [String] - attr_accessor :sequence - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @next_page_token = args[:next_page_token] if args.key?(:next_page_token) - @offset = args[:offset] if args.key?(:offset) - @sequence = args[:sequence] if args.key?(:sequence) - end - end - - # - class ListCoverageBucketsResponse - include Google::Apis::Core::Hashable - - # The length of each coverage bucket in base pairs. Note that buckets at the - # end of a reference sequence may be shorter. This value is omitted if the - # bucket width is infinity (the default behaviour, with no range or - # `targetBucketWidth`). - # Corresponds to the JSON property `bucketWidth` - # @return [Fixnum] - attr_accessor :bucket_width - - # The coverage buckets. The list of buckets is sparse; a bucket with 0 - # overlapping reads is not returned. A bucket never crosses more than one - # reference sequence. Each bucket has width `bucketWidth`, unless - # its end is the end of the reference sequence. - # Corresponds to the JSON property `coverageBuckets` - # @return [Array] - attr_accessor :coverage_buckets - - # The continuation token, which is used to page through large result sets. - # Provide this value in a subsequent request to return the next page of - # results. This field will be empty if there aren't any additional results. - # Corresponds to the JSON property `nextPageToken` - # @return [String] - attr_accessor :next_page_token - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @bucket_width = args[:bucket_width] if args.key?(:bucket_width) - @coverage_buckets = args[:coverage_buckets] if args.key?(:coverage_buckets) - @next_page_token = args[:next_page_token] if args.key?(:next_page_token) - end - end - - # The dataset list response. - class ListDatasetsResponse - include Google::Apis::Core::Hashable - - # The list of matching Datasets. - # Corresponds to the JSON property `datasets` - # @return [Array] - attr_accessor :datasets - - # The continuation token, which is used to page through large result sets. - # Provide this value in a subsequent request to return the next page of - # results. This field will be empty if there aren't any additional results. - # Corresponds to the JSON property `nextPageToken` - # @return [String] - attr_accessor :next_page_token - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @datasets = args[:datasets] if args.key?(:datasets) - @next_page_token = args[:next_page_token] if args.key?(:next_page_token) - end - end - # The response message for Operations.ListOperations. class ListOperationsResponse include Google::Apis::Core::Hashable @@ -1395,38 +339,6 @@ module Google end end - # - class MergeVariantsRequest - include Google::Apis::Core::Hashable - - # A mapping between info field keys and the InfoMergeOperations to - # be performed on them. - # Corresponds to the JSON property `infoMergeConfig` - # @return [Hash] - attr_accessor :info_merge_config - - # The destination variant set. - # Corresponds to the JSON property `variantSetId` - # @return [String] - attr_accessor :variant_set_id - - # The variants to be merged with existing variants. - # Corresponds to the JSON property `variants` - # @return [Array] - attr_accessor :variants - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @info_merge_config = args[:info_merge_config] if args.key?(:info_merge_config) - @variant_set_id = args[:variant_set_id] if args.key?(:variant_set_id) - @variants = args[:variants] if args.key?(:variants) - end - end - # This resource represents a long-running operation that is the result of a # network API call. class Operation @@ -1622,164 +534,6 @@ module Google end end - # Defines an Identity and Access Management (IAM) policy. It is used to - # specify access control policies for Cloud Platform resources. - # A `Policy` consists of a list of `bindings`. A `binding` binds a list of - # `members` to a `role`, where the members can be user accounts, Google groups, - # Google domains, and service accounts. A `role` is a named list of permissions - # defined by IAM. - # **JSON Example** - # ` - # "bindings": [ - # ` - # "role": "roles/owner", - # "members": [ - # "user:mike@example.com", - # "group:admins@example.com", - # "domain:google.com", - # "serviceAccount:my-other-app@appspot.gserviceaccount.com" - # ] - # `, - # ` - # "role": "roles/viewer", - # "members": ["user:sean@example.com"] - # ` - # ] - # ` - # **YAML Example** - # bindings: - # - members: - # - user:mike@example.com - # - group:admins@example.com - # - domain:google.com - # - serviceAccount:my-other-app@appspot.gserviceaccount.com - # role: roles/owner - # - members: - # - user:sean@example.com - # role: roles/viewer - # For a description of IAM and its features, see the - # [IAM developer's guide](https://cloud.google.com/iam/docs). - class Policy - include Google::Apis::Core::Hashable - - # Associates a list of `members` to a `role`. - # `bindings` with no members will result in an error. - # Corresponds to the JSON property `bindings` - # @return [Array] - attr_accessor :bindings - - # `etag` is used for optimistic concurrency control as a way to help - # prevent simultaneous updates of a policy from overwriting each other. - # It is strongly suggested that systems make use of the `etag` in the - # read-modify-write cycle to perform policy updates in order to avoid race - # conditions: An `etag` is returned in the response to `getIamPolicy`, and - # systems are expected to put that etag in the request to `setIamPolicy` to - # ensure that their change will be applied to the same version of the policy. - # If no `etag` is provided in the call to `setIamPolicy`, then the existing - # policy is overwritten blindly. - # Corresponds to the JSON property `etag` - # NOTE: Values are automatically base64 encoded/decoded in the client library. - # @return [String] - attr_accessor :etag - - # Deprecated. - # Corresponds to the JSON property `version` - # @return [Fixnum] - attr_accessor :version - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @bindings = args[:bindings] if args.key?(:bindings) - @etag = args[:etag] if args.key?(:etag) - @version = args[:version] if args.key?(:version) - end - end - - # An abstraction for referring to a genomic position, in relation to some - # already known reference. For now, represents a genomic position as a - # reference name, a base number on that reference (0-based), and a - # determination of forward or reverse strand. - class Position - include Google::Apis::Core::Hashable - - # The 0-based offset from the start of the forward strand for that reference. - # Corresponds to the JSON property `position` - # @return [Fixnum] - attr_accessor :position - - # The name of the reference in whatever reference set is being used. - # Corresponds to the JSON property `referenceName` - # @return [String] - attr_accessor :reference_name - - # Whether this position is on the reverse strand, as opposed to the forward - # strand. - # Corresponds to the JSON property `reverseStrand` - # @return [Boolean] - attr_accessor :reverse_strand - alias_method :reverse_strand?, :reverse_strand - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @position = args[:position] if args.key?(:position) - @reference_name = args[:reference_name] if args.key?(:reference_name) - @reverse_strand = args[:reverse_strand] if args.key?(:reverse_strand) - end - end - - # - class Program - include Google::Apis::Core::Hashable - - # The command line used to run this program. - # Corresponds to the JSON property `commandLine` - # @return [String] - attr_accessor :command_line - - # The user specified locally unique ID of the program. Used along with - # `prevProgramId` to define an ordering between programs. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # The display name of the program. This is typically the colloquial name of - # the tool used, for example 'bwa' or 'picard'. - # Corresponds to the JSON property `name` - # @return [String] - attr_accessor :name - - # The ID of the program run before this one. - # Corresponds to the JSON property `prevProgramId` - # @return [String] - attr_accessor :prev_program_id - - # The version of the program run. - # Corresponds to the JSON property `version` - # @return [String] - attr_accessor :version - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @command_line = args[:command_line] if args.key?(:command_line) - @id = args[:id] if args.key?(:id) - @name = args[:name] if args.key?(:name) - @prev_program_id = args[:prev_program_id] if args.key?(:prev_program_id) - @version = args[:version] if args.key?(:version) - end - end - # An event generated when the worker starts pulling an image. class PullStartedEvent include Google::Apis::Core::Hashable @@ -1818,574 +572,6 @@ module Google end end - # A 0-based half-open genomic coordinate range for search requests. - class Range - include Google::Apis::Core::Hashable - - # The end position of the range on the reference, 0-based exclusive. - # Corresponds to the JSON property `end` - # @return [Fixnum] - attr_accessor :end - - # The reference sequence name, for example `chr1`, - # `1`, or `chrX`. - # Corresponds to the JSON property `referenceName` - # @return [String] - attr_accessor :reference_name - - # The start position of the range on the reference, 0-based inclusive. - # Corresponds to the JSON property `start` - # @return [Fixnum] - attr_accessor :start - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @end = args[:end] if args.key?(:end) - @reference_name = args[:reference_name] if args.key?(:reference_name) - @start = args[:start] if args.key?(:start) - end - end - - # A read alignment describes a linear alignment of a string of DNA to a - # reference sequence, in addition to metadata - # about the fragment (the molecule of DNA sequenced) and the read (the bases - # which were read by the sequencer). A read is equivalent to a line in a SAM - # file. A read belongs to exactly one read group and exactly one - # read group set. - # ### Reverse-stranded reads - # Mapped reads (reads having a non-null `alignment`) can be aligned to either - # the forward or the reverse strand of their associated reference. Strandedness - # of a mapped read is encoded by `alignment.position.reverseStrand`. - # If we consider the reference to be a forward-stranded coordinate space of - # `[0, reference.length)` with `0` as the left-most position and - # `reference.length` as the right-most position, reads are always aligned left - # to right. That is, `alignment.position.position` always refers to the - # left-most reference coordinate and `alignment.cigar` describes the alignment - # of this read to the reference from left to right. All per-base fields such as - # `alignedSequence` and `alignedQuality` share this same left-to-right - # orientation; this is true of reads which are aligned to either strand. For - # reverse-stranded reads, this means that `alignedSequence` is the reverse - # complement of the bases that were originally reported by the sequencing - # machine. - # ### Generating a reference-aligned sequence string - # When interacting with mapped reads, it's often useful to produce a string - # representing the local alignment of the read to reference. The following - # pseudocode demonstrates one way of doing this: - # out = "" - # offset = 0 - # for c in read.alignment.cigar ` - # switch c.operation ` - # case "ALIGNMENT_MATCH", "SEQUENCE_MATCH", "SEQUENCE_MISMATCH": - # out += read.alignedSequence[offset:offset+c.operationLength] - # offset += c.operationLength - # break - # case "CLIP_SOFT", "INSERT": - # offset += c.operationLength - # break - # case "PAD": - # out += repeat("*", c.operationLength) - # break - # case "DELETE": - # out += repeat("-", c.operationLength) - # break - # case "SKIP": - # out += repeat(" ", c.operationLength) - # break - # case "CLIP_HARD": - # break - # ` - # ` - # return out - # ### Converting to SAM's CIGAR string - # The following pseudocode generates a SAM CIGAR string from the - # `cigar` field. Note that this is a lossy conversion - # (`cigar.referenceSequence` is lost). - # cigarMap = ` - # "ALIGNMENT_MATCH": "M", - # "INSERT": "I", - # "DELETE": "D", - # "SKIP": "N", - # "CLIP_SOFT": "S", - # "CLIP_HARD": "H", - # "PAD": "P", - # "SEQUENCE_MATCH": "=", - # "SEQUENCE_MISMATCH": "X", - # ` - # cigarStr = "" - # for c in read.alignment.cigar ` - # cigarStr += c.operationLength + cigarMap[c.operation] - # ` - # return cigarStr - class Read - include Google::Apis::Core::Hashable - - # The quality of the read sequence contained in this alignment record - # (equivalent to QUAL in SAM). - # `alignedSequence` and `alignedQuality` may be shorter than the full read - # sequence and quality. This will occur if the alignment is part of a - # chimeric alignment, or if the read was trimmed. When this occurs, the CIGAR - # for this read will begin/end with a hard clip operator that will indicate - # the length of the excised sequence. - # Corresponds to the JSON property `alignedQuality` - # @return [Array] - attr_accessor :aligned_quality - - # The bases of the read sequence contained in this alignment record, - # **without CIGAR operations applied** (equivalent to SEQ in SAM). - # `alignedSequence` and `alignedQuality` may be - # shorter than the full read sequence and quality. This will occur if the - # alignment is part of a chimeric alignment, or if the read was trimmed. When - # this occurs, the CIGAR for this read will begin/end with a hard clip - # operator that will indicate the length of the excised sequence. - # Corresponds to the JSON property `alignedSequence` - # @return [String] - attr_accessor :aligned_sequence - - # A linear alignment can be represented by one CIGAR string. Describes the - # mapped position and local alignment of the read to the reference. - # Corresponds to the JSON property `alignment` - # @return [Google::Apis::GenomicsV1::LinearAlignment] - attr_accessor :alignment - - # The fragment is a PCR or optical duplicate (SAM flag 0x400). - # Corresponds to the JSON property `duplicateFragment` - # @return [Boolean] - attr_accessor :duplicate_fragment - alias_method :duplicate_fragment?, :duplicate_fragment - - # Whether this read did not pass filters, such as platform or vendor quality - # controls (SAM flag 0x200). - # Corresponds to the JSON property `failedVendorQualityChecks` - # @return [Boolean] - attr_accessor :failed_vendor_quality_checks - alias_method :failed_vendor_quality_checks?, :failed_vendor_quality_checks - - # The observed length of the fragment, equivalent to TLEN in SAM. - # Corresponds to the JSON property `fragmentLength` - # @return [Fixnum] - attr_accessor :fragment_length - - # The fragment name. Equivalent to QNAME (query template name) in SAM. - # Corresponds to the JSON property `fragmentName` - # @return [String] - attr_accessor :fragment_name - - # The server-generated read ID, unique across all reads. This is different - # from the `fragmentName`. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # A map of additional read alignment information. This must be of the form - # map (string key mapping to a list of string values). - # Corresponds to the JSON property `info` - # @return [Hash>] - attr_accessor :info - - # An abstraction for referring to a genomic position, in relation to some - # already known reference. For now, represents a genomic position as a - # reference name, a base number on that reference (0-based), and a - # determination of forward or reverse strand. - # Corresponds to the JSON property `nextMatePosition` - # @return [Google::Apis::GenomicsV1::Position] - attr_accessor :next_mate_position - - # The number of reads in the fragment (extension to SAM flag 0x1). - # Corresponds to the JSON property `numberReads` - # @return [Fixnum] - attr_accessor :number_reads - - # The orientation and the distance between reads from the fragment are - # consistent with the sequencing protocol (SAM flag 0x2). - # Corresponds to the JSON property `properPlacement` - # @return [Boolean] - attr_accessor :proper_placement - alias_method :proper_placement?, :proper_placement - - # The ID of the read group this read belongs to. A read belongs to exactly - # one read group. This is a server-generated ID which is distinct from SAM's - # RG tag (for that value, see - # ReadGroup.name). - # Corresponds to the JSON property `readGroupId` - # @return [String] - attr_accessor :read_group_id - - # The ID of the read group set this read belongs to. A read belongs to - # exactly one read group set. - # Corresponds to the JSON property `readGroupSetId` - # @return [String] - attr_accessor :read_group_set_id - - # The read number in sequencing. 0-based and less than numberReads. This - # field replaces SAM flag 0x40 and 0x80. - # Corresponds to the JSON property `readNumber` - # @return [Fixnum] - attr_accessor :read_number - - # Whether this alignment is secondary. Equivalent to SAM flag 0x100. - # A secondary alignment represents an alternative to the primary alignment - # for this read. Aligners may return secondary alignments if a read can map - # ambiguously to multiple coordinates in the genome. By convention, each read - # has one and only one alignment where both `secondaryAlignment` - # and `supplementaryAlignment` are false. - # Corresponds to the JSON property `secondaryAlignment` - # @return [Boolean] - attr_accessor :secondary_alignment - alias_method :secondary_alignment?, :secondary_alignment - - # Whether this alignment is supplementary. Equivalent to SAM flag 0x800. - # Supplementary alignments are used in the representation of a chimeric - # alignment. In a chimeric alignment, a read is split into multiple - # linear alignments that map to different reference contigs. The first - # linear alignment in the read will be designated as the representative - # alignment; the remaining linear alignments will be designated as - # supplementary alignments. These alignments may have different mapping - # quality scores. In each linear alignment in a chimeric alignment, the read - # will be hard clipped. The `alignedSequence` and - # `alignedQuality` fields in the alignment record will only - # represent the bases for its respective linear alignment. - # Corresponds to the JSON property `supplementaryAlignment` - # @return [Boolean] - attr_accessor :supplementary_alignment - alias_method :supplementary_alignment?, :supplementary_alignment - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @aligned_quality = args[:aligned_quality] if args.key?(:aligned_quality) - @aligned_sequence = args[:aligned_sequence] if args.key?(:aligned_sequence) - @alignment = args[:alignment] if args.key?(:alignment) - @duplicate_fragment = args[:duplicate_fragment] if args.key?(:duplicate_fragment) - @failed_vendor_quality_checks = args[:failed_vendor_quality_checks] if args.key?(:failed_vendor_quality_checks) - @fragment_length = args[:fragment_length] if args.key?(:fragment_length) - @fragment_name = args[:fragment_name] if args.key?(:fragment_name) - @id = args[:id] if args.key?(:id) - @info = args[:info] if args.key?(:info) - @next_mate_position = args[:next_mate_position] if args.key?(:next_mate_position) - @number_reads = args[:number_reads] if args.key?(:number_reads) - @proper_placement = args[:proper_placement] if args.key?(:proper_placement) - @read_group_id = args[:read_group_id] if args.key?(:read_group_id) - @read_group_set_id = args[:read_group_set_id] if args.key?(:read_group_set_id) - @read_number = args[:read_number] if args.key?(:read_number) - @secondary_alignment = args[:secondary_alignment] if args.key?(:secondary_alignment) - @supplementary_alignment = args[:supplementary_alignment] if args.key?(:supplementary_alignment) - end - end - - # A read group is all the data that's processed the same way by the sequencer. - class ReadGroup - include Google::Apis::Core::Hashable - - # The dataset to which this read group belongs. - # Corresponds to the JSON property `datasetId` - # @return [String] - attr_accessor :dataset_id - - # A free-form text description of this read group. - # Corresponds to the JSON property `description` - # @return [String] - attr_accessor :description - - # The experiment used to generate this read group. - # Corresponds to the JSON property `experiment` - # @return [Google::Apis::GenomicsV1::Experiment] - attr_accessor :experiment - - # The server-generated read group ID, unique for all read groups. - # Note: This is different than the @RG ID field in the SAM spec. For that - # value, see name. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # A map of additional read group information. This must be of the form - # map (string key mapping to a list of string values). - # Corresponds to the JSON property `info` - # @return [Hash>] - attr_accessor :info - - # The read group name. This corresponds to the @RG ID field in the SAM spec. - # Corresponds to the JSON property `name` - # @return [String] - attr_accessor :name - - # The predicted insert size of this read group. The insert size is the length - # the sequenced DNA fragment from end-to-end, not including the adapters. - # Corresponds to the JSON property `predictedInsertSize` - # @return [Fixnum] - attr_accessor :predicted_insert_size - - # The programs used to generate this read group. Programs are always - # identical for all read groups within a read group set. For this reason, - # only the first read group in a returned set will have this field - # populated. - # Corresponds to the JSON property `programs` - # @return [Array] - attr_accessor :programs - - # The reference set the reads in this read group are aligned to. - # Corresponds to the JSON property `referenceSetId` - # @return [String] - attr_accessor :reference_set_id - - # A client-supplied sample identifier for the reads in this read group. - # Corresponds to the JSON property `sampleId` - # @return [String] - attr_accessor :sample_id - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @dataset_id = args[:dataset_id] if args.key?(:dataset_id) - @description = args[:description] if args.key?(:description) - @experiment = args[:experiment] if args.key?(:experiment) - @id = args[:id] if args.key?(:id) - @info = args[:info] if args.key?(:info) - @name = args[:name] if args.key?(:name) - @predicted_insert_size = args[:predicted_insert_size] if args.key?(:predicted_insert_size) - @programs = args[:programs] if args.key?(:programs) - @reference_set_id = args[:reference_set_id] if args.key?(:reference_set_id) - @sample_id = args[:sample_id] if args.key?(:sample_id) - end - end - - # A read group set is a logical collection of read groups, which are - # collections of reads produced by a sequencer. A read group set typically - # models reads corresponding to one sample, sequenced one way, and aligned one - # way. - # * A read group set belongs to one dataset. - # * A read group belongs to one read group set. - # * A read belongs to one read group. - class ReadGroupSet - include Google::Apis::Core::Hashable - - # The dataset to which this read group set belongs. - # Corresponds to the JSON property `datasetId` - # @return [String] - attr_accessor :dataset_id - - # The filename of the original source file for this read group set, if any. - # Corresponds to the JSON property `filename` - # @return [String] - attr_accessor :filename - - # The server-generated read group set ID, unique for all read group sets. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # A map of additional read group set information. - # Corresponds to the JSON property `info` - # @return [Hash>] - attr_accessor :info - - # The read group set name. By default this will be initialized to the sample - # name of the sequenced data contained in this set. - # Corresponds to the JSON property `name` - # @return [String] - attr_accessor :name - - # The read groups in this set. There are typically 1-10 read groups in a read - # group set. - # Corresponds to the JSON property `readGroups` - # @return [Array] - attr_accessor :read_groups - - # The reference set to which the reads in this read group set are aligned. - # Corresponds to the JSON property `referenceSetId` - # @return [String] - attr_accessor :reference_set_id - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @dataset_id = args[:dataset_id] if args.key?(:dataset_id) - @filename = args[:filename] if args.key?(:filename) - @id = args[:id] if args.key?(:id) - @info = args[:info] if args.key?(:info) - @name = args[:name] if args.key?(:name) - @read_groups = args[:read_groups] if args.key?(:read_groups) - @reference_set_id = args[:reference_set_id] if args.key?(:reference_set_id) - end - end - - # A reference is a canonical assembled DNA sequence, intended to act as a - # reference coordinate space for other genomic annotations. A single reference - # might represent the human chromosome 1 or mitochandrial DNA, for instance. A - # reference belongs to one or more reference sets. - class Reference - include Google::Apis::Core::Hashable - - # The server-generated reference ID, unique across all references. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # The length of this reference's sequence. - # Corresponds to the JSON property `length` - # @return [Fixnum] - attr_accessor :length - - # MD5 of the upper-case sequence excluding all whitespace characters (this - # is equivalent to SQ:M5 in SAM). This value is represented in lower case - # hexadecimal format. - # Corresponds to the JSON property `md5checksum` - # @return [String] - attr_accessor :md5checksum - - # The name of this reference, for example `22`. - # Corresponds to the JSON property `name` - # @return [String] - attr_accessor :name - - # ID from http://www.ncbi.nlm.nih.gov/taxonomy. For example, 9606 for human. - # Corresponds to the JSON property `ncbiTaxonId` - # @return [Fixnum] - attr_accessor :ncbi_taxon_id - - # All known corresponding accession IDs in INSDC (GenBank/ENA/DDBJ) ideally - # with a version number, for example `GCF_000001405.26`. - # Corresponds to the JSON property `sourceAccessions` - # @return [Array] - attr_accessor :source_accessions - - # The URI from which the sequence was obtained. Typically specifies a FASTA - # format file. - # Corresponds to the JSON property `sourceUri` - # @return [String] - attr_accessor :source_uri - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @id = args[:id] if args.key?(:id) - @length = args[:length] if args.key?(:length) - @md5checksum = args[:md5checksum] if args.key?(:md5checksum) - @name = args[:name] if args.key?(:name) - @ncbi_taxon_id = args[:ncbi_taxon_id] if args.key?(:ncbi_taxon_id) - @source_accessions = args[:source_accessions] if args.key?(:source_accessions) - @source_uri = args[:source_uri] if args.key?(:source_uri) - end - end - - # ReferenceBound records an upper bound for the starting coordinate of - # variants in a particular reference. - class ReferenceBound - include Google::Apis::Core::Hashable - - # The name of the reference associated with this reference bound. - # Corresponds to the JSON property `referenceName` - # @return [String] - attr_accessor :reference_name - - # An upper bound (inclusive) on the starting coordinate of any - # variant in the reference sequence. - # Corresponds to the JSON property `upperBound` - # @return [Fixnum] - attr_accessor :upper_bound - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @reference_name = args[:reference_name] if args.key?(:reference_name) - @upper_bound = args[:upper_bound] if args.key?(:upper_bound) - end - end - - # A reference set is a set of references which typically comprise a reference - # assembly for a species, such as `GRCh38` which is representative - # of the human genome. A reference set defines a common coordinate space for - # comparing reference-aligned experimental data. A reference set contains 1 or - # more references. - class ReferenceSet - include Google::Apis::Core::Hashable - - # Public id of this reference set, such as `GRCh37`. - # Corresponds to the JSON property `assemblyId` - # @return [String] - attr_accessor :assembly_id - - # Free text description of this reference set. - # Corresponds to the JSON property `description` - # @return [String] - attr_accessor :description - - # The server-generated reference set ID, unique across all reference sets. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # Order-independent MD5 checksum which identifies this reference set. The - # checksum is computed by sorting all lower case hexidecimal string - # `reference.md5checksum` (for all reference in this set) in - # ascending lexicographic order, concatenating, and taking the MD5 of that - # value. The resulting value is represented in lower case hexadecimal format. - # Corresponds to the JSON property `md5checksum` - # @return [String] - attr_accessor :md5checksum - - # ID from http://www.ncbi.nlm.nih.gov/taxonomy (for example, 9606 for human) - # indicating the species which this reference set is intended to model. Note - # that contained references may specify a different `ncbiTaxonId`, as - # assemblies may contain reference sequences which do not belong to the - # modeled species, for example EBV in a human reference genome. - # Corresponds to the JSON property `ncbiTaxonId` - # @return [Fixnum] - attr_accessor :ncbi_taxon_id - - # The IDs of the reference objects that are part of this set. - # `Reference.md5checksum` must be unique within this set. - # Corresponds to the JSON property `referenceIds` - # @return [Array] - attr_accessor :reference_ids - - # All known corresponding accession IDs in INSDC (GenBank/ENA/DDBJ) ideally - # with a version number, for example `NC_000001.11`. - # Corresponds to the JSON property `sourceAccessions` - # @return [Array] - attr_accessor :source_accessions - - # The URI from which the references were obtained. - # Corresponds to the JSON property `sourceUri` - # @return [String] - attr_accessor :source_uri - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @assembly_id = args[:assembly_id] if args.key?(:assembly_id) - @description = args[:description] if args.key?(:description) - @id = args[:id] if args.key?(:id) - @md5checksum = args[:md5checksum] if args.key?(:md5checksum) - @ncbi_taxon_id = args[:ncbi_taxon_id] if args.key?(:ncbi_taxon_id) - @reference_ids = args[:reference_ids] if args.key?(:reference_ids) - @source_accessions = args[:source_accessions] if args.key?(:source_accessions) - @source_uri = args[:source_uri] if args.key?(:source_uri) - end - end - # The response to the RunPipeline method, returned in the operation's result # field on success. class RunPipelineResponse @@ -2422,797 +608,6 @@ module Google end end - # - class SearchAnnotationSetsRequest - include Google::Apis::Core::Hashable - - # Required. The dataset IDs to search within. Caller must have `READ` access - # to these datasets. - # Corresponds to the JSON property `datasetIds` - # @return [Array] - attr_accessor :dataset_ids - - # Only return annotations sets for which a substring of the name matches this - # string (case insensitive). - # Corresponds to the JSON property `name` - # @return [String] - attr_accessor :name - - # The maximum number of results to return in a single page. If unspecified, - # defaults to 128. The maximum value is 1024. - # Corresponds to the JSON property `pageSize` - # @return [Fixnum] - attr_accessor :page_size - - # The continuation token, which is used to page through large result sets. - # To get the next page of results, set this parameter to the value of - # `nextPageToken` from the previous response. - # Corresponds to the JSON property `pageToken` - # @return [String] - attr_accessor :page_token - - # If specified, only annotation sets associated with the given reference set - # are returned. - # Corresponds to the JSON property `referenceSetId` - # @return [String] - attr_accessor :reference_set_id - - # If specified, only annotation sets that have any of these types are - # returned. - # Corresponds to the JSON property `types` - # @return [Array] - attr_accessor :types - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @dataset_ids = args[:dataset_ids] if args.key?(:dataset_ids) - @name = args[:name] if args.key?(:name) - @page_size = args[:page_size] if args.key?(:page_size) - @page_token = args[:page_token] if args.key?(:page_token) - @reference_set_id = args[:reference_set_id] if args.key?(:reference_set_id) - @types = args[:types] if args.key?(:types) - end - end - - # - class SearchAnnotationSetsResponse - include Google::Apis::Core::Hashable - - # The matching annotation sets. - # Corresponds to the JSON property `annotationSets` - # @return [Array] - attr_accessor :annotation_sets - - # The continuation token, which is used to page through large result sets. - # Provide this value in a subsequent request to return the next page of - # results. This field will be empty if there aren't any additional results. - # Corresponds to the JSON property `nextPageToken` - # @return [String] - attr_accessor :next_page_token - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @annotation_sets = args[:annotation_sets] if args.key?(:annotation_sets) - @next_page_token = args[:next_page_token] if args.key?(:next_page_token) - end - end - - # - class SearchAnnotationsRequest - include Google::Apis::Core::Hashable - - # Required. The annotation sets to search within. The caller must have - # `READ` access to these annotation sets. - # All queried annotation sets must have the same type. - # Corresponds to the JSON property `annotationSetIds` - # @return [Array] - attr_accessor :annotation_set_ids - - # The end position of the range on the reference, 0-based exclusive. If - # referenceId or - # referenceName - # must be specified, Defaults to the length of the reference. - # Corresponds to the JSON property `end` - # @return [Fixnum] - attr_accessor :end - - # The maximum number of results to return in a single page. If unspecified, - # defaults to 256. The maximum value is 2048. - # Corresponds to the JSON property `pageSize` - # @return [Fixnum] - attr_accessor :page_size - - # The continuation token, which is used to page through large result sets. - # To get the next page of results, set this parameter to the value of - # `nextPageToken` from the previous response. - # Corresponds to the JSON property `pageToken` - # @return [String] - attr_accessor :page_token - - # The ID of the reference to query. - # Corresponds to the JSON property `referenceId` - # @return [String] - attr_accessor :reference_id - - # The name of the reference to query, within the reference set associated - # with this query. - # Corresponds to the JSON property `referenceName` - # @return [String] - attr_accessor :reference_name - - # The start position of the range on the reference, 0-based inclusive. If - # specified, - # referenceId or - # referenceName - # must be specified. Defaults to 0. - # Corresponds to the JSON property `start` - # @return [Fixnum] - attr_accessor :start - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @annotation_set_ids = args[:annotation_set_ids] if args.key?(:annotation_set_ids) - @end = args[:end] if args.key?(:end) - @page_size = args[:page_size] if args.key?(:page_size) - @page_token = args[:page_token] if args.key?(:page_token) - @reference_id = args[:reference_id] if args.key?(:reference_id) - @reference_name = args[:reference_name] if args.key?(:reference_name) - @start = args[:start] if args.key?(:start) - end - end - - # - class SearchAnnotationsResponse - include Google::Apis::Core::Hashable - - # The matching annotations. - # Corresponds to the JSON property `annotations` - # @return [Array] - attr_accessor :annotations - - # The continuation token, which is used to page through large result sets. - # Provide this value in a subsequent request to return the next page of - # results. This field will be empty if there aren't any additional results. - # Corresponds to the JSON property `nextPageToken` - # @return [String] - attr_accessor :next_page_token - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @annotations = args[:annotations] if args.key?(:annotations) - @next_page_token = args[:next_page_token] if args.key?(:next_page_token) - end - end - - # The call set search request. - class SearchCallSetsRequest - include Google::Apis::Core::Hashable - - # Only return call sets for which a substring of the name matches this - # string. - # Corresponds to the JSON property `name` - # @return [String] - attr_accessor :name - - # The maximum number of results to return in a single page. If unspecified, - # defaults to 1024. - # Corresponds to the JSON property `pageSize` - # @return [Fixnum] - attr_accessor :page_size - - # The continuation token, which is used to page through large result sets. - # To get the next page of results, set this parameter to the value of - # `nextPageToken` from the previous response. - # Corresponds to the JSON property `pageToken` - # @return [String] - attr_accessor :page_token - - # Restrict the query to call sets within the given variant sets. At least one - # ID must be provided. - # Corresponds to the JSON property `variantSetIds` - # @return [Array] - attr_accessor :variant_set_ids - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @name = args[:name] if args.key?(:name) - @page_size = args[:page_size] if args.key?(:page_size) - @page_token = args[:page_token] if args.key?(:page_token) - @variant_set_ids = args[:variant_set_ids] if args.key?(:variant_set_ids) - end - end - - # The call set search response. - class SearchCallSetsResponse - include Google::Apis::Core::Hashable - - # The list of matching call sets. - # Corresponds to the JSON property `callSets` - # @return [Array] - attr_accessor :call_sets - - # The continuation token, which is used to page through large result sets. - # Provide this value in a subsequent request to return the next page of - # results. This field will be empty if there aren't any additional results. - # Corresponds to the JSON property `nextPageToken` - # @return [String] - attr_accessor :next_page_token - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @call_sets = args[:call_sets] if args.key?(:call_sets) - @next_page_token = args[:next_page_token] if args.key?(:next_page_token) - end - end - - # The read group set search request. - class SearchReadGroupSetsRequest - include Google::Apis::Core::Hashable - - # Restricts this query to read group sets within the given datasets. At least - # one ID must be provided. - # Corresponds to the JSON property `datasetIds` - # @return [Array] - attr_accessor :dataset_ids - - # Only return read group sets for which a substring of the name matches this - # string. - # Corresponds to the JSON property `name` - # @return [String] - attr_accessor :name - - # The maximum number of results to return in a single page. If unspecified, - # defaults to 256. The maximum value is 1024. - # Corresponds to the JSON property `pageSize` - # @return [Fixnum] - attr_accessor :page_size - - # The continuation token, which is used to page through large result sets. - # To get the next page of results, set this parameter to the value of - # `nextPageToken` from the previous response. - # Corresponds to the JSON property `pageToken` - # @return [String] - attr_accessor :page_token - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @dataset_ids = args[:dataset_ids] if args.key?(:dataset_ids) - @name = args[:name] if args.key?(:name) - @page_size = args[:page_size] if args.key?(:page_size) - @page_token = args[:page_token] if args.key?(:page_token) - end - end - - # The read group set search response. - class SearchReadGroupSetsResponse - include Google::Apis::Core::Hashable - - # The continuation token, which is used to page through large result sets. - # Provide this value in a subsequent request to return the next page of - # results. This field will be empty if there aren't any additional results. - # Corresponds to the JSON property `nextPageToken` - # @return [String] - attr_accessor :next_page_token - - # The list of matching read group sets. - # Corresponds to the JSON property `readGroupSets` - # @return [Array] - attr_accessor :read_group_sets - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @next_page_token = args[:next_page_token] if args.key?(:next_page_token) - @read_group_sets = args[:read_group_sets] if args.key?(:read_group_sets) - end - end - - # The read search request. - class SearchReadsRequest - include Google::Apis::Core::Hashable - - # The end position of the range on the reference, 0-based exclusive. If - # specified, `referenceName` must also be specified. - # Corresponds to the JSON property `end` - # @return [Fixnum] - attr_accessor :end - - # The maximum number of results to return in a single page. If unspecified, - # defaults to 256. The maximum value is 2048. - # Corresponds to the JSON property `pageSize` - # @return [Fixnum] - attr_accessor :page_size - - # The continuation token, which is used to page through large result sets. - # To get the next page of results, set this parameter to the value of - # `nextPageToken` from the previous response. - # Corresponds to the JSON property `pageToken` - # @return [String] - attr_accessor :page_token - - # The IDs of the read groups within which to search for reads. All specified - # read groups must belong to the same read group sets. Must specify one of - # `readGroupSetIds` or `readGroupIds`. - # Corresponds to the JSON property `readGroupIds` - # @return [Array] - attr_accessor :read_group_ids - - # The IDs of the read groups sets within which to search for reads. All - # specified read group sets must be aligned against a common set of reference - # sequences; this defines the genomic coordinates for the query. Must specify - # one of `readGroupSetIds` or `readGroupIds`. - # Corresponds to the JSON property `readGroupSetIds` - # @return [Array] - attr_accessor :read_group_set_ids - - # The reference sequence name, for example `chr1`, `1`, or `chrX`. If set to - # `*`, only unmapped reads are returned. If unspecified, all reads (mapped - # and unmapped) are returned. - # Corresponds to the JSON property `referenceName` - # @return [String] - attr_accessor :reference_name - - # The start position of the range on the reference, 0-based inclusive. If - # specified, `referenceName` must also be specified. - # Corresponds to the JSON property `start` - # @return [Fixnum] - attr_accessor :start - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @end = args[:end] if args.key?(:end) - @page_size = args[:page_size] if args.key?(:page_size) - @page_token = args[:page_token] if args.key?(:page_token) - @read_group_ids = args[:read_group_ids] if args.key?(:read_group_ids) - @read_group_set_ids = args[:read_group_set_ids] if args.key?(:read_group_set_ids) - @reference_name = args[:reference_name] if args.key?(:reference_name) - @start = args[:start] if args.key?(:start) - end - end - - # The read search response. - class SearchReadsResponse - include Google::Apis::Core::Hashable - - # The list of matching alignments sorted by mapped genomic coordinate, - # if any, ascending in position within the same reference. Unmapped reads, - # which have no position, are returned contiguously and are sorted in - # ascending lexicographic order by fragment name. - # Corresponds to the JSON property `alignments` - # @return [Array] - attr_accessor :alignments - - # The continuation token, which is used to page through large result sets. - # Provide this value in a subsequent request to return the next page of - # results. This field will be empty if there aren't any additional results. - # Corresponds to the JSON property `nextPageToken` - # @return [String] - attr_accessor :next_page_token - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @alignments = args[:alignments] if args.key?(:alignments) - @next_page_token = args[:next_page_token] if args.key?(:next_page_token) - end - end - - # - class SearchReferenceSetsRequest - include Google::Apis::Core::Hashable - - # If present, return reference sets for which a prefix of any of - # sourceAccessions - # match any of these strings. Accession numbers typically have a main number - # and a version, for example `NC_000001.11`. - # Corresponds to the JSON property `accessions` - # @return [Array] - attr_accessor :accessions - - # If present, return reference sets for which a substring of their - # `assemblyId` matches this string (case insensitive). - # Corresponds to the JSON property `assemblyId` - # @return [String] - attr_accessor :assembly_id - - # If present, return reference sets for which the - # md5checksum matches exactly. - # Corresponds to the JSON property `md5checksums` - # @return [Array] - attr_accessor :md5checksums - - # The maximum number of results to return in a single page. If unspecified, - # defaults to 1024. The maximum value is 4096. - # Corresponds to the JSON property `pageSize` - # @return [Fixnum] - attr_accessor :page_size - - # The continuation token, which is used to page through large result sets. - # To get the next page of results, set this parameter to the value of - # `nextPageToken` from the previous response. - # Corresponds to the JSON property `pageToken` - # @return [String] - attr_accessor :page_token - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @accessions = args[:accessions] if args.key?(:accessions) - @assembly_id = args[:assembly_id] if args.key?(:assembly_id) - @md5checksums = args[:md5checksums] if args.key?(:md5checksums) - @page_size = args[:page_size] if args.key?(:page_size) - @page_token = args[:page_token] if args.key?(:page_token) - end - end - - # - class SearchReferenceSetsResponse - include Google::Apis::Core::Hashable - - # The continuation token, which is used to page through large result sets. - # Provide this value in a subsequent request to return the next page of - # results. This field will be empty if there aren't any additional results. - # Corresponds to the JSON property `nextPageToken` - # @return [String] - attr_accessor :next_page_token - - # The matching references sets. - # Corresponds to the JSON property `referenceSets` - # @return [Array] - attr_accessor :reference_sets - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @next_page_token = args[:next_page_token] if args.key?(:next_page_token) - @reference_sets = args[:reference_sets] if args.key?(:reference_sets) - end - end - - # - class SearchReferencesRequest - include Google::Apis::Core::Hashable - - # If present, return references for which a prefix of any of - # sourceAccessions match - # any of these strings. Accession numbers typically have a main number and a - # version, for example `GCF_000001405.26`. - # Corresponds to the JSON property `accessions` - # @return [Array] - attr_accessor :accessions - - # If present, return references for which the - # md5checksum matches exactly. - # Corresponds to the JSON property `md5checksums` - # @return [Array] - attr_accessor :md5checksums - - # The maximum number of results to return in a single page. If unspecified, - # defaults to 1024. The maximum value is 4096. - # Corresponds to the JSON property `pageSize` - # @return [Fixnum] - attr_accessor :page_size - - # The continuation token, which is used to page through large result sets. - # To get the next page of results, set this parameter to the value of - # `nextPageToken` from the previous response. - # Corresponds to the JSON property `pageToken` - # @return [String] - attr_accessor :page_token - - # If present, return only references which belong to this reference set. - # Corresponds to the JSON property `referenceSetId` - # @return [String] - attr_accessor :reference_set_id - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @accessions = args[:accessions] if args.key?(:accessions) - @md5checksums = args[:md5checksums] if args.key?(:md5checksums) - @page_size = args[:page_size] if args.key?(:page_size) - @page_token = args[:page_token] if args.key?(:page_token) - @reference_set_id = args[:reference_set_id] if args.key?(:reference_set_id) - end - end - - # - class SearchReferencesResponse - include Google::Apis::Core::Hashable - - # The continuation token, which is used to page through large result sets. - # Provide this value in a subsequent request to return the next page of - # results. This field will be empty if there aren't any additional results. - # Corresponds to the JSON property `nextPageToken` - # @return [String] - attr_accessor :next_page_token - - # The matching references. - # Corresponds to the JSON property `references` - # @return [Array] - attr_accessor :references - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @next_page_token = args[:next_page_token] if args.key?(:next_page_token) - @references = args[:references] if args.key?(:references) - end - end - - # The search variant sets request. - class SearchVariantSetsRequest - include Google::Apis::Core::Hashable - - # Exactly one dataset ID must be provided here. Only variant sets which - # belong to this dataset will be returned. - # Corresponds to the JSON property `datasetIds` - # @return [Array] - attr_accessor :dataset_ids - - # The maximum number of results to return in a single page. If unspecified, - # defaults to 1024. - # Corresponds to the JSON property `pageSize` - # @return [Fixnum] - attr_accessor :page_size - - # The continuation token, which is used to page through large result sets. - # To get the next page of results, set this parameter to the value of - # `nextPageToken` from the previous response. - # Corresponds to the JSON property `pageToken` - # @return [String] - attr_accessor :page_token - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @dataset_ids = args[:dataset_ids] if args.key?(:dataset_ids) - @page_size = args[:page_size] if args.key?(:page_size) - @page_token = args[:page_token] if args.key?(:page_token) - end - end - - # The search variant sets response. - class SearchVariantSetsResponse - include Google::Apis::Core::Hashable - - # The continuation token, which is used to page through large result sets. - # Provide this value in a subsequent request to return the next page of - # results. This field will be empty if there aren't any additional results. - # Corresponds to the JSON property `nextPageToken` - # @return [String] - attr_accessor :next_page_token - - # The variant sets belonging to the requested dataset. - # Corresponds to the JSON property `variantSets` - # @return [Array] - attr_accessor :variant_sets - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @next_page_token = args[:next_page_token] if args.key?(:next_page_token) - @variant_sets = args[:variant_sets] if args.key?(:variant_sets) - end - end - - # The variant search request. - class SearchVariantsRequest - include Google::Apis::Core::Hashable - - # Only return variant calls which belong to call sets with these ids. - # Leaving this blank returns all variant calls. If a variant has no - # calls belonging to any of these call sets, it won't be returned at all. - # Corresponds to the JSON property `callSetIds` - # @return [Array] - attr_accessor :call_set_ids - - # The end of the window, 0-based exclusive. If unspecified or 0, defaults to - # the length of the reference. - # Corresponds to the JSON property `end` - # @return [Fixnum] - attr_accessor :end - - # The maximum number of calls to return in a single page. Note that this - # limit may be exceeded in the event that a matching variant contains more - # calls than the requested maximum. If unspecified, defaults to 5000. The - # maximum value is 10000. - # Corresponds to the JSON property `maxCalls` - # @return [Fixnum] - attr_accessor :max_calls - - # The maximum number of variants to return in a single page. If unspecified, - # defaults to 5000. The maximum value is 10000. - # Corresponds to the JSON property `pageSize` - # @return [Fixnum] - attr_accessor :page_size - - # The continuation token, which is used to page through large result sets. - # To get the next page of results, set this parameter to the value of - # `nextPageToken` from the previous response. - # Corresponds to the JSON property `pageToken` - # @return [String] - attr_accessor :page_token - - # Required. Only return variants in this reference sequence. - # Corresponds to the JSON property `referenceName` - # @return [String] - attr_accessor :reference_name - - # The beginning of the window (0-based, inclusive) for which - # overlapping variants should be returned. If unspecified, defaults to 0. - # Corresponds to the JSON property `start` - # @return [Fixnum] - attr_accessor :start - - # Only return variants which have exactly this name. - # Corresponds to the JSON property `variantName` - # @return [String] - attr_accessor :variant_name - - # At most one variant set ID must be provided. Only variants from this - # variant set will be returned. If omitted, a call set id must be included in - # the request. - # Corresponds to the JSON property `variantSetIds` - # @return [Array] - attr_accessor :variant_set_ids - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @call_set_ids = args[:call_set_ids] if args.key?(:call_set_ids) - @end = args[:end] if args.key?(:end) - @max_calls = args[:max_calls] if args.key?(:max_calls) - @page_size = args[:page_size] if args.key?(:page_size) - @page_token = args[:page_token] if args.key?(:page_token) - @reference_name = args[:reference_name] if args.key?(:reference_name) - @start = args[:start] if args.key?(:start) - @variant_name = args[:variant_name] if args.key?(:variant_name) - @variant_set_ids = args[:variant_set_ids] if args.key?(:variant_set_ids) - end - end - - # The variant search response. - class SearchVariantsResponse - include Google::Apis::Core::Hashable - - # The continuation token, which is used to page through large result sets. - # Provide this value in a subsequent request to return the next page of - # results. This field will be empty if there aren't any additional results. - # Corresponds to the JSON property `nextPageToken` - # @return [String] - attr_accessor :next_page_token - - # The list of matching Variants. - # Corresponds to the JSON property `variants` - # @return [Array] - attr_accessor :variants - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @next_page_token = args[:next_page_token] if args.key?(:next_page_token) - @variants = args[:variants] if args.key?(:variants) - end - end - - # Request message for `SetIamPolicy` method. - class SetIamPolicyRequest - include Google::Apis::Core::Hashable - - # Defines an Identity and Access Management (IAM) policy. It is used to - # specify access control policies for Cloud Platform resources. - # A `Policy` consists of a list of `bindings`. A `binding` binds a list of - # `members` to a `role`, where the members can be user accounts, Google groups, - # Google domains, and service accounts. A `role` is a named list of permissions - # defined by IAM. - # **JSON Example** - # ` - # "bindings": [ - # ` - # "role": "roles/owner", - # "members": [ - # "user:mike@example.com", - # "group:admins@example.com", - # "domain:google.com", - # "serviceAccount:my-other-app@appspot.gserviceaccount.com" - # ] - # `, - # ` - # "role": "roles/viewer", - # "members": ["user:sean@example.com"] - # ` - # ] - # ` - # **YAML Example** - # bindings: - # - members: - # - user:mike@example.com - # - group:admins@example.com - # - domain:google.com - # - serviceAccount:my-other-app@appspot.gserviceaccount.com - # role: roles/owner - # - members: - # - user:sean@example.com - # role: roles/viewer - # For a description of IAM and its features, see the - # [IAM developer's guide](https://cloud.google.com/iam/docs). - # Corresponds to the JSON property `policy` - # @return [Google::Apis::GenomicsV1::Policy] - attr_accessor :policy - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @policy = args[:policy] if args.key?(:policy) - end - end - # The `Status` type defines a logical error model that is suitable for # different programming environments, including REST APIs and RPC APIs. It is # used by [gRPC](https://github.com/grpc). The error model is designed to be: @@ -3285,120 +680,6 @@ module Google end end - # Request message for `TestIamPermissions` method. - class TestIamPermissionsRequest - include Google::Apis::Core::Hashable - - # REQUIRED: The set of permissions to check for the 'resource'. - # Permissions with wildcards (such as '*' or 'storage.*') are not allowed. - # Allowed permissions are: - # * `genomics.datasets.create` - # * `genomics.datasets.delete` - # * `genomics.datasets.get` - # * `genomics.datasets.list` - # * `genomics.datasets.update` - # * `genomics.datasets.getIamPolicy` - # * `genomics.datasets.setIamPolicy` - # Corresponds to the JSON property `permissions` - # @return [Array] - attr_accessor :permissions - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @permissions = args[:permissions] if args.key?(:permissions) - end - end - - # Response message for `TestIamPermissions` method. - class TestIamPermissionsResponse - include Google::Apis::Core::Hashable - - # A subset of `TestPermissionsRequest.permissions` that the caller is - # allowed. - # Corresponds to the JSON property `permissions` - # @return [Array] - attr_accessor :permissions - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @permissions = args[:permissions] if args.key?(:permissions) - end - end - - # A transcript represents the assertion that a particular region of the - # reference genome may be transcribed as RNA. - class Transcript - include Google::Apis::Core::Hashable - - # The range of the coding sequence for this transcript, if any. To determine - # the exact ranges of coding sequence, intersect this range with those of the - # exons, if any. If there are any - # exons, the - # codingSequence must start - # and end within them. - # Note that in some cases, the reference genome will not exactly match the - # observed mRNA transcript e.g. due to variance in the source genome from - # reference. In these cases, - # exon.frame will not necessarily - # match the expected reference reading frame and coding exon reference bases - # cannot necessarily be concatenated to produce the original transcript mRNA. - # Corresponds to the JSON property `codingSequence` - # @return [Google::Apis::GenomicsV1::CodingSequence] - attr_accessor :coding_sequence - - # The exons that compose - # this transcript. This field should be unset for genomes where transcript - # splicing does not occur, for example prokaryotes. - # Introns are regions of the transcript that are not included in the - # spliced RNA product. Though not explicitly modeled here, intron ranges can - # be deduced; all regions of this transcript that are not exons are introns. - # Exonic sequences do not necessarily code for a translational product - # (amino acids). Only the regions of exons bounded by the - # codingSequence correspond - # to coding DNA sequence. - # Exons are ordered by start position and may not overlap. - # Corresponds to the JSON property `exons` - # @return [Array] - attr_accessor :exons - - # The annotation ID of the gene from which this transcript is transcribed. - # Corresponds to the JSON property `geneId` - # @return [String] - attr_accessor :gene_id - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @coding_sequence = args[:coding_sequence] if args.key?(:coding_sequence) - @exons = args[:exons] if args.key?(:exons) - @gene_id = args[:gene_id] if args.key?(:gene_id) - end - end - - # - class UndeleteDatasetRequest - include Google::Apis::Core::Hashable - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - end - end - # An event generated when the execution of a container results in a # non-zero exit status that was not otherwise ignored. Execution will # continue, but only actions that are flagged as `ALWAYS_RUN` will be @@ -3427,378 +708,6 @@ module Google end end - # A variant represents a change in DNA sequence relative to a reference - # sequence. For example, a variant could represent a SNP or an insertion. - # Variants belong to a variant set. - # Each of the calls on a variant represent a determination of genotype with - # respect to that variant. For example, a call might assign probability of 0.32 - # to the occurrence of a SNP named rs1234 in a sample named NA12345. A call - # belongs to a call set, which contains related calls typically from one - # sample. - class Variant - include Google::Apis::Core::Hashable - - # The bases that appear instead of the reference bases. - # Corresponds to the JSON property `alternateBases` - # @return [Array] - attr_accessor :alternate_bases - - # The variant calls for this particular variant. Each one represents the - # determination of genotype with respect to this variant. - # Corresponds to the JSON property `calls` - # @return [Array] - attr_accessor :calls - - # The date this variant was created, in milliseconds from the epoch. - # Corresponds to the JSON property `created` - # @return [Fixnum] - attr_accessor :created - - # The end position (0-based) of this variant. This corresponds to the first - # base after the last base in the reference allele. So, the length of - # the reference allele is (end - start). This is useful for variants - # that don't explicitly give alternate bases, for example large deletions. - # Corresponds to the JSON property `end` - # @return [Fixnum] - attr_accessor :end - - # A list of filters (normally quality filters) this variant has failed. - # `PASS` indicates this variant has passed all filters. - # Corresponds to the JSON property `filter` - # @return [Array] - attr_accessor :filter - - # The server-generated variant ID, unique across all variants. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # A map of additional variant information. This must be of the form - # map (string key mapping to a list of string values). - # Corresponds to the JSON property `info` - # @return [Hash>] - attr_accessor :info - - # Names for the variant, for example a RefSNP ID. - # Corresponds to the JSON property `names` - # @return [Array] - attr_accessor :names - - # A measure of how likely this variant is to be real. - # A higher value is better. - # Corresponds to the JSON property `quality` - # @return [Float] - attr_accessor :quality - - # The reference bases for this variant. They start at the given - # position. - # Corresponds to the JSON property `referenceBases` - # @return [String] - attr_accessor :reference_bases - - # The reference on which this variant occurs. - # (such as `chr20` or `X`) - # Corresponds to the JSON property `referenceName` - # @return [String] - attr_accessor :reference_name - - # The position at which this variant occurs (0-based). - # This corresponds to the first base of the string of reference bases. - # Corresponds to the JSON property `start` - # @return [Fixnum] - attr_accessor :start - - # The ID of the variant set this variant belongs to. - # Corresponds to the JSON property `variantSetId` - # @return [String] - attr_accessor :variant_set_id - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @alternate_bases = args[:alternate_bases] if args.key?(:alternate_bases) - @calls = args[:calls] if args.key?(:calls) - @created = args[:created] if args.key?(:created) - @end = args[:end] if args.key?(:end) - @filter = args[:filter] if args.key?(:filter) - @id = args[:id] if args.key?(:id) - @info = args[:info] if args.key?(:info) - @names = args[:names] if args.key?(:names) - @quality = args[:quality] if args.key?(:quality) - @reference_bases = args[:reference_bases] if args.key?(:reference_bases) - @reference_name = args[:reference_name] if args.key?(:reference_name) - @start = args[:start] if args.key?(:start) - @variant_set_id = args[:variant_set_id] if args.key?(:variant_set_id) - end - end - - # - class VariantAnnotation - include Google::Apis::Core::Hashable - - # The alternate allele for this variant. If multiple alternate alleles - # exist at this location, create a separate variant for each one, as they - # may represent distinct conditions. - # Corresponds to the JSON property `alternateBases` - # @return [String] - attr_accessor :alternate_bases - - # Describes the clinical significance of a variant. - # It is adapted from the ClinVar controlled vocabulary for clinical - # significance described at: - # http://www.ncbi.nlm.nih.gov/clinvar/docs/clinsig/ - # Corresponds to the JSON property `clinicalSignificance` - # @return [String] - attr_accessor :clinical_significance - - # The set of conditions associated with this variant. - # A condition describes the way a variant influences human health. - # Corresponds to the JSON property `conditions` - # @return [Array] - attr_accessor :conditions - - # Effect of the variant on the coding sequence. - # Corresponds to the JSON property `effect` - # @return [String] - attr_accessor :effect - - # Google annotation ID of the gene affected by this variant. This should - # be provided when the variant is created. - # Corresponds to the JSON property `geneId` - # @return [String] - attr_accessor :gene_id - - # Google annotation IDs of the transcripts affected by this variant. These - # should be provided when the variant is created. - # Corresponds to the JSON property `transcriptIds` - # @return [Array] - attr_accessor :transcript_ids - - # Type has been adapted from ClinVar's list of variant types. - # Corresponds to the JSON property `type` - # @return [String] - attr_accessor :type - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @alternate_bases = args[:alternate_bases] if args.key?(:alternate_bases) - @clinical_significance = args[:clinical_significance] if args.key?(:clinical_significance) - @conditions = args[:conditions] if args.key?(:conditions) - @effect = args[:effect] if args.key?(:effect) - @gene_id = args[:gene_id] if args.key?(:gene_id) - @transcript_ids = args[:transcript_ids] if args.key?(:transcript_ids) - @type = args[:type] if args.key?(:type) - end - end - - # A call represents the determination of genotype with respect to a particular - # variant. It may include associated information such as quality and phasing. - # For example, a call might assign a probability of 0.32 to the occurrence of - # a SNP named rs1234 in a call set with the name NA12345. - class VariantCall - include Google::Apis::Core::Hashable - - # The ID of the call set this variant call belongs to. - # Corresponds to the JSON property `callSetId` - # @return [String] - attr_accessor :call_set_id - - # The name of the call set this variant call belongs to. - # Corresponds to the JSON property `callSetName` - # @return [String] - attr_accessor :call_set_name - - # The genotype of this variant call. Each value represents either the value - # of the `referenceBases` field or a 1-based index into - # `alternateBases`. If a variant had a `referenceBases` - # value of `T` and an `alternateBases` - # value of `["A", "C"]`, and the `genotype` was - # `[2, 1]`, that would mean the call - # represented the heterozygous value `CA` for this variant. - # If the `genotype` was instead `[0, 1]`, the - # represented value would be `TA`. Ordering of the - # genotype values is important if the `phaseset` is present. - # If a genotype is not called (that is, a `.` is present in the - # GT string) -1 is returned. - # Corresponds to the JSON property `genotype` - # @return [Array] - attr_accessor :genotype - - # The genotype likelihoods for this variant call. Each array entry - # represents how likely a specific genotype is for this call. The value - # ordering is defined by the GL tag in the VCF spec. - # If Phred-scaled genotype likelihood scores (PL) are available and - # log10(P) genotype likelihood scores (GL) are not, PL scores are converted - # to GL scores. If both are available, PL scores are stored in `info`. - # Corresponds to the JSON property `genotypeLikelihood` - # @return [Array] - attr_accessor :genotype_likelihood - - # A map of additional variant call information. This must be of the form - # map (string key mapping to a list of string values). - # Corresponds to the JSON property `info` - # @return [Hash>] - attr_accessor :info - - # If this field is present, this variant call's genotype ordering implies - # the phase of the bases and is consistent with any other variant calls in - # the same reference sequence which have the same phaseset value. - # When importing data from VCF, if the genotype data was phased but no - # phase set was specified this field will be set to `*`. - # Corresponds to the JSON property `phaseset` - # @return [String] - attr_accessor :phaseset - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @call_set_id = args[:call_set_id] if args.key?(:call_set_id) - @call_set_name = args[:call_set_name] if args.key?(:call_set_name) - @genotype = args[:genotype] if args.key?(:genotype) - @genotype_likelihood = args[:genotype_likelihood] if args.key?(:genotype_likelihood) - @info = args[:info] if args.key?(:info) - @phaseset = args[:phaseset] if args.key?(:phaseset) - end - end - - # A variant set is a collection of call sets and variants. It contains summary - # statistics of those contents. A variant set belongs to a dataset. - class VariantSet - include Google::Apis::Core::Hashable - - # The dataset to which this variant set belongs. - # Corresponds to the JSON property `datasetId` - # @return [String] - attr_accessor :dataset_id - - # A textual description of this variant set. - # Corresponds to the JSON property `description` - # @return [String] - attr_accessor :description - - # The server-generated variant set ID, unique across all variant sets. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # The metadata associated with this variant set. - # Corresponds to the JSON property `metadata` - # @return [Array] - attr_accessor :metadata - - # User-specified, mutable name. - # Corresponds to the JSON property `name` - # @return [String] - attr_accessor :name - - # A list of all references used by the variants in a variant set - # with associated coordinate upper bounds for each one. - # Corresponds to the JSON property `referenceBounds` - # @return [Array] - attr_accessor :reference_bounds - - # The reference set to which the variant set is mapped. The reference set - # describes the alignment provenance of the variant set, while the - # `referenceBounds` describe the shape of the actual variant data. The - # reference set's reference names are a superset of those found in the - # `referenceBounds`. - # For example, given a variant set that is mapped to the GRCh38 reference set - # and contains a single variant on reference 'X', `referenceBounds` would - # contain only an entry for 'X', while the associated reference set - # enumerates all possible references: '1', '2', 'X', 'Y', 'MT', etc. - # Corresponds to the JSON property `referenceSetId` - # @return [String] - attr_accessor :reference_set_id - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @dataset_id = args[:dataset_id] if args.key?(:dataset_id) - @description = args[:description] if args.key?(:description) - @id = args[:id] if args.key?(:id) - @metadata = args[:metadata] if args.key?(:metadata) - @name = args[:name] if args.key?(:name) - @reference_bounds = args[:reference_bounds] if args.key?(:reference_bounds) - @reference_set_id = args[:reference_set_id] if args.key?(:reference_set_id) - end - end - - # Metadata describes a single piece of variant call metadata. - # These data include a top level key and either a single value string (value) - # or a list of key-value pairs (info.) - # Value and info are mutually exclusive. - class VariantSetMetadata - include Google::Apis::Core::Hashable - - # A textual description of this metadata. - # Corresponds to the JSON property `description` - # @return [String] - attr_accessor :description - - # User-provided ID field, not enforced by this API. - # Two or more pieces of structured metadata with identical - # id and key fields are considered equivalent. - # Corresponds to the JSON property `id` - # @return [String] - attr_accessor :id - - # Remaining structured metadata key-value pairs. This must be of the form - # map (string key mapping to a list of string values). - # Corresponds to the JSON property `info` - # @return [Hash>] - attr_accessor :info - - # The top-level key. - # Corresponds to the JSON property `key` - # @return [String] - attr_accessor :key - - # The number of values that can be included in a field described by this - # metadata. - # Corresponds to the JSON property `number` - # @return [String] - attr_accessor :number - - # The type of data. Possible types include: Integer, Float, - # Flag, Character, and String. - # Corresponds to the JSON property `type` - # @return [String] - attr_accessor :type - - # The value field for simple metadata - # Corresponds to the JSON property `value` - # @return [String] - attr_accessor :value - - def initialize(**args) - update!(**args) - end - - # Update properties of this object - def update!(**args) - @description = args[:description] if args.key?(:description) - @id = args[:id] if args.key?(:id) - @info = args[:info] if args.key?(:info) - @key = args[:key] if args.key?(:key) - @number = args[:number] if args.key?(:number) - @type = args[:type] if args.key?(:type) - @value = args[:value] if args.key?(:value) - end - end - # An event generated after a worker VM has been assigned to run the # pipeline. class WorkerAssignedEvent diff --git a/generated/google/apis/genomics_v1/representations.rb b/generated/google/apis/genomics_v1/representations.rb index 7eb50af79..634c5b2ce 100644 --- a/generated/google/apis/genomics_v1/representations.rb +++ b/generated/google/apis/genomics_v1/representations.rb @@ -22,66 +22,12 @@ module Google module Apis module GenomicsV1 - class Annotation - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class AnnotationSet - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class BatchCreateAnnotationsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class BatchCreateAnnotationsResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class Binding - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class CallSet - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class CancelOperationRequest class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class CigarUnit - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class ClinicalCondition - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class CodingSequence - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class ComputeEngine class Representation < Google::Apis::Core::JsonRepresentation; end @@ -106,18 +52,6 @@ module Google include Google::Apis::Core::JsonObjectSupport end - class CoverageBucket - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class Dataset - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class DelayedEvent class Representation < Google::Apis::Core::JsonRepresentation; end @@ -130,126 +64,36 @@ module Google include Google::Apis::Core::JsonObjectSupport end - class Entry - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class Event class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class Exon - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class Experiment - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class ExportReadGroupSetRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class ExportVariantSetRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class Expr - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class ExternalId - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class FailedEvent class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GetIamPolicyRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class ImportReadGroupSetsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class ImportReadGroupSetsResponse class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class ImportVariantsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class ImportVariantsResponse class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class LinearAlignment - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class ListBasesResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class ListCoverageBucketsResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class ListDatasetsResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class ListOperationsResponse class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class MergeVariantsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class Operation class Representation < Google::Apis::Core::JsonRepresentation; end @@ -268,24 +112,6 @@ module Google include Google::Apis::Core::JsonObjectSupport end - class Policy - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class Position - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class Program - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class PullStartedEvent class Representation < Google::Apis::Core::JsonRepresentation; end @@ -298,48 +124,6 @@ module Google include Google::Apis::Core::JsonObjectSupport end - class Range - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class Read - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class ReadGroup - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class ReadGroupSet - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class Reference - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class ReferenceBound - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class ReferenceSet - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class RunPipelineResponse class Representation < Google::Apis::Core::JsonRepresentation; end @@ -352,186 +136,18 @@ module Google include Google::Apis::Core::JsonObjectSupport end - class SearchAnnotationSetsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchAnnotationSetsResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchAnnotationsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchAnnotationsResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchCallSetsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchCallSetsResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchReadGroupSetsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchReadGroupSetsResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchReadsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchReadsResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchReferenceSetsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchReferenceSetsResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchReferencesRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchReferencesResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchVariantSetsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchVariantSetsResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchVariantsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SearchVariantsResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class SetIamPolicyRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class Status class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class TestIamPermissionsRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class TestIamPermissionsResponse - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class Transcript - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class UndeleteDatasetRequest - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class UnexpectedExitStatusEvent class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class Variant - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class VariantAnnotation - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class VariantCall - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class VariantSet - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - - class VariantSetMetadata - class Representation < Google::Apis::Core::JsonRepresentation; end - - include Google::Apis::Core::JsonObjectSupport - end - class WorkerAssignedEvent class Representation < Google::Apis::Core::JsonRepresentation; end @@ -544,124 +160,12 @@ module Google include Google::Apis::Core::JsonObjectSupport end - class Annotation - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :annotation_set_id, as: 'annotationSetId' - property :end, :numeric_string => true, as: 'end' - property :id, as: 'id' - hash :info, as: 'info', :class => Array do - include Representable::JSON::Collection - items - end - - property :name, as: 'name' - property :reference_id, as: 'referenceId' - property :reference_name, as: 'referenceName' - property :reverse_strand, as: 'reverseStrand' - property :start, :numeric_string => true, as: 'start' - property :transcript, as: 'transcript', class: Google::Apis::GenomicsV1::Transcript, decorator: Google::Apis::GenomicsV1::Transcript::Representation - - property :type, as: 'type' - property :variant, as: 'variant', class: Google::Apis::GenomicsV1::VariantAnnotation, decorator: Google::Apis::GenomicsV1::VariantAnnotation::Representation - - end - end - - class AnnotationSet - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :dataset_id, as: 'datasetId' - property :id, as: 'id' - hash :info, as: 'info', :class => Array do - include Representable::JSON::Collection - items - end - - property :name, as: 'name' - property :reference_set_id, as: 'referenceSetId' - property :source_uri, as: 'sourceUri' - property :type, as: 'type' - end - end - - class BatchCreateAnnotationsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :annotations, as: 'annotations', class: Google::Apis::GenomicsV1::Annotation, decorator: Google::Apis::GenomicsV1::Annotation::Representation - - property :request_id, as: 'requestId' - end - end - - class BatchCreateAnnotationsResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :entries, as: 'entries', class: Google::Apis::GenomicsV1::Entry, decorator: Google::Apis::GenomicsV1::Entry::Representation - - end - end - - class Binding - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :condition, as: 'condition', class: Google::Apis::GenomicsV1::Expr, decorator: Google::Apis::GenomicsV1::Expr::Representation - - collection :members, as: 'members' - property :role, as: 'role' - end - end - - class CallSet - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :created, :numeric_string => true, as: 'created' - property :id, as: 'id' - hash :info, as: 'info', :class => Array do - include Representable::JSON::Collection - items - end - - property :name, as: 'name' - property :sample_id, as: 'sampleId' - collection :variant_set_ids, as: 'variantSetIds' - end - end - class CancelOperationRequest # @private class Representation < Google::Apis::Core::JsonRepresentation end end - class CigarUnit - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :operation, as: 'operation' - property :operation_length, :numeric_string => true, as: 'operationLength' - property :reference_sequence, as: 'referenceSequence' - end - end - - class ClinicalCondition - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :concept_id, as: 'conceptId' - collection :external_ids, as: 'externalIds', class: Google::Apis::GenomicsV1::ExternalId, decorator: Google::Apis::GenomicsV1::ExternalId::Representation - - collection :names, as: 'names' - property :omim_id, as: 'omimId' - end - end - - class CodingSequence - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :end, :numeric_string => true, as: 'end' - property :start, :numeric_string => true, as: 'start' - end - end - class ComputeEngine # @private class Representation < Google::Apis::Core::JsonRepresentation @@ -697,25 +201,6 @@ module Google end end - class CoverageBucket - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :mean_coverage, as: 'meanCoverage' - property :range, as: 'range', class: Google::Apis::GenomicsV1::Range, decorator: Google::Apis::GenomicsV1::Range::Representation - - end - end - - class Dataset - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :create_time, as: 'createTime' - property :id, as: 'id' - property :name, as: 'name' - property :project_id, as: 'projectId' - end - end - class DelayedEvent # @private class Representation < Google::Apis::Core::JsonRepresentation @@ -730,16 +215,6 @@ module Google end end - class Entry - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :annotation, as: 'annotation', class: Google::Apis::GenomicsV1::Annotation, decorator: Google::Apis::GenomicsV1::Annotation::Representation - - property :status, as: 'status', class: Google::Apis::GenomicsV1::Status, decorator: Google::Apis::GenomicsV1::Status::Representation - - end - end - class Event # @private class Representation < Google::Apis::Core::JsonRepresentation @@ -749,63 +224,6 @@ module Google end end - class Exon - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :end, :numeric_string => true, as: 'end' - property :frame, as: 'frame' - property :start, :numeric_string => true, as: 'start' - end - end - - class Experiment - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :instrument_model, as: 'instrumentModel' - property :library_id, as: 'libraryId' - property :platform_unit, as: 'platformUnit' - property :sequencing_center, as: 'sequencingCenter' - end - end - - class ExportReadGroupSetRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :export_uri, as: 'exportUri' - property :project_id, as: 'projectId' - collection :reference_names, as: 'referenceNames' - end - end - - class ExportVariantSetRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :bigquery_dataset, as: 'bigqueryDataset' - property :bigquery_table, as: 'bigqueryTable' - collection :call_set_ids, as: 'callSetIds' - property :format, as: 'format' - property :project_id, as: 'projectId' - end - end - - class Expr - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :description, as: 'description' - property :expression, as: 'expression' - property :location, as: 'location' - property :title, as: 'title' - end - end - - class ExternalId - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :id, as: 'id' - property :source_name, as: 'sourceName' - end - end - class FailedEvent # @private class Representation < Google::Apis::Core::JsonRepresentation @@ -814,22 +232,6 @@ module Google end end - class GetIamPolicyRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - end - end - - class ImportReadGroupSetsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :dataset_id, as: 'datasetId' - property :partition_strategy, as: 'partitionStrategy' - property :reference_set_id, as: 'referenceSetId' - collection :source_uris, as: 'sourceUris' - end - end - class ImportReadGroupSetsResponse # @private class Representation < Google::Apis::Core::JsonRepresentation @@ -837,17 +239,6 @@ module Google end end - class ImportVariantsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :format, as: 'format' - hash :info_merge_config, as: 'infoMergeConfig' - property :normalize_reference_names, as: 'normalizeReferenceNames' - collection :source_uris, as: 'sourceUris' - property :variant_set_id, as: 'variantSetId' - end - end - class ImportVariantsResponse # @private class Representation < Google::Apis::Core::JsonRepresentation @@ -855,45 +246,6 @@ module Google end end - class LinearAlignment - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :cigar, as: 'cigar', class: Google::Apis::GenomicsV1::CigarUnit, decorator: Google::Apis::GenomicsV1::CigarUnit::Representation - - property :mapping_quality, as: 'mappingQuality' - property :position, as: 'position', class: Google::Apis::GenomicsV1::Position, decorator: Google::Apis::GenomicsV1::Position::Representation - - end - end - - class ListBasesResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :next_page_token, as: 'nextPageToken' - property :offset, :numeric_string => true, as: 'offset' - property :sequence, as: 'sequence' - end - end - - class ListCoverageBucketsResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :bucket_width, :numeric_string => true, as: 'bucketWidth' - collection :coverage_buckets, as: 'coverageBuckets', class: Google::Apis::GenomicsV1::CoverageBucket, decorator: Google::Apis::GenomicsV1::CoverageBucket::Representation - - property :next_page_token, as: 'nextPageToken' - end - end - - class ListDatasetsResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :datasets, as: 'datasets', class: Google::Apis::GenomicsV1::Dataset, decorator: Google::Apis::GenomicsV1::Dataset::Representation - - property :next_page_token, as: 'nextPageToken' - end - end - class ListOperationsResponse # @private class Representation < Google::Apis::Core::JsonRepresentation @@ -903,16 +255,6 @@ module Google end end - class MergeVariantsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - hash :info_merge_config, as: 'infoMergeConfig' - property :variant_set_id, as: 'variantSetId' - collection :variants, as: 'variants', class: Google::Apis::GenomicsV1::Variant, decorator: Google::Apis::GenomicsV1::Variant::Representation - - end - end - class Operation # @private class Representation < Google::Apis::Core::JsonRepresentation @@ -950,36 +292,6 @@ module Google end end - class Policy - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :bindings, as: 'bindings', class: Google::Apis::GenomicsV1::Binding, decorator: Google::Apis::GenomicsV1::Binding::Representation - - property :etag, :base64 => true, as: 'etag' - property :version, as: 'version' - end - end - - class Position - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :position, :numeric_string => true, as: 'position' - property :reference_name, as: 'referenceName' - property :reverse_strand, as: 'reverseStrand' - end - end - - class Program - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :command_line, as: 'commandLine' - property :id, as: 'id' - property :name, as: 'name' - property :prev_program_id, as: 'prevProgramId' - property :version, as: 'version' - end - end - class PullStartedEvent # @private class Representation < Google::Apis::Core::JsonRepresentation @@ -994,119 +306,6 @@ module Google end end - class Range - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :end, :numeric_string => true, as: 'end' - property :reference_name, as: 'referenceName' - property :start, :numeric_string => true, as: 'start' - end - end - - class Read - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :aligned_quality, as: 'alignedQuality' - property :aligned_sequence, as: 'alignedSequence' - property :alignment, as: 'alignment', class: Google::Apis::GenomicsV1::LinearAlignment, decorator: Google::Apis::GenomicsV1::LinearAlignment::Representation - - property :duplicate_fragment, as: 'duplicateFragment' - property :failed_vendor_quality_checks, as: 'failedVendorQualityChecks' - property :fragment_length, as: 'fragmentLength' - property :fragment_name, as: 'fragmentName' - property :id, as: 'id' - hash :info, as: 'info', :class => Array do - include Representable::JSON::Collection - items - end - - property :next_mate_position, as: 'nextMatePosition', class: Google::Apis::GenomicsV1::Position, decorator: Google::Apis::GenomicsV1::Position::Representation - - property :number_reads, as: 'numberReads' - property :proper_placement, as: 'properPlacement' - property :read_group_id, as: 'readGroupId' - property :read_group_set_id, as: 'readGroupSetId' - property :read_number, as: 'readNumber' - property :secondary_alignment, as: 'secondaryAlignment' - property :supplementary_alignment, as: 'supplementaryAlignment' - end - end - - class ReadGroup - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :dataset_id, as: 'datasetId' - property :description, as: 'description' - property :experiment, as: 'experiment', class: Google::Apis::GenomicsV1::Experiment, decorator: Google::Apis::GenomicsV1::Experiment::Representation - - property :id, as: 'id' - hash :info, as: 'info', :class => Array do - include Representable::JSON::Collection - items - end - - property :name, as: 'name' - property :predicted_insert_size, as: 'predictedInsertSize' - collection :programs, as: 'programs', class: Google::Apis::GenomicsV1::Program, decorator: Google::Apis::GenomicsV1::Program::Representation - - property :reference_set_id, as: 'referenceSetId' - property :sample_id, as: 'sampleId' - end - end - - class ReadGroupSet - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :dataset_id, as: 'datasetId' - property :filename, as: 'filename' - property :id, as: 'id' - hash :info, as: 'info', :class => Array do - include Representable::JSON::Collection - items - end - - property :name, as: 'name' - collection :read_groups, as: 'readGroups', class: Google::Apis::GenomicsV1::ReadGroup, decorator: Google::Apis::GenomicsV1::ReadGroup::Representation - - property :reference_set_id, as: 'referenceSetId' - end - end - - class Reference - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :id, as: 'id' - property :length, :numeric_string => true, as: 'length' - property :md5checksum, as: 'md5checksum' - property :name, as: 'name' - property :ncbi_taxon_id, as: 'ncbiTaxonId' - collection :source_accessions, as: 'sourceAccessions' - property :source_uri, as: 'sourceUri' - end - end - - class ReferenceBound - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :reference_name, as: 'referenceName' - property :upper_bound, :numeric_string => true, as: 'upperBound' - end - end - - class ReferenceSet - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :assembly_id, as: 'assemblyId' - property :description, as: 'description' - property :id, as: 'id' - property :md5checksum, as: 'md5checksum' - property :ncbi_taxon_id, as: 'ncbiTaxonId' - collection :reference_ids, as: 'referenceIds' - collection :source_accessions, as: 'sourceAccessions' - property :source_uri, as: 'sourceUri' - end - end - class RunPipelineResponse # @private class Representation < Google::Apis::Core::JsonRepresentation @@ -1121,199 +320,6 @@ module Google end end - class SearchAnnotationSetsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :dataset_ids, as: 'datasetIds' - property :name, as: 'name' - property :page_size, as: 'pageSize' - property :page_token, as: 'pageToken' - property :reference_set_id, as: 'referenceSetId' - collection :types, as: 'types' - end - end - - class SearchAnnotationSetsResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :annotation_sets, as: 'annotationSets', class: Google::Apis::GenomicsV1::AnnotationSet, decorator: Google::Apis::GenomicsV1::AnnotationSet::Representation - - property :next_page_token, as: 'nextPageToken' - end - end - - class SearchAnnotationsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :annotation_set_ids, as: 'annotationSetIds' - property :end, :numeric_string => true, as: 'end' - property :page_size, as: 'pageSize' - property :page_token, as: 'pageToken' - property :reference_id, as: 'referenceId' - property :reference_name, as: 'referenceName' - property :start, :numeric_string => true, as: 'start' - end - end - - class SearchAnnotationsResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :annotations, as: 'annotations', class: Google::Apis::GenomicsV1::Annotation, decorator: Google::Apis::GenomicsV1::Annotation::Representation - - property :next_page_token, as: 'nextPageToken' - end - end - - class SearchCallSetsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :name, as: 'name' - property :page_size, as: 'pageSize' - property :page_token, as: 'pageToken' - collection :variant_set_ids, as: 'variantSetIds' - end - end - - class SearchCallSetsResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :call_sets, as: 'callSets', class: Google::Apis::GenomicsV1::CallSet, decorator: Google::Apis::GenomicsV1::CallSet::Representation - - property :next_page_token, as: 'nextPageToken' - end - end - - class SearchReadGroupSetsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :dataset_ids, as: 'datasetIds' - property :name, as: 'name' - property :page_size, as: 'pageSize' - property :page_token, as: 'pageToken' - end - end - - class SearchReadGroupSetsResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :next_page_token, as: 'nextPageToken' - collection :read_group_sets, as: 'readGroupSets', class: Google::Apis::GenomicsV1::ReadGroupSet, decorator: Google::Apis::GenomicsV1::ReadGroupSet::Representation - - end - end - - class SearchReadsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :end, :numeric_string => true, as: 'end' - property :page_size, as: 'pageSize' - property :page_token, as: 'pageToken' - collection :read_group_ids, as: 'readGroupIds' - collection :read_group_set_ids, as: 'readGroupSetIds' - property :reference_name, as: 'referenceName' - property :start, :numeric_string => true, as: 'start' - end - end - - class SearchReadsResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :alignments, as: 'alignments', class: Google::Apis::GenomicsV1::Read, decorator: Google::Apis::GenomicsV1::Read::Representation - - property :next_page_token, as: 'nextPageToken' - end - end - - class SearchReferenceSetsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :accessions, as: 'accessions' - property :assembly_id, as: 'assemblyId' - collection :md5checksums, as: 'md5checksums' - property :page_size, as: 'pageSize' - property :page_token, as: 'pageToken' - end - end - - class SearchReferenceSetsResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :next_page_token, as: 'nextPageToken' - collection :reference_sets, as: 'referenceSets', class: Google::Apis::GenomicsV1::ReferenceSet, decorator: Google::Apis::GenomicsV1::ReferenceSet::Representation - - end - end - - class SearchReferencesRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :accessions, as: 'accessions' - collection :md5checksums, as: 'md5checksums' - property :page_size, as: 'pageSize' - property :page_token, as: 'pageToken' - property :reference_set_id, as: 'referenceSetId' - end - end - - class SearchReferencesResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :next_page_token, as: 'nextPageToken' - collection :references, as: 'references', class: Google::Apis::GenomicsV1::Reference, decorator: Google::Apis::GenomicsV1::Reference::Representation - - end - end - - class SearchVariantSetsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :dataset_ids, as: 'datasetIds' - property :page_size, as: 'pageSize' - property :page_token, as: 'pageToken' - end - end - - class SearchVariantSetsResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :next_page_token, as: 'nextPageToken' - collection :variant_sets, as: 'variantSets', class: Google::Apis::GenomicsV1::VariantSet, decorator: Google::Apis::GenomicsV1::VariantSet::Representation - - end - end - - class SearchVariantsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :call_set_ids, as: 'callSetIds' - property :end, :numeric_string => true, as: 'end' - property :max_calls, as: 'maxCalls' - property :page_size, as: 'pageSize' - property :page_token, as: 'pageToken' - property :reference_name, as: 'referenceName' - property :start, :numeric_string => true, as: 'start' - property :variant_name, as: 'variantName' - collection :variant_set_ids, as: 'variantSetIds' - end - end - - class SearchVariantsResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :next_page_token, as: 'nextPageToken' - collection :variants, as: 'variants', class: Google::Apis::GenomicsV1::Variant, decorator: Google::Apis::GenomicsV1::Variant::Representation - - end - end - - class SetIamPolicyRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :policy, as: 'policy', class: Google::Apis::GenomicsV1::Policy, decorator: Google::Apis::GenomicsV1::Policy::Representation - - end - end - class Status # @private class Representation < Google::Apis::Core::JsonRepresentation @@ -1323,37 +329,6 @@ module Google end end - class TestIamPermissionsRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :permissions, as: 'permissions' - end - end - - class TestIamPermissionsResponse - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :permissions, as: 'permissions' - end - end - - class Transcript - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :coding_sequence, as: 'codingSequence', class: Google::Apis::GenomicsV1::CodingSequence, decorator: Google::Apis::GenomicsV1::CodingSequence::Representation - - collection :exons, as: 'exons', class: Google::Apis::GenomicsV1::Exon, decorator: Google::Apis::GenomicsV1::Exon::Representation - - property :gene_id, as: 'geneId' - end - end - - class UndeleteDatasetRequest - # @private - class Representation < Google::Apis::Core::JsonRepresentation - end - end - class UnexpectedExitStatusEvent # @private class Representation < Google::Apis::Core::JsonRepresentation @@ -1362,92 +337,6 @@ module Google end end - class Variant - # @private - class Representation < Google::Apis::Core::JsonRepresentation - collection :alternate_bases, as: 'alternateBases' - collection :calls, as: 'calls', class: Google::Apis::GenomicsV1::VariantCall, decorator: Google::Apis::GenomicsV1::VariantCall::Representation - - property :created, :numeric_string => true, as: 'created' - property :end, :numeric_string => true, as: 'end' - collection :filter, as: 'filter' - property :id, as: 'id' - hash :info, as: 'info', :class => Array do - include Representable::JSON::Collection - items - end - - collection :names, as: 'names' - property :quality, as: 'quality' - property :reference_bases, as: 'referenceBases' - property :reference_name, as: 'referenceName' - property :start, :numeric_string => true, as: 'start' - property :variant_set_id, as: 'variantSetId' - end - end - - class VariantAnnotation - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :alternate_bases, as: 'alternateBases' - property :clinical_significance, as: 'clinicalSignificance' - collection :conditions, as: 'conditions', class: Google::Apis::GenomicsV1::ClinicalCondition, decorator: Google::Apis::GenomicsV1::ClinicalCondition::Representation - - property :effect, as: 'effect' - property :gene_id, as: 'geneId' - collection :transcript_ids, as: 'transcriptIds' - property :type, as: 'type' - end - end - - class VariantCall - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :call_set_id, as: 'callSetId' - property :call_set_name, as: 'callSetName' - collection :genotype, as: 'genotype' - collection :genotype_likelihood, as: 'genotypeLikelihood' - hash :info, as: 'info', :class => Array do - include Representable::JSON::Collection - items - end - - property :phaseset, as: 'phaseset' - end - end - - class VariantSet - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :dataset_id, as: 'datasetId' - property :description, as: 'description' - property :id, as: 'id' - collection :metadata, as: 'metadata', class: Google::Apis::GenomicsV1::VariantSetMetadata, decorator: Google::Apis::GenomicsV1::VariantSetMetadata::Representation - - property :name, as: 'name' - collection :reference_bounds, as: 'referenceBounds', class: Google::Apis::GenomicsV1::ReferenceBound, decorator: Google::Apis::GenomicsV1::ReferenceBound::Representation - - property :reference_set_id, as: 'referenceSetId' - end - end - - class VariantSetMetadata - # @private - class Representation < Google::Apis::Core::JsonRepresentation - property :description, as: 'description' - property :id, as: 'id' - hash :info, as: 'info', :class => Array do - include Representable::JSON::Collection - items - end - - property :key, as: 'key' - property :number, as: 'number' - property :type, as: 'type' - property :value, as: 'value' - end - end - class WorkerAssignedEvent # @private class Representation < Google::Apis::Core::JsonRepresentation diff --git a/generated/google/apis/genomics_v1/service.rb b/generated/google/apis/genomics_v1/service.rb index fc19fe77d..647e9541c 100644 --- a/generated/google/apis/genomics_v1/service.rb +++ b/generated/google/apis/genomics_v1/service.rb @@ -47,887 +47,6 @@ module Google @batch_path = 'batch' end - # Creates one or more new annotations atomically. All annotations must - # belong to the same annotation set. Caller must have WRITE - # permission for this annotation set. For optimal performance, batch - # positionally adjacent annotations together. - # If the request has a systemic issue, such as an attempt to write to - # an inaccessible annotation set, the entire RPC will fail accordingly. For - # lesser data issues, when possible an error will be isolated to the - # corresponding batch entry in the response; the remaining well formed - # annotations will be created normally. - # For details on the requirements for each individual annotation resource, - # see - # CreateAnnotation. - # @param [Google::Apis::GenomicsV1::BatchCreateAnnotationsRequest] batch_create_annotations_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::BatchCreateAnnotationsResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::BatchCreateAnnotationsResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def batch_create_annotations(batch_create_annotations_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/annotations:batchCreate', options) - command.request_representation = Google::Apis::GenomicsV1::BatchCreateAnnotationsRequest::Representation - command.request_object = batch_create_annotations_request_object - command.response_representation = Google::Apis::GenomicsV1::BatchCreateAnnotationsResponse::Representation - command.response_class = Google::Apis::GenomicsV1::BatchCreateAnnotationsResponse - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Creates a new annotation. Caller must have WRITE permission - # for the associated annotation set. - # The following fields are required: - # * annotationSetId - # * referenceName or - # referenceId - # ### Transcripts - # For annotations of type TRANSCRIPT, the following fields of - # transcript must be provided: - # * exons.start - # * exons.end - # All other fields may be optionally specified, unless documented as being - # server-generated (for example, the `id` field). The annotated - # range must be no longer than 100Mbp (mega base pairs). See the - # Annotation resource - # for additional restrictions on each field. - # @param [Google::Apis::GenomicsV1::Annotation] annotation_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Annotation] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Annotation] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def create_annotation(annotation_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/annotations', options) - command.request_representation = Google::Apis::GenomicsV1::Annotation::Representation - command.request_object = annotation_object - command.response_representation = Google::Apis::GenomicsV1::Annotation::Representation - command.response_class = Google::Apis::GenomicsV1::Annotation - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Deletes an annotation. Caller must have WRITE permission for - # the associated annotation set. - # @param [String] annotation_id - # The ID of the annotation to be deleted. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Empty] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Empty] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def delete_annotation(annotation_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:delete, 'v1/annotations/{annotationId}', options) - command.response_representation = Google::Apis::GenomicsV1::Empty::Representation - command.response_class = Google::Apis::GenomicsV1::Empty - command.params['annotationId'] = annotation_id unless annotation_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets an annotation. Caller must have READ permission - # for the associated annotation set. - # @param [String] annotation_id - # The ID of the annotation to be retrieved. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Annotation] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Annotation] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def get_annotation(annotation_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:get, 'v1/annotations/{annotationId}', options) - command.response_representation = Google::Apis::GenomicsV1::Annotation::Representation - command.response_class = Google::Apis::GenomicsV1::Annotation - command.params['annotationId'] = annotation_id unless annotation_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Searches for annotations that match the given criteria. Results are - # ordered by genomic coordinate (by reference sequence, then position). - # Annotations with equivalent genomic coordinates are returned in an - # unspecified order. This order is consistent, such that two queries for the - # same content (regardless of page size) yield annotations in the same order - # across their respective streams of paginated responses. Caller must have - # READ permission for the queried annotation sets. - # @param [Google::Apis::GenomicsV1::SearchAnnotationsRequest] search_annotations_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::SearchAnnotationsResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::SearchAnnotationsResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def search_annotations(search_annotations_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/annotations/search', options) - command.request_representation = Google::Apis::GenomicsV1::SearchAnnotationsRequest::Representation - command.request_object = search_annotations_request_object - command.response_representation = Google::Apis::GenomicsV1::SearchAnnotationsResponse::Representation - command.response_class = Google::Apis::GenomicsV1::SearchAnnotationsResponse - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Updates an annotation. Caller must have - # WRITE permission for the associated dataset. - # @param [String] annotation_id - # The ID of the annotation to be updated. - # @param [Google::Apis::GenomicsV1::Annotation] annotation_object - # @param [String] update_mask - # An optional mask specifying which fields to update. Mutable fields are - # name, - # variant, - # transcript, and - # info. If unspecified, all mutable - # fields will be updated. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Annotation] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Annotation] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def update_annotation(annotation_id, annotation_object = nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:put, 'v1/annotations/{annotationId}', options) - command.request_representation = Google::Apis::GenomicsV1::Annotation::Representation - command.request_object = annotation_object - command.response_representation = Google::Apis::GenomicsV1::Annotation::Representation - command.response_class = Google::Apis::GenomicsV1::Annotation - command.params['annotationId'] = annotation_id unless annotation_id.nil? - command.query['updateMask'] = update_mask unless update_mask.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Creates a new annotation set. Caller must have WRITE permission for the - # associated dataset. - # The following fields are required: - # * datasetId - # * referenceSetId - # All other fields may be optionally specified, unless documented as being - # server-generated (for example, the `id` field). - # @param [Google::Apis::GenomicsV1::AnnotationSet] annotation_set_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::AnnotationSet] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::AnnotationSet] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def create_annotation_set(annotation_set_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/annotationsets', options) - command.request_representation = Google::Apis::GenomicsV1::AnnotationSet::Representation - command.request_object = annotation_set_object - command.response_representation = Google::Apis::GenomicsV1::AnnotationSet::Representation - command.response_class = Google::Apis::GenomicsV1::AnnotationSet - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Deletes an annotation set. Caller must have WRITE permission - # for the associated annotation set. - # @param [String] annotation_set_id - # The ID of the annotation set to be deleted. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Empty] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Empty] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def delete_annotationset(annotation_set_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:delete, 'v1/annotationsets/{annotationSetId}', options) - command.response_representation = Google::Apis::GenomicsV1::Empty::Representation - command.response_class = Google::Apis::GenomicsV1::Empty - command.params['annotationSetId'] = annotation_set_id unless annotation_set_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets an annotation set. Caller must have READ permission for - # the associated dataset. - # @param [String] annotation_set_id - # The ID of the annotation set to be retrieved. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::AnnotationSet] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::AnnotationSet] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def get_annotation_set(annotation_set_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:get, 'v1/annotationsets/{annotationSetId}', options) - command.response_representation = Google::Apis::GenomicsV1::AnnotationSet::Representation - command.response_class = Google::Apis::GenomicsV1::AnnotationSet - command.params['annotationSetId'] = annotation_set_id unless annotation_set_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Searches for annotation sets that match the given criteria. Annotation sets - # are returned in an unspecified order. This order is consistent, such that - # two queries for the same content (regardless of page size) yield annotation - # sets in the same order across their respective streams of paginated - # responses. Caller must have READ permission for the queried datasets. - # @param [Google::Apis::GenomicsV1::SearchAnnotationSetsRequest] search_annotation_sets_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::SearchAnnotationSetsResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::SearchAnnotationSetsResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def search_annotationset_annotation_sets(search_annotation_sets_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/annotationsets/search', options) - command.request_representation = Google::Apis::GenomicsV1::SearchAnnotationSetsRequest::Representation - command.request_object = search_annotation_sets_request_object - command.response_representation = Google::Apis::GenomicsV1::SearchAnnotationSetsResponse::Representation - command.response_class = Google::Apis::GenomicsV1::SearchAnnotationSetsResponse - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Updates an annotation set. The update must respect all mutability - # restrictions and other invariants described on the annotation set resource. - # Caller must have WRITE permission for the associated dataset. - # @param [String] annotation_set_id - # The ID of the annotation set to be updated. - # @param [Google::Apis::GenomicsV1::AnnotationSet] annotation_set_object - # @param [String] update_mask - # An optional mask specifying which fields to update. Mutable fields are - # name, - # source_uri, and - # info. If unspecified, all - # mutable fields will be updated. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::AnnotationSet] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::AnnotationSet] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def update_annotationset(annotation_set_id, annotation_set_object = nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:put, 'v1/annotationsets/{annotationSetId}', options) - command.request_representation = Google::Apis::GenomicsV1::AnnotationSet::Representation - command.request_object = annotation_set_object - command.response_representation = Google::Apis::GenomicsV1::AnnotationSet::Representation - command.response_class = Google::Apis::GenomicsV1::AnnotationSet - command.params['annotationSetId'] = annotation_set_id unless annotation_set_id.nil? - command.query['updateMask'] = update_mask unless update_mask.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Creates a new call set. - # @param [Google::Apis::GenomicsV1::CallSet] call_set_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::CallSet] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::CallSet] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def create_call_set(call_set_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/callsets', options) - command.request_representation = Google::Apis::GenomicsV1::CallSet::Representation - command.request_object = call_set_object - command.response_representation = Google::Apis::GenomicsV1::CallSet::Representation - command.response_class = Google::Apis::GenomicsV1::CallSet - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Deletes a call set. - # @param [String] call_set_id - # The ID of the call set to be deleted. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Empty] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Empty] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def delete_call_set(call_set_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:delete, 'v1/callsets/{callSetId}', options) - command.response_representation = Google::Apis::GenomicsV1::Empty::Representation - command.response_class = Google::Apis::GenomicsV1::Empty - command.params['callSetId'] = call_set_id unless call_set_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets a call set by ID. - # @param [String] call_set_id - # The ID of the call set. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::CallSet] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::CallSet] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def get_call_set(call_set_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:get, 'v1/callsets/{callSetId}', options) - command.response_representation = Google::Apis::GenomicsV1::CallSet::Representation - command.response_class = Google::Apis::GenomicsV1::CallSet - command.params['callSetId'] = call_set_id unless call_set_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Updates a call set. - # This method supports patch semantics. - # @param [String] call_set_id - # The ID of the call set to be updated. - # @param [Google::Apis::GenomicsV1::CallSet] call_set_object - # @param [String] update_mask - # An optional mask specifying which fields to update. At this time, the only - # mutable field is name. The only - # acceptable value is "name". If unspecified, all mutable fields will be - # updated. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::CallSet] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::CallSet] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def patch_call_set(call_set_id, call_set_object = nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:patch, 'v1/callsets/{callSetId}', options) - command.request_representation = Google::Apis::GenomicsV1::CallSet::Representation - command.request_object = call_set_object - command.response_representation = Google::Apis::GenomicsV1::CallSet::Representation - command.response_class = Google::Apis::GenomicsV1::CallSet - command.params['callSetId'] = call_set_id unless call_set_id.nil? - command.query['updateMask'] = update_mask unless update_mask.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets a list of call sets matching the criteria. - # Implements - # [GlobalAllianceApi.searchCallSets](https://github.com/ga4gh/schemas/blob/v0.5. - # 1/src/main/resources/avro/variantmethods.avdl#L178). - # @param [Google::Apis::GenomicsV1::SearchCallSetsRequest] search_call_sets_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::SearchCallSetsResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::SearchCallSetsResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def search_call_sets(search_call_sets_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/callsets/search', options) - command.request_representation = Google::Apis::GenomicsV1::SearchCallSetsRequest::Representation - command.request_object = search_call_sets_request_object - command.response_representation = Google::Apis::GenomicsV1::SearchCallSetsResponse::Representation - command.response_class = Google::Apis::GenomicsV1::SearchCallSetsResponse - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Creates a new dataset. - # @param [Google::Apis::GenomicsV1::Dataset] dataset_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Dataset] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Dataset] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def create_dataset(dataset_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/datasets', options) - command.request_representation = Google::Apis::GenomicsV1::Dataset::Representation - command.request_object = dataset_object - command.response_representation = Google::Apis::GenomicsV1::Dataset::Representation - command.response_class = Google::Apis::GenomicsV1::Dataset - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Deletes a dataset and all of its contents (all read group sets, - # reference sets, variant sets, call sets, annotation sets, etc.) - # This is reversible (up to one week after the deletion) via - # the - # datasets.undelete - # operation. - # @param [String] dataset_id - # The ID of the dataset to be deleted. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Empty] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Empty] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def delete_dataset(dataset_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:delete, 'v1/datasets/{datasetId}', options) - command.response_representation = Google::Apis::GenomicsV1::Empty::Representation - command.response_class = Google::Apis::GenomicsV1::Empty - command.params['datasetId'] = dataset_id unless dataset_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets a dataset by ID. - # @param [String] dataset_id - # The ID of the dataset. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Dataset] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Dataset] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def get_dataset(dataset_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:get, 'v1/datasets/{datasetId}', options) - command.response_representation = Google::Apis::GenomicsV1::Dataset::Representation - command.response_class = Google::Apis::GenomicsV1::Dataset - command.params['datasetId'] = dataset_id unless dataset_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets the access control policy for the dataset. This is empty if the - # policy or resource does not exist. - # See Getting a - # Policy for more information. - # @param [String] resource - # REQUIRED: The resource for which policy is being specified. Format is - # `datasets/`. - # @param [Google::Apis::GenomicsV1::GetIamPolicyRequest] get_iam_policy_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Policy] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Policy] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def get_dataset_iam_policy(resource, get_iam_policy_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/{+resource}:getIamPolicy', options) - command.request_representation = Google::Apis::GenomicsV1::GetIamPolicyRequest::Representation - command.request_object = get_iam_policy_request_object - command.response_representation = Google::Apis::GenomicsV1::Policy::Representation - command.response_class = Google::Apis::GenomicsV1::Policy - command.params['resource'] = resource unless resource.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Lists datasets within a project. - # @param [Fixnum] page_size - # The maximum number of results to return in a single page. If unspecified, - # defaults to 50. The maximum value is 1024. - # @param [String] page_token - # The continuation token, which is used to page through large result sets. - # To get the next page of results, set this parameter to the value of - # `nextPageToken` from the previous response. - # @param [String] project_id - # Required. The Google Cloud project ID to list datasets for. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::ListDatasetsResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::ListDatasetsResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def list_datasets(page_size: nil, page_token: nil, project_id: nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:get, 'v1/datasets', options) - command.response_representation = Google::Apis::GenomicsV1::ListDatasetsResponse::Representation - command.response_class = Google::Apis::GenomicsV1::ListDatasetsResponse - command.query['pageSize'] = page_size unless page_size.nil? - command.query['pageToken'] = page_token unless page_token.nil? - command.query['projectId'] = project_id unless project_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Updates a dataset. - # This method supports patch semantics. - # @param [String] dataset_id - # The ID of the dataset to be updated. - # @param [Google::Apis::GenomicsV1::Dataset] dataset_object - # @param [String] update_mask - # An optional mask specifying which fields to update. At this time, the only - # mutable field is name. The only - # acceptable value is "name". If unspecified, all mutable fields will be - # updated. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Dataset] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Dataset] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def patch_dataset(dataset_id, dataset_object = nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:patch, 'v1/datasets/{datasetId}', options) - command.request_representation = Google::Apis::GenomicsV1::Dataset::Representation - command.request_object = dataset_object - command.response_representation = Google::Apis::GenomicsV1::Dataset::Representation - command.response_class = Google::Apis::GenomicsV1::Dataset - command.params['datasetId'] = dataset_id unless dataset_id.nil? - command.query['updateMask'] = update_mask unless update_mask.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Sets the access control policy on the specified dataset. Replaces any - # existing policy. - # See Setting a - # Policy for more information. - # @param [String] resource - # REQUIRED: The resource for which policy is being specified. Format is - # `datasets/`. - # @param [Google::Apis::GenomicsV1::SetIamPolicyRequest] set_iam_policy_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Policy] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Policy] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def set_dataset_iam_policy(resource, set_iam_policy_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/{+resource}:setIamPolicy', options) - command.request_representation = Google::Apis::GenomicsV1::SetIamPolicyRequest::Representation - command.request_object = set_iam_policy_request_object - command.response_representation = Google::Apis::GenomicsV1::Policy::Representation - command.response_class = Google::Apis::GenomicsV1::Policy - command.params['resource'] = resource unless resource.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Returns permissions that a caller has on the specified resource. - # See Testing - # Permissions for more information. - # @param [String] resource - # REQUIRED: The resource for which policy is being specified. Format is - # `datasets/`. - # @param [Google::Apis::GenomicsV1::TestIamPermissionsRequest] test_iam_permissions_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::TestIamPermissionsResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::TestIamPermissionsResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def test_dataset_iam_permissions(resource, test_iam_permissions_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/{+resource}:testIamPermissions', options) - command.request_representation = Google::Apis::GenomicsV1::TestIamPermissionsRequest::Representation - command.request_object = test_iam_permissions_request_object - command.response_representation = Google::Apis::GenomicsV1::TestIamPermissionsResponse::Representation - command.response_class = Google::Apis::GenomicsV1::TestIamPermissionsResponse - command.params['resource'] = resource unless resource.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Undeletes a dataset by restoring a dataset which was deleted via this API. - # This operation is only possible for a week after the deletion occurred. - # @param [String] dataset_id - # The ID of the dataset to be undeleted. - # @param [Google::Apis::GenomicsV1::UndeleteDatasetRequest] undelete_dataset_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Dataset] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Dataset] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def undelete_dataset(dataset_id, undelete_dataset_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/datasets/{datasetId}:undelete', options) - command.request_representation = Google::Apis::GenomicsV1::UndeleteDatasetRequest::Representation - command.request_object = undelete_dataset_request_object - command.response_representation = Google::Apis::GenomicsV1::Dataset::Representation - command.response_class = Google::Apis::GenomicsV1::Dataset - command.params['datasetId'] = dataset_id unless dataset_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - # Starts asynchronous cancellation on a long-running operation. # The server makes a best effort to cancel the operation, but success is not # guaranteed. Clients may use Operations.GetOperation @@ -1070,1031 +189,6 @@ module Google command.query['quotaUser'] = quota_user unless quota_user.nil? execute_or_queue_command(command, &block) end - - # Deletes a read group set. - # @param [String] read_group_set_id - # The ID of the read group set to be deleted. The caller must have WRITE - # permissions to the dataset associated with this read group set. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Empty] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Empty] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def delete_read_group_set(read_group_set_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:delete, 'v1/readgroupsets/{readGroupSetId}', options) - command.response_representation = Google::Apis::GenomicsV1::Empty::Representation - command.response_class = Google::Apis::GenomicsV1::Empty - command.params['readGroupSetId'] = read_group_set_id unless read_group_set_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Exports a read group set to a BAM file in Google Cloud Storage. - # Note that currently there may be some differences between exported BAM - # files and the original BAM file at the time of import. See - # ImportReadGroupSets - # for caveats. - # @param [String] read_group_set_id - # Required. The ID of the read group set to export. The caller must have - # READ access to this read group set. - # @param [Google::Apis::GenomicsV1::ExportReadGroupSetRequest] export_read_group_set_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Operation] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Operation] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def export_read_group_sets(read_group_set_id, export_read_group_set_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/readgroupsets/{readGroupSetId}:export', options) - command.request_representation = Google::Apis::GenomicsV1::ExportReadGroupSetRequest::Representation - command.request_object = export_read_group_set_request_object - command.response_representation = Google::Apis::GenomicsV1::Operation::Representation - command.response_class = Google::Apis::GenomicsV1::Operation - command.params['readGroupSetId'] = read_group_set_id unless read_group_set_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets a read group set by ID. - # @param [String] read_group_set_id - # The ID of the read group set. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::ReadGroupSet] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::ReadGroupSet] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def get_read_group_set(read_group_set_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:get, 'v1/readgroupsets/{readGroupSetId}', options) - command.response_representation = Google::Apis::GenomicsV1::ReadGroupSet::Representation - command.response_class = Google::Apis::GenomicsV1::ReadGroupSet - command.params['readGroupSetId'] = read_group_set_id unless read_group_set_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Creates read group sets by asynchronously importing the provided - # information. - # The caller must have WRITE permissions to the dataset. - # ## Notes on [BAM](https://samtools.github.io/hts-specs/SAMv1.pdf) import - # - Tags will be converted to strings - tag types are not preserved - # - Comments (`@CO`) in the input file header will not be preserved - # - Original header order of references (`@SQ`) will not be preserved - # - Any reverse stranded unmapped reads will be reverse complemented, and - # their qualities (also the "BQ" and "OQ" tags, if any) will be reversed - # - Unmapped reads will be stripped of positional information (reference name - # and position) - # @param [Google::Apis::GenomicsV1::ImportReadGroupSetsRequest] import_read_group_sets_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Operation] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Operation] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def import_read_group_sets(import_read_group_sets_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/readgroupsets:import', options) - command.request_representation = Google::Apis::GenomicsV1::ImportReadGroupSetsRequest::Representation - command.request_object = import_read_group_sets_request_object - command.response_representation = Google::Apis::GenomicsV1::Operation::Representation - command.response_class = Google::Apis::GenomicsV1::Operation - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Updates a read group set. - # This method supports patch semantics. - # @param [String] read_group_set_id - # The ID of the read group set to be updated. The caller must have WRITE - # permissions to the dataset associated with this read group set. - # @param [Google::Apis::GenomicsV1::ReadGroupSet] read_group_set_object - # @param [String] update_mask - # An optional mask specifying which fields to update. Supported fields: - # * name. - # * referenceSetId. - # Leaving `updateMask` unset is equivalent to specifying all mutable - # fields. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::ReadGroupSet] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::ReadGroupSet] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def patch_read_group_set(read_group_set_id, read_group_set_object = nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:patch, 'v1/readgroupsets/{readGroupSetId}', options) - command.request_representation = Google::Apis::GenomicsV1::ReadGroupSet::Representation - command.request_object = read_group_set_object - command.response_representation = Google::Apis::GenomicsV1::ReadGroupSet::Representation - command.response_class = Google::Apis::GenomicsV1::ReadGroupSet - command.params['readGroupSetId'] = read_group_set_id unless read_group_set_id.nil? - command.query['updateMask'] = update_mask unless update_mask.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Searches for read group sets matching the criteria. - # Implements - # [GlobalAllianceApi.searchReadGroupSets](https://github.com/ga4gh/schemas/blob/ - # v0.5.1/src/main/resources/avro/readmethods.avdl#L135). - # @param [Google::Apis::GenomicsV1::SearchReadGroupSetsRequest] search_read_group_sets_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::SearchReadGroupSetsResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::SearchReadGroupSetsResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def search_read_group_sets(search_read_group_sets_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/readgroupsets/search', options) - command.request_representation = Google::Apis::GenomicsV1::SearchReadGroupSetsRequest::Representation - command.request_object = search_read_group_sets_request_object - command.response_representation = Google::Apis::GenomicsV1::SearchReadGroupSetsResponse::Representation - command.response_class = Google::Apis::GenomicsV1::SearchReadGroupSetsResponse - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Lists fixed width coverage buckets for a read group set, each of which - # correspond to a range of a reference sequence. Each bucket summarizes - # coverage information across its corresponding genomic range. - # Coverage is defined as the number of reads which are aligned to a given - # base in the reference sequence. Coverage buckets are available at several - # precomputed bucket widths, enabling retrieval of various coverage 'zoom - # levels'. The caller must have READ permissions for the target read group - # set. - # @param [String] read_group_set_id - # Required. The ID of the read group set over which coverage is requested. - # @param [Fixnum] end_ - # The end position of the range on the reference, 0-based exclusive. If - # specified, `referenceName` must also be specified. If unset or 0, defaults - # to the length of the reference. - # @param [Fixnum] page_size - # The maximum number of results to return in a single page. If unspecified, - # defaults to 1024. The maximum value is 2048. - # @param [String] page_token - # The continuation token, which is used to page through large result sets. - # To get the next page of results, set this parameter to the value of - # `nextPageToken` from the previous response. - # @param [String] reference_name - # The name of the reference to query, within the reference set associated - # with this query. Optional. - # @param [Fixnum] start - # The start position of the range on the reference, 0-based inclusive. If - # specified, `referenceName` must also be specified. Defaults to 0. - # @param [Fixnum] target_bucket_width - # The desired width of each reported coverage bucket in base pairs. This - # will be rounded down to the nearest precomputed bucket width; the value - # of which is returned as `bucketWidth` in the response. Defaults - # to infinity (each bucket spans an entire reference sequence) or the length - # of the target range, if specified. The smallest precomputed - # `bucketWidth` is currently 2048 base pairs; this is subject to - # change. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::ListCoverageBucketsResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::ListCoverageBucketsResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def list_coverage_buckets(read_group_set_id, end_: nil, page_size: nil, page_token: nil, reference_name: nil, start: nil, target_bucket_width: nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:get, 'v1/readgroupsets/{readGroupSetId}/coveragebuckets', options) - command.response_representation = Google::Apis::GenomicsV1::ListCoverageBucketsResponse::Representation - command.response_class = Google::Apis::GenomicsV1::ListCoverageBucketsResponse - command.params['readGroupSetId'] = read_group_set_id unless read_group_set_id.nil? - command.query['end'] = end_ unless end_.nil? - command.query['pageSize'] = page_size unless page_size.nil? - command.query['pageToken'] = page_token unless page_token.nil? - command.query['referenceName'] = reference_name unless reference_name.nil? - command.query['start'] = start unless start.nil? - command.query['targetBucketWidth'] = target_bucket_width unless target_bucket_width.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets a list of reads for one or more read group sets. - # Reads search operates over a genomic coordinate space of reference sequence - # & position defined over the reference sequences to which the requested - # read group sets are aligned. - # If a target positional range is specified, search returns all reads whose - # alignment to the reference genome overlap the range. A query which - # specifies only read group set IDs yields all reads in those read group - # sets, including unmapped reads. - # All reads returned (including reads on subsequent pages) are ordered by - # genomic coordinate (by reference sequence, then position). Reads with - # equivalent genomic coordinates are returned in an unspecified order. This - # order is consistent, such that two queries for the same content (regardless - # of page size) yield reads in the same order across their respective streams - # of paginated responses. - # Implements - # [GlobalAllianceApi.searchReads](https://github.com/ga4gh/schemas/blob/v0.5.1/ - # src/main/resources/avro/readmethods.avdl#L85). - # @param [Google::Apis::GenomicsV1::SearchReadsRequest] search_reads_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::SearchReadsResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::SearchReadsResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def search_reads(search_reads_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/reads/search', options) - command.request_representation = Google::Apis::GenomicsV1::SearchReadsRequest::Representation - command.request_object = search_reads_request_object - command.response_representation = Google::Apis::GenomicsV1::SearchReadsResponse::Representation - command.response_class = Google::Apis::GenomicsV1::SearchReadsResponse - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets a reference. - # Implements - # [GlobalAllianceApi.getReference](https://github.com/ga4gh/schemas/blob/v0.5.1/ - # src/main/resources/avro/referencemethods.avdl#L158). - # @param [String] reference_id - # The ID of the reference. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Reference] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Reference] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def get_reference(reference_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:get, 'v1/references/{referenceId}', options) - command.response_representation = Google::Apis::GenomicsV1::Reference::Representation - command.response_class = Google::Apis::GenomicsV1::Reference - command.params['referenceId'] = reference_id unless reference_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Searches for references which match the given criteria. - # Implements - # [GlobalAllianceApi.searchReferences](https://github.com/ga4gh/schemas/blob/v0. - # 5.1/src/main/resources/avro/referencemethods.avdl#L146). - # @param [Google::Apis::GenomicsV1::SearchReferencesRequest] search_references_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::SearchReferencesResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::SearchReferencesResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def search_references(search_references_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/references/search', options) - command.request_representation = Google::Apis::GenomicsV1::SearchReferencesRequest::Representation - command.request_object = search_references_request_object - command.response_representation = Google::Apis::GenomicsV1::SearchReferencesResponse::Representation - command.response_class = Google::Apis::GenomicsV1::SearchReferencesResponse - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Lists the bases in a reference, optionally restricted to a range. - # Implements - # [GlobalAllianceApi.getReferenceBases](https://github.com/ga4gh/schemas/blob/v0. - # 5.1/src/main/resources/avro/referencemethods.avdl#L221). - # @param [String] reference_id - # The ID of the reference. - # @param [Fixnum] end_position - # The end position (0-based, exclusive) of this query. Defaults to the length - # of this reference. - # @param [Fixnum] page_size - # The maximum number of bases to return in a single page. If unspecified, - # defaults to 200Kbp (kilo base pairs). The maximum value is 10Mbp (mega base - # pairs). - # @param [String] page_token - # The continuation token, which is used to page through large result sets. - # To get the next page of results, set this parameter to the value of - # `nextPageToken` from the previous response. - # @param [Fixnum] start_position - # The start position (0-based) of this query. Defaults to 0. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::ListBasesResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::ListBasesResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def list_reference_bases(reference_id, end_position: nil, page_size: nil, page_token: nil, start_position: nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:get, 'v1/references/{referenceId}/bases', options) - command.response_representation = Google::Apis::GenomicsV1::ListBasesResponse::Representation - command.response_class = Google::Apis::GenomicsV1::ListBasesResponse - command.params['referenceId'] = reference_id unless reference_id.nil? - command.query['end'] = end_position unless end_position.nil? - command.query['pageSize'] = page_size unless page_size.nil? - command.query['pageToken'] = page_token unless page_token.nil? - command.query['start'] = start_position unless start_position.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets a reference set. - # Implements - # [GlobalAllianceApi.getReferenceSet](https://github.com/ga4gh/schemas/blob/v0.5. - # 1/src/main/resources/avro/referencemethods.avdl#L83). - # @param [String] reference_set_id - # The ID of the reference set. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::ReferenceSet] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::ReferenceSet] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def get_reference_set(reference_set_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:get, 'v1/referencesets/{referenceSetId}', options) - command.response_representation = Google::Apis::GenomicsV1::ReferenceSet::Representation - command.response_class = Google::Apis::GenomicsV1::ReferenceSet - command.params['referenceSetId'] = reference_set_id unless reference_set_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Searches for reference sets which match the given criteria. - # Implements - # [GlobalAllianceApi.searchReferenceSets](https://github.com/ga4gh/schemas/blob/ - # v0.5.1/src/main/resources/avro/referencemethods.avdl#L71) - # @param [Google::Apis::GenomicsV1::SearchReferenceSetsRequest] search_reference_sets_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::SearchReferenceSetsResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::SearchReferenceSetsResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def search_reference_sets(search_reference_sets_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/referencesets/search', options) - command.request_representation = Google::Apis::GenomicsV1::SearchReferenceSetsRequest::Representation - command.request_object = search_reference_sets_request_object - command.response_representation = Google::Apis::GenomicsV1::SearchReferenceSetsResponse::Representation - command.response_class = Google::Apis::GenomicsV1::SearchReferenceSetsResponse - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Creates a new variant. - # @param [Google::Apis::GenomicsV1::Variant] variant_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Variant] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Variant] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def create_variant(variant_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/variants', options) - command.request_representation = Google::Apis::GenomicsV1::Variant::Representation - command.request_object = variant_object - command.response_representation = Google::Apis::GenomicsV1::Variant::Representation - command.response_class = Google::Apis::GenomicsV1::Variant - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Deletes a variant. - # @param [String] variant_id - # The ID of the variant to be deleted. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Empty] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Empty] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def delete_variant(variant_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:delete, 'v1/variants/{variantId}', options) - command.response_representation = Google::Apis::GenomicsV1::Empty::Representation - command.response_class = Google::Apis::GenomicsV1::Empty - command.params['variantId'] = variant_id unless variant_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets a variant by ID. - # @param [String] variant_id - # The ID of the variant. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Variant] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Variant] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def get_variant(variant_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:get, 'v1/variants/{variantId}', options) - command.response_representation = Google::Apis::GenomicsV1::Variant::Representation - command.response_class = Google::Apis::GenomicsV1::Variant - command.params['variantId'] = variant_id unless variant_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Creates variant data by asynchronously importing the provided information. - # The variants for import will be merged with any existing variant that - # matches its reference sequence, start, end, reference bases, and - # alternative bases. If no such variant exists, a new one will be created. - # When variants are merged, the call information from the new variant - # is added to the existing variant, and Variant info fields are merged - # as specified in - # infoMergeConfig. - # As a special case, for single-sample VCF files, QUAL and FILTER fields will - # be moved to the call level; these are sometimes interpreted in a - # call-specific context. - # Imported VCF headers are appended to the metadata already in a variant set. - # @param [Google::Apis::GenomicsV1::ImportVariantsRequest] import_variants_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Operation] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Operation] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def import_variants(import_variants_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/variants:import', options) - command.request_representation = Google::Apis::GenomicsV1::ImportVariantsRequest::Representation - command.request_object = import_variants_request_object - command.response_representation = Google::Apis::GenomicsV1::Operation::Representation - command.response_class = Google::Apis::GenomicsV1::Operation - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Merges the given variants with existing variants. - # Each variant will be - # merged with an existing variant that matches its reference sequence, - # start, end, reference bases, and alternative bases. If no such variant - # exists, a new one will be created. - # When variants are merged, the call information from the new variant - # is added to the existing variant. Variant info fields are merged as - # specified in the - # infoMergeConfig - # field of the MergeVariantsRequest. - # Please exercise caution when using this method! It is easy to introduce - # mistakes in existing variants and difficult to back out of them. For - # example, - # suppose you were trying to merge a new variant with an existing one and - # both - # variants contain calls that belong to callsets with the same callset ID. - # // Existing variant - irrelevant fields trimmed for clarity - # ` - # "variantSetId": "10473108253681171589", - # "referenceName": "1", - # "start": "10582", - # "referenceBases": "G", - # "alternateBases": [ - # "A" - # ], - # "calls": [ - # ` - # "callSetId": "10473108253681171589-0", - # "callSetName": "CALLSET0", - # "genotype": [ - # 0, - # 1 - # ], - # ` - # ] - # ` - # // New variant with conflicting call information - # ` - # "variantSetId": "10473108253681171589", - # "referenceName": "1", - # "start": "10582", - # "referenceBases": "G", - # "alternateBases": [ - # "A" - # ], - # "calls": [ - # ` - # "callSetId": "10473108253681171589-0", - # "callSetName": "CALLSET0", - # "genotype": [ - # 1, - # 1 - # ], - # ` - # ] - # ` - # The resulting merged variant would overwrite the existing calls with those - # from the new variant: - # ` - # "variantSetId": "10473108253681171589", - # "referenceName": "1", - # "start": "10582", - # "referenceBases": "G", - # "alternateBases": [ - # "A" - # ], - # "calls": [ - # ` - # "callSetId": "10473108253681171589-0", - # "callSetName": "CALLSET0", - # "genotype": [ - # 1, - # 1 - # ], - # ` - # ] - # ` - # This may be the desired outcome, but it is up to the user to determine if - # if that is indeed the case. - # @param [Google::Apis::GenomicsV1::MergeVariantsRequest] merge_variants_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Empty] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Empty] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def merge_variants(merge_variants_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/variants:merge', options) - command.request_representation = Google::Apis::GenomicsV1::MergeVariantsRequest::Representation - command.request_object = merge_variants_request_object - command.response_representation = Google::Apis::GenomicsV1::Empty::Representation - command.response_class = Google::Apis::GenomicsV1::Empty - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Updates a variant. - # This method supports patch semantics. Returns the modified variant without - # its calls. - # @param [String] variant_id - # The ID of the variant to be updated. - # @param [Google::Apis::GenomicsV1::Variant] variant_object - # @param [String] update_mask - # An optional mask specifying which fields to update. At this time, mutable - # fields are names and - # info. Acceptable values are "names" and - # "info". If unspecified, all mutable fields will be updated. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Variant] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Variant] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def patch_variant(variant_id, variant_object = nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:patch, 'v1/variants/{variantId}', options) - command.request_representation = Google::Apis::GenomicsV1::Variant::Representation - command.request_object = variant_object - command.response_representation = Google::Apis::GenomicsV1::Variant::Representation - command.response_class = Google::Apis::GenomicsV1::Variant - command.params['variantId'] = variant_id unless variant_id.nil? - command.query['updateMask'] = update_mask unless update_mask.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets a list of variants matching the criteria. - # Implements - # [GlobalAllianceApi.searchVariants](https://github.com/ga4gh/schemas/blob/v0.5. - # 1/src/main/resources/avro/variantmethods.avdl#L126). - # @param [Google::Apis::GenomicsV1::SearchVariantsRequest] search_variants_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::SearchVariantsResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::SearchVariantsResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def search_variants(search_variants_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/variants/search', options) - command.request_representation = Google::Apis::GenomicsV1::SearchVariantsRequest::Representation - command.request_object = search_variants_request_object - command.response_representation = Google::Apis::GenomicsV1::SearchVariantsResponse::Representation - command.response_class = Google::Apis::GenomicsV1::SearchVariantsResponse - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Creates a new variant set. - # The provided variant set must have a valid `datasetId` set - all other - # fields are optional. Note that the `id` field will be ignored, as this is - # assigned by the server. - # @param [Google::Apis::GenomicsV1::VariantSet] variant_set_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::VariantSet] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::VariantSet] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def create_variantset(variant_set_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/variantsets', options) - command.request_representation = Google::Apis::GenomicsV1::VariantSet::Representation - command.request_object = variant_set_object - command.response_representation = Google::Apis::GenomicsV1::VariantSet::Representation - command.response_class = Google::Apis::GenomicsV1::VariantSet - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Deletes a variant set including all variants, call sets, and calls within. - # This is not reversible. - # @param [String] variant_set_id - # The ID of the variant set to be deleted. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Empty] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Empty] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def delete_variantset(variant_set_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:delete, 'v1/variantsets/{variantSetId}', options) - command.response_representation = Google::Apis::GenomicsV1::Empty::Representation - command.response_class = Google::Apis::GenomicsV1::Empty - command.params['variantSetId'] = variant_set_id unless variant_set_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Exports variant set data to an external destination. - # @param [String] variant_set_id - # Required. The ID of the variant set that contains variant data which - # should be exported. The caller must have READ access to this variant set. - # @param [Google::Apis::GenomicsV1::ExportVariantSetRequest] export_variant_set_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::Operation] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::Operation] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def export_variant_set(variant_set_id, export_variant_set_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/variantsets/{variantSetId}:export', options) - command.request_representation = Google::Apis::GenomicsV1::ExportVariantSetRequest::Representation - command.request_object = export_variant_set_request_object - command.response_representation = Google::Apis::GenomicsV1::Operation::Representation - command.response_class = Google::Apis::GenomicsV1::Operation - command.params['variantSetId'] = variant_set_id unless variant_set_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Gets a variant set by ID. - # @param [String] variant_set_id - # Required. The ID of the variant set. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::VariantSet] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::VariantSet] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def get_variantset(variant_set_id, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:get, 'v1/variantsets/{variantSetId}', options) - command.response_representation = Google::Apis::GenomicsV1::VariantSet::Representation - command.response_class = Google::Apis::GenomicsV1::VariantSet - command.params['variantSetId'] = variant_set_id unless variant_set_id.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Updates a variant set using patch semantics. - # @param [String] variant_set_id - # The ID of the variant to be updated (must already exist). - # @param [Google::Apis::GenomicsV1::VariantSet] variant_set_object - # @param [String] update_mask - # An optional mask specifying which fields to update. Supported fields: - # * metadata. - # * name. - # * description. - # Leaving `updateMask` unset is equivalent to specifying all mutable - # fields. - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::VariantSet] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::VariantSet] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def patch_variantset(variant_set_id, variant_set_object = nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:patch, 'v1/variantsets/{variantSetId}', options) - command.request_representation = Google::Apis::GenomicsV1::VariantSet::Representation - command.request_object = variant_set_object - command.response_representation = Google::Apis::GenomicsV1::VariantSet::Representation - command.response_class = Google::Apis::GenomicsV1::VariantSet - command.params['variantSetId'] = variant_set_id unless variant_set_id.nil? - command.query['updateMask'] = update_mask unless update_mask.nil? - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end - - # Returns a list of all variant sets matching search criteria. - # Implements - # [GlobalAllianceApi.searchVariantSets](https://github.com/ga4gh/schemas/blob/v0. - # 5.1/src/main/resources/avro/variantmethods.avdl#L49). - # @param [Google::Apis::GenomicsV1::SearchVariantSetsRequest] search_variant_sets_request_object - # @param [String] fields - # Selector specifying which fields to include in a partial response. - # @param [String] quota_user - # Available to use for quota purposes for server-side applications. Can be any - # arbitrary string assigned to a user, but should not exceed 40 characters. - # @param [Google::Apis::RequestOptions] options - # Request-specific options - # - # @yield [result, err] Result & error if block supplied - # @yieldparam result [Google::Apis::GenomicsV1::SearchVariantSetsResponse] parsed result object - # @yieldparam err [StandardError] error object if request failed - # - # @return [Google::Apis::GenomicsV1::SearchVariantSetsResponse] - # - # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried - # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification - # @raise [Google::Apis::AuthorizationError] Authorization is required - def search_variant_sets(search_variant_sets_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) - command = make_simple_command(:post, 'v1/variantsets/search', options) - command.request_representation = Google::Apis::GenomicsV1::SearchVariantSetsRequest::Representation - command.request_object = search_variant_sets_request_object - command.response_representation = Google::Apis::GenomicsV1::SearchVariantSetsResponse::Representation - command.response_class = Google::Apis::GenomicsV1::SearchVariantSetsResponse - command.query['fields'] = fields unless fields.nil? - command.query['quotaUser'] = quota_user unless quota_user.nil? - execute_or_queue_command(command, &block) - end protected diff --git a/generated/google/apis/iap_v1.rb b/generated/google/apis/iap_v1.rb index 109b9458a..f6931a058 100644 --- a/generated/google/apis/iap_v1.rb +++ b/generated/google/apis/iap_v1.rb @@ -25,7 +25,7 @@ module Google # @see https://cloud.google.com/iap module IapV1 VERSION = 'V1' - REVISION = '20190109' + REVISION = '20190306' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/iap_v1/classes.rb b/generated/google/apis/iap_v1/classes.rb index 0846a5ba8..fbdf57a56 100644 --- a/generated/google/apis/iap_v1/classes.rb +++ b/generated/google/apis/iap_v1/classes.rb @@ -46,7 +46,7 @@ module Google # account. For example, `my-other-app@appspot.gserviceaccount.com`. # * `group:`emailid``: An email address that represents a Google group. # For example, `admins@example.com`. - # * `domain:`domain``: A Google Apps domain name that represents all the + # * `domain:`domain``: The G Suite domain (primary) that represents all the # users of that domain. For example, `google.com` or `example.com`. # Corresponds to the JSON property `members` # @return [Array] diff --git a/generated/google/apis/iap_v1beta1.rb b/generated/google/apis/iap_v1beta1.rb index f27967785..c37d3c030 100644 --- a/generated/google/apis/iap_v1beta1.rb +++ b/generated/google/apis/iap_v1beta1.rb @@ -25,7 +25,7 @@ module Google # @see https://cloud.google.com/iap module IapV1beta1 VERSION = 'V1beta1' - REVISION = '20190109' + REVISION = '20190306' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/iap_v1beta1/classes.rb b/generated/google/apis/iap_v1beta1/classes.rb index 0c098f9de..b6edacda7 100644 --- a/generated/google/apis/iap_v1beta1/classes.rb +++ b/generated/google/apis/iap_v1beta1/classes.rb @@ -46,7 +46,7 @@ module Google # account. For example, `my-other-app@appspot.gserviceaccount.com`. # * `group:`emailid``: An email address that represents a Google group. # For example, `admins@example.com`. - # * `domain:`domain``: A Google Apps domain name that represents all the + # * `domain:`domain``: The G Suite domain (primary) that represents all the # users of that domain. For example, `google.com` or `example.com`. # Corresponds to the JSON property `members` # @return [Array] diff --git a/generated/google/apis/language_v1.rb b/generated/google/apis/language_v1.rb index 8188a9557..0868d99e7 100644 --- a/generated/google/apis/language_v1.rb +++ b/generated/google/apis/language_v1.rb @@ -27,7 +27,7 @@ module Google # @see https://cloud.google.com/natural-language/ module LanguageV1 VERSION = 'V1' - REVISION = '20181022' + REVISION = '20190308' # Apply machine learning models to reveal the structure and meaning of text AUTH_CLOUD_LANGUAGE = 'https://www.googleapis.com/auth/cloud-language' diff --git a/generated/google/apis/language_v1/classes.rb b/generated/google/apis/language_v1/classes.rb index 67e3ff741..1c1a3618b 100644 --- a/generated/google/apis/language_v1/classes.rb +++ b/generated/google/apis/language_v1/classes.rb @@ -768,14 +768,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/language_v1beta1.rb b/generated/google/apis/language_v1beta1.rb index 135a990f8..83901ba09 100644 --- a/generated/google/apis/language_v1beta1.rb +++ b/generated/google/apis/language_v1beta1.rb @@ -27,7 +27,7 @@ module Google # @see https://cloud.google.com/natural-language/ module LanguageV1beta1 VERSION = 'V1beta1' - REVISION = '20181022' + REVISION = '20190308' # Apply machine learning models to reveal the structure and meaning of text AUTH_CLOUD_LANGUAGE = 'https://www.googleapis.com/auth/cloud-language' diff --git a/generated/google/apis/language_v1beta1/classes.rb b/generated/google/apis/language_v1beta1/classes.rb index 4dadf21c7..1861fc3bc 100644 --- a/generated/google/apis/language_v1beta1/classes.rb +++ b/generated/google/apis/language_v1beta1/classes.rb @@ -620,14 +620,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/language_v1beta2.rb b/generated/google/apis/language_v1beta2.rb index b0c017c87..e57085e5a 100644 --- a/generated/google/apis/language_v1beta2.rb +++ b/generated/google/apis/language_v1beta2.rb @@ -27,7 +27,7 @@ module Google # @see https://cloud.google.com/natural-language/ module LanguageV1beta2 VERSION = 'V1beta2' - REVISION = '20181022' + REVISION = '20190308' # Apply machine learning models to reveal the structure and meaning of text AUTH_CLOUD_LANGUAGE = 'https://www.googleapis.com/auth/cloud-language' diff --git a/generated/google/apis/language_v1beta2/classes.rb b/generated/google/apis/language_v1beta2/classes.rb index f0481f12c..e7f15a467 100644 --- a/generated/google/apis/language_v1beta2/classes.rb +++ b/generated/google/apis/language_v1beta2/classes.rb @@ -775,14 +775,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/poly_v1.rb b/generated/google/apis/poly_v1.rb index bbf52988b..7a8b324d4 100644 --- a/generated/google/apis/poly_v1.rb +++ b/generated/google/apis/poly_v1.rb @@ -27,7 +27,7 @@ module Google # @see https://developers.google.com/poly/ module PolyV1 VERSION = 'V1' - REVISION = '20180808' + REVISION = '20190309' end end end diff --git a/generated/google/apis/poly_v1/classes.rb b/generated/google/apis/poly_v1/classes.rb index 0dbd7fc0d..653aa7a42 100644 --- a/generated/google/apis/poly_v1/classes.rb +++ b/generated/google/apis/poly_v1/classes.rb @@ -180,14 +180,15 @@ module Google # The MIME content-type, such as `image/png`. # For more information, see - # [MIME types](//developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/ - # MIME_types). + # [MIME + # types](//developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types). # Corresponds to the JSON property `contentType` # @return [String] attr_accessor :content_type - # The path of the resource file relative to the root file. - # For root or thumbnail files, this is just the filename. + # The path of the resource file relative to the + # root file. For root or thumbnail files, + # this is just the filename. # Corresponds to the JSON property `relativePath` # @return [String] attr_accessor :relative_path diff --git a/generated/google/apis/tpu_v1.rb b/generated/google/apis/tpu_v1.rb index 59ec592b7..3dca297dc 100644 --- a/generated/google/apis/tpu_v1.rb +++ b/generated/google/apis/tpu_v1.rb @@ -25,7 +25,7 @@ module Google # @see https://cloud.google.com/tpu/ module TpuV1 VERSION = 'V1' - REVISION = '20190228' + REVISION = '20190312' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/tpu_v1/classes.rb b/generated/google/apis/tpu_v1/classes.rb index 0f42c387b..d55d8e138 100644 --- a/generated/google/apis/tpu_v1/classes.rb +++ b/generated/google/apis/tpu_v1/classes.rb @@ -420,14 +420,14 @@ module Google attr_accessor :done alias_method :done?, :done - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -615,14 +615,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/tpu_v1alpha1.rb b/generated/google/apis/tpu_v1alpha1.rb index 2bbbb1f47..e8a3c206b 100644 --- a/generated/google/apis/tpu_v1alpha1.rb +++ b/generated/google/apis/tpu_v1alpha1.rb @@ -25,7 +25,7 @@ module Google # @see https://cloud.google.com/tpu/ module TpuV1alpha1 VERSION = 'V1alpha1' - REVISION = '20190228' + REVISION = '20190312' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/tpu_v1alpha1/classes.rb b/generated/google/apis/tpu_v1alpha1/classes.rb index 33bebc23b..7a3ebd702 100644 --- a/generated/google/apis/tpu_v1alpha1/classes.rb +++ b/generated/google/apis/tpu_v1alpha1/classes.rb @@ -420,14 +420,14 @@ module Google attr_accessor :done alias_method :done?, :done - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -615,14 +615,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/videointelligence_v1beta2.rb b/generated/google/apis/videointelligence_v1beta2.rb index 7d29f4f5e..c826d7c46 100644 --- a/generated/google/apis/videointelligence_v1beta2.rb +++ b/generated/google/apis/videointelligence_v1beta2.rb @@ -27,7 +27,7 @@ module Google # @see https://cloud.google.com/video-intelligence/docs/ module VideointelligenceV1beta2 VERSION = 'V1beta2' - REVISION = '20190220' + REVISION = '20190308' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/videointelligence_v1beta2/classes.rb b/generated/google/apis/videointelligence_v1beta2/classes.rb index 15ec38f9e..1862e61e5 100644 --- a/generated/google/apis/videointelligence_v1beta2/classes.rb +++ b/generated/google/apis/videointelligence_v1beta2/classes.rb @@ -623,14 +623,14 @@ module Google class GoogleCloudVideointelligenceV1VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -1079,6 +1079,16 @@ module Google class GoogleCloudVideointelligenceV1beta2LabelDetectionConfig include Google::Apis::Core::Hashable + # The confidence threshold we perform filtering on the labels from + # frame-level detection. If not set, it is set to 0.4 by default. The valid + # range for this threshold is [0.1, 0.9]. Any value set outside of this + # range will be clipped. + # Note: for best results please follow the default threshold. We will update + # the default threshold everytime when we release a new model. + # Corresponds to the JSON property `frameConfidenceThreshold` + # @return [Float] + attr_accessor :frame_confidence_threshold + # What labels should be detected with LABEL_DETECTION, in addition to # video-level labels or segment-level labels. # If unspecified, defaults to `SHOT_MODE`. @@ -1101,15 +1111,27 @@ module Google attr_accessor :stationary_camera alias_method :stationary_camera?, :stationary_camera + # The confidence threshold we perform filtering on the labels from + # video-level and shot-level detections. If not set, it is set to 0.3 by + # default. The valid range for this threshold is [0.1, 0.9]. Any value set + # outside of this range will be clipped. + # Note: for best results please follow the default threshold. We will update + # the default threshold everytime when we release a new model. + # Corresponds to the JSON property `videoConfidenceThreshold` + # @return [Float] + attr_accessor :video_confidence_threshold + def initialize(**args) update!(**args) end # Update properties of this object def update!(**args) + @frame_confidence_threshold = args[:frame_confidence_threshold] if args.key?(:frame_confidence_threshold) @label_detection_mode = args[:label_detection_mode] if args.key?(:label_detection_mode) @model = args[:model] if args.key?(:model) @stationary_camera = args[:stationary_camera] if args.key?(:stationary_camera) + @video_confidence_threshold = args[:video_confidence_threshold] if args.key?(:video_confidence_threshold) end end @@ -1719,14 +1741,14 @@ module Google class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -2572,14 +2594,14 @@ module Google class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -3374,14 +3396,14 @@ module Google class GoogleCloudVideointelligenceV1p2beta1VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -3578,12 +3600,12 @@ module Google # Video annotation progress. Included in the `metadata` # field of the `Operation` returned by the `GetOperation` # call of the `google::longrunning::Operations` service. - class GoogleCloudVideointelligenceV2beta1AnnotateVideoProgress + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress include Google::Apis::Core::Hashable # Progress metadata for all videos specified in `AnnotateVideoRequest`. # Corresponds to the JSON property `annotationProgress` - # @return [Array] + # @return [Array] attr_accessor :annotation_progress def initialize(**args) @@ -3599,12 +3621,12 @@ module Google # Video annotation response. Included in the `response` # field of the `Operation` returned by the `GetOperation` # call of the `google::longrunning::Operations` service. - class GoogleCloudVideointelligenceV2beta1AnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse include Google::Apis::Core::Hashable # Annotation results for all videos specified in `AnnotateVideoRequest`. # Corresponds to the JSON property `annotationResults` - # @return [Array] + # @return [Array] attr_accessor :annotation_results def initialize(**args) @@ -3618,7 +3640,7 @@ module Google end # Detected entity from video analysis. - class GoogleCloudVideointelligenceV2beta1Entity + class GoogleCloudVideointelligenceV1p3beta1Entity include Google::Apis::Core::Hashable # Textual description, e.g. `Fixed-gear bicycle`. @@ -3653,12 +3675,12 @@ module Google # Explicit content annotation (based on per-frame visual signals only). # If no explicit content has been detected in a frame, no annotations are # present for that frame. - class GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation include Google::Apis::Core::Hashable # All video frames where explicit content was detected. # Corresponds to the JSON property `frames` - # @return [Array] + # @return [Array] attr_accessor :frames def initialize(**args) @@ -3672,7 +3694,7 @@ module Google end # Video frame level annotation results for explicit content. - class GoogleCloudVideointelligenceV2beta1ExplicitContentFrame + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame include Google::Apis::Core::Hashable # Likelihood of the pornography content.. @@ -3698,7 +3720,7 @@ module Google end # Label annotation. - class GoogleCloudVideointelligenceV2beta1LabelAnnotation + class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation include Google::Apis::Core::Hashable # Common categories for the detected entity. @@ -3706,22 +3728,22 @@ module Google # cases there might be more than one categories e.g. `Terrier` could also be # a `pet`. # Corresponds to the JSON property `categoryEntities` - # @return [Array] + # @return [Array] attr_accessor :category_entities # Detected entity from video analysis. # Corresponds to the JSON property `entity` - # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1Entity] + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1Entity] attr_accessor :entity # All video frames where a label was detected. # Corresponds to the JSON property `frames` - # @return [Array] + # @return [Array] attr_accessor :frames # All video segments where a label was detected. # Corresponds to the JSON property `segments` - # @return [Array] + # @return [Array] attr_accessor :segments def initialize(**args) @@ -3738,7 +3760,7 @@ module Google end # Video frame level annotation results for label detection. - class GoogleCloudVideointelligenceV2beta1LabelFrame + class GoogleCloudVideointelligenceV1p3beta1LabelFrame include Google::Apis::Core::Hashable # Confidence that the label is accurate. Range: [0, 1]. @@ -3764,7 +3786,7 @@ module Google end # Video segment level annotation results for label detection. - class GoogleCloudVideointelligenceV2beta1LabelSegment + class GoogleCloudVideointelligenceV1p3beta1LabelSegment include Google::Apis::Core::Hashable # Confidence that the label is accurate. Range: [0, 1]. @@ -3774,7 +3796,7 @@ module Google # Video segment. # Corresponds to the JSON property `segment` - # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment] + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment] attr_accessor :segment def initialize(**args) @@ -3791,7 +3813,7 @@ module Google # Normalized bounding box. # The normalized vertex coordinates are relative to the original image. # Range: [0, 1]. - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox include Google::Apis::Core::Hashable # Bottom Y coordinate. @@ -3842,12 +3864,12 @@ module Google # and the vertex order will still be (0, 1, 2, 3). Note that values can be less # than 0, or greater than 1 due to trignometric calculations for location of # the box. - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly include Google::Apis::Core::Hashable # Normalized vertices of the bounding polygon. # Corresponds to the JSON property `vertices` - # @return [Array] + # @return [Array] attr_accessor :vertices def initialize(**args) @@ -3863,7 +3885,7 @@ module Google # A vertex represents a 2D point in the image. # NOTE: the normalized vertex coordinates are relative to the original image # and range from 0 to 1. - class GoogleCloudVideointelligenceV2beta1NormalizedVertex + class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex include Google::Apis::Core::Hashable # X coordinate. @@ -3888,7 +3910,7 @@ module Google end # Annotations corresponding to one tracked object. - class GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation include Google::Apis::Core::Hashable # Object category's labeling confidence of this track. @@ -3898,7 +3920,7 @@ module Google # Detected entity from video analysis. # Corresponds to the JSON property `entity` - # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1Entity] + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1Entity] attr_accessor :entity # Information corresponding to all frames where this object track appears. @@ -3906,12 +3928,12 @@ module Google # messages in frames. # Streaming mode: it can only be one ObjectTrackingFrame message in frames. # Corresponds to the JSON property `frames` - # @return [Array] + # @return [Array] attr_accessor :frames # Video segment. # Corresponds to the JSON property `segment` - # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment] + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment] attr_accessor :segment # Streaming mode ONLY. @@ -3940,14 +3962,14 @@ module Google # Video frame level annotations for object detection and tracking. This field # stores per frame location, time offset, and confidence. - class GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame include Google::Apis::Core::Hashable # Normalized bounding box. # The normalized vertex coordinates are relative to the original image. # Range: [0, 1]. # Corresponds to the JSON property `normalizedBoundingBox` - # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox] + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox] attr_accessor :normalized_bounding_box # The timestamp of the frame in microseconds. @@ -3967,7 +3989,7 @@ module Google end # Alternative hypotheses (a.k.a. n-best list). - class GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative + class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative include Google::Apis::Core::Hashable # The confidence estimate between 0.0 and 1.0. A higher number @@ -3987,7 +4009,7 @@ module Google # A list of word-specific information for each recognized word. # Corresponds to the JSON property `words` - # @return [Array] + # @return [Array] attr_accessor :words def initialize(**args) @@ -4003,7 +4025,7 @@ module Google end # A speech recognition result corresponding to a portion of the audio. - class GoogleCloudVideointelligenceV2beta1SpeechTranscription + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription include Google::Apis::Core::Hashable # May contain one or more recognition hypotheses (up to the maximum specified @@ -4011,7 +4033,7 @@ module Google # accuracy, with the top (first) alternative being the most probable, as # ranked by the recognizer. # Corresponds to the JSON property `alternatives` - # @return [Array] + # @return [Array] attr_accessor :alternatives # Output only. The @@ -4036,13 +4058,13 @@ module Google # `StreamingAnnotateVideoResponse` is the only message returned to the client # by `StreamingAnnotateVideo`. A series of zero or more # `StreamingAnnotateVideoResponse` messages are streamed back to the client. - class GoogleCloudVideointelligenceV2beta1StreamingAnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse include Google::Apis::Core::Hashable # Streaming annotation results corresponding to a portion of the video # that is currently being processed. # Corresponds to the JSON property `annotationResults` - # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults] + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults] attr_accessor :annotation_results # GCS URI that stores annotation results of one streaming session. @@ -4053,14 +4075,14 @@ module Google # @return [String] attr_accessor :annotation_results_uri - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -4110,29 +4132,29 @@ module Google # Streaming annotation results corresponding to a portion of the video # that is currently being processed. - class GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults include Google::Apis::Core::Hashable # Explicit content annotation (based on per-frame visual signals only). # If no explicit content has been detected in a frame, no annotations are # present for that frame. # Corresponds to the JSON property `explicitAnnotation` - # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation] + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation] attr_accessor :explicit_annotation # Label annotation results. # Corresponds to the JSON property `labelAnnotations` - # @return [Array] + # @return [Array] attr_accessor :label_annotations # Object tracking results. # Corresponds to the JSON property `objectAnnotations` - # @return [Array] + # @return [Array] attr_accessor :object_annotations # Shot annotation results. Each shot is represented as a video segment. # Corresponds to the JSON property `shotAnnotations` - # @return [Array] + # @return [Array] attr_accessor :shot_annotations def initialize(**args) @@ -4151,12 +4173,12 @@ module Google # Annotations related to one detected OCR text snippet. This will contain the # corresponding text, confidence value, and frame level information for each # detection. - class GoogleCloudVideointelligenceV2beta1TextAnnotation + class GoogleCloudVideointelligenceV1p3beta1TextAnnotation include Google::Apis::Core::Hashable # All video segments where OCR detected text appears. # Corresponds to the JSON property `segments` - # @return [Array] + # @return [Array] attr_accessor :segments # The detected text. @@ -4178,7 +4200,7 @@ module Google # Video frame level annotation results for text annotation (OCR). # Contains information regarding timestamp and bounding box locations for the # frames containing detected OCR text snippets. - class GoogleCloudVideointelligenceV2beta1TextFrame + class GoogleCloudVideointelligenceV1p3beta1TextFrame include Google::Apis::Core::Hashable # Normalized bounding polygon for text (that might not be aligned with axis). @@ -4197,7 +4219,7 @@ module Google # than 0, or greater than 1 due to trignometric calculations for location of # the box. # Corresponds to the JSON property `rotatedBoundingBox` - # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly] + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly] attr_accessor :rotated_bounding_box # Timestamp of this frame. @@ -4217,7 +4239,7 @@ module Google end # Video segment level annotation results for text detection. - class GoogleCloudVideointelligenceV2beta1TextSegment + class GoogleCloudVideointelligenceV1p3beta1TextSegment include Google::Apis::Core::Hashable # Confidence for the track of detected text. It is calculated as the highest @@ -4228,12 +4250,12 @@ module Google # Information related to the frames where OCR detected text appears. # Corresponds to the JSON property `frames` - # @return [Array] + # @return [Array] attr_accessor :frames # Video segment. # Corresponds to the JSON property `segment` - # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment] + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment] attr_accessor :segment def initialize(**args) @@ -4249,7 +4271,7 @@ module Google end # Annotation progress for a single video. - class GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress include Google::Apis::Core::Hashable # Video file location in @@ -4288,17 +4310,17 @@ module Google end # Annotation results for a single video. - class GoogleCloudVideointelligenceV2beta1VideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -4338,13 +4360,13 @@ module Google # If no explicit content has been detected in a frame, no annotations are # present for that frame. # Corresponds to the JSON property `explicitAnnotation` - # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation] + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation] attr_accessor :explicit_annotation # Label annotations on frame level. # There is exactly one element for each unique label. # Corresponds to the JSON property `frameLabelAnnotations` - # @return [Array] + # @return [Array] attr_accessor :frame_label_annotations # Video file location in @@ -4355,36 +4377,36 @@ module Google # Annotations for list of objects detected and tracked in video. # Corresponds to the JSON property `objectAnnotations` - # @return [Array] + # @return [Array] attr_accessor :object_annotations # Label annotations on video level or user specified segment level. # There is exactly one element for each unique label. # Corresponds to the JSON property `segmentLabelAnnotations` - # @return [Array] + # @return [Array] attr_accessor :segment_label_annotations # Shot annotations. Each shot is represented as a video segment. # Corresponds to the JSON property `shotAnnotations` - # @return [Array] + # @return [Array] attr_accessor :shot_annotations # Label annotations on shot level. # There is exactly one element for each unique label. # Corresponds to the JSON property `shotLabelAnnotations` - # @return [Array] + # @return [Array] attr_accessor :shot_label_annotations # Speech transcription. # Corresponds to the JSON property `speechTranscriptions` - # @return [Array] + # @return [Array] attr_accessor :speech_transcriptions # OCR text detection and tracking. # Annotations for list of detected text snippets. Each will have list of # frame information associated with it. # Corresponds to the JSON property `textAnnotations` - # @return [Array] + # @return [Array] attr_accessor :text_annotations def initialize(**args) @@ -4407,7 +4429,7 @@ module Google end # Video segment. - class GoogleCloudVideointelligenceV2beta1VideoSegment + class GoogleCloudVideointelligenceV1p3beta1VideoSegment include Google::Apis::Core::Hashable # Time-offset, relative to the beginning of the video, @@ -4436,7 +4458,7 @@ module Google # Word-specific information for recognized words. Word information is only # included in the response when certain request parameters are set, such # as `enable_word_time_offsets`. - class GoogleCloudVideointelligenceV2beta1WordInfo + class GoogleCloudVideointelligenceV1p3beta1WordInfo include Google::Apis::Core::Hashable # Output only. The confidence estimate between 0.0 and 1.0. A higher number @@ -4505,14 +4527,14 @@ module Google attr_accessor :done alias_method :done?, :done - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -4589,14 +4611,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/videointelligence_v1beta2/representations.rb b/generated/google/apis/videointelligence_v1beta2/representations.rb index 50f288997..a099d069e 100644 --- a/generated/google/apis/videointelligence_v1beta2/representations.rb +++ b/generated/google/apis/videointelligence_v1beta2/representations.rb @@ -598,145 +598,145 @@ module Google include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1AnnotateVideoProgress + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1AnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1Entity + class GoogleCloudVideointelligenceV1p3beta1Entity class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1ExplicitContentFrame + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1LabelAnnotation + class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1LabelFrame + class GoogleCloudVideointelligenceV1p3beta1LabelFrame class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1LabelSegment + class GoogleCloudVideointelligenceV1p3beta1LabelSegment class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1NormalizedVertex + class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative + class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1SpeechTranscription + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1StreamingAnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1TextAnnotation + class GoogleCloudVideointelligenceV1p3beta1TextAnnotation class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1TextFrame + class GoogleCloudVideointelligenceV1p3beta1TextFrame class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1TextSegment + class GoogleCloudVideointelligenceV1p3beta1TextSegment class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1VideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1VideoSegment + class GoogleCloudVideointelligenceV1p3beta1VideoSegment class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1WordInfo + class GoogleCloudVideointelligenceV1p3beta1WordInfo class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport @@ -1055,9 +1055,11 @@ module Google class GoogleCloudVideointelligenceV1beta2LabelDetectionConfig # @private class Representation < Google::Apis::Core::JsonRepresentation + property :frame_confidence_threshold, as: 'frameConfidenceThreshold' property :label_detection_mode, as: 'labelDetectionMode' property :model, as: 'model' property :stationary_camera, as: 'stationaryCamera' + property :video_confidence_threshold, as: 'videoConfidenceThreshold' end end @@ -1730,23 +1732,23 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1AnnotateVideoProgress + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress::Representation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress::Representation end end - class GoogleCloudVideointelligenceV2beta1AnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoAnnotationResults::Representation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults::Representation end end - class GoogleCloudVideointelligenceV2beta1Entity + class GoogleCloudVideointelligenceV1p3beta1Entity # @private class Representation < Google::Apis::Core::JsonRepresentation property :description, as: 'description' @@ -1755,15 +1757,15 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ExplicitContentFrame::Representation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame::Representation end end - class GoogleCloudVideointelligenceV2beta1ExplicitContentFrame + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame # @private class Representation < Google::Apis::Core::JsonRepresentation property :pornography_likelihood, as: 'pornographyLikelihood' @@ -1771,21 +1773,21 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1LabelAnnotation + class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1Entity, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1Entity::Representation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1Entity, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1Entity::Representation - property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1Entity, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1Entity::Representation + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1Entity, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1Entity::Representation - collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1LabelFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1LabelFrame::Representation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1LabelFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1LabelFrame::Representation - collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1LabelSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1LabelSegment::Representation + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1LabelSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1LabelSegment::Representation end end - class GoogleCloudVideointelligenceV2beta1LabelFrame + class GoogleCloudVideointelligenceV1p3beta1LabelFrame # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' @@ -1793,16 +1795,16 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1LabelSegment + class GoogleCloudVideointelligenceV1p3beta1LabelSegment # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' - property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation end end - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox # @private class Representation < Google::Apis::Core::JsonRepresentation property :bottom, as: 'bottom' @@ -1812,15 +1814,15 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :vertices, as: 'vertices', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1NormalizedVertex, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1NormalizedVertex::Representation + collection :vertices, as: 'vertices', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1NormalizedVertex, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1NormalizedVertex::Representation end end - class GoogleCloudVideointelligenceV2beta1NormalizedVertex + class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex # @private class Representation < Google::Apis::Core::JsonRepresentation property :x, as: 'x' @@ -1828,52 +1830,52 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' - property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1Entity, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1Entity::Representation + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1Entity, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1Entity::Representation - collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame::Representation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame::Representation - property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation property :track_id, :numeric_string => true, as: 'trackId' end end - class GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame # @private class Representation < Google::Apis::Core::JsonRepresentation - property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox::Representation + property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox::Representation property :time_offset, as: 'timeOffset' end end - class GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative + class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' property :transcript, as: 'transcript' - collection :words, as: 'words', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1WordInfo, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1WordInfo::Representation + collection :words, as: 'words', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1WordInfo, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1WordInfo::Representation end end - class GoogleCloudVideointelligenceV2beta1SpeechTranscription + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative::Representation + collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative::Representation property :language_code, as: 'languageCode' end end - class GoogleCloudVideointelligenceV2beta1StreamingAnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse # @private class Representation < Google::Apis::Core::JsonRepresentation - property :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults::Representation + property :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults::Representation property :annotation_results_uri, as: 'annotationResultsUri' property :error, as: 'error', class: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus::Representation @@ -1881,50 +1883,50 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults # @private class Representation < Google::Apis::Core::JsonRepresentation - property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation::Representation + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation::Representation - collection :label_annotations, as: 'labelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1LabelAnnotation::Representation + collection :label_annotations, as: 'labelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation - collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation::Representation + collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation::Representation - collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation end end - class GoogleCloudVideointelligenceV2beta1TextAnnotation + class GoogleCloudVideointelligenceV1p3beta1TextAnnotation # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1TextSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1TextSegment::Representation + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1TextSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1TextSegment::Representation property :text, as: 'text' end end - class GoogleCloudVideointelligenceV2beta1TextFrame + class GoogleCloudVideointelligenceV1p3beta1TextFrame # @private class Representation < Google::Apis::Core::JsonRepresentation - property :rotated_bounding_box, as: 'rotatedBoundingBox', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly::Representation + property :rotated_bounding_box, as: 'rotatedBoundingBox', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly::Representation property :time_offset, as: 'timeOffset' end end - class GoogleCloudVideointelligenceV2beta1TextSegment + class GoogleCloudVideointelligenceV1p3beta1TextSegment # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' - collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1TextFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1TextFrame::Representation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1TextFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1TextFrame::Representation - property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation end end - class GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress # @private class Representation < Google::Apis::Core::JsonRepresentation property :input_uri, as: 'inputUri' @@ -1934,32 +1936,32 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1VideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults # @private class Representation < Google::Apis::Core::JsonRepresentation property :error, as: 'error', class: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus::Representation - property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation::Representation + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation::Representation - collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1LabelAnnotation::Representation + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation property :input_uri, as: 'inputUri' - collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation::Representation + collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation::Representation - collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1LabelAnnotation::Representation + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation - collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation - collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1LabelAnnotation::Representation + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation - collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1SpeechTranscription, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1SpeechTranscription::Representation + collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1SpeechTranscription, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1SpeechTranscription::Representation - collection :text_annotations, as: 'textAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1TextAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV2beta1TextAnnotation::Representation + collection :text_annotations, as: 'textAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1TextAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p3beta1TextAnnotation::Representation end end - class GoogleCloudVideointelligenceV2beta1VideoSegment + class GoogleCloudVideointelligenceV1p3beta1VideoSegment # @private class Representation < Google::Apis::Core::JsonRepresentation property :end_time_offset, as: 'endTimeOffset' @@ -1967,7 +1969,7 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1WordInfo + class GoogleCloudVideointelligenceV1p3beta1WordInfo # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' diff --git a/generated/google/apis/videointelligence_v1p1beta1.rb b/generated/google/apis/videointelligence_v1p1beta1.rb index 1dd4a475e..0a26af8a3 100644 --- a/generated/google/apis/videointelligence_v1p1beta1.rb +++ b/generated/google/apis/videointelligence_v1p1beta1.rb @@ -27,7 +27,7 @@ module Google # @see https://cloud.google.com/video-intelligence/docs/ module VideointelligenceV1p1beta1 VERSION = 'V1p1beta1' - REVISION = '20190220' + REVISION = '20190308' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/videointelligence_v1p1beta1/classes.rb b/generated/google/apis/videointelligence_v1p1beta1/classes.rb index 59dd16592..ad6761818 100644 --- a/generated/google/apis/videointelligence_v1p1beta1/classes.rb +++ b/generated/google/apis/videointelligence_v1p1beta1/classes.rb @@ -623,14 +623,14 @@ module Google class GoogleCloudVideointelligenceV1VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -1425,14 +1425,14 @@ module Google class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -1881,6 +1881,16 @@ module Google class GoogleCloudVideointelligenceV1p1beta1LabelDetectionConfig include Google::Apis::Core::Hashable + # The confidence threshold we perform filtering on the labels from + # frame-level detection. If not set, it is set to 0.4 by default. The valid + # range for this threshold is [0.1, 0.9]. Any value set outside of this + # range will be clipped. + # Note: for best results please follow the default threshold. We will update + # the default threshold everytime when we release a new model. + # Corresponds to the JSON property `frameConfidenceThreshold` + # @return [Float] + attr_accessor :frame_confidence_threshold + # What labels should be detected with LABEL_DETECTION, in addition to # video-level labels or segment-level labels. # If unspecified, defaults to `SHOT_MODE`. @@ -1903,15 +1913,27 @@ module Google attr_accessor :stationary_camera alias_method :stationary_camera?, :stationary_camera + # The confidence threshold we perform filtering on the labels from + # video-level and shot-level detections. If not set, it is set to 0.3 by + # default. The valid range for this threshold is [0.1, 0.9]. Any value set + # outside of this range will be clipped. + # Note: for best results please follow the default threshold. We will update + # the default threshold everytime when we release a new model. + # Corresponds to the JSON property `videoConfidenceThreshold` + # @return [Float] + attr_accessor :video_confidence_threshold + def initialize(**args) update!(**args) end # Update properties of this object def update!(**args) + @frame_confidence_threshold = args[:frame_confidence_threshold] if args.key?(:frame_confidence_threshold) @label_detection_mode = args[:label_detection_mode] if args.key?(:label_detection_mode) @model = args[:model] if args.key?(:model) @stationary_camera = args[:stationary_camera] if args.key?(:stationary_camera) + @video_confidence_threshold = args[:video_confidence_threshold] if args.key?(:video_confidence_threshold) end end @@ -2521,14 +2543,14 @@ module Google class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -3374,14 +3396,14 @@ module Google class GoogleCloudVideointelligenceV1p2beta1VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -3578,12 +3600,12 @@ module Google # Video annotation progress. Included in the `metadata` # field of the `Operation` returned by the `GetOperation` # call of the `google::longrunning::Operations` service. - class GoogleCloudVideointelligenceV2beta1AnnotateVideoProgress + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress include Google::Apis::Core::Hashable # Progress metadata for all videos specified in `AnnotateVideoRequest`. # Corresponds to the JSON property `annotationProgress` - # @return [Array] + # @return [Array] attr_accessor :annotation_progress def initialize(**args) @@ -3599,12 +3621,12 @@ module Google # Video annotation response. Included in the `response` # field of the `Operation` returned by the `GetOperation` # call of the `google::longrunning::Operations` service. - class GoogleCloudVideointelligenceV2beta1AnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse include Google::Apis::Core::Hashable # Annotation results for all videos specified in `AnnotateVideoRequest`. # Corresponds to the JSON property `annotationResults` - # @return [Array] + # @return [Array] attr_accessor :annotation_results def initialize(**args) @@ -3618,7 +3640,7 @@ module Google end # Detected entity from video analysis. - class GoogleCloudVideointelligenceV2beta1Entity + class GoogleCloudVideointelligenceV1p3beta1Entity include Google::Apis::Core::Hashable # Textual description, e.g. `Fixed-gear bicycle`. @@ -3653,12 +3675,12 @@ module Google # Explicit content annotation (based on per-frame visual signals only). # If no explicit content has been detected in a frame, no annotations are # present for that frame. - class GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation include Google::Apis::Core::Hashable # All video frames where explicit content was detected. # Corresponds to the JSON property `frames` - # @return [Array] + # @return [Array] attr_accessor :frames def initialize(**args) @@ -3672,7 +3694,7 @@ module Google end # Video frame level annotation results for explicit content. - class GoogleCloudVideointelligenceV2beta1ExplicitContentFrame + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame include Google::Apis::Core::Hashable # Likelihood of the pornography content.. @@ -3698,7 +3720,7 @@ module Google end # Label annotation. - class GoogleCloudVideointelligenceV2beta1LabelAnnotation + class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation include Google::Apis::Core::Hashable # Common categories for the detected entity. @@ -3706,22 +3728,22 @@ module Google # cases there might be more than one categories e.g. `Terrier` could also be # a `pet`. # Corresponds to the JSON property `categoryEntities` - # @return [Array] + # @return [Array] attr_accessor :category_entities # Detected entity from video analysis. # Corresponds to the JSON property `entity` - # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1Entity] + # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1Entity] attr_accessor :entity # All video frames where a label was detected. # Corresponds to the JSON property `frames` - # @return [Array] + # @return [Array] attr_accessor :frames # All video segments where a label was detected. # Corresponds to the JSON property `segments` - # @return [Array] + # @return [Array] attr_accessor :segments def initialize(**args) @@ -3738,7 +3760,7 @@ module Google end # Video frame level annotation results for label detection. - class GoogleCloudVideointelligenceV2beta1LabelFrame + class GoogleCloudVideointelligenceV1p3beta1LabelFrame include Google::Apis::Core::Hashable # Confidence that the label is accurate. Range: [0, 1]. @@ -3764,7 +3786,7 @@ module Google end # Video segment level annotation results for label detection. - class GoogleCloudVideointelligenceV2beta1LabelSegment + class GoogleCloudVideointelligenceV1p3beta1LabelSegment include Google::Apis::Core::Hashable # Confidence that the label is accurate. Range: [0, 1]. @@ -3774,7 +3796,7 @@ module Google # Video segment. # Corresponds to the JSON property `segment` - # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment] + # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment] attr_accessor :segment def initialize(**args) @@ -3791,7 +3813,7 @@ module Google # Normalized bounding box. # The normalized vertex coordinates are relative to the original image. # Range: [0, 1]. - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox include Google::Apis::Core::Hashable # Bottom Y coordinate. @@ -3842,12 +3864,12 @@ module Google # and the vertex order will still be (0, 1, 2, 3). Note that values can be less # than 0, or greater than 1 due to trignometric calculations for location of # the box. - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly include Google::Apis::Core::Hashable # Normalized vertices of the bounding polygon. # Corresponds to the JSON property `vertices` - # @return [Array] + # @return [Array] attr_accessor :vertices def initialize(**args) @@ -3863,7 +3885,7 @@ module Google # A vertex represents a 2D point in the image. # NOTE: the normalized vertex coordinates are relative to the original image # and range from 0 to 1. - class GoogleCloudVideointelligenceV2beta1NormalizedVertex + class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex include Google::Apis::Core::Hashable # X coordinate. @@ -3888,7 +3910,7 @@ module Google end # Annotations corresponding to one tracked object. - class GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation include Google::Apis::Core::Hashable # Object category's labeling confidence of this track. @@ -3898,7 +3920,7 @@ module Google # Detected entity from video analysis. # Corresponds to the JSON property `entity` - # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1Entity] + # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1Entity] attr_accessor :entity # Information corresponding to all frames where this object track appears. @@ -3906,12 +3928,12 @@ module Google # messages in frames. # Streaming mode: it can only be one ObjectTrackingFrame message in frames. # Corresponds to the JSON property `frames` - # @return [Array] + # @return [Array] attr_accessor :frames # Video segment. # Corresponds to the JSON property `segment` - # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment] + # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment] attr_accessor :segment # Streaming mode ONLY. @@ -3940,14 +3962,14 @@ module Google # Video frame level annotations for object detection and tracking. This field # stores per frame location, time offset, and confidence. - class GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame include Google::Apis::Core::Hashable # Normalized bounding box. # The normalized vertex coordinates are relative to the original image. # Range: [0, 1]. # Corresponds to the JSON property `normalizedBoundingBox` - # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox] + # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox] attr_accessor :normalized_bounding_box # The timestamp of the frame in microseconds. @@ -3967,7 +3989,7 @@ module Google end # Alternative hypotheses (a.k.a. n-best list). - class GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative + class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative include Google::Apis::Core::Hashable # The confidence estimate between 0.0 and 1.0. A higher number @@ -3987,7 +4009,7 @@ module Google # A list of word-specific information for each recognized word. # Corresponds to the JSON property `words` - # @return [Array] + # @return [Array] attr_accessor :words def initialize(**args) @@ -4003,7 +4025,7 @@ module Google end # A speech recognition result corresponding to a portion of the audio. - class GoogleCloudVideointelligenceV2beta1SpeechTranscription + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription include Google::Apis::Core::Hashable # May contain one or more recognition hypotheses (up to the maximum specified @@ -4011,7 +4033,7 @@ module Google # accuracy, with the top (first) alternative being the most probable, as # ranked by the recognizer. # Corresponds to the JSON property `alternatives` - # @return [Array] + # @return [Array] attr_accessor :alternatives # Output only. The @@ -4036,13 +4058,13 @@ module Google # `StreamingAnnotateVideoResponse` is the only message returned to the client # by `StreamingAnnotateVideo`. A series of zero or more # `StreamingAnnotateVideoResponse` messages are streamed back to the client. - class GoogleCloudVideointelligenceV2beta1StreamingAnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse include Google::Apis::Core::Hashable # Streaming annotation results corresponding to a portion of the video # that is currently being processed. # Corresponds to the JSON property `annotationResults` - # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults] + # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults] attr_accessor :annotation_results # GCS URI that stores annotation results of one streaming session. @@ -4053,14 +4075,14 @@ module Google # @return [String] attr_accessor :annotation_results_uri - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -4110,29 +4132,29 @@ module Google # Streaming annotation results corresponding to a portion of the video # that is currently being processed. - class GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults include Google::Apis::Core::Hashable # Explicit content annotation (based on per-frame visual signals only). # If no explicit content has been detected in a frame, no annotations are # present for that frame. # Corresponds to the JSON property `explicitAnnotation` - # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation] + # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation] attr_accessor :explicit_annotation # Label annotation results. # Corresponds to the JSON property `labelAnnotations` - # @return [Array] + # @return [Array] attr_accessor :label_annotations # Object tracking results. # Corresponds to the JSON property `objectAnnotations` - # @return [Array] + # @return [Array] attr_accessor :object_annotations # Shot annotation results. Each shot is represented as a video segment. # Corresponds to the JSON property `shotAnnotations` - # @return [Array] + # @return [Array] attr_accessor :shot_annotations def initialize(**args) @@ -4151,12 +4173,12 @@ module Google # Annotations related to one detected OCR text snippet. This will contain the # corresponding text, confidence value, and frame level information for each # detection. - class GoogleCloudVideointelligenceV2beta1TextAnnotation + class GoogleCloudVideointelligenceV1p3beta1TextAnnotation include Google::Apis::Core::Hashable # All video segments where OCR detected text appears. # Corresponds to the JSON property `segments` - # @return [Array] + # @return [Array] attr_accessor :segments # The detected text. @@ -4178,7 +4200,7 @@ module Google # Video frame level annotation results for text annotation (OCR). # Contains information regarding timestamp and bounding box locations for the # frames containing detected OCR text snippets. - class GoogleCloudVideointelligenceV2beta1TextFrame + class GoogleCloudVideointelligenceV1p3beta1TextFrame include Google::Apis::Core::Hashable # Normalized bounding polygon for text (that might not be aligned with axis). @@ -4197,7 +4219,7 @@ module Google # than 0, or greater than 1 due to trignometric calculations for location of # the box. # Corresponds to the JSON property `rotatedBoundingBox` - # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly] + # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly] attr_accessor :rotated_bounding_box # Timestamp of this frame. @@ -4217,7 +4239,7 @@ module Google end # Video segment level annotation results for text detection. - class GoogleCloudVideointelligenceV2beta1TextSegment + class GoogleCloudVideointelligenceV1p3beta1TextSegment include Google::Apis::Core::Hashable # Confidence for the track of detected text. It is calculated as the highest @@ -4228,12 +4250,12 @@ module Google # Information related to the frames where OCR detected text appears. # Corresponds to the JSON property `frames` - # @return [Array] + # @return [Array] attr_accessor :frames # Video segment. # Corresponds to the JSON property `segment` - # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment] + # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment] attr_accessor :segment def initialize(**args) @@ -4249,7 +4271,7 @@ module Google end # Annotation progress for a single video. - class GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress include Google::Apis::Core::Hashable # Video file location in @@ -4288,17 +4310,17 @@ module Google end # Annotation results for a single video. - class GoogleCloudVideointelligenceV2beta1VideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -4338,13 +4360,13 @@ module Google # If no explicit content has been detected in a frame, no annotations are # present for that frame. # Corresponds to the JSON property `explicitAnnotation` - # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation] + # @return [Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation] attr_accessor :explicit_annotation # Label annotations on frame level. # There is exactly one element for each unique label. # Corresponds to the JSON property `frameLabelAnnotations` - # @return [Array] + # @return [Array] attr_accessor :frame_label_annotations # Video file location in @@ -4355,36 +4377,36 @@ module Google # Annotations for list of objects detected and tracked in video. # Corresponds to the JSON property `objectAnnotations` - # @return [Array] + # @return [Array] attr_accessor :object_annotations # Label annotations on video level or user specified segment level. # There is exactly one element for each unique label. # Corresponds to the JSON property `segmentLabelAnnotations` - # @return [Array] + # @return [Array] attr_accessor :segment_label_annotations # Shot annotations. Each shot is represented as a video segment. # Corresponds to the JSON property `shotAnnotations` - # @return [Array] + # @return [Array] attr_accessor :shot_annotations # Label annotations on shot level. # There is exactly one element for each unique label. # Corresponds to the JSON property `shotLabelAnnotations` - # @return [Array] + # @return [Array] attr_accessor :shot_label_annotations # Speech transcription. # Corresponds to the JSON property `speechTranscriptions` - # @return [Array] + # @return [Array] attr_accessor :speech_transcriptions # OCR text detection and tracking. # Annotations for list of detected text snippets. Each will have list of # frame information associated with it. # Corresponds to the JSON property `textAnnotations` - # @return [Array] + # @return [Array] attr_accessor :text_annotations def initialize(**args) @@ -4407,7 +4429,7 @@ module Google end # Video segment. - class GoogleCloudVideointelligenceV2beta1VideoSegment + class GoogleCloudVideointelligenceV1p3beta1VideoSegment include Google::Apis::Core::Hashable # Time-offset, relative to the beginning of the video, @@ -4436,7 +4458,7 @@ module Google # Word-specific information for recognized words. Word information is only # included in the response when certain request parameters are set, such # as `enable_word_time_offsets`. - class GoogleCloudVideointelligenceV2beta1WordInfo + class GoogleCloudVideointelligenceV1p3beta1WordInfo include Google::Apis::Core::Hashable # Output only. The confidence estimate between 0.0 and 1.0. A higher number @@ -4505,14 +4527,14 @@ module Google attr_accessor :done alias_method :done?, :done - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -4589,14 +4611,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/videointelligence_v1p1beta1/representations.rb b/generated/google/apis/videointelligence_v1p1beta1/representations.rb index a4414cefa..d791b96f3 100644 --- a/generated/google/apis/videointelligence_v1p1beta1/representations.rb +++ b/generated/google/apis/videointelligence_v1p1beta1/representations.rb @@ -598,145 +598,145 @@ module Google include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1AnnotateVideoProgress + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1AnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1Entity + class GoogleCloudVideointelligenceV1p3beta1Entity class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1ExplicitContentFrame + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1LabelAnnotation + class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1LabelFrame + class GoogleCloudVideointelligenceV1p3beta1LabelFrame class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1LabelSegment + class GoogleCloudVideointelligenceV1p3beta1LabelSegment class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1NormalizedVertex + class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative + class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1SpeechTranscription + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1StreamingAnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1TextAnnotation + class GoogleCloudVideointelligenceV1p3beta1TextAnnotation class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1TextFrame + class GoogleCloudVideointelligenceV1p3beta1TextFrame class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1TextSegment + class GoogleCloudVideointelligenceV1p3beta1TextSegment class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1VideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1VideoSegment + class GoogleCloudVideointelligenceV1p3beta1VideoSegment class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1WordInfo + class GoogleCloudVideointelligenceV1p3beta1WordInfo class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport @@ -1278,9 +1278,11 @@ module Google class GoogleCloudVideointelligenceV1p1beta1LabelDetectionConfig # @private class Representation < Google::Apis::Core::JsonRepresentation + property :frame_confidence_threshold, as: 'frameConfidenceThreshold' property :label_detection_mode, as: 'labelDetectionMode' property :model, as: 'model' property :stationary_camera, as: 'stationaryCamera' + property :video_confidence_threshold, as: 'videoConfidenceThreshold' end end @@ -1730,23 +1732,23 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1AnnotateVideoProgress + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress::Representation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress::Representation end end - class GoogleCloudVideointelligenceV2beta1AnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoAnnotationResults::Representation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults::Representation end end - class GoogleCloudVideointelligenceV2beta1Entity + class GoogleCloudVideointelligenceV1p3beta1Entity # @private class Representation < Google::Apis::Core::JsonRepresentation property :description, as: 'description' @@ -1755,15 +1757,15 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentFrame::Representation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame::Representation end end - class GoogleCloudVideointelligenceV2beta1ExplicitContentFrame + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame # @private class Representation < Google::Apis::Core::JsonRepresentation property :pornography_likelihood, as: 'pornographyLikelihood' @@ -1771,21 +1773,21 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1LabelAnnotation + class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1Entity, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1Entity::Representation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1Entity, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1Entity::Representation - property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1Entity, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1Entity::Representation + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1Entity, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1Entity::Representation - collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1LabelFrame, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1LabelFrame::Representation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1LabelFrame, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1LabelFrame::Representation - collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1LabelSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1LabelSegment::Representation + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1LabelSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1LabelSegment::Representation end end - class GoogleCloudVideointelligenceV2beta1LabelFrame + class GoogleCloudVideointelligenceV1p3beta1LabelFrame # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' @@ -1793,16 +1795,16 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1LabelSegment + class GoogleCloudVideointelligenceV1p3beta1LabelSegment # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' - property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation end end - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox # @private class Representation < Google::Apis::Core::JsonRepresentation property :bottom, as: 'bottom' @@ -1812,15 +1814,15 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :vertices, as: 'vertices', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1NormalizedVertex, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1NormalizedVertex::Representation + collection :vertices, as: 'vertices', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedVertex, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedVertex::Representation end end - class GoogleCloudVideointelligenceV2beta1NormalizedVertex + class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex # @private class Representation < Google::Apis::Core::JsonRepresentation property :x, as: 'x' @@ -1828,52 +1830,52 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' - property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1Entity, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1Entity::Representation + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1Entity, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1Entity::Representation - collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame::Representation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame::Representation - property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation property :track_id, :numeric_string => true, as: 'trackId' end end - class GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame # @private class Representation < Google::Apis::Core::JsonRepresentation - property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox::Representation + property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox::Representation property :time_offset, as: 'timeOffset' end end - class GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative + class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' property :transcript, as: 'transcript' - collection :words, as: 'words', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1WordInfo, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1WordInfo::Representation + collection :words, as: 'words', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1WordInfo, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1WordInfo::Representation end end - class GoogleCloudVideointelligenceV2beta1SpeechTranscription + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative::Representation + collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative::Representation property :language_code, as: 'languageCode' end end - class GoogleCloudVideointelligenceV2beta1StreamingAnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse # @private class Representation < Google::Apis::Core::JsonRepresentation - property :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults::Representation + property :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults::Representation property :annotation_results_uri, as: 'annotationResultsUri' property :error, as: 'error', class: Google::Apis::VideointelligenceV1p1beta1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleRpcStatus::Representation @@ -1881,50 +1883,50 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults # @private class Representation < Google::Apis::Core::JsonRepresentation - property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation::Representation + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation::Representation - collection :label_annotations, as: 'labelAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation::Representation + collection :label_annotations, as: 'labelAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation - collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation::Representation + collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation::Representation - collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation end end - class GoogleCloudVideointelligenceV2beta1TextAnnotation + class GoogleCloudVideointelligenceV1p3beta1TextAnnotation # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1TextSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1TextSegment::Representation + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1TextSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1TextSegment::Representation property :text, as: 'text' end end - class GoogleCloudVideointelligenceV2beta1TextFrame + class GoogleCloudVideointelligenceV1p3beta1TextFrame # @private class Representation < Google::Apis::Core::JsonRepresentation - property :rotated_bounding_box, as: 'rotatedBoundingBox', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly::Representation + property :rotated_bounding_box, as: 'rotatedBoundingBox', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly::Representation property :time_offset, as: 'timeOffset' end end - class GoogleCloudVideointelligenceV2beta1TextSegment + class GoogleCloudVideointelligenceV1p3beta1TextSegment # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' - collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1TextFrame, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1TextFrame::Representation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1TextFrame, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1TextFrame::Representation - property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation end end - class GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress # @private class Representation < Google::Apis::Core::JsonRepresentation property :input_uri, as: 'inputUri' @@ -1934,32 +1936,32 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1VideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults # @private class Representation < Google::Apis::Core::JsonRepresentation property :error, as: 'error', class: Google::Apis::VideointelligenceV1p1beta1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleRpcStatus::Representation - property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation::Representation + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation::Representation - collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation::Representation + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation property :input_uri, as: 'inputUri' - collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation::Representation + collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation::Representation - collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation::Representation + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation - collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation - collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation::Representation + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation - collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1SpeechTranscription, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1SpeechTranscription::Representation + collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1SpeechTranscription, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1SpeechTranscription::Representation - collection :text_annotations, as: 'textAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1TextAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV2beta1TextAnnotation::Representation + collection :text_annotations, as: 'textAnnotations', class: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1TextAnnotation, decorator: Google::Apis::VideointelligenceV1p1beta1::GoogleCloudVideointelligenceV1p3beta1TextAnnotation::Representation end end - class GoogleCloudVideointelligenceV2beta1VideoSegment + class GoogleCloudVideointelligenceV1p3beta1VideoSegment # @private class Representation < Google::Apis::Core::JsonRepresentation property :end_time_offset, as: 'endTimeOffset' @@ -1967,7 +1969,7 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1WordInfo + class GoogleCloudVideointelligenceV1p3beta1WordInfo # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' diff --git a/generated/google/apis/videointelligence_v1p2beta1.rb b/generated/google/apis/videointelligence_v1p2beta1.rb index 60f0adcc5..88c42dacd 100644 --- a/generated/google/apis/videointelligence_v1p2beta1.rb +++ b/generated/google/apis/videointelligence_v1p2beta1.rb @@ -27,7 +27,7 @@ module Google # @see https://cloud.google.com/video-intelligence/docs/ module VideointelligenceV1p2beta1 VERSION = 'V1p2beta1' - REVISION = '20190220' + REVISION = '20190308' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/videointelligence_v1p2beta1/classes.rb b/generated/google/apis/videointelligence_v1p2beta1/classes.rb index 15f2a4cde..ded2deddd 100644 --- a/generated/google/apis/videointelligence_v1p2beta1/classes.rb +++ b/generated/google/apis/videointelligence_v1p2beta1/classes.rb @@ -623,14 +623,14 @@ module Google class GoogleCloudVideointelligenceV1VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -1425,14 +1425,14 @@ module Google class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -2227,14 +2227,14 @@ module Google class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -2683,6 +2683,16 @@ module Google class GoogleCloudVideointelligenceV1p2beta1LabelDetectionConfig include Google::Apis::Core::Hashable + # The confidence threshold we perform filtering on the labels from + # frame-level detection. If not set, it is set to 0.4 by default. The valid + # range for this threshold is [0.1, 0.9]. Any value set outside of this + # range will be clipped. + # Note: for best results please follow the default threshold. We will update + # the default threshold everytime when we release a new model. + # Corresponds to the JSON property `frameConfidenceThreshold` + # @return [Float] + attr_accessor :frame_confidence_threshold + # What labels should be detected with LABEL_DETECTION, in addition to # video-level labels or segment-level labels. # If unspecified, defaults to `SHOT_MODE`. @@ -2705,15 +2715,27 @@ module Google attr_accessor :stationary_camera alias_method :stationary_camera?, :stationary_camera + # The confidence threshold we perform filtering on the labels from + # video-level and shot-level detections. If not set, it is set to 0.3 by + # default. The valid range for this threshold is [0.1, 0.9]. Any value set + # outside of this range will be clipped. + # Note: for best results please follow the default threshold. We will update + # the default threshold everytime when we release a new model. + # Corresponds to the JSON property `videoConfidenceThreshold` + # @return [Float] + attr_accessor :video_confidence_threshold + def initialize(**args) update!(**args) end # Update properties of this object def update!(**args) + @frame_confidence_threshold = args[:frame_confidence_threshold] if args.key?(:frame_confidence_threshold) @label_detection_mode = args[:label_detection_mode] if args.key?(:label_detection_mode) @model = args[:model] if args.key?(:model) @stationary_camera = args[:stationary_camera] if args.key?(:stationary_camera) + @video_confidence_threshold = args[:video_confidence_threshold] if args.key?(:video_confidence_threshold) end end @@ -3323,14 +3345,14 @@ module Google class GoogleCloudVideointelligenceV1p2beta1VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -3578,12 +3600,12 @@ module Google # Video annotation progress. Included in the `metadata` # field of the `Operation` returned by the `GetOperation` # call of the `google::longrunning::Operations` service. - class GoogleCloudVideointelligenceV2beta1AnnotateVideoProgress + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress include Google::Apis::Core::Hashable # Progress metadata for all videos specified in `AnnotateVideoRequest`. # Corresponds to the JSON property `annotationProgress` - # @return [Array] + # @return [Array] attr_accessor :annotation_progress def initialize(**args) @@ -3599,12 +3621,12 @@ module Google # Video annotation response. Included in the `response` # field of the `Operation` returned by the `GetOperation` # call of the `google::longrunning::Operations` service. - class GoogleCloudVideointelligenceV2beta1AnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse include Google::Apis::Core::Hashable # Annotation results for all videos specified in `AnnotateVideoRequest`. # Corresponds to the JSON property `annotationResults` - # @return [Array] + # @return [Array] attr_accessor :annotation_results def initialize(**args) @@ -3618,7 +3640,7 @@ module Google end # Detected entity from video analysis. - class GoogleCloudVideointelligenceV2beta1Entity + class GoogleCloudVideointelligenceV1p3beta1Entity include Google::Apis::Core::Hashable # Textual description, e.g. `Fixed-gear bicycle`. @@ -3653,12 +3675,12 @@ module Google # Explicit content annotation (based on per-frame visual signals only). # If no explicit content has been detected in a frame, no annotations are # present for that frame. - class GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation include Google::Apis::Core::Hashable # All video frames where explicit content was detected. # Corresponds to the JSON property `frames` - # @return [Array] + # @return [Array] attr_accessor :frames def initialize(**args) @@ -3672,7 +3694,7 @@ module Google end # Video frame level annotation results for explicit content. - class GoogleCloudVideointelligenceV2beta1ExplicitContentFrame + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame include Google::Apis::Core::Hashable # Likelihood of the pornography content.. @@ -3698,7 +3720,7 @@ module Google end # Label annotation. - class GoogleCloudVideointelligenceV2beta1LabelAnnotation + class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation include Google::Apis::Core::Hashable # Common categories for the detected entity. @@ -3706,22 +3728,22 @@ module Google # cases there might be more than one categories e.g. `Terrier` could also be # a `pet`. # Corresponds to the JSON property `categoryEntities` - # @return [Array] + # @return [Array] attr_accessor :category_entities # Detected entity from video analysis. # Corresponds to the JSON property `entity` - # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1Entity] + # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1Entity] attr_accessor :entity # All video frames where a label was detected. # Corresponds to the JSON property `frames` - # @return [Array] + # @return [Array] attr_accessor :frames # All video segments where a label was detected. # Corresponds to the JSON property `segments` - # @return [Array] + # @return [Array] attr_accessor :segments def initialize(**args) @@ -3738,7 +3760,7 @@ module Google end # Video frame level annotation results for label detection. - class GoogleCloudVideointelligenceV2beta1LabelFrame + class GoogleCloudVideointelligenceV1p3beta1LabelFrame include Google::Apis::Core::Hashable # Confidence that the label is accurate. Range: [0, 1]. @@ -3764,7 +3786,7 @@ module Google end # Video segment level annotation results for label detection. - class GoogleCloudVideointelligenceV2beta1LabelSegment + class GoogleCloudVideointelligenceV1p3beta1LabelSegment include Google::Apis::Core::Hashable # Confidence that the label is accurate. Range: [0, 1]. @@ -3774,7 +3796,7 @@ module Google # Video segment. # Corresponds to the JSON property `segment` - # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment] + # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment] attr_accessor :segment def initialize(**args) @@ -3791,7 +3813,7 @@ module Google # Normalized bounding box. # The normalized vertex coordinates are relative to the original image. # Range: [0, 1]. - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox include Google::Apis::Core::Hashable # Bottom Y coordinate. @@ -3842,12 +3864,12 @@ module Google # and the vertex order will still be (0, 1, 2, 3). Note that values can be less # than 0, or greater than 1 due to trignometric calculations for location of # the box. - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly include Google::Apis::Core::Hashable # Normalized vertices of the bounding polygon. # Corresponds to the JSON property `vertices` - # @return [Array] + # @return [Array] attr_accessor :vertices def initialize(**args) @@ -3863,7 +3885,7 @@ module Google # A vertex represents a 2D point in the image. # NOTE: the normalized vertex coordinates are relative to the original image # and range from 0 to 1. - class GoogleCloudVideointelligenceV2beta1NormalizedVertex + class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex include Google::Apis::Core::Hashable # X coordinate. @@ -3888,7 +3910,7 @@ module Google end # Annotations corresponding to one tracked object. - class GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation include Google::Apis::Core::Hashable # Object category's labeling confidence of this track. @@ -3898,7 +3920,7 @@ module Google # Detected entity from video analysis. # Corresponds to the JSON property `entity` - # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1Entity] + # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1Entity] attr_accessor :entity # Information corresponding to all frames where this object track appears. @@ -3906,12 +3928,12 @@ module Google # messages in frames. # Streaming mode: it can only be one ObjectTrackingFrame message in frames. # Corresponds to the JSON property `frames` - # @return [Array] + # @return [Array] attr_accessor :frames # Video segment. # Corresponds to the JSON property `segment` - # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment] + # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment] attr_accessor :segment # Streaming mode ONLY. @@ -3940,14 +3962,14 @@ module Google # Video frame level annotations for object detection and tracking. This field # stores per frame location, time offset, and confidence. - class GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame include Google::Apis::Core::Hashable # Normalized bounding box. # The normalized vertex coordinates are relative to the original image. # Range: [0, 1]. # Corresponds to the JSON property `normalizedBoundingBox` - # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox] + # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox] attr_accessor :normalized_bounding_box # The timestamp of the frame in microseconds. @@ -3967,7 +3989,7 @@ module Google end # Alternative hypotheses (a.k.a. n-best list). - class GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative + class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative include Google::Apis::Core::Hashable # The confidence estimate between 0.0 and 1.0. A higher number @@ -3987,7 +4009,7 @@ module Google # A list of word-specific information for each recognized word. # Corresponds to the JSON property `words` - # @return [Array] + # @return [Array] attr_accessor :words def initialize(**args) @@ -4003,7 +4025,7 @@ module Google end # A speech recognition result corresponding to a portion of the audio. - class GoogleCloudVideointelligenceV2beta1SpeechTranscription + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription include Google::Apis::Core::Hashable # May contain one or more recognition hypotheses (up to the maximum specified @@ -4011,7 +4033,7 @@ module Google # accuracy, with the top (first) alternative being the most probable, as # ranked by the recognizer. # Corresponds to the JSON property `alternatives` - # @return [Array] + # @return [Array] attr_accessor :alternatives # Output only. The @@ -4036,13 +4058,13 @@ module Google # `StreamingAnnotateVideoResponse` is the only message returned to the client # by `StreamingAnnotateVideo`. A series of zero or more # `StreamingAnnotateVideoResponse` messages are streamed back to the client. - class GoogleCloudVideointelligenceV2beta1StreamingAnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse include Google::Apis::Core::Hashable # Streaming annotation results corresponding to a portion of the video # that is currently being processed. # Corresponds to the JSON property `annotationResults` - # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults] + # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults] attr_accessor :annotation_results # GCS URI that stores annotation results of one streaming session. @@ -4053,14 +4075,14 @@ module Google # @return [String] attr_accessor :annotation_results_uri - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -4110,29 +4132,29 @@ module Google # Streaming annotation results corresponding to a portion of the video # that is currently being processed. - class GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults include Google::Apis::Core::Hashable # Explicit content annotation (based on per-frame visual signals only). # If no explicit content has been detected in a frame, no annotations are # present for that frame. # Corresponds to the JSON property `explicitAnnotation` - # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation] + # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation] attr_accessor :explicit_annotation # Label annotation results. # Corresponds to the JSON property `labelAnnotations` - # @return [Array] + # @return [Array] attr_accessor :label_annotations # Object tracking results. # Corresponds to the JSON property `objectAnnotations` - # @return [Array] + # @return [Array] attr_accessor :object_annotations # Shot annotation results. Each shot is represented as a video segment. # Corresponds to the JSON property `shotAnnotations` - # @return [Array] + # @return [Array] attr_accessor :shot_annotations def initialize(**args) @@ -4151,12 +4173,12 @@ module Google # Annotations related to one detected OCR text snippet. This will contain the # corresponding text, confidence value, and frame level information for each # detection. - class GoogleCloudVideointelligenceV2beta1TextAnnotation + class GoogleCloudVideointelligenceV1p3beta1TextAnnotation include Google::Apis::Core::Hashable # All video segments where OCR detected text appears. # Corresponds to the JSON property `segments` - # @return [Array] + # @return [Array] attr_accessor :segments # The detected text. @@ -4178,7 +4200,7 @@ module Google # Video frame level annotation results for text annotation (OCR). # Contains information regarding timestamp and bounding box locations for the # frames containing detected OCR text snippets. - class GoogleCloudVideointelligenceV2beta1TextFrame + class GoogleCloudVideointelligenceV1p3beta1TextFrame include Google::Apis::Core::Hashable # Normalized bounding polygon for text (that might not be aligned with axis). @@ -4197,7 +4219,7 @@ module Google # than 0, or greater than 1 due to trignometric calculations for location of # the box. # Corresponds to the JSON property `rotatedBoundingBox` - # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly] + # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly] attr_accessor :rotated_bounding_box # Timestamp of this frame. @@ -4217,7 +4239,7 @@ module Google end # Video segment level annotation results for text detection. - class GoogleCloudVideointelligenceV2beta1TextSegment + class GoogleCloudVideointelligenceV1p3beta1TextSegment include Google::Apis::Core::Hashable # Confidence for the track of detected text. It is calculated as the highest @@ -4228,12 +4250,12 @@ module Google # Information related to the frames where OCR detected text appears. # Corresponds to the JSON property `frames` - # @return [Array] + # @return [Array] attr_accessor :frames # Video segment. # Corresponds to the JSON property `segment` - # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment] + # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment] attr_accessor :segment def initialize(**args) @@ -4249,7 +4271,7 @@ module Google end # Annotation progress for a single video. - class GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress include Google::Apis::Core::Hashable # Video file location in @@ -4288,17 +4310,17 @@ module Google end # Annotation results for a single video. - class GoogleCloudVideointelligenceV2beta1VideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults include Google::Apis::Core::Hashable - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -4338,13 +4360,13 @@ module Google # If no explicit content has been detected in a frame, no annotations are # present for that frame. # Corresponds to the JSON property `explicitAnnotation` - # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation] + # @return [Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation] attr_accessor :explicit_annotation # Label annotations on frame level. # There is exactly one element for each unique label. # Corresponds to the JSON property `frameLabelAnnotations` - # @return [Array] + # @return [Array] attr_accessor :frame_label_annotations # Video file location in @@ -4355,36 +4377,36 @@ module Google # Annotations for list of objects detected and tracked in video. # Corresponds to the JSON property `objectAnnotations` - # @return [Array] + # @return [Array] attr_accessor :object_annotations # Label annotations on video level or user specified segment level. # There is exactly one element for each unique label. # Corresponds to the JSON property `segmentLabelAnnotations` - # @return [Array] + # @return [Array] attr_accessor :segment_label_annotations # Shot annotations. Each shot is represented as a video segment. # Corresponds to the JSON property `shotAnnotations` - # @return [Array] + # @return [Array] attr_accessor :shot_annotations # Label annotations on shot level. # There is exactly one element for each unique label. # Corresponds to the JSON property `shotLabelAnnotations` - # @return [Array] + # @return [Array] attr_accessor :shot_label_annotations # Speech transcription. # Corresponds to the JSON property `speechTranscriptions` - # @return [Array] + # @return [Array] attr_accessor :speech_transcriptions # OCR text detection and tracking. # Annotations for list of detected text snippets. Each will have list of # frame information associated with it. # Corresponds to the JSON property `textAnnotations` - # @return [Array] + # @return [Array] attr_accessor :text_annotations def initialize(**args) @@ -4407,7 +4429,7 @@ module Google end # Video segment. - class GoogleCloudVideointelligenceV2beta1VideoSegment + class GoogleCloudVideointelligenceV1p3beta1VideoSegment include Google::Apis::Core::Hashable # Time-offset, relative to the beginning of the video, @@ -4436,7 +4458,7 @@ module Google # Word-specific information for recognized words. Word information is only # included in the response when certain request parameters are set, such # as `enable_word_time_offsets`. - class GoogleCloudVideointelligenceV2beta1WordInfo + class GoogleCloudVideointelligenceV1p3beta1WordInfo include Google::Apis::Core::Hashable # Output only. The confidence estimate between 0.0 and 1.0. A higher number @@ -4505,14 +4527,14 @@ module Google attr_accessor :done alias_method :done?, :done - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -4589,14 +4611,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/videointelligence_v1p2beta1/representations.rb b/generated/google/apis/videointelligence_v1p2beta1/representations.rb index a6563ae82..c7359ff33 100644 --- a/generated/google/apis/videointelligence_v1p2beta1/representations.rb +++ b/generated/google/apis/videointelligence_v1p2beta1/representations.rb @@ -598,145 +598,145 @@ module Google include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1AnnotateVideoProgress + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1AnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1Entity + class GoogleCloudVideointelligenceV1p3beta1Entity class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1ExplicitContentFrame + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1LabelAnnotation + class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1LabelFrame + class GoogleCloudVideointelligenceV1p3beta1LabelFrame class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1LabelSegment + class GoogleCloudVideointelligenceV1p3beta1LabelSegment class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1NormalizedVertex + class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative + class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1SpeechTranscription + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1StreamingAnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1TextAnnotation + class GoogleCloudVideointelligenceV1p3beta1TextAnnotation class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1TextFrame + class GoogleCloudVideointelligenceV1p3beta1TextFrame class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1TextSegment + class GoogleCloudVideointelligenceV1p3beta1TextSegment class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1VideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1VideoSegment + class GoogleCloudVideointelligenceV1p3beta1VideoSegment class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport end - class GoogleCloudVideointelligenceV2beta1WordInfo + class GoogleCloudVideointelligenceV1p3beta1WordInfo class Representation < Google::Apis::Core::JsonRepresentation; end include Google::Apis::Core::JsonObjectSupport @@ -1501,9 +1501,11 @@ module Google class GoogleCloudVideointelligenceV1p2beta1LabelDetectionConfig # @private class Representation < Google::Apis::Core::JsonRepresentation + property :frame_confidence_threshold, as: 'frameConfidenceThreshold' property :label_detection_mode, as: 'labelDetectionMode' property :model, as: 'model' property :stationary_camera, as: 'stationaryCamera' + property :video_confidence_threshold, as: 'videoConfidenceThreshold' end end @@ -1730,23 +1732,23 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1AnnotateVideoProgress + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress::Representation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress::Representation end end - class GoogleCloudVideointelligenceV2beta1AnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoAnnotationResults::Representation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults::Representation end end - class GoogleCloudVideointelligenceV2beta1Entity + class GoogleCloudVideointelligenceV1p3beta1Entity # @private class Representation < Google::Apis::Core::JsonRepresentation property :description, as: 'description' @@ -1755,15 +1757,15 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentFrame::Representation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame::Representation end end - class GoogleCloudVideointelligenceV2beta1ExplicitContentFrame + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame # @private class Representation < Google::Apis::Core::JsonRepresentation property :pornography_likelihood, as: 'pornographyLikelihood' @@ -1771,21 +1773,21 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1LabelAnnotation + class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1Entity, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1Entity::Representation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1Entity, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1Entity::Representation - property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1Entity, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1Entity::Representation + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1Entity, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1Entity::Representation - collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1LabelFrame, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1LabelFrame::Representation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1LabelFrame, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1LabelFrame::Representation - collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1LabelSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1LabelSegment::Representation + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1LabelSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1LabelSegment::Representation end end - class GoogleCloudVideointelligenceV2beta1LabelFrame + class GoogleCloudVideointelligenceV1p3beta1LabelFrame # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' @@ -1793,16 +1795,16 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1LabelSegment + class GoogleCloudVideointelligenceV1p3beta1LabelSegment # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' - property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation end end - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox # @private class Representation < Google::Apis::Core::JsonRepresentation property :bottom, as: 'bottom' @@ -1812,15 +1814,15 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :vertices, as: 'vertices', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1NormalizedVertex, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1NormalizedVertex::Representation + collection :vertices, as: 'vertices', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedVertex, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedVertex::Representation end end - class GoogleCloudVideointelligenceV2beta1NormalizedVertex + class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex # @private class Representation < Google::Apis::Core::JsonRepresentation property :x, as: 'x' @@ -1828,52 +1830,52 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' - property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1Entity, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1Entity::Representation + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1Entity, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1Entity::Representation - collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame::Representation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame::Representation - property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation property :track_id, :numeric_string => true, as: 'trackId' end end - class GoogleCloudVideointelligenceV2beta1ObjectTrackingFrame + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame # @private class Representation < Google::Apis::Core::JsonRepresentation - property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1NormalizedBoundingBox::Representation + property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox::Representation property :time_offset, as: 'timeOffset' end end - class GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative + class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' property :transcript, as: 'transcript' - collection :words, as: 'words', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1WordInfo, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1WordInfo::Representation + collection :words, as: 'words', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1WordInfo, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1WordInfo::Representation end end - class GoogleCloudVideointelligenceV2beta1SpeechTranscription + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1SpeechRecognitionAlternative::Representation + collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative::Representation property :language_code, as: 'languageCode' end end - class GoogleCloudVideointelligenceV2beta1StreamingAnnotateVideoResponse + class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse # @private class Representation < Google::Apis::Core::JsonRepresentation - property :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults::Representation + property :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults::Representation property :annotation_results_uri, as: 'annotationResultsUri' property :error, as: 'error', class: Google::Apis::VideointelligenceV1p2beta1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleRpcStatus::Representation @@ -1881,50 +1883,50 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1StreamingVideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults # @private class Representation < Google::Apis::Core::JsonRepresentation - property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation::Representation + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation::Representation - collection :label_annotations, as: 'labelAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation::Representation + collection :label_annotations, as: 'labelAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation - collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation::Representation + collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation::Representation - collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation end end - class GoogleCloudVideointelligenceV2beta1TextAnnotation + class GoogleCloudVideointelligenceV1p3beta1TextAnnotation # @private class Representation < Google::Apis::Core::JsonRepresentation - collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1TextSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1TextSegment::Representation + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1TextSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1TextSegment::Representation property :text, as: 'text' end end - class GoogleCloudVideointelligenceV2beta1TextFrame + class GoogleCloudVideointelligenceV1p3beta1TextFrame # @private class Representation < Google::Apis::Core::JsonRepresentation - property :rotated_bounding_box, as: 'rotatedBoundingBox', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1NormalizedBoundingPoly::Representation + property :rotated_bounding_box, as: 'rotatedBoundingBox', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly::Representation property :time_offset, as: 'timeOffset' end end - class GoogleCloudVideointelligenceV2beta1TextSegment + class GoogleCloudVideointelligenceV1p3beta1TextSegment # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' - collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1TextFrame, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1TextFrame::Representation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1TextFrame, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1TextFrame::Representation - property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation end end - class GoogleCloudVideointelligenceV2beta1VideoAnnotationProgress + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress # @private class Representation < Google::Apis::Core::JsonRepresentation property :input_uri, as: 'inputUri' @@ -1934,32 +1936,32 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1VideoAnnotationResults + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults # @private class Representation < Google::Apis::Core::JsonRepresentation property :error, as: 'error', class: Google::Apis::VideointelligenceV1p2beta1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleRpcStatus::Representation - property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ExplicitContentAnnotation::Representation + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation::Representation - collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation::Representation + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation property :input_uri, as: 'inputUri' - collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1ObjectTrackingAnnotation::Representation + collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation::Representation - collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation::Representation + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation - collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1VideoSegment::Representation + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation - collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1LabelAnnotation::Representation + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation - collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1SpeechTranscription, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1SpeechTranscription::Representation + collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1SpeechTranscription, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1SpeechTranscription::Representation - collection :text_annotations, as: 'textAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1TextAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV2beta1TextAnnotation::Representation + collection :text_annotations, as: 'textAnnotations', class: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1TextAnnotation, decorator: Google::Apis::VideointelligenceV1p2beta1::GoogleCloudVideointelligenceV1p3beta1TextAnnotation::Representation end end - class GoogleCloudVideointelligenceV2beta1VideoSegment + class GoogleCloudVideointelligenceV1p3beta1VideoSegment # @private class Representation < Google::Apis::Core::JsonRepresentation property :end_time_offset, as: 'endTimeOffset' @@ -1967,7 +1969,7 @@ module Google end end - class GoogleCloudVideointelligenceV2beta1WordInfo + class GoogleCloudVideointelligenceV1p3beta1WordInfo # @private class Representation < Google::Apis::Core::JsonRepresentation property :confidence, as: 'confidence' diff --git a/generated/google/apis/videointelligence_v1p3beta1.rb b/generated/google/apis/videointelligence_v1p3beta1.rb new file mode 100644 index 000000000..b74dfe7f2 --- /dev/null +++ b/generated/google/apis/videointelligence_v1p3beta1.rb @@ -0,0 +1,36 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'google/apis/videointelligence_v1p3beta1/service.rb' +require 'google/apis/videointelligence_v1p3beta1/classes.rb' +require 'google/apis/videointelligence_v1p3beta1/representations.rb' + +module Google + module Apis + # Cloud Video Intelligence API + # + # Detects objects, explicit content, and scene changes in videos. It also + # specifies the region for annotation and transcribes speech to text. Supports + # both asynchronous API and streaming API. + # + # @see https://cloud.google.com/video-intelligence/docs/ + module VideointelligenceV1p3beta1 + VERSION = 'V1p3beta1' + REVISION = '20190308' + + # View and manage your data across Google Cloud Platform services + AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' + end + end +end diff --git a/generated/google/apis/videointelligence_v1p3beta1/classes.rb b/generated/google/apis/videointelligence_v1p3beta1/classes.rb new file mode 100644 index 000000000..140e4bf80 --- /dev/null +++ b/generated/google/apis/videointelligence_v1p3beta1/classes.rb @@ -0,0 +1,4687 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'date' +require 'google/apis/core/base_service' +require 'google/apis/core/json_representation' +require 'google/apis/core/hashable' +require 'google/apis/errors' + +module Google + module Apis + module VideointelligenceV1p3beta1 + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Detected entity from video analysis. + class GoogleCloudVideointelligenceV1Entity + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Opaque entity ID. Some IDs may be available in + # [Google Knowledge Graph Search + # API](https://developers.google.com/knowledge-graph/). + # Corresponds to the JSON property `entityId` + # @return [String] + attr_accessor :entity_id + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @entity_id = args[:entity_id] if args.key?(:entity_id) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + class GoogleCloudVideointelligenceV1ExplicitContentAnnotation + include Google::Apis::Core::Hashable + + # All video frames where explicit content was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + end + end + + # Video frame level annotation results for explicit content. + class GoogleCloudVideointelligenceV1ExplicitContentFrame + include Google::Apis::Core::Hashable + + # Likelihood of the pornography content.. + # Corresponds to the JSON property `pornographyLikelihood` + # @return [String] + attr_accessor :pornography_likelihood + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1LabelAnnotation + include Google::Apis::Core::Hashable + + # Common categories for the detected entity. + # E.g. when the label is `Terrier` the category is likely `dog`. And in some + # cases there might be more than one categories e.g. `Terrier` could also be + # a `pet`. + # Corresponds to the JSON property `categoryEntities` + # @return [Array] + attr_accessor :category_entities + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1Entity] + attr_accessor :entity + + # All video frames where a label was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a label was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @category_entities = args[:category_entities] if args.key?(:category_entities) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Video frame level annotation results for label detection. + class GoogleCloudVideointelligenceV1LabelFrame + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for label detection. + class GoogleCloudVideointelligenceV1LabelSegment + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + class GoogleCloudVideointelligenceV1NormalizedBoundingBox + include Google::Apis::Core::Hashable + + # Bottom Y coordinate. + # Corresponds to the JSON property `bottom` + # @return [Float] + attr_accessor :bottom + + # Left X coordinate. + # Corresponds to the JSON property `left` + # @return [Float] + attr_accessor :left + + # Right X coordinate. + # Corresponds to the JSON property `right` + # @return [Float] + attr_accessor :right + + # Top Y coordinate. + # Corresponds to the JSON property `top` + # @return [Float] + attr_accessor :top + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @bottom = args[:bottom] if args.key?(:bottom) + @left = args[:left] if args.key?(:left) + @right = args[:right] if args.key?(:right) + @top = args[:top] if args.key?(:top) + end + end + + # Normalized bounding polygon for text (that might not be aligned with axis). + # Contains list of the corner points in clockwise order starting from + # top-left corner. For example, for a rectangular bounding box: + # When the text is horizontal it might look like: + # 0----1 + # | | + # 3----2 + # When it's clockwise rotated 180 degrees around the top-left corner it + # becomes: + # 2----3 + # | | + # 1----0 + # and the vertex order will still be (0, 1, 2, 3). Note that values can be less + # than 0, or greater than 1 due to trignometric calculations for location of + # the box. + class GoogleCloudVideointelligenceV1NormalizedBoundingPoly + include Google::Apis::Core::Hashable + + # Normalized vertices of the bounding polygon. + # Corresponds to the JSON property `vertices` + # @return [Array] + attr_accessor :vertices + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @vertices = args[:vertices] if args.key?(:vertices) + end + end + + # A vertex represents a 2D point in the image. + # NOTE: the normalized vertex coordinates are relative to the original image + # and range from 0 to 1. + class GoogleCloudVideointelligenceV1NormalizedVertex + include Google::Apis::Core::Hashable + + # X coordinate. + # Corresponds to the JSON property `x` + # @return [Float] + attr_accessor :x + + # Y coordinate. + # Corresponds to the JSON property `y` + # @return [Float] + attr_accessor :y + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @x = args[:x] if args.key?(:x) + @y = args[:y] if args.key?(:y) + end + end + + # Annotations corresponding to one tracked object. + class GoogleCloudVideointelligenceV1ObjectTrackingAnnotation + include Google::Apis::Core::Hashable + + # Object category's labeling confidence of this track. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1Entity] + attr_accessor :entity + + # Information corresponding to all frames where this object track appears. + # Non-streaming batch mode: it may be one or multiple ObjectTrackingFrame + # messages in frames. + # Streaming mode: it can only be one ObjectTrackingFrame message in frames. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment] + attr_accessor :segment + + # Streaming mode ONLY. + # In streaming mode, we do not know the end time of a tracked object + # before it is completed. Hence, there is no VideoSegment info returned. + # Instead, we provide a unique identifiable integer track_id so that + # the customers can correlate the results of the ongoing + # ObjectTrackAnnotation of the same track_id over time. + # Corresponds to the JSON property `trackId` + # @return [Fixnum] + attr_accessor :track_id + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segment = args[:segment] if args.key?(:segment) + @track_id = args[:track_id] if args.key?(:track_id) + end + end + + # Video frame level annotations for object detection and tracking. This field + # stores per frame location, time offset, and confidence. + class GoogleCloudVideointelligenceV1ObjectTrackingFrame + include Google::Apis::Core::Hashable + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + # Corresponds to the JSON property `normalizedBoundingBox` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1NormalizedBoundingBox] + attr_accessor :normalized_bounding_box + + # The timestamp of the frame in microseconds. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @normalized_bounding_box = args[:normalized_bounding_box] if args.key?(:normalized_bounding_box) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Alternative hypotheses (a.k.a. n-best list). + class GoogleCloudVideointelligenceV1SpeechRecognitionAlternative + include Google::Apis::Core::Hashable + + # The confidence estimate between 0.0 and 1.0. A higher number + # indicates an estimated greater likelihood that the recognized words are + # correct. This field is typically provided only for the top hypothesis, and + # only for `is_final=true` results. Clients should not rely on the + # `confidence` field as it is not guaranteed to be accurate or consistent. + # The default of 0.0 is a sentinel value indicating `confidence` was not set. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Transcript text representing the words that the user spoke. + # Corresponds to the JSON property `transcript` + # @return [String] + attr_accessor :transcript + + # A list of word-specific information for each recognized word. + # Corresponds to the JSON property `words` + # @return [Array] + attr_accessor :words + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @transcript = args[:transcript] if args.key?(:transcript) + @words = args[:words] if args.key?(:words) + end + end + + # A speech recognition result corresponding to a portion of the audio. + class GoogleCloudVideointelligenceV1SpeechTranscription + include Google::Apis::Core::Hashable + + # May contain one or more recognition hypotheses (up to the maximum specified + # in `max_alternatives`). These alternatives are ordered in terms of + # accuracy, with the top (first) alternative being the most probable, as + # ranked by the recognizer. + # Corresponds to the JSON property `alternatives` + # @return [Array] + attr_accessor :alternatives + + # Output only. The + # [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the + # language in this result. This language code was detected to have the most + # likelihood of being spoken in the audio. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @alternatives = args[:alternatives] if args.key?(:alternatives) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Annotations related to one detected OCR text snippet. This will contain the + # corresponding text, confidence value, and frame level information for each + # detection. + class GoogleCloudVideointelligenceV1TextAnnotation + include Google::Apis::Core::Hashable + + # All video segments where OCR detected text appears. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + # The detected text. + # Corresponds to the JSON property `text` + # @return [String] + attr_accessor :text + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @segments = args[:segments] if args.key?(:segments) + @text = args[:text] if args.key?(:text) + end + end + + # Video frame level annotation results for text annotation (OCR). + # Contains information regarding timestamp and bounding box locations for the + # frames containing detected OCR text snippets. + class GoogleCloudVideointelligenceV1TextFrame + include Google::Apis::Core::Hashable + + # Normalized bounding polygon for text (that might not be aligned with axis). + # Contains list of the corner points in clockwise order starting from + # top-left corner. For example, for a rectangular bounding box: + # When the text is horizontal it might look like: + # 0----1 + # | | + # 3----2 + # When it's clockwise rotated 180 degrees around the top-left corner it + # becomes: + # 2----3 + # | | + # 1----0 + # and the vertex order will still be (0, 1, 2, 3). Note that values can be less + # than 0, or greater than 1 due to trignometric calculations for location of + # the box. + # Corresponds to the JSON property `rotatedBoundingBox` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1NormalizedBoundingPoly] + attr_accessor :rotated_bounding_box + + # Timestamp of this frame. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @rotated_bounding_box = args[:rotated_bounding_box] if args.key?(:rotated_bounding_box) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for text detection. + class GoogleCloudVideointelligenceV1TextSegment + include Google::Apis::Core::Hashable + + # Confidence for the track of detected text. It is calculated as the highest + # over all frames where OCR detected text appears. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Information related to the frames where OCR detected text appears. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @frames = args[:frames] if args.key?(:frames) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. Guaranteed to be + # 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus] + attr_accessor :error + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + # Corresponds to the JSON property `explicitAnnotation` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ExplicitContentAnnotation] + attr_accessor :explicit_annotation + + # Label annotations on frame level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `frameLabelAnnotations` + # @return [Array] + attr_accessor :frame_label_annotations + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Annotations for list of objects detected and tracked in video. + # Corresponds to the JSON property `objectAnnotations` + # @return [Array] + attr_accessor :object_annotations + + # Label annotations on video level or user specified segment level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `segmentLabelAnnotations` + # @return [Array] + attr_accessor :segment_label_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + # Label annotations on shot level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `shotLabelAnnotations` + # @return [Array] + attr_accessor :shot_label_annotations + + # Speech transcription. + # Corresponds to the JSON property `speechTranscriptions` + # @return [Array] + attr_accessor :speech_transcriptions + + # OCR text detection and tracking. + # Annotations for list of detected text snippets. Each will have list of + # frame information associated with it. + # Corresponds to the JSON property `textAnnotations` + # @return [Array] + attr_accessor :text_annotations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation) + @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @object_annotations = args[:object_annotations] if args.key?(:object_annotations) + @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations) + @speech_transcriptions = args[:speech_transcriptions] if args.key?(:speech_transcriptions) + @text_annotations = args[:text_annotations] if args.key?(:text_annotations) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1VideoSegment + include Google::Apis::Core::Hashable + + # Time-offset, relative to the beginning of the video, + # corresponding to the end of the segment (inclusive). + # Corresponds to the JSON property `endTimeOffset` + # @return [String] + attr_accessor :end_time_offset + + # Time-offset, relative to the beginning of the video, + # corresponding to the start of the segment (inclusive). + # Corresponds to the JSON property `startTimeOffset` + # @return [String] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Word-specific information for recognized words. Word information is only + # included in the response when certain request parameters are set, such + # as `enable_word_time_offsets`. + class GoogleCloudVideointelligenceV1WordInfo + include Google::Apis::Core::Hashable + + # Output only. The confidence estimate between 0.0 and 1.0. A higher number + # indicates an estimated greater likelihood that the recognized words are + # correct. This field is set only for the top alternative. + # This field is not guaranteed to be accurate and users should not rely on it + # to be always provided. + # The default of 0.0 is a sentinel value indicating `confidence` was not set. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time offset relative to the beginning of the audio, and + # corresponding to the end of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `endTime` + # @return [String] + attr_accessor :end_time + + # Output only. A distinct integer value is assigned for every speaker within + # the audio. This field specifies which one of those speakers was detected to + # have spoken this word. Value ranges from 1 up to diarization_speaker_count, + # and is only set if speaker diarization is enabled. + # Corresponds to the JSON property `speakerTag` + # @return [Fixnum] + attr_accessor :speaker_tag + + # Time offset relative to the beginning of the audio, and + # corresponding to the start of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # The word corresponding to this set of information. + # Corresponds to the JSON property `word` + # @return [String] + attr_accessor :word + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @end_time = args[:end_time] if args.key?(:end_time) + @speaker_tag = args[:speaker_tag] if args.key?(:speaker_tag) + @start_time = args[:start_time] if args.key?(:start_time) + @word = args[:word] if args.key?(:word) + end + end + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1beta2AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1beta2AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Detected entity from video analysis. + class GoogleCloudVideointelligenceV1beta2Entity + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Opaque entity ID. Some IDs may be available in + # [Google Knowledge Graph Search + # API](https://developers.google.com/knowledge-graph/). + # Corresponds to the JSON property `entityId` + # @return [String] + attr_accessor :entity_id + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @entity_id = args[:entity_id] if args.key?(:entity_id) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + class GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation + include Google::Apis::Core::Hashable + + # All video frames where explicit content was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + end + end + + # Video frame level annotation results for explicit content. + class GoogleCloudVideointelligenceV1beta2ExplicitContentFrame + include Google::Apis::Core::Hashable + + # Likelihood of the pornography content.. + # Corresponds to the JSON property `pornographyLikelihood` + # @return [String] + attr_accessor :pornography_likelihood + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1beta2LabelAnnotation + include Google::Apis::Core::Hashable + + # Common categories for the detected entity. + # E.g. when the label is `Terrier` the category is likely `dog`. And in some + # cases there might be more than one categories e.g. `Terrier` could also be + # a `pet`. + # Corresponds to the JSON property `categoryEntities` + # @return [Array] + attr_accessor :category_entities + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2Entity] + attr_accessor :entity + + # All video frames where a label was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a label was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @category_entities = args[:category_entities] if args.key?(:category_entities) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Video frame level annotation results for label detection. + class GoogleCloudVideointelligenceV1beta2LabelFrame + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for label detection. + class GoogleCloudVideointelligenceV1beta2LabelSegment + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + class GoogleCloudVideointelligenceV1beta2NormalizedBoundingBox + include Google::Apis::Core::Hashable + + # Bottom Y coordinate. + # Corresponds to the JSON property `bottom` + # @return [Float] + attr_accessor :bottom + + # Left X coordinate. + # Corresponds to the JSON property `left` + # @return [Float] + attr_accessor :left + + # Right X coordinate. + # Corresponds to the JSON property `right` + # @return [Float] + attr_accessor :right + + # Top Y coordinate. + # Corresponds to the JSON property `top` + # @return [Float] + attr_accessor :top + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @bottom = args[:bottom] if args.key?(:bottom) + @left = args[:left] if args.key?(:left) + @right = args[:right] if args.key?(:right) + @top = args[:top] if args.key?(:top) + end + end + + # Normalized bounding polygon for text (that might not be aligned with axis). + # Contains list of the corner points in clockwise order starting from + # top-left corner. For example, for a rectangular bounding box: + # When the text is horizontal it might look like: + # 0----1 + # | | + # 3----2 + # When it's clockwise rotated 180 degrees around the top-left corner it + # becomes: + # 2----3 + # | | + # 1----0 + # and the vertex order will still be (0, 1, 2, 3). Note that values can be less + # than 0, or greater than 1 due to trignometric calculations for location of + # the box. + class GoogleCloudVideointelligenceV1beta2NormalizedBoundingPoly + include Google::Apis::Core::Hashable + + # Normalized vertices of the bounding polygon. + # Corresponds to the JSON property `vertices` + # @return [Array] + attr_accessor :vertices + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @vertices = args[:vertices] if args.key?(:vertices) + end + end + + # A vertex represents a 2D point in the image. + # NOTE: the normalized vertex coordinates are relative to the original image + # and range from 0 to 1. + class GoogleCloudVideointelligenceV1beta2NormalizedVertex + include Google::Apis::Core::Hashable + + # X coordinate. + # Corresponds to the JSON property `x` + # @return [Float] + attr_accessor :x + + # Y coordinate. + # Corresponds to the JSON property `y` + # @return [Float] + attr_accessor :y + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @x = args[:x] if args.key?(:x) + @y = args[:y] if args.key?(:y) + end + end + + # Annotations corresponding to one tracked object. + class GoogleCloudVideointelligenceV1beta2ObjectTrackingAnnotation + include Google::Apis::Core::Hashable + + # Object category's labeling confidence of this track. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2Entity] + attr_accessor :entity + + # Information corresponding to all frames where this object track appears. + # Non-streaming batch mode: it may be one or multiple ObjectTrackingFrame + # messages in frames. + # Streaming mode: it can only be one ObjectTrackingFrame message in frames. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment] + attr_accessor :segment + + # Streaming mode ONLY. + # In streaming mode, we do not know the end time of a tracked object + # before it is completed. Hence, there is no VideoSegment info returned. + # Instead, we provide a unique identifiable integer track_id so that + # the customers can correlate the results of the ongoing + # ObjectTrackAnnotation of the same track_id over time. + # Corresponds to the JSON property `trackId` + # @return [Fixnum] + attr_accessor :track_id + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segment = args[:segment] if args.key?(:segment) + @track_id = args[:track_id] if args.key?(:track_id) + end + end + + # Video frame level annotations for object detection and tracking. This field + # stores per frame location, time offset, and confidence. + class GoogleCloudVideointelligenceV1beta2ObjectTrackingFrame + include Google::Apis::Core::Hashable + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + # Corresponds to the JSON property `normalizedBoundingBox` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2NormalizedBoundingBox] + attr_accessor :normalized_bounding_box + + # The timestamp of the frame in microseconds. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @normalized_bounding_box = args[:normalized_bounding_box] if args.key?(:normalized_bounding_box) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Alternative hypotheses (a.k.a. n-best list). + class GoogleCloudVideointelligenceV1beta2SpeechRecognitionAlternative + include Google::Apis::Core::Hashable + + # The confidence estimate between 0.0 and 1.0. A higher number + # indicates an estimated greater likelihood that the recognized words are + # correct. This field is typically provided only for the top hypothesis, and + # only for `is_final=true` results. Clients should not rely on the + # `confidence` field as it is not guaranteed to be accurate or consistent. + # The default of 0.0 is a sentinel value indicating `confidence` was not set. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Transcript text representing the words that the user spoke. + # Corresponds to the JSON property `transcript` + # @return [String] + attr_accessor :transcript + + # A list of word-specific information for each recognized word. + # Corresponds to the JSON property `words` + # @return [Array] + attr_accessor :words + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @transcript = args[:transcript] if args.key?(:transcript) + @words = args[:words] if args.key?(:words) + end + end + + # A speech recognition result corresponding to a portion of the audio. + class GoogleCloudVideointelligenceV1beta2SpeechTranscription + include Google::Apis::Core::Hashable + + # May contain one or more recognition hypotheses (up to the maximum specified + # in `max_alternatives`). These alternatives are ordered in terms of + # accuracy, with the top (first) alternative being the most probable, as + # ranked by the recognizer. + # Corresponds to the JSON property `alternatives` + # @return [Array] + attr_accessor :alternatives + + # Output only. The + # [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the + # language in this result. This language code was detected to have the most + # likelihood of being spoken in the audio. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @alternatives = args[:alternatives] if args.key?(:alternatives) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Annotations related to one detected OCR text snippet. This will contain the + # corresponding text, confidence value, and frame level information for each + # detection. + class GoogleCloudVideointelligenceV1beta2TextAnnotation + include Google::Apis::Core::Hashable + + # All video segments where OCR detected text appears. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + # The detected text. + # Corresponds to the JSON property `text` + # @return [String] + attr_accessor :text + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @segments = args[:segments] if args.key?(:segments) + @text = args[:text] if args.key?(:text) + end + end + + # Video frame level annotation results for text annotation (OCR). + # Contains information regarding timestamp and bounding box locations for the + # frames containing detected OCR text snippets. + class GoogleCloudVideointelligenceV1beta2TextFrame + include Google::Apis::Core::Hashable + + # Normalized bounding polygon for text (that might not be aligned with axis). + # Contains list of the corner points in clockwise order starting from + # top-left corner. For example, for a rectangular bounding box: + # When the text is horizontal it might look like: + # 0----1 + # | | + # 3----2 + # When it's clockwise rotated 180 degrees around the top-left corner it + # becomes: + # 2----3 + # | | + # 1----0 + # and the vertex order will still be (0, 1, 2, 3). Note that values can be less + # than 0, or greater than 1 due to trignometric calculations for location of + # the box. + # Corresponds to the JSON property `rotatedBoundingBox` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2NormalizedBoundingPoly] + attr_accessor :rotated_bounding_box + + # Timestamp of this frame. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @rotated_bounding_box = args[:rotated_bounding_box] if args.key?(:rotated_bounding_box) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for text detection. + class GoogleCloudVideointelligenceV1beta2TextSegment + include Google::Apis::Core::Hashable + + # Confidence for the track of detected text. It is calculated as the highest + # over all frames where OCR detected text appears. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Information related to the frames where OCR detected text appears. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @frames = args[:frames] if args.key?(:frames) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. Guaranteed to be + # 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus] + attr_accessor :error + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + # Corresponds to the JSON property `explicitAnnotation` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation] + attr_accessor :explicit_annotation + + # Label annotations on frame level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `frameLabelAnnotations` + # @return [Array] + attr_accessor :frame_label_annotations + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Annotations for list of objects detected and tracked in video. + # Corresponds to the JSON property `objectAnnotations` + # @return [Array] + attr_accessor :object_annotations + + # Label annotations on video level or user specified segment level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `segmentLabelAnnotations` + # @return [Array] + attr_accessor :segment_label_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + # Label annotations on shot level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `shotLabelAnnotations` + # @return [Array] + attr_accessor :shot_label_annotations + + # Speech transcription. + # Corresponds to the JSON property `speechTranscriptions` + # @return [Array] + attr_accessor :speech_transcriptions + + # OCR text detection and tracking. + # Annotations for list of detected text snippets. Each will have list of + # frame information associated with it. + # Corresponds to the JSON property `textAnnotations` + # @return [Array] + attr_accessor :text_annotations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation) + @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @object_annotations = args[:object_annotations] if args.key?(:object_annotations) + @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations) + @speech_transcriptions = args[:speech_transcriptions] if args.key?(:speech_transcriptions) + @text_annotations = args[:text_annotations] if args.key?(:text_annotations) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1beta2VideoSegment + include Google::Apis::Core::Hashable + + # Time-offset, relative to the beginning of the video, + # corresponding to the end of the segment (inclusive). + # Corresponds to the JSON property `endTimeOffset` + # @return [String] + attr_accessor :end_time_offset + + # Time-offset, relative to the beginning of the video, + # corresponding to the start of the segment (inclusive). + # Corresponds to the JSON property `startTimeOffset` + # @return [String] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Word-specific information for recognized words. Word information is only + # included in the response when certain request parameters are set, such + # as `enable_word_time_offsets`. + class GoogleCloudVideointelligenceV1beta2WordInfo + include Google::Apis::Core::Hashable + + # Output only. The confidence estimate between 0.0 and 1.0. A higher number + # indicates an estimated greater likelihood that the recognized words are + # correct. This field is set only for the top alternative. + # This field is not guaranteed to be accurate and users should not rely on it + # to be always provided. + # The default of 0.0 is a sentinel value indicating `confidence` was not set. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time offset relative to the beginning of the audio, and + # corresponding to the end of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `endTime` + # @return [String] + attr_accessor :end_time + + # Output only. A distinct integer value is assigned for every speaker within + # the audio. This field specifies which one of those speakers was detected to + # have spoken this word. Value ranges from 1 up to diarization_speaker_count, + # and is only set if speaker diarization is enabled. + # Corresponds to the JSON property `speakerTag` + # @return [Fixnum] + attr_accessor :speaker_tag + + # Time offset relative to the beginning of the audio, and + # corresponding to the start of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # The word corresponding to this set of information. + # Corresponds to the JSON property `word` + # @return [String] + attr_accessor :word + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @end_time = args[:end_time] if args.key?(:end_time) + @speaker_tag = args[:speaker_tag] if args.key?(:speaker_tag) + @start_time = args[:start_time] if args.key?(:start_time) + @word = args[:word] if args.key?(:word) + end + end + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Detected entity from video analysis. + class GoogleCloudVideointelligenceV1p1beta1Entity + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Opaque entity ID. Some IDs may be available in + # [Google Knowledge Graph Search + # API](https://developers.google.com/knowledge-graph/). + # Corresponds to the JSON property `entityId` + # @return [String] + attr_accessor :entity_id + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @entity_id = args[:entity_id] if args.key?(:entity_id) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation + include Google::Apis::Core::Hashable + + # All video frames where explicit content was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + end + end + + # Video frame level annotation results for explicit content. + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame + include Google::Apis::Core::Hashable + + # Likelihood of the pornography content.. + # Corresponds to the JSON property `pornographyLikelihood` + # @return [String] + attr_accessor :pornography_likelihood + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1p1beta1LabelAnnotation + include Google::Apis::Core::Hashable + + # Common categories for the detected entity. + # E.g. when the label is `Terrier` the category is likely `dog`. And in some + # cases there might be more than one categories e.g. `Terrier` could also be + # a `pet`. + # Corresponds to the JSON property `categoryEntities` + # @return [Array] + attr_accessor :category_entities + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1Entity] + attr_accessor :entity + + # All video frames where a label was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a label was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @category_entities = args[:category_entities] if args.key?(:category_entities) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Video frame level annotation results for label detection. + class GoogleCloudVideointelligenceV1p1beta1LabelFrame + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for label detection. + class GoogleCloudVideointelligenceV1p1beta1LabelSegment + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox + include Google::Apis::Core::Hashable + + # Bottom Y coordinate. + # Corresponds to the JSON property `bottom` + # @return [Float] + attr_accessor :bottom + + # Left X coordinate. + # Corresponds to the JSON property `left` + # @return [Float] + attr_accessor :left + + # Right X coordinate. + # Corresponds to the JSON property `right` + # @return [Float] + attr_accessor :right + + # Top Y coordinate. + # Corresponds to the JSON property `top` + # @return [Float] + attr_accessor :top + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @bottom = args[:bottom] if args.key?(:bottom) + @left = args[:left] if args.key?(:left) + @right = args[:right] if args.key?(:right) + @top = args[:top] if args.key?(:top) + end + end + + # Normalized bounding polygon for text (that might not be aligned with axis). + # Contains list of the corner points in clockwise order starting from + # top-left corner. For example, for a rectangular bounding box: + # When the text is horizontal it might look like: + # 0----1 + # | | + # 3----2 + # When it's clockwise rotated 180 degrees around the top-left corner it + # becomes: + # 2----3 + # | | + # 1----0 + # and the vertex order will still be (0, 1, 2, 3). Note that values can be less + # than 0, or greater than 1 due to trignometric calculations for location of + # the box. + class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingPoly + include Google::Apis::Core::Hashable + + # Normalized vertices of the bounding polygon. + # Corresponds to the JSON property `vertices` + # @return [Array] + attr_accessor :vertices + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @vertices = args[:vertices] if args.key?(:vertices) + end + end + + # A vertex represents a 2D point in the image. + # NOTE: the normalized vertex coordinates are relative to the original image + # and range from 0 to 1. + class GoogleCloudVideointelligenceV1p1beta1NormalizedVertex + include Google::Apis::Core::Hashable + + # X coordinate. + # Corresponds to the JSON property `x` + # @return [Float] + attr_accessor :x + + # Y coordinate. + # Corresponds to the JSON property `y` + # @return [Float] + attr_accessor :y + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @x = args[:x] if args.key?(:x) + @y = args[:y] if args.key?(:y) + end + end + + # Annotations corresponding to one tracked object. + class GoogleCloudVideointelligenceV1p1beta1ObjectTrackingAnnotation + include Google::Apis::Core::Hashable + + # Object category's labeling confidence of this track. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1Entity] + attr_accessor :entity + + # Information corresponding to all frames where this object track appears. + # Non-streaming batch mode: it may be one or multiple ObjectTrackingFrame + # messages in frames. + # Streaming mode: it can only be one ObjectTrackingFrame message in frames. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment] + attr_accessor :segment + + # Streaming mode ONLY. + # In streaming mode, we do not know the end time of a tracked object + # before it is completed. Hence, there is no VideoSegment info returned. + # Instead, we provide a unique identifiable integer track_id so that + # the customers can correlate the results of the ongoing + # ObjectTrackAnnotation of the same track_id over time. + # Corresponds to the JSON property `trackId` + # @return [Fixnum] + attr_accessor :track_id + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segment = args[:segment] if args.key?(:segment) + @track_id = args[:track_id] if args.key?(:track_id) + end + end + + # Video frame level annotations for object detection and tracking. This field + # stores per frame location, time offset, and confidence. + class GoogleCloudVideointelligenceV1p1beta1ObjectTrackingFrame + include Google::Apis::Core::Hashable + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + # Corresponds to the JSON property `normalizedBoundingBox` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox] + attr_accessor :normalized_bounding_box + + # The timestamp of the frame in microseconds. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @normalized_bounding_box = args[:normalized_bounding_box] if args.key?(:normalized_bounding_box) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Alternative hypotheses (a.k.a. n-best list). + class GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative + include Google::Apis::Core::Hashable + + # The confidence estimate between 0.0 and 1.0. A higher number + # indicates an estimated greater likelihood that the recognized words are + # correct. This field is typically provided only for the top hypothesis, and + # only for `is_final=true` results. Clients should not rely on the + # `confidence` field as it is not guaranteed to be accurate or consistent. + # The default of 0.0 is a sentinel value indicating `confidence` was not set. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Transcript text representing the words that the user spoke. + # Corresponds to the JSON property `transcript` + # @return [String] + attr_accessor :transcript + + # A list of word-specific information for each recognized word. + # Corresponds to the JSON property `words` + # @return [Array] + attr_accessor :words + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @transcript = args[:transcript] if args.key?(:transcript) + @words = args[:words] if args.key?(:words) + end + end + + # A speech recognition result corresponding to a portion of the audio. + class GoogleCloudVideointelligenceV1p1beta1SpeechTranscription + include Google::Apis::Core::Hashable + + # May contain one or more recognition hypotheses (up to the maximum specified + # in `max_alternatives`). These alternatives are ordered in terms of + # accuracy, with the top (first) alternative being the most probable, as + # ranked by the recognizer. + # Corresponds to the JSON property `alternatives` + # @return [Array] + attr_accessor :alternatives + + # Output only. The + # [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the + # language in this result. This language code was detected to have the most + # likelihood of being spoken in the audio. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @alternatives = args[:alternatives] if args.key?(:alternatives) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Annotations related to one detected OCR text snippet. This will contain the + # corresponding text, confidence value, and frame level information for each + # detection. + class GoogleCloudVideointelligenceV1p1beta1TextAnnotation + include Google::Apis::Core::Hashable + + # All video segments where OCR detected text appears. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + # The detected text. + # Corresponds to the JSON property `text` + # @return [String] + attr_accessor :text + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @segments = args[:segments] if args.key?(:segments) + @text = args[:text] if args.key?(:text) + end + end + + # Video frame level annotation results for text annotation (OCR). + # Contains information regarding timestamp and bounding box locations for the + # frames containing detected OCR text snippets. + class GoogleCloudVideointelligenceV1p1beta1TextFrame + include Google::Apis::Core::Hashable + + # Normalized bounding polygon for text (that might not be aligned with axis). + # Contains list of the corner points in clockwise order starting from + # top-left corner. For example, for a rectangular bounding box: + # When the text is horizontal it might look like: + # 0----1 + # | | + # 3----2 + # When it's clockwise rotated 180 degrees around the top-left corner it + # becomes: + # 2----3 + # | | + # 1----0 + # and the vertex order will still be (0, 1, 2, 3). Note that values can be less + # than 0, or greater than 1 due to trignometric calculations for location of + # the box. + # Corresponds to the JSON property `rotatedBoundingBox` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingPoly] + attr_accessor :rotated_bounding_box + + # Timestamp of this frame. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @rotated_bounding_box = args[:rotated_bounding_box] if args.key?(:rotated_bounding_box) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for text detection. + class GoogleCloudVideointelligenceV1p1beta1TextSegment + include Google::Apis::Core::Hashable + + # Confidence for the track of detected text. It is calculated as the highest + # over all frames where OCR detected text appears. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Information related to the frames where OCR detected text appears. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @frames = args[:frames] if args.key?(:frames) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. Guaranteed to be + # 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus] + attr_accessor :error + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + # Corresponds to the JSON property `explicitAnnotation` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation] + attr_accessor :explicit_annotation + + # Label annotations on frame level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `frameLabelAnnotations` + # @return [Array] + attr_accessor :frame_label_annotations + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Annotations for list of objects detected and tracked in video. + # Corresponds to the JSON property `objectAnnotations` + # @return [Array] + attr_accessor :object_annotations + + # Label annotations on video level or user specified segment level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `segmentLabelAnnotations` + # @return [Array] + attr_accessor :segment_label_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + # Label annotations on shot level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `shotLabelAnnotations` + # @return [Array] + attr_accessor :shot_label_annotations + + # Speech transcription. + # Corresponds to the JSON property `speechTranscriptions` + # @return [Array] + attr_accessor :speech_transcriptions + + # OCR text detection and tracking. + # Annotations for list of detected text snippets. Each will have list of + # frame information associated with it. + # Corresponds to the JSON property `textAnnotations` + # @return [Array] + attr_accessor :text_annotations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation) + @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @object_annotations = args[:object_annotations] if args.key?(:object_annotations) + @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations) + @speech_transcriptions = args[:speech_transcriptions] if args.key?(:speech_transcriptions) + @text_annotations = args[:text_annotations] if args.key?(:text_annotations) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1p1beta1VideoSegment + include Google::Apis::Core::Hashable + + # Time-offset, relative to the beginning of the video, + # corresponding to the end of the segment (inclusive). + # Corresponds to the JSON property `endTimeOffset` + # @return [String] + attr_accessor :end_time_offset + + # Time-offset, relative to the beginning of the video, + # corresponding to the start of the segment (inclusive). + # Corresponds to the JSON property `startTimeOffset` + # @return [String] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Word-specific information for recognized words. Word information is only + # included in the response when certain request parameters are set, such + # as `enable_word_time_offsets`. + class GoogleCloudVideointelligenceV1p1beta1WordInfo + include Google::Apis::Core::Hashable + + # Output only. The confidence estimate between 0.0 and 1.0. A higher number + # indicates an estimated greater likelihood that the recognized words are + # correct. This field is set only for the top alternative. + # This field is not guaranteed to be accurate and users should not rely on it + # to be always provided. + # The default of 0.0 is a sentinel value indicating `confidence` was not set. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time offset relative to the beginning of the audio, and + # corresponding to the end of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `endTime` + # @return [String] + attr_accessor :end_time + + # Output only. A distinct integer value is assigned for every speaker within + # the audio. This field specifies which one of those speakers was detected to + # have spoken this word. Value ranges from 1 up to diarization_speaker_count, + # and is only set if speaker diarization is enabled. + # Corresponds to the JSON property `speakerTag` + # @return [Fixnum] + attr_accessor :speaker_tag + + # Time offset relative to the beginning of the audio, and + # corresponding to the start of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # The word corresponding to this set of information. + # Corresponds to the JSON property `word` + # @return [String] + attr_accessor :word + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @end_time = args[:end_time] if args.key?(:end_time) + @speaker_tag = args[:speaker_tag] if args.key?(:speaker_tag) + @start_time = args[:start_time] if args.key?(:start_time) + @word = args[:word] if args.key?(:word) + end + end + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1p2beta1AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1p2beta1AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Detected entity from video analysis. + class GoogleCloudVideointelligenceV1p2beta1Entity + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Opaque entity ID. Some IDs may be available in + # [Google Knowledge Graph Search + # API](https://developers.google.com/knowledge-graph/). + # Corresponds to the JSON property `entityId` + # @return [String] + attr_accessor :entity_id + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @entity_id = args[:entity_id] if args.key?(:entity_id) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + class GoogleCloudVideointelligenceV1p2beta1ExplicitContentAnnotation + include Google::Apis::Core::Hashable + + # All video frames where explicit content was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + end + end + + # Video frame level annotation results for explicit content. + class GoogleCloudVideointelligenceV1p2beta1ExplicitContentFrame + include Google::Apis::Core::Hashable + + # Likelihood of the pornography content.. + # Corresponds to the JSON property `pornographyLikelihood` + # @return [String] + attr_accessor :pornography_likelihood + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1p2beta1LabelAnnotation + include Google::Apis::Core::Hashable + + # Common categories for the detected entity. + # E.g. when the label is `Terrier` the category is likely `dog`. And in some + # cases there might be more than one categories e.g. `Terrier` could also be + # a `pet`. + # Corresponds to the JSON property `categoryEntities` + # @return [Array] + attr_accessor :category_entities + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1Entity] + attr_accessor :entity + + # All video frames where a label was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a label was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @category_entities = args[:category_entities] if args.key?(:category_entities) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Video frame level annotation results for label detection. + class GoogleCloudVideointelligenceV1p2beta1LabelFrame + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for label detection. + class GoogleCloudVideointelligenceV1p2beta1LabelSegment + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + class GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingBox + include Google::Apis::Core::Hashable + + # Bottom Y coordinate. + # Corresponds to the JSON property `bottom` + # @return [Float] + attr_accessor :bottom + + # Left X coordinate. + # Corresponds to the JSON property `left` + # @return [Float] + attr_accessor :left + + # Right X coordinate. + # Corresponds to the JSON property `right` + # @return [Float] + attr_accessor :right + + # Top Y coordinate. + # Corresponds to the JSON property `top` + # @return [Float] + attr_accessor :top + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @bottom = args[:bottom] if args.key?(:bottom) + @left = args[:left] if args.key?(:left) + @right = args[:right] if args.key?(:right) + @top = args[:top] if args.key?(:top) + end + end + + # Normalized bounding polygon for text (that might not be aligned with axis). + # Contains list of the corner points in clockwise order starting from + # top-left corner. For example, for a rectangular bounding box: + # When the text is horizontal it might look like: + # 0----1 + # | | + # 3----2 + # When it's clockwise rotated 180 degrees around the top-left corner it + # becomes: + # 2----3 + # | | + # 1----0 + # and the vertex order will still be (0, 1, 2, 3). Note that values can be less + # than 0, or greater than 1 due to trignometric calculations for location of + # the box. + class GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingPoly + include Google::Apis::Core::Hashable + + # Normalized vertices of the bounding polygon. + # Corresponds to the JSON property `vertices` + # @return [Array] + attr_accessor :vertices + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @vertices = args[:vertices] if args.key?(:vertices) + end + end + + # A vertex represents a 2D point in the image. + # NOTE: the normalized vertex coordinates are relative to the original image + # and range from 0 to 1. + class GoogleCloudVideointelligenceV1p2beta1NormalizedVertex + include Google::Apis::Core::Hashable + + # X coordinate. + # Corresponds to the JSON property `x` + # @return [Float] + attr_accessor :x + + # Y coordinate. + # Corresponds to the JSON property `y` + # @return [Float] + attr_accessor :y + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @x = args[:x] if args.key?(:x) + @y = args[:y] if args.key?(:y) + end + end + + # Annotations corresponding to one tracked object. + class GoogleCloudVideointelligenceV1p2beta1ObjectTrackingAnnotation + include Google::Apis::Core::Hashable + + # Object category's labeling confidence of this track. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1Entity] + attr_accessor :entity + + # Information corresponding to all frames where this object track appears. + # Non-streaming batch mode: it may be one or multiple ObjectTrackingFrame + # messages in frames. + # Streaming mode: it can only be one ObjectTrackingFrame message in frames. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment] + attr_accessor :segment + + # Streaming mode ONLY. + # In streaming mode, we do not know the end time of a tracked object + # before it is completed. Hence, there is no VideoSegment info returned. + # Instead, we provide a unique identifiable integer track_id so that + # the customers can correlate the results of the ongoing + # ObjectTrackAnnotation of the same track_id over time. + # Corresponds to the JSON property `trackId` + # @return [Fixnum] + attr_accessor :track_id + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segment = args[:segment] if args.key?(:segment) + @track_id = args[:track_id] if args.key?(:track_id) + end + end + + # Video frame level annotations for object detection and tracking. This field + # stores per frame location, time offset, and confidence. + class GoogleCloudVideointelligenceV1p2beta1ObjectTrackingFrame + include Google::Apis::Core::Hashable + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + # Corresponds to the JSON property `normalizedBoundingBox` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingBox] + attr_accessor :normalized_bounding_box + + # The timestamp of the frame in microseconds. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @normalized_bounding_box = args[:normalized_bounding_box] if args.key?(:normalized_bounding_box) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Alternative hypotheses (a.k.a. n-best list). + class GoogleCloudVideointelligenceV1p2beta1SpeechRecognitionAlternative + include Google::Apis::Core::Hashable + + # The confidence estimate between 0.0 and 1.0. A higher number + # indicates an estimated greater likelihood that the recognized words are + # correct. This field is typically provided only for the top hypothesis, and + # only for `is_final=true` results. Clients should not rely on the + # `confidence` field as it is not guaranteed to be accurate or consistent. + # The default of 0.0 is a sentinel value indicating `confidence` was not set. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Transcript text representing the words that the user spoke. + # Corresponds to the JSON property `transcript` + # @return [String] + attr_accessor :transcript + + # A list of word-specific information for each recognized word. + # Corresponds to the JSON property `words` + # @return [Array] + attr_accessor :words + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @transcript = args[:transcript] if args.key?(:transcript) + @words = args[:words] if args.key?(:words) + end + end + + # A speech recognition result corresponding to a portion of the audio. + class GoogleCloudVideointelligenceV1p2beta1SpeechTranscription + include Google::Apis::Core::Hashable + + # May contain one or more recognition hypotheses (up to the maximum specified + # in `max_alternatives`). These alternatives are ordered in terms of + # accuracy, with the top (first) alternative being the most probable, as + # ranked by the recognizer. + # Corresponds to the JSON property `alternatives` + # @return [Array] + attr_accessor :alternatives + + # Output only. The + # [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the + # language in this result. This language code was detected to have the most + # likelihood of being spoken in the audio. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @alternatives = args[:alternatives] if args.key?(:alternatives) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Annotations related to one detected OCR text snippet. This will contain the + # corresponding text, confidence value, and frame level information for each + # detection. + class GoogleCloudVideointelligenceV1p2beta1TextAnnotation + include Google::Apis::Core::Hashable + + # All video segments where OCR detected text appears. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + # The detected text. + # Corresponds to the JSON property `text` + # @return [String] + attr_accessor :text + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @segments = args[:segments] if args.key?(:segments) + @text = args[:text] if args.key?(:text) + end + end + + # Video frame level annotation results for text annotation (OCR). + # Contains information regarding timestamp and bounding box locations for the + # frames containing detected OCR text snippets. + class GoogleCloudVideointelligenceV1p2beta1TextFrame + include Google::Apis::Core::Hashable + + # Normalized bounding polygon for text (that might not be aligned with axis). + # Contains list of the corner points in clockwise order starting from + # top-left corner. For example, for a rectangular bounding box: + # When the text is horizontal it might look like: + # 0----1 + # | | + # 3----2 + # When it's clockwise rotated 180 degrees around the top-left corner it + # becomes: + # 2----3 + # | | + # 1----0 + # and the vertex order will still be (0, 1, 2, 3). Note that values can be less + # than 0, or greater than 1 due to trignometric calculations for location of + # the box. + # Corresponds to the JSON property `rotatedBoundingBox` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingPoly] + attr_accessor :rotated_bounding_box + + # Timestamp of this frame. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @rotated_bounding_box = args[:rotated_bounding_box] if args.key?(:rotated_bounding_box) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for text detection. + class GoogleCloudVideointelligenceV1p2beta1TextSegment + include Google::Apis::Core::Hashable + + # Confidence for the track of detected text. It is calculated as the highest + # over all frames where OCR detected text appears. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Information related to the frames where OCR detected text appears. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @frames = args[:frames] if args.key?(:frames) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1p2beta1VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. Guaranteed to be + # 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1p2beta1VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus] + attr_accessor :error + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + # Corresponds to the JSON property `explicitAnnotation` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ExplicitContentAnnotation] + attr_accessor :explicit_annotation + + # Label annotations on frame level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `frameLabelAnnotations` + # @return [Array] + attr_accessor :frame_label_annotations + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Annotations for list of objects detected and tracked in video. + # Corresponds to the JSON property `objectAnnotations` + # @return [Array] + attr_accessor :object_annotations + + # Label annotations on video level or user specified segment level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `segmentLabelAnnotations` + # @return [Array] + attr_accessor :segment_label_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + # Label annotations on shot level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `shotLabelAnnotations` + # @return [Array] + attr_accessor :shot_label_annotations + + # Speech transcription. + # Corresponds to the JSON property `speechTranscriptions` + # @return [Array] + attr_accessor :speech_transcriptions + + # OCR text detection and tracking. + # Annotations for list of detected text snippets. Each will have list of + # frame information associated with it. + # Corresponds to the JSON property `textAnnotations` + # @return [Array] + attr_accessor :text_annotations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation) + @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @object_annotations = args[:object_annotations] if args.key?(:object_annotations) + @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations) + @speech_transcriptions = args[:speech_transcriptions] if args.key?(:speech_transcriptions) + @text_annotations = args[:text_annotations] if args.key?(:text_annotations) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1p2beta1VideoSegment + include Google::Apis::Core::Hashable + + # Time-offset, relative to the beginning of the video, + # corresponding to the end of the segment (inclusive). + # Corresponds to the JSON property `endTimeOffset` + # @return [String] + attr_accessor :end_time_offset + + # Time-offset, relative to the beginning of the video, + # corresponding to the start of the segment (inclusive). + # Corresponds to the JSON property `startTimeOffset` + # @return [String] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Word-specific information for recognized words. Word information is only + # included in the response when certain request parameters are set, such + # as `enable_word_time_offsets`. + class GoogleCloudVideointelligenceV1p2beta1WordInfo + include Google::Apis::Core::Hashable + + # Output only. The confidence estimate between 0.0 and 1.0. A higher number + # indicates an estimated greater likelihood that the recognized words are + # correct. This field is set only for the top alternative. + # This field is not guaranteed to be accurate and users should not rely on it + # to be always provided. + # The default of 0.0 is a sentinel value indicating `confidence` was not set. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time offset relative to the beginning of the audio, and + # corresponding to the end of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `endTime` + # @return [String] + attr_accessor :end_time + + # Output only. A distinct integer value is assigned for every speaker within + # the audio. This field specifies which one of those speakers was detected to + # have spoken this word. Value ranges from 1 up to diarization_speaker_count, + # and is only set if speaker diarization is enabled. + # Corresponds to the JSON property `speakerTag` + # @return [Fixnum] + attr_accessor :speaker_tag + + # Time offset relative to the beginning of the audio, and + # corresponding to the start of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # The word corresponding to this set of information. + # Corresponds to the JSON property `word` + # @return [String] + attr_accessor :word + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @end_time = args[:end_time] if args.key?(:end_time) + @speaker_tag = args[:speaker_tag] if args.key?(:speaker_tag) + @start_time = args[:start_time] if args.key?(:start_time) + @word = args[:word] if args.key?(:word) + end + end + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation request. + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoRequest + include Google::Apis::Core::Hashable + + # Requested video annotation features. + # Corresponds to the JSON property `features` + # @return [Array] + attr_accessor :features + + # The video data bytes. + # If unset, the input video(s) should be specified via `input_uri`. + # If set, `input_uri` should be unset. + # Corresponds to the JSON property `inputContent` + # NOTE: Values are automatically base64 encoded/decoded in the client library. + # @return [String] + attr_accessor :input_content + + # Input video location. Currently, only + # [Google Cloud Storage](https://cloud.google.com/storage/) URIs are + # supported, which must be specified in the following format: + # `gs://bucket-id/object-id` (other URI formats return + # google.rpc.Code.INVALID_ARGUMENT). For more information, see + # [Request URIs](/storage/docs/reference-uris). + # A video URI may include wildcards in `object-id`, and thus identify + # multiple videos. Supported wildcards: '*' to match 0 or more characters; + # '?' to match 1 character. If unset, the input video should be embedded + # in the request as `input_content`. If set, `input_content` should be unset. + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Optional cloud region where annotation should take place. Supported cloud + # regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region + # is specified, a region will be determined based on video file location. + # Corresponds to the JSON property `locationId` + # @return [String] + attr_accessor :location_id + + # Optional location where the output (in JSON format) should be stored. + # Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) + # URIs are supported, which must be specified in the following format: + # `gs://bucket-id/object-id` (other URI formats return + # google.rpc.Code.INVALID_ARGUMENT). For more information, see + # [Request URIs](/storage/docs/reference-uris). + # Corresponds to the JSON property `outputUri` + # @return [String] + attr_accessor :output_uri + + # Video context and/or feature-specific parameters. + # Corresponds to the JSON property `videoContext` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoContext] + attr_accessor :video_context + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @features = args[:features] if args.key?(:features) + @input_content = args[:input_content] if args.key?(:input_content) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @location_id = args[:location_id] if args.key?(:location_id) + @output_uri = args[:output_uri] if args.key?(:output_uri) + @video_context = args[:video_context] if args.key?(:video_context) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Detected entity from video analysis. + class GoogleCloudVideointelligenceV1p3beta1Entity + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Opaque entity ID. Some IDs may be available in + # [Google Knowledge Graph Search + # API](https://developers.google.com/knowledge-graph/). + # Corresponds to the JSON property `entityId` + # @return [String] + attr_accessor :entity_id + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @entity_id = args[:entity_id] if args.key?(:entity_id) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation + include Google::Apis::Core::Hashable + + # All video frames where explicit content was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + end + end + + # Config for EXPLICIT_CONTENT_DETECTION. + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentDetectionConfig + include Google::Apis::Core::Hashable + + # Model to use for explicit content detection. + # Supported values: "builtin/stable" (the default if unset) and + # "builtin/latest". + # Corresponds to the JSON property `model` + # @return [String] + attr_accessor :model + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @model = args[:model] if args.key?(:model) + end + end + + # Video frame level annotation results for explicit content. + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame + include Google::Apis::Core::Hashable + + # Likelihood of the pornography content.. + # Corresponds to the JSON property `pornographyLikelihood` + # @return [String] + attr_accessor :pornography_likelihood + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation + include Google::Apis::Core::Hashable + + # Common categories for the detected entity. + # E.g. when the label is `Terrier` the category is likely `dog`. And in some + # cases there might be more than one categories e.g. `Terrier` could also be + # a `pet`. + # Corresponds to the JSON property `categoryEntities` + # @return [Array] + attr_accessor :category_entities + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1Entity] + attr_accessor :entity + + # All video frames where a label was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a label was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @category_entities = args[:category_entities] if args.key?(:category_entities) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Config for LABEL_DETECTION. + class GoogleCloudVideointelligenceV1p3beta1LabelDetectionConfig + include Google::Apis::Core::Hashable + + # The confidence threshold we perform filtering on the labels from + # frame-level detection. If not set, it is set to 0.4 by default. The valid + # range for this threshold is [0.1, 0.9]. Any value set outside of this + # range will be clipped. + # Note: for best results please follow the default threshold. We will update + # the default threshold everytime when we release a new model. + # Corresponds to the JSON property `frameConfidenceThreshold` + # @return [Float] + attr_accessor :frame_confidence_threshold + + # What labels should be detected with LABEL_DETECTION, in addition to + # video-level labels or segment-level labels. + # If unspecified, defaults to `SHOT_MODE`. + # Corresponds to the JSON property `labelDetectionMode` + # @return [String] + attr_accessor :label_detection_mode + + # Model to use for label detection. + # Supported values: "builtin/stable" (the default if unset) and + # "builtin/latest". + # Corresponds to the JSON property `model` + # @return [String] + attr_accessor :model + + # Whether the video has been shot from a stationary (i.e. non-moving) camera. + # When set to true, might improve detection accuracy for moving objects. + # Should be used with `SHOT_AND_FRAME_MODE` enabled. + # Corresponds to the JSON property `stationaryCamera` + # @return [Boolean] + attr_accessor :stationary_camera + alias_method :stationary_camera?, :stationary_camera + + # The confidence threshold we perform filtering on the labels from + # video-level and shot-level detections. If not set, it is set to 0.3 by + # default. The valid range for this threshold is [0.1, 0.9]. Any value set + # outside of this range will be clipped. + # Note: for best results please follow the default threshold. We will update + # the default threshold everytime when we release a new model. + # Corresponds to the JSON property `videoConfidenceThreshold` + # @return [Float] + attr_accessor :video_confidence_threshold + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frame_confidence_threshold = args[:frame_confidence_threshold] if args.key?(:frame_confidence_threshold) + @label_detection_mode = args[:label_detection_mode] if args.key?(:label_detection_mode) + @model = args[:model] if args.key?(:model) + @stationary_camera = args[:stationary_camera] if args.key?(:stationary_camera) + @video_confidence_threshold = args[:video_confidence_threshold] if args.key?(:video_confidence_threshold) + end + end + + # Video frame level annotation results for label detection. + class GoogleCloudVideointelligenceV1p3beta1LabelFrame + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for label detection. + class GoogleCloudVideointelligenceV1p3beta1LabelSegment + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox + include Google::Apis::Core::Hashable + + # Bottom Y coordinate. + # Corresponds to the JSON property `bottom` + # @return [Float] + attr_accessor :bottom + + # Left X coordinate. + # Corresponds to the JSON property `left` + # @return [Float] + attr_accessor :left + + # Right X coordinate. + # Corresponds to the JSON property `right` + # @return [Float] + attr_accessor :right + + # Top Y coordinate. + # Corresponds to the JSON property `top` + # @return [Float] + attr_accessor :top + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @bottom = args[:bottom] if args.key?(:bottom) + @left = args[:left] if args.key?(:left) + @right = args[:right] if args.key?(:right) + @top = args[:top] if args.key?(:top) + end + end + + # Normalized bounding polygon for text (that might not be aligned with axis). + # Contains list of the corner points in clockwise order starting from + # top-left corner. For example, for a rectangular bounding box: + # When the text is horizontal it might look like: + # 0----1 + # | | + # 3----2 + # When it's clockwise rotated 180 degrees around the top-left corner it + # becomes: + # 2----3 + # | | + # 1----0 + # and the vertex order will still be (0, 1, 2, 3). Note that values can be less + # than 0, or greater than 1 due to trignometric calculations for location of + # the box. + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly + include Google::Apis::Core::Hashable + + # Normalized vertices of the bounding polygon. + # Corresponds to the JSON property `vertices` + # @return [Array] + attr_accessor :vertices + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @vertices = args[:vertices] if args.key?(:vertices) + end + end + + # A vertex represents a 2D point in the image. + # NOTE: the normalized vertex coordinates are relative to the original image + # and range from 0 to 1. + class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex + include Google::Apis::Core::Hashable + + # X coordinate. + # Corresponds to the JSON property `x` + # @return [Float] + attr_accessor :x + + # Y coordinate. + # Corresponds to the JSON property `y` + # @return [Float] + attr_accessor :y + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @x = args[:x] if args.key?(:x) + @y = args[:y] if args.key?(:y) + end + end + + # Annotations corresponding to one tracked object. + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation + include Google::Apis::Core::Hashable + + # Object category's labeling confidence of this track. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1Entity] + attr_accessor :entity + + # Information corresponding to all frames where this object track appears. + # Non-streaming batch mode: it may be one or multiple ObjectTrackingFrame + # messages in frames. + # Streaming mode: it can only be one ObjectTrackingFrame message in frames. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment] + attr_accessor :segment + + # Streaming mode ONLY. + # In streaming mode, we do not know the end time of a tracked object + # before it is completed. Hence, there is no VideoSegment info returned. + # Instead, we provide a unique identifiable integer track_id so that + # the customers can correlate the results of the ongoing + # ObjectTrackAnnotation of the same track_id over time. + # Corresponds to the JSON property `trackId` + # @return [Fixnum] + attr_accessor :track_id + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segment = args[:segment] if args.key?(:segment) + @track_id = args[:track_id] if args.key?(:track_id) + end + end + + # Video frame level annotations for object detection and tracking. This field + # stores per frame location, time offset, and confidence. + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame + include Google::Apis::Core::Hashable + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + # Corresponds to the JSON property `normalizedBoundingBox` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox] + attr_accessor :normalized_bounding_box + + # The timestamp of the frame in microseconds. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @normalized_bounding_box = args[:normalized_bounding_box] if args.key?(:normalized_bounding_box) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Config for SHOT_CHANGE_DETECTION. + class GoogleCloudVideointelligenceV1p3beta1ShotChangeDetectionConfig + include Google::Apis::Core::Hashable + + # Model to use for shot change detection. + # Supported values: "builtin/stable" (the default if unset) and + # "builtin/latest". + # Corresponds to the JSON property `model` + # @return [String] + attr_accessor :model + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @model = args[:model] if args.key?(:model) + end + end + + # Provides "hints" to the speech recognizer to favor specific words and phrases + # in the results. + class GoogleCloudVideointelligenceV1p3beta1SpeechContext + include Google::Apis::Core::Hashable + + # *Optional* A list of strings containing words and phrases "hints" so that + # the speech recognition is more likely to recognize them. This can be used + # to improve the accuracy for specific words and phrases, for example, if + # specific commands are typically spoken by the user. This can also be used + # to add additional words to the vocabulary of the recognizer. See + # [usage limits](https://cloud.google.com/speech/limits#content). + # Corresponds to the JSON property `phrases` + # @return [Array] + attr_accessor :phrases + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @phrases = args[:phrases] if args.key?(:phrases) + end + end + + # Alternative hypotheses (a.k.a. n-best list). + class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative + include Google::Apis::Core::Hashable + + # The confidence estimate between 0.0 and 1.0. A higher number + # indicates an estimated greater likelihood that the recognized words are + # correct. This field is typically provided only for the top hypothesis, and + # only for `is_final=true` results. Clients should not rely on the + # `confidence` field as it is not guaranteed to be accurate or consistent. + # The default of 0.0 is a sentinel value indicating `confidence` was not set. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Transcript text representing the words that the user spoke. + # Corresponds to the JSON property `transcript` + # @return [String] + attr_accessor :transcript + + # A list of word-specific information for each recognized word. + # Corresponds to the JSON property `words` + # @return [Array] + attr_accessor :words + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @transcript = args[:transcript] if args.key?(:transcript) + @words = args[:words] if args.key?(:words) + end + end + + # A speech recognition result corresponding to a portion of the audio. + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription + include Google::Apis::Core::Hashable + + # May contain one or more recognition hypotheses (up to the maximum specified + # in `max_alternatives`). These alternatives are ordered in terms of + # accuracy, with the top (first) alternative being the most probable, as + # ranked by the recognizer. + # Corresponds to the JSON property `alternatives` + # @return [Array] + attr_accessor :alternatives + + # Output only. The + # [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the + # language in this result. This language code was detected to have the most + # likelihood of being spoken in the audio. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @alternatives = args[:alternatives] if args.key?(:alternatives) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Config for SPEECH_TRANSCRIPTION. + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscriptionConfig + include Google::Apis::Core::Hashable + + # *Optional* For file formats, such as MXF or MKV, supporting multiple audio + # tracks, specify up to two tracks. Default: track 0. + # Corresponds to the JSON property `audioTracks` + # @return [Array] + attr_accessor :audio_tracks + + # *Optional* + # If set, specifies the estimated number of speakers in the conversation. + # If not set, defaults to '2'. + # Ignored unless enable_speaker_diarization is set to true. + # Corresponds to the JSON property `diarizationSpeakerCount` + # @return [Fixnum] + attr_accessor :diarization_speaker_count + + # *Optional* If 'true', adds punctuation to recognition result hypotheses. + # This feature is only available in select languages. Setting this for + # requests in other languages has no effect at all. The default 'false' value + # does not add punctuation to result hypotheses. NOTE: "This is currently + # offered as an experimental service, complimentary to all users. In the + # future this may be exclusively available as a premium feature." + # Corresponds to the JSON property `enableAutomaticPunctuation` + # @return [Boolean] + attr_accessor :enable_automatic_punctuation + alias_method :enable_automatic_punctuation?, :enable_automatic_punctuation + + # *Optional* If 'true', enables speaker detection for each recognized word in + # the top alternative of the recognition result using a speaker_tag provided + # in the WordInfo. + # Note: When this is true, we send all the words from the beginning of the + # audio for the top alternative in every consecutive responses. + # This is done in order to improve our speaker tags as our models learn to + # identify the speakers in the conversation over time. + # Corresponds to the JSON property `enableSpeakerDiarization` + # @return [Boolean] + attr_accessor :enable_speaker_diarization + alias_method :enable_speaker_diarization?, :enable_speaker_diarization + + # *Optional* If `true`, the top result includes a list of words and the + # confidence for those words. If `false`, no word-level confidence + # information is returned. The default is `false`. + # Corresponds to the JSON property `enableWordConfidence` + # @return [Boolean] + attr_accessor :enable_word_confidence + alias_method :enable_word_confidence?, :enable_word_confidence + + # *Optional* If set to `true`, the server will attempt to filter out + # profanities, replacing all but the initial character in each filtered word + # with asterisks, e.g. "f***". If set to `false` or omitted, profanities + # won't be filtered out. + # Corresponds to the JSON property `filterProfanity` + # @return [Boolean] + attr_accessor :filter_profanity + alias_method :filter_profanity?, :filter_profanity + + # *Required* The language of the supplied audio as a + # [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. + # Example: "en-US". + # See [Language Support](https://cloud.google.com/speech/docs/languages) + # for a list of the currently supported language codes. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + # *Optional* Maximum number of recognition hypotheses to be returned. + # Specifically, the maximum number of `SpeechRecognitionAlternative` messages + # within each `SpeechTranscription`. The server may return fewer than + # `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will + # return a maximum of one. If omitted, will return a maximum of one. + # Corresponds to the JSON property `maxAlternatives` + # @return [Fixnum] + attr_accessor :max_alternatives + + # *Optional* A means to provide context to assist the speech recognition. + # Corresponds to the JSON property `speechContexts` + # @return [Array] + attr_accessor :speech_contexts + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @audio_tracks = args[:audio_tracks] if args.key?(:audio_tracks) + @diarization_speaker_count = args[:diarization_speaker_count] if args.key?(:diarization_speaker_count) + @enable_automatic_punctuation = args[:enable_automatic_punctuation] if args.key?(:enable_automatic_punctuation) + @enable_speaker_diarization = args[:enable_speaker_diarization] if args.key?(:enable_speaker_diarization) + @enable_word_confidence = args[:enable_word_confidence] if args.key?(:enable_word_confidence) + @filter_profanity = args[:filter_profanity] if args.key?(:filter_profanity) + @language_code = args[:language_code] if args.key?(:language_code) + @max_alternatives = args[:max_alternatives] if args.key?(:max_alternatives) + @speech_contexts = args[:speech_contexts] if args.key?(:speech_contexts) + end + end + + # `StreamingAnnotateVideoResponse` is the only message returned to the client + # by `StreamingAnnotateVideo`. A series of zero or more + # `StreamingAnnotateVideoResponse` messages are streamed back to the client. + class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Streaming annotation results corresponding to a portion of the video + # that is currently being processed. + # Corresponds to the JSON property `annotationResults` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults] + attr_accessor :annotation_results + + # GCS URI that stores annotation results of one streaming session. + # It is a directory that can hold multiple files in JSON format. + # Example uri format: + # gs://bucket_id/object_id/cloud_project_name-session_id + # Corresponds to the JSON property `annotationResultsUri` + # @return [String] + attr_accessor :annotation_results_uri + + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus] + attr_accessor :error + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + @annotation_results_uri = args[:annotation_results_uri] if args.key?(:annotation_results_uri) + @error = args[:error] if args.key?(:error) + end + end + + # Streaming annotation results corresponding to a portion of the video + # that is currently being processed. + class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults + include Google::Apis::Core::Hashable + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + # Corresponds to the JSON property `explicitAnnotation` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation] + attr_accessor :explicit_annotation + + # Label annotation results. + # Corresponds to the JSON property `labelAnnotations` + # @return [Array] + attr_accessor :label_annotations + + # Object tracking results. + # Corresponds to the JSON property `objectAnnotations` + # @return [Array] + attr_accessor :object_annotations + + # Shot annotation results. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation) + @label_annotations = args[:label_annotations] if args.key?(:label_annotations) + @object_annotations = args[:object_annotations] if args.key?(:object_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + end + end + + # Annotations related to one detected OCR text snippet. This will contain the + # corresponding text, confidence value, and frame level information for each + # detection. + class GoogleCloudVideointelligenceV1p3beta1TextAnnotation + include Google::Apis::Core::Hashable + + # All video segments where OCR detected text appears. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + # The detected text. + # Corresponds to the JSON property `text` + # @return [String] + attr_accessor :text + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @segments = args[:segments] if args.key?(:segments) + @text = args[:text] if args.key?(:text) + end + end + + # Config for TEXT_DETECTION. + class GoogleCloudVideointelligenceV1p3beta1TextDetectionConfig + include Google::Apis::Core::Hashable + + # Language hint can be specified if the language to be detected is known a + # priori. It can increase the accuracy of the detection. Language hint must + # be language code in BCP-47 format. + # Automatic language detection is performed if no hint is provided. + # Corresponds to the JSON property `languageHints` + # @return [Array] + attr_accessor :language_hints + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @language_hints = args[:language_hints] if args.key?(:language_hints) + end + end + + # Video frame level annotation results for text annotation (OCR). + # Contains information regarding timestamp and bounding box locations for the + # frames containing detected OCR text snippets. + class GoogleCloudVideointelligenceV1p3beta1TextFrame + include Google::Apis::Core::Hashable + + # Normalized bounding polygon for text (that might not be aligned with axis). + # Contains list of the corner points in clockwise order starting from + # top-left corner. For example, for a rectangular bounding box: + # When the text is horizontal it might look like: + # 0----1 + # | | + # 3----2 + # When it's clockwise rotated 180 degrees around the top-left corner it + # becomes: + # 2----3 + # | | + # 1----0 + # and the vertex order will still be (0, 1, 2, 3). Note that values can be less + # than 0, or greater than 1 due to trignometric calculations for location of + # the box. + # Corresponds to the JSON property `rotatedBoundingBox` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly] + attr_accessor :rotated_bounding_box + + # Timestamp of this frame. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @rotated_bounding_box = args[:rotated_bounding_box] if args.key?(:rotated_bounding_box) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for text detection. + class GoogleCloudVideointelligenceV1p3beta1TextSegment + include Google::Apis::Core::Hashable + + # Confidence for the track of detected text. It is calculated as the highest + # over all frames where OCR detected text appears. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Information related to the frames where OCR detected text appears. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @frames = args[:frames] if args.key?(:frames) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. Guaranteed to be + # 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus] + attr_accessor :error + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + # Corresponds to the JSON property `explicitAnnotation` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation] + attr_accessor :explicit_annotation + + # Label annotations on frame level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `frameLabelAnnotations` + # @return [Array] + attr_accessor :frame_label_annotations + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Annotations for list of objects detected and tracked in video. + # Corresponds to the JSON property `objectAnnotations` + # @return [Array] + attr_accessor :object_annotations + + # Label annotations on video level or user specified segment level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `segmentLabelAnnotations` + # @return [Array] + attr_accessor :segment_label_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + # Label annotations on shot level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `shotLabelAnnotations` + # @return [Array] + attr_accessor :shot_label_annotations + + # Speech transcription. + # Corresponds to the JSON property `speechTranscriptions` + # @return [Array] + attr_accessor :speech_transcriptions + + # OCR text detection and tracking. + # Annotations for list of detected text snippets. Each will have list of + # frame information associated with it. + # Corresponds to the JSON property `textAnnotations` + # @return [Array] + attr_accessor :text_annotations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation) + @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @object_annotations = args[:object_annotations] if args.key?(:object_annotations) + @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations) + @speech_transcriptions = args[:speech_transcriptions] if args.key?(:speech_transcriptions) + @text_annotations = args[:text_annotations] if args.key?(:text_annotations) + end + end + + # Video context and/or feature-specific parameters. + class GoogleCloudVideointelligenceV1p3beta1VideoContext + include Google::Apis::Core::Hashable + + # Config for EXPLICIT_CONTENT_DETECTION. + # Corresponds to the JSON property `explicitContentDetectionConfig` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentDetectionConfig] + attr_accessor :explicit_content_detection_config + + # Config for LABEL_DETECTION. + # Corresponds to the JSON property `labelDetectionConfig` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelDetectionConfig] + attr_accessor :label_detection_config + + # Video segments to annotate. The segments may overlap and are not required + # to be contiguous or span the whole video. If unspecified, each video is + # treated as a single segment. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + # Config for SHOT_CHANGE_DETECTION. + # Corresponds to the JSON property `shotChangeDetectionConfig` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ShotChangeDetectionConfig] + attr_accessor :shot_change_detection_config + + # Config for SPEECH_TRANSCRIPTION. + # Corresponds to the JSON property `speechTranscriptionConfig` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechTranscriptionConfig] + attr_accessor :speech_transcription_config + + # Config for TEXT_DETECTION. + # Corresponds to the JSON property `textDetectionConfig` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextDetectionConfig] + attr_accessor :text_detection_config + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @explicit_content_detection_config = args[:explicit_content_detection_config] if args.key?(:explicit_content_detection_config) + @label_detection_config = args[:label_detection_config] if args.key?(:label_detection_config) + @segments = args[:segments] if args.key?(:segments) + @shot_change_detection_config = args[:shot_change_detection_config] if args.key?(:shot_change_detection_config) + @speech_transcription_config = args[:speech_transcription_config] if args.key?(:speech_transcription_config) + @text_detection_config = args[:text_detection_config] if args.key?(:text_detection_config) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1p3beta1VideoSegment + include Google::Apis::Core::Hashable + + # Time-offset, relative to the beginning of the video, + # corresponding to the end of the segment (inclusive). + # Corresponds to the JSON property `endTimeOffset` + # @return [String] + attr_accessor :end_time_offset + + # Time-offset, relative to the beginning of the video, + # corresponding to the start of the segment (inclusive). + # Corresponds to the JSON property `startTimeOffset` + # @return [String] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Word-specific information for recognized words. Word information is only + # included in the response when certain request parameters are set, such + # as `enable_word_time_offsets`. + class GoogleCloudVideointelligenceV1p3beta1WordInfo + include Google::Apis::Core::Hashable + + # Output only. The confidence estimate between 0.0 and 1.0. A higher number + # indicates an estimated greater likelihood that the recognized words are + # correct. This field is set only for the top alternative. + # This field is not guaranteed to be accurate and users should not rely on it + # to be always provided. + # The default of 0.0 is a sentinel value indicating `confidence` was not set. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time offset relative to the beginning of the audio, and + # corresponding to the end of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `endTime` + # @return [String] + attr_accessor :end_time + + # Output only. A distinct integer value is assigned for every speaker within + # the audio. This field specifies which one of those speakers was detected to + # have spoken this word. Value ranges from 1 up to diarization_speaker_count, + # and is only set if speaker diarization is enabled. + # Corresponds to the JSON property `speakerTag` + # @return [Fixnum] + attr_accessor :speaker_tag + + # Time offset relative to the beginning of the audio, and + # corresponding to the start of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # The word corresponding to this set of information. + # Corresponds to the JSON property `word` + # @return [String] + attr_accessor :word + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @end_time = args[:end_time] if args.key?(:end_time) + @speaker_tag = args[:speaker_tag] if args.key?(:speaker_tag) + @start_time = args[:start_time] if args.key?(:start_time) + @word = args[:word] if args.key?(:word) + end + end + + # This resource represents a long-running operation that is the result of a + # network API call. + class GoogleLongrunningOperation + include Google::Apis::Core::Hashable + + # If the value is `false`, it means the operation is still in progress. + # If `true`, the operation is completed, and either `error` or `response` is + # available. + # Corresponds to the JSON property `done` + # @return [Boolean] + attr_accessor :done + alias_method :done?, :done + + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus] + attr_accessor :error + + # Service-specific metadata associated with the operation. It typically + # contains progress information and common metadata such as create time. + # Some services might not provide such metadata. Any method that returns a + # long-running operation should document the metadata type, if any. + # Corresponds to the JSON property `metadata` + # @return [Hash] + attr_accessor :metadata + + # The server-assigned name, which is only unique within the same service that + # originally returns it. If you use the default HTTP mapping, the + # `name` should have the format of `operations/some/unique/name`. + # Corresponds to the JSON property `name` + # @return [String] + attr_accessor :name + + # The normal response of the operation in case of success. If the original + # method returns no data on success, such as `Delete`, the response is + # `google.protobuf.Empty`. If the original method is standard + # `Get`/`Create`/`Update`, the response should be the resource. For other + # methods, the response should have the type `XxxResponse`, where `Xxx` + # is the original method name. For example, if the original method name + # is `TakeSnapshot()`, the inferred response type is + # `TakeSnapshotResponse`. + # Corresponds to the JSON property `response` + # @return [Hash] + attr_accessor :response + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @done = args[:done] if args.key?(:done) + @error = args[:error] if args.key?(:error) + @metadata = args[:metadata] if args.key?(:metadata) + @name = args[:name] if args.key?(:name) + @response = args[:response] if args.key?(:response) + end + end + + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + class GoogleRpcStatus + include Google::Apis::Core::Hashable + + # The status code, which should be an enum value of google.rpc.Code. + # Corresponds to the JSON property `code` + # @return [Fixnum] + attr_accessor :code + + # A list of messages that carry the error details. There is a common set of + # message types for APIs to use. + # Corresponds to the JSON property `details` + # @return [Array>] + attr_accessor :details + + # A developer-facing error message, which should be in English. Any + # user-facing error message should be localized and sent in the + # google.rpc.Status.details field, or localized by the client. + # Corresponds to the JSON property `message` + # @return [String] + attr_accessor :message + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @code = args[:code] if args.key?(:code) + @details = args[:details] if args.key?(:details) + @message = args[:message] if args.key?(:message) + end + end + end + end +end diff --git a/generated/google/apis/videointelligence_v1p3beta1/representations.rb b/generated/google/apis/videointelligence_v1p3beta1/representations.rb new file mode 100644 index 000000000..f76702cff --- /dev/null +++ b/generated/google/apis/videointelligence_v1p3beta1/representations.rb @@ -0,0 +1,2005 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'date' +require 'google/apis/core/base_service' +require 'google/apis/core/json_representation' +require 'google/apis/core/hashable' +require 'google/apis/errors' + +module Google + module Apis + module VideointelligenceV1p3beta1 + + class GoogleCloudVideointelligenceV1AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1Entity + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1ExplicitContentAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1ExplicitContentFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1LabelFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1LabelSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1NormalizedBoundingBox + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1NormalizedBoundingPoly + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1NormalizedVertex + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1ObjectTrackingAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1ObjectTrackingFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1SpeechRecognitionAlternative + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1SpeechTranscription + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1TextAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1TextFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1TextSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1WordInfo + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2Entity + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2LabelFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2LabelSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2NormalizedBoundingBox + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2NormalizedBoundingPoly + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2NormalizedVertex + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2ObjectTrackingAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2ObjectTrackingFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2SpeechRecognitionAlternative + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2SpeechTranscription + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2TextAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2TextFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2TextSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2WordInfo + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1Entity + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1LabelFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1LabelSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingPoly + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1NormalizedVertex + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1ObjectTrackingAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1ObjectTrackingFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1SpeechTranscription + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1TextAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1TextFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1TextSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1WordInfo + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1Entity + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1ExplicitContentAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1ExplicitContentFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1LabelFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1LabelSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingBox + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingPoly + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1NormalizedVertex + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1ObjectTrackingAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1ObjectTrackingFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1SpeechRecognitionAlternative + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1SpeechTranscription + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1TextAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1TextFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1TextSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p2beta1WordInfo + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoRequest + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1Entity + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentDetectionConfig + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1LabelDetectionConfig + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1LabelFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1LabelSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1ShotChangeDetectionConfig + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1SpeechContext + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscriptionConfig + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1TextAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1TextDetectionConfig + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1TextFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1TextSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1VideoContext + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p3beta1WordInfo + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleLongrunningOperation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleRpcStatus + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1Entity + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :entity_id, as: 'entityId' + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1ExplicitContentAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ExplicitContentFrame::Representation + + end + end + + class GoogleCloudVideointelligenceV1ExplicitContentFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :pornography_likelihood, as: 'pornographyLikelihood' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1Entity::Representation + + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1LabelFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1LabelSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1NormalizedBoundingBox + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :bottom, as: 'bottom' + property :left, as: 'left' + property :right, as: 'right' + property :top, as: 'top' + end + end + + class GoogleCloudVideointelligenceV1NormalizedBoundingPoly + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :vertices, as: 'vertices', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1NormalizedVertex, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1NormalizedVertex::Representation + + end + end + + class GoogleCloudVideointelligenceV1NormalizedVertex + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :x, as: 'x' + property :y, as: 'y' + end + end + + class GoogleCloudVideointelligenceV1ObjectTrackingAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ObjectTrackingFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ObjectTrackingFrame::Representation + + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment::Representation + + property :track_id, :numeric_string => true, as: 'trackId' + end + end + + class GoogleCloudVideointelligenceV1ObjectTrackingFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1NormalizedBoundingBox::Representation + + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1SpeechRecognitionAlternative + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :transcript, as: 'transcript' + collection :words, as: 'words', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1WordInfo, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1WordInfo::Representation + + end + end + + class GoogleCloudVideointelligenceV1SpeechTranscription + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1SpeechRecognitionAlternative::Representation + + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1TextAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1TextSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1TextSegment::Representation + + property :text, as: 'text' + end + end + + class GoogleCloudVideointelligenceV1TextFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :rotated_bounding_box, as: 'rotatedBoundingBox', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1NormalizedBoundingPoly, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1NormalizedBoundingPoly::Representation + + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1TextSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1TextFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1TextFrame::Representation + + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus::Representation + + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ExplicitContentAnnotation::Representation + + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelAnnotation::Representation + + property :input_uri, as: 'inputUri' + collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ObjectTrackingAnnotation::Representation + + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment::Representation + + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelAnnotation::Representation + + collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1SpeechTranscription, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1SpeechTranscription::Representation + + collection :text_annotations, as: 'textAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1TextAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1TextAnnotation::Representation + + end + end + + class GoogleCloudVideointelligenceV1VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, as: 'endTimeOffset' + property :start_time_offset, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1WordInfo + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :end_time, as: 'endTime' + property :speaker_tag, as: 'speakerTag' + property :start_time, as: 'startTime' + property :word, as: 'word' + end + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2Entity + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :entity_id, as: 'entityId' + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ExplicitContentFrame::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :pornography_likelihood, as: 'pornographyLikelihood' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta2LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2Entity::Representation + + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2LabelFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta2LabelSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2NormalizedBoundingBox + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :bottom, as: 'bottom' + property :left, as: 'left' + property :right, as: 'right' + property :top, as: 'top' + end + end + + class GoogleCloudVideointelligenceV1beta2NormalizedBoundingPoly + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :vertices, as: 'vertices', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2NormalizedVertex, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2NormalizedVertex::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2NormalizedVertex + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :x, as: 'x' + property :y, as: 'y' + end + end + + class GoogleCloudVideointelligenceV1beta2ObjectTrackingAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ObjectTrackingFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ObjectTrackingFrame::Representation + + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment::Representation + + property :track_id, :numeric_string => true, as: 'trackId' + end + end + + class GoogleCloudVideointelligenceV1beta2ObjectTrackingFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2NormalizedBoundingBox::Representation + + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta2SpeechRecognitionAlternative + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :transcript, as: 'transcript' + collection :words, as: 'words', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2WordInfo, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2WordInfo::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2SpeechTranscription + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2SpeechRecognitionAlternative::Representation + + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1beta2TextAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2TextSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2TextSegment::Representation + + property :text, as: 'text' + end + end + + class GoogleCloudVideointelligenceV1beta2TextFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :rotated_bounding_box, as: 'rotatedBoundingBox', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2NormalizedBoundingPoly, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2NormalizedBoundingPoly::Representation + + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta2TextSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2TextFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2TextFrame::Representation + + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus::Representation + + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation::Representation + + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelAnnotation::Representation + + property :input_uri, as: 'inputUri' + collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ObjectTrackingAnnotation::Representation + + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment::Representation + + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelAnnotation::Representation + + collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2SpeechTranscription, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2SpeechTranscription::Representation + + collection :text_annotations, as: 'textAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2TextAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2TextAnnotation::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, as: 'endTimeOffset' + property :start_time_offset, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta2WordInfo + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :end_time, as: 'endTime' + property :speaker_tag, as: 'speakerTag' + property :start_time, as: 'startTime' + property :word, as: 'word' + end + end + + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1Entity + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :entity_id, as: 'entityId' + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :pornography_likelihood, as: 'pornographyLikelihood' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1Entity::Representation + + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1LabelFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1LabelSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :bottom, as: 'bottom' + property :left, as: 'left' + property :right, as: 'right' + property :top, as: 'top' + end + end + + class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingPoly + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :vertices, as: 'vertices', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1NormalizedVertex, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1NormalizedVertex::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1NormalizedVertex + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :x, as: 'x' + property :y, as: 'y' + end + end + + class GoogleCloudVideointelligenceV1p1beta1ObjectTrackingAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ObjectTrackingFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ObjectTrackingFrame::Representation + + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment::Representation + + property :track_id, :numeric_string => true, as: 'trackId' + end + end + + class GoogleCloudVideointelligenceV1p1beta1ObjectTrackingFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox::Representation + + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :transcript, as: 'transcript' + collection :words, as: 'words', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1WordInfo, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1WordInfo::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1SpeechTranscription + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative::Representation + + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1p1beta1TextAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1TextSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1TextSegment::Representation + + property :text, as: 'text' + end + end + + class GoogleCloudVideointelligenceV1p1beta1TextFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :rotated_bounding_box, as: 'rotatedBoundingBox', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingPoly, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingPoly::Representation + + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1TextSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1TextFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1TextFrame::Representation + + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus::Representation + + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation::Representation + + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation::Representation + + property :input_uri, as: 'inputUri' + collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ObjectTrackingAnnotation::Representation + + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment::Representation + + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation::Representation + + collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1SpeechTranscription, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1SpeechTranscription::Representation + + collection :text_annotations, as: 'textAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1TextAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1TextAnnotation::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, as: 'endTimeOffset' + property :start_time_offset, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1WordInfo + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :end_time, as: 'endTime' + property :speaker_tag, as: 'speakerTag' + property :start_time, as: 'startTime' + property :word, as: 'word' + end + end + + class GoogleCloudVideointelligenceV1p2beta1AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1p2beta1AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1p2beta1Entity + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :entity_id, as: 'entityId' + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1p2beta1ExplicitContentAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ExplicitContentFrame::Representation + + end + end + + class GoogleCloudVideointelligenceV1p2beta1ExplicitContentFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :pornography_likelihood, as: 'pornographyLikelihood' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p2beta1LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1Entity::Representation + + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p2beta1LabelFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p2beta1LabelSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingBox + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :bottom, as: 'bottom' + property :left, as: 'left' + property :right, as: 'right' + property :top, as: 'top' + end + end + + class GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingPoly + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :vertices, as: 'vertices', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1NormalizedVertex, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1NormalizedVertex::Representation + + end + end + + class GoogleCloudVideointelligenceV1p2beta1NormalizedVertex + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :x, as: 'x' + property :y, as: 'y' + end + end + + class GoogleCloudVideointelligenceV1p2beta1ObjectTrackingAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ObjectTrackingFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ObjectTrackingFrame::Representation + + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment::Representation + + property :track_id, :numeric_string => true, as: 'trackId' + end + end + + class GoogleCloudVideointelligenceV1p2beta1ObjectTrackingFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingBox::Representation + + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p2beta1SpeechRecognitionAlternative + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :transcript, as: 'transcript' + collection :words, as: 'words', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1WordInfo, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1WordInfo::Representation + + end + end + + class GoogleCloudVideointelligenceV1p2beta1SpeechTranscription + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1SpeechRecognitionAlternative::Representation + + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1p2beta1TextAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1TextSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1TextSegment::Representation + + property :text, as: 'text' + end + end + + class GoogleCloudVideointelligenceV1p2beta1TextFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :rotated_bounding_box, as: 'rotatedBoundingBox', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingPoly, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingPoly::Representation + + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p2beta1TextSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1TextFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1TextFrame::Representation + + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p2beta1VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1p2beta1VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus::Representation + + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ExplicitContentAnnotation::Representation + + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelAnnotation::Representation + + property :input_uri, as: 'inputUri' + collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ObjectTrackingAnnotation::Representation + + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment::Representation + + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelAnnotation::Representation + + collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1SpeechTranscription, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1SpeechTranscription::Representation + + collection :text_annotations, as: 'textAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1TextAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1TextAnnotation::Representation + + end + end + + class GoogleCloudVideointelligenceV1p2beta1VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, as: 'endTimeOffset' + property :start_time_offset, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1p2beta1WordInfo + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :end_time, as: 'endTime' + property :speaker_tag, as: 'speakerTag' + property :start_time, as: 'startTime' + property :word, as: 'word' + end + end + + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoRequest + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :features, as: 'features' + property :input_content, :base64 => true, as: 'inputContent' + property :input_uri, as: 'inputUri' + property :location_id, as: 'locationId' + property :output_uri, as: 'outputUri' + property :video_context, as: 'videoContext', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoContext, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoContext::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1Entity + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :entity_id, as: 'entityId' + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentDetectionConfig + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :model, as: 'model' + end + end + + class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :pornography_likelihood, as: 'pornographyLikelihood' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1Entity::Representation + + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1LabelDetectionConfig + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :frame_confidence_threshold, as: 'frameConfidenceThreshold' + property :label_detection_mode, as: 'labelDetectionMode' + property :model, as: 'model' + property :stationary_camera, as: 'stationaryCamera' + property :video_confidence_threshold, as: 'videoConfidenceThreshold' + end + end + + class GoogleCloudVideointelligenceV1p3beta1LabelFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p3beta1LabelSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :bottom, as: 'bottom' + property :left, as: 'left' + property :right, as: 'right' + property :top, as: 'top' + end + end + + class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :vertices, as: 'vertices', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedVertex, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedVertex::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :x, as: 'x' + property :y, as: 'y' + end + end + + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1Entity, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame::Representation + + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation + + property :track_id, :numeric_string => true, as: 'trackId' + end + end + + class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox::Representation + + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p3beta1ShotChangeDetectionConfig + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :model, as: 'model' + end + end + + class GoogleCloudVideointelligenceV1p3beta1SpeechContext + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :phrases, as: 'phrases' + end + end + + class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :transcript, as: 'transcript' + collection :words, as: 'words', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1WordInfo, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1WordInfo::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative::Representation + + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1p3beta1SpeechTranscriptionConfig + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :audio_tracks, as: 'audioTracks' + property :diarization_speaker_count, as: 'diarizationSpeakerCount' + property :enable_automatic_punctuation, as: 'enableAutomaticPunctuation' + property :enable_speaker_diarization, as: 'enableSpeakerDiarization' + property :enable_word_confidence, as: 'enableWordConfidence' + property :filter_profanity, as: 'filterProfanity' + property :language_code, as: 'languageCode' + property :max_alternatives, as: 'maxAlternatives' + collection :speech_contexts, as: 'speechContexts', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechContext, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechContext::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults::Representation + + property :annotation_results_uri, as: 'annotationResultsUri' + property :error, as: 'error', class: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation::Representation + + collection :label_annotations, as: 'labelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation + + collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1TextAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextSegment::Representation + + property :text, as: 'text' + end + end + + class GoogleCloudVideointelligenceV1p3beta1TextDetectionConfig + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :language_hints, as: 'languageHints' + end + end + + class GoogleCloudVideointelligenceV1p3beta1TextFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :rotated_bounding_box, as: 'rotatedBoundingBox', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly::Representation + + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p3beta1TextSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextFrame, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextFrame::Representation + + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus::Representation + + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation::Representation + + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation + + property :input_uri, as: 'inputUri' + collection :object_annotations, as: 'objectAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation::Representation + + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation + + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation::Representation + + collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechTranscription, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechTranscription::Representation + + collection :text_annotations, as: 'textAnnotations', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextAnnotation, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextAnnotation::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1VideoContext + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :explicit_content_detection_config, as: 'explicitContentDetectionConfig', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentDetectionConfig, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentDetectionConfig::Representation + + property :label_detection_config, as: 'labelDetectionConfig', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelDetectionConfig, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelDetectionConfig::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment::Representation + + property :shot_change_detection_config, as: 'shotChangeDetectionConfig', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ShotChangeDetectionConfig, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ShotChangeDetectionConfig::Representation + + property :speech_transcription_config, as: 'speechTranscriptionConfig', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechTranscriptionConfig, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechTranscriptionConfig::Representation + + property :text_detection_config, as: 'textDetectionConfig', class: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextDetectionConfig, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextDetectionConfig::Representation + + end + end + + class GoogleCloudVideointelligenceV1p3beta1VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, as: 'endTimeOffset' + property :start_time_offset, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1p3beta1WordInfo + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :end_time, as: 'endTime' + property :speaker_tag, as: 'speakerTag' + property :start_time, as: 'startTime' + property :word, as: 'word' + end + end + + class GoogleLongrunningOperation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :done, as: 'done' + property :error, as: 'error', class: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus::Representation + + hash :metadata, as: 'metadata' + property :name, as: 'name' + hash :response, as: 'response' + end + end + + class GoogleRpcStatus + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :code, as: 'code' + collection :details, as: 'details' + property :message, as: 'message' + end + end + end + end +end diff --git a/generated/google/apis/videointelligence_v1p3beta1/service.rb b/generated/google/apis/videointelligence_v1p3beta1/service.rb new file mode 100644 index 000000000..81eea6bdf --- /dev/null +++ b/generated/google/apis/videointelligence_v1p3beta1/service.rb @@ -0,0 +1,94 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'google/apis/core/base_service' +require 'google/apis/core/json_representation' +require 'google/apis/core/hashable' +require 'google/apis/errors' + +module Google + module Apis + module VideointelligenceV1p3beta1 + # Cloud Video Intelligence API + # + # Detects objects, explicit content, and scene changes in videos. It also + # specifies the region for annotation and transcribes speech to text. Supports + # both asynchronous API and streaming API. + # + # @example + # require 'google/apis/videointelligence_v1p3beta1' + # + # Videointelligence = Google::Apis::VideointelligenceV1p3beta1 # Alias the module + # service = Videointelligence::CloudVideoIntelligenceService.new + # + # @see https://cloud.google.com/video-intelligence/docs/ + class CloudVideoIntelligenceService < Google::Apis::Core::BaseService + # @return [String] + # API key. Your API key identifies your project and provides you with API access, + # quota, and reports. Required unless you provide an OAuth 2.0 token. + attr_accessor :key + + # @return [String] + # Available to use for quota purposes for server-side applications. Can be any + # arbitrary string assigned to a user, but should not exceed 40 characters. + attr_accessor :quota_user + + def initialize + super('https://videointelligence.googleapis.com/', '') + @batch_path = 'batch' + end + + # Performs asynchronous video annotation. Progress and results can be + # retrieved through the `google.longrunning.Operations` interface. + # `Operation.metadata` contains `AnnotateVideoProgress` (progress). + # `Operation.response` contains `AnnotateVideoResponse` (results). + # @param [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1AnnotateVideoRequest] google_cloud_videointelligence_v1p3beta1_annotate_video_request_object + # @param [String] fields + # Selector specifying which fields to include in a partial response. + # @param [String] quota_user + # Available to use for quota purposes for server-side applications. Can be any + # arbitrary string assigned to a user, but should not exceed 40 characters. + # @param [Google::Apis::RequestOptions] options + # Request-specific options + # + # @yield [result, err] Result & error if block supplied + # @yieldparam result [Google::Apis::VideointelligenceV1p3beta1::GoogleLongrunningOperation] parsed result object + # @yieldparam err [StandardError] error object if request failed + # + # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleLongrunningOperation] + # + # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried + # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification + # @raise [Google::Apis::AuthorizationError] Authorization is required + def annotate_video(google_cloud_videointelligence_v1p3beta1_annotate_video_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) + command = make_simple_command(:post, 'v1p3beta1/videos:annotate', options) + command.request_representation = Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1AnnotateVideoRequest::Representation + command.request_object = google_cloud_videointelligence_v1p3beta1_annotate_video_request_object + command.response_representation = Google::Apis::VideointelligenceV1p3beta1::GoogleLongrunningOperation::Representation + command.response_class = Google::Apis::VideointelligenceV1p3beta1::GoogleLongrunningOperation + command.query['fields'] = fields unless fields.nil? + command.query['quotaUser'] = quota_user unless quota_user.nil? + execute_or_queue_command(command, &block) + end + + protected + + def apply_command_defaults(command) + command.query['key'] = key unless key.nil? + command.query['quotaUser'] = quota_user unless quota_user.nil? + end + end + end + end +end diff --git a/generated/google/apis/vision_v1.rb b/generated/google/apis/vision_v1.rb index 10ea7a698..2ca5542e7 100644 --- a/generated/google/apis/vision_v1.rb +++ b/generated/google/apis/vision_v1.rb @@ -27,7 +27,7 @@ module Google # @see https://cloud.google.com/vision/ module VisionV1 VERSION = 'V1' - REVISION = '20190212' + REVISION = '20190309' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/vision_v1/classes.rb b/generated/google/apis/vision_v1/classes.rb index dc14833f8..3b9f89b4e 100644 --- a/generated/google/apis/vision_v1/classes.rb +++ b/generated/google/apis/vision_v1/classes.rb @@ -116,14 +116,14 @@ module Google # @return [Google::Apis::VisionV1::CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -501,7 +501,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -649,7 +653,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -1253,14 +1261,14 @@ module Google # @return [Google::Apis::VisionV1::GoogleCloudVisionV1p1beta1CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -1501,7 +1509,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -3000,14 +3012,14 @@ module Google # @return [Google::Apis::VisionV1::GoogleCloudVisionV1p2beta1CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -3248,7 +3260,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -4747,14 +4763,14 @@ module Google # @return [Google::Apis::VisionV1::GoogleCloudVisionV1p3beta1CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -5029,7 +5045,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -6610,14 +6630,14 @@ module Google # @return [Google::Apis::VisionV1::GoogleCloudVisionV1p4beta1CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -6931,7 +6951,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -9189,14 +9213,14 @@ module Google attr_accessor :done alias_method :done?, :done - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -9600,14 +9624,14 @@ module Google # @return [String] attr_accessor :display_name - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -9854,14 +9878,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/vision_v1p1beta1.rb b/generated/google/apis/vision_v1p1beta1.rb index a72c0a457..038e2f53e 100644 --- a/generated/google/apis/vision_v1p1beta1.rb +++ b/generated/google/apis/vision_v1p1beta1.rb @@ -27,7 +27,7 @@ module Google # @see https://cloud.google.com/vision/ module VisionV1p1beta1 VERSION = 'V1p1beta1' - REVISION = '20190212' + REVISION = '20190309' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/vision_v1p1beta1/classes.rb b/generated/google/apis/vision_v1p1beta1/classes.rb index 3930f096d..acbc76e13 100644 --- a/generated/google/apis/vision_v1p1beta1/classes.rb +++ b/generated/google/apis/vision_v1p1beta1/classes.rb @@ -63,14 +63,14 @@ module Google # @return [Google::Apis::VisionV1p1beta1::CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -340,7 +340,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -488,7 +492,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -1045,14 +1053,14 @@ module Google # @return [Google::Apis::VisionV1p1beta1::GoogleCloudVisionV1p1beta1CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -1388,7 +1396,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -3165,14 +3177,14 @@ module Google # @return [Google::Apis::VisionV1p1beta1::GoogleCloudVisionV1p2beta1CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -3413,7 +3425,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -4912,14 +4928,14 @@ module Google # @return [Google::Apis::VisionV1p1beta1::GoogleCloudVisionV1p3beta1CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -5194,7 +5210,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -6775,14 +6795,14 @@ module Google # @return [Google::Apis::VisionV1p1beta1::GoogleCloudVisionV1p4beta1CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -7096,7 +7116,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -8962,14 +8986,14 @@ module Google attr_accessor :done alias_method :done?, :done - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -9476,14 +9500,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing diff --git a/generated/google/apis/vision_v1p2beta1.rb b/generated/google/apis/vision_v1p2beta1.rb index f59551fcf..73f4da826 100644 --- a/generated/google/apis/vision_v1p2beta1.rb +++ b/generated/google/apis/vision_v1p2beta1.rb @@ -27,7 +27,7 @@ module Google # @see https://cloud.google.com/vision/ module VisionV1p2beta1 VERSION = 'V1p2beta1' - REVISION = '20190212' + REVISION = '20190309' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/vision_v1p2beta1/classes.rb b/generated/google/apis/vision_v1p2beta1/classes.rb index b535f085f..76b50757e 100644 --- a/generated/google/apis/vision_v1p2beta1/classes.rb +++ b/generated/google/apis/vision_v1p2beta1/classes.rb @@ -63,14 +63,14 @@ module Google # @return [Google::Apis::VisionV1p2beta1::CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -340,7 +340,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -488,7 +492,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -1013,14 +1021,14 @@ module Google # @return [Google::Apis::VisionV1p2beta1::GoogleCloudVisionV1p1beta1CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -1261,7 +1269,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -2792,14 +2804,14 @@ module Google # @return [Google::Apis::VisionV1p2beta1::GoogleCloudVisionV1p2beta1CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -3135,7 +3147,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -4912,14 +4928,14 @@ module Google # @return [Google::Apis::VisionV1p2beta1::GoogleCloudVisionV1p3beta1CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -5194,7 +5210,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -6775,14 +6795,14 @@ module Google # @return [Google::Apis::VisionV1p2beta1::GoogleCloudVisionV1p4beta1CropHintsAnnotation] attr_accessor :crop_hints_annotation - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -7096,7 +7116,11 @@ module Google # can be trivially provided to the constructor of "java.awt.Color" in Java; it # can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" # method in iOS; and, with just a little work, it can be easily formatted into - # a CSS "rgba()" string in JavaScript, as well. Here are some examples: + # a CSS "rgba()" string in JavaScript, as well. + # Note: this proto does not carry information about the absolute color space + # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, + # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color + # space. # Example (Java): # import com.google.type.Color; # // ... @@ -8962,14 +8986,14 @@ module Google attr_accessor :done alias_method :done?, :done - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing @@ -9476,14 +9500,14 @@ module Google end end - # The `Status` type defines a logical error model that is suitable for different - # programming environments, including REST APIs and RPC APIs. It is used by - # [gRPC](https://github.com/grpc). The error model is designed to be: + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). The error model is designed to be: # - Simple to use and understand for most users # - Flexible enough to meet unexpected needs # # Overview - # The `Status` message contains three pieces of data: error code, error message, - # and error details. The error code should be an enum value of + # The `Status` message contains three pieces of data: error code, error + # message, and error details. The error code should be an enum value of # google.rpc.Code, but it may accept additional error codes if needed. The # error message should be a developer-facing English message that helps # developers *understand* and *resolve* the error. If a localized user-facing