From a024c3a722e88da0d58f4c867f49167975c7055c Mon Sep 17 00:00:00 2001 From: Google APIs Date: Wed, 18 Apr 2018 00:36:37 +0000 Subject: [PATCH] Autogenerated update (2018-04-18) Update: - classroom_v1 - cloudkms_v1 - dns_v1beta2 - oslogin_v1beta - toolresults_v1beta3 --- api_names_out.yaml | 566 +++++ generated/google/apis/classroom_v1.rb | 2 +- generated/google/apis/classroom_v1/classes.rb | 2 +- generated/google/apis/cloudkms_v1.rb | 4 +- generated/google/apis/cloudkms_v1/classes.rb | 34 +- generated/google/apis/cloudkms_v1/service.rb | 2 +- generated/google/apis/dns_v1beta2.rb | 2 +- generated/google/apis/dns_v1beta2/service.rb | 2 +- generated/google/apis/oslogin_v1beta.rb | 2 +- .../google/apis/oslogin_v1beta/classes.rb | 6 + .../apis/oslogin_v1beta/representations.rb | 1 + generated/google/apis/toolresults_v1beta3.rb | 2 +- generated/google/apis/videointelligence_v1.rb | 34 + .../apis/videointelligence_v1/classes.rb | 2149 +++++++++++++++++ .../videointelligence_v1/representations.rb | 972 ++++++++ .../apis/videointelligence_v1/service.rb | 246 ++ .../google/apis/videointelligence_v1beta2.rb | 34 + .../apis/videointelligence_v1beta2/classes.rb | 2092 ++++++++++++++++ .../representations.rb | 933 +++++++ .../apis/videointelligence_v1beta2/service.rb | 92 + 20 files changed, 7162 insertions(+), 15 deletions(-) create mode 100644 generated/google/apis/videointelligence_v1.rb create mode 100644 generated/google/apis/videointelligence_v1/classes.rb create mode 100644 generated/google/apis/videointelligence_v1/representations.rb create mode 100644 generated/google/apis/videointelligence_v1/service.rb create mode 100644 generated/google/apis/videointelligence_v1beta2.rb create mode 100644 generated/google/apis/videointelligence_v1beta2/classes.rb create mode 100644 generated/google/apis/videointelligence_v1beta2/representations.rb create mode 100644 generated/google/apis/videointelligence_v1beta2/service.rb diff --git a/api_names_out.yaml b/api_names_out.yaml index ceaf4b21c..a5ad4871a 100644 --- a/api_names_out.yaml +++ b/api_names_out.yaml @@ -59090,6 +59090,7 @@ "/oslogin:v1beta/PosixAccount/gecos": gecos "/oslogin:v1beta/PosixAccount/gid": gid "/oslogin:v1beta/PosixAccount/homeDirectory": home_directory +"/oslogin:v1beta/PosixAccount/operatingSystemType": operating_system_type "/oslogin:v1beta/PosixAccount/primary": primary "/oslogin:v1beta/PosixAccount/shell": shell "/oslogin:v1beta/PosixAccount/systemId": system_id @@ -73279,6 +73280,294 @@ "/vault:v1/vault.matters.undelete/matterId": matter_id "/vault:v1/vault.matters.update": update_matter "/vault:v1/vault.matters.update/matterId": matter_id +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoProgress": google_cloud_videointelligence_v1_annotate_video_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoRequest": google_cloud_videointelligence_v1_annotate_video_request +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoRequest/features": features +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoRequest/features/feature": feature +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoRequest/inputContent": input_content +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoRequest/inputUri": input_uri +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoRequest/locationId": location_id +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoRequest/outputUri": output_uri +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoRequest/videoContext": video_context +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoResponse": google_cloud_videointelligence_v1_annotate_video_response +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1/GoogleCloudVideointelligenceV1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1/GoogleCloudVideointelligenceV1_Entity": google_cloud_videointelligence_v1_entity +"/videointelligence:v1/GoogleCloudVideointelligenceV1_Entity/description": description +"/videointelligence:v1/GoogleCloudVideointelligenceV1_Entity/entityId": entity_id +"/videointelligence:v1/GoogleCloudVideointelligenceV1_Entity/languageCode": language_code +"/videointelligence:v1/GoogleCloudVideointelligenceV1_ExplicitContentAnnotation": google_cloud_videointelligence_v1_explicit_content_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1/GoogleCloudVideointelligenceV1_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1_ExplicitContentDetectionConfig": google_cloud_videointelligence_v1_explicit_content_detection_config +"/videointelligence:v1/GoogleCloudVideointelligenceV1_ExplicitContentDetectionConfig/model": model +"/videointelligence:v1/GoogleCloudVideointelligenceV1_ExplicitContentFrame": google_cloud_videointelligence_v1_explicit_content_frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1/GoogleCloudVideointelligenceV1_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelAnnotation": google_cloud_videointelligence_v1_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelAnnotation/entity": entity +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelAnnotation/frames": frames +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelAnnotation/frames/frame": frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelAnnotation/segments": segments +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelAnnotation/segments/segment": segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelDetectionConfig": google_cloud_videointelligence_v1_label_detection_config +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelDetectionConfig/labelDetectionMode": label_detection_mode +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelDetectionConfig/model": model +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelDetectionConfig/stationaryCamera": stationary_camera +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelFrame": google_cloud_videointelligence_v1_label_frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelFrame/confidence": confidence +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelFrame/timeOffset": time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelSegment": google_cloud_videointelligence_v1_label_segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelSegment/confidence": confidence +"/videointelligence:v1/GoogleCloudVideointelligenceV1_LabelSegment/segment": segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1_ShotChangeDetectionConfig": google_cloud_videointelligence_v1_shot_change_detection_config +"/videointelligence:v1/GoogleCloudVideointelligenceV1_ShotChangeDetectionConfig/model": model +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationProgress": google_cloud_videointelligence_v1_video_annotation_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationResults": google_cloud_videointelligence_v1_video_annotation_results +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/error": error +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation": frame_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation": segment_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation": shot_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoContext": google_cloud_videointelligence_v1_video_context +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoContext/explicitContentDetectionConfig": explicit_content_detection_config +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoContext/labelDetectionConfig": label_detection_config +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoContext/segments": segments +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoContext/segments/segment": segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoContext/shotChangeDetectionConfig": shot_change_detection_config +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoSegment": google_cloud_videointelligence_v1_video_segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_AnnotateVideoProgress": google_cloud_videointelligence_v1beta1_annotate_video_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_AnnotateVideoResponse": google_cloud_videointelligence_v1beta1_annotate_video_response +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_LabelAnnotation": google_cloud_videointelligence_v1beta1_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_LabelAnnotation/description": description +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_LabelAnnotation/languageCode": language_code +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_LabelAnnotation/locations": locations +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_LabelAnnotation/locations/location": location +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_LabelLocation": google_cloud_videointelligence_v1beta1_label_location +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_LabelLocation/confidence": confidence +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_LabelLocation/level": level +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_LabelLocation/segment": segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation": google_cloud_videointelligence_v1beta1_safe_search_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation/adult": adult +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation/medical": medical +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation/racy": racy +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation/spoof": spoof +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation/timeOffset": time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation/violent": violent +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationProgress": google_cloud_videointelligence_v1beta1_video_annotation_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults": google_cloud_videointelligence_v1beta1_video_annotation_results +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/error": error +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/labelAnnotations": label_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/labelAnnotations/label_annotation": label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/safeSearchAnnotations": safe_search_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/safeSearchAnnotations/safe_search_annotation": safe_search_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoSegment": google_cloud_videointelligence_v1beta1_video_segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_AnnotateVideoProgress": google_cloud_videointelligence_v1beta2_annotate_video_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_AnnotateVideoResponse": google_cloud_videointelligence_v1beta2_annotate_video_response +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_Entity": google_cloud_videointelligence_v1beta2_entity +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_Entity/description": description +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_Entity/entityId": entity_id +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_Entity/languageCode": language_code +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_ExplicitContentAnnotation": google_cloud_videointelligence_v1beta2_explicit_content_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_ExplicitContentFrame": google_cloud_videointelligence_v1beta2_explicit_content_frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation": google_cloud_videointelligence_v1beta2_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/entity": entity +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/frames": frames +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/frames/frame": frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/segments": segments +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/segments/segment": segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelFrame": google_cloud_videointelligence_v1beta2_label_frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelFrame/confidence": confidence +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelFrame/timeOffset": time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelSegment": google_cloud_videointelligence_v1beta2_label_segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelSegment/confidence": confidence +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_LabelSegment/segment": segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress": google_cloud_videointelligence_v1beta2_video_annotation_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults": google_cloud_videointelligence_v1beta2_video_annotation_results +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/error": error +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation": frame_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation": shot_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoSegment": google_cloud_videointelligence_v1beta2_video_segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1beta2_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoProgress": google_cloud_videointelligence_v1p1beta1_annotate_video_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoResponse": google_cloud_videointelligence_v1p1beta1_annotate_video_response +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_EmotionAttribute": google_cloud_videointelligence_v1p1beta1_emotion_attribute +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_EmotionAttribute/emotion": emotion +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_EmotionAttribute/score": score +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_Entity": google_cloud_videointelligence_v1p1beta1_entity +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_Entity/description": description +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_Entity/entityId": entity_id +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_Entity/languageCode": language_code +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentAnnotation": google_cloud_videointelligence_v1p1beta1_explicit_content_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentFrame": google_cloud_videointelligence_v1p1beta1_explicit_content_frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAnnotation": google_cloud_videointelligence_v1p1beta1_face_detection_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAnnotation/frames": frames +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAnnotation/frames/frame": frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAnnotation/segments": segments +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAnnotation/segments/segment": segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAttribute": google_cloud_videointelligence_v1p1beta1_face_detection_attribute +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAttribute/emotions": emotions +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAttribute/emotions/emotion": emotion +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAttribute/normalizedBoundingBox": normalized_bounding_box +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionFrame": google_cloud_videointelligence_v1p1beta1_face_detection_frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionFrame/attributes": attributes +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionFrame/attributes/attribute": attribute +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionFrame/timeOffset": time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceSegment": google_cloud_videointelligence_v1p1beta1_face_segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_FaceSegment/segment": segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation": google_cloud_videointelligence_v1p1beta1_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/entity": entity +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/frames": frames +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/frames/frame": frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/segments": segments +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/segments/segment": segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelFrame": google_cloud_videointelligence_v1p1beta1_label_frame +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelFrame/confidence": confidence +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelFrame/timeOffset": time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelSegment": google_cloud_videointelligence_v1p1beta1_label_segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelSegment/confidence": confidence +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_LabelSegment/segment": segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox": google_cloud_videointelligence_v1p1beta1_normalized_bounding_box +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox/bottom": bottom +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox/left": left +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox/right": right +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox/top": top +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative": google_cloud_videointelligence_v1p1beta1_speech_recognition_alternative +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative/confidence": confidence +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative/transcript": transcript +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative/words": words +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative/words/word": word +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_SpeechTranscription": google_cloud_videointelligence_v1p1beta1_speech_transcription +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_SpeechTranscription/alternatives": alternatives +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_SpeechTranscription/alternatives/alternative": alternative +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress": google_cloud_videointelligence_v1p1beta1_video_annotation_progress +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults": google_cloud_videointelligence_v1p1beta1_video_annotation_results +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/error": error +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/faceDetectionAnnotations": face_detection_annotations +? "/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/faceDetectionAnnotations/face_detection_annotation" +: face_detection_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation": frame_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation": shot_label_annotation +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/speechTranscriptions": speech_transcriptions +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/speechTranscriptions/speech_transcription": speech_transcription +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoSegment": google_cloud_videointelligence_v1p1beta1_video_segment +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_WordInfo": google_cloud_videointelligence_v1p1beta1_word_info +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_WordInfo/endTime": end_time +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_WordInfo/startTime": start_time +"/videointelligence:v1/GoogleCloudVideointelligenceV1p1beta1_WordInfo/word": word +"/videointelligence:v1/GoogleLongrunning_CancelOperationRequest": google_longrunning_cancel_operation_request +"/videointelligence:v1/GoogleLongrunning_ListOperationsResponse": google_longrunning_list_operations_response +"/videointelligence:v1/GoogleLongrunning_ListOperationsResponse/nextPageToken": next_page_token +"/videointelligence:v1/GoogleLongrunning_ListOperationsResponse/operations": operations +"/videointelligence:v1/GoogleLongrunning_ListOperationsResponse/operations/operation": operation +"/videointelligence:v1/GoogleLongrunning_Operation": google_longrunning_operation +"/videointelligence:v1/GoogleLongrunning_Operation/done": done +"/videointelligence:v1/GoogleLongrunning_Operation/error": error +"/videointelligence:v1/GoogleLongrunning_Operation/metadata": metadata +"/videointelligence:v1/GoogleLongrunning_Operation/metadata/metadatum": metadatum +"/videointelligence:v1/GoogleLongrunning_Operation/name": name +"/videointelligence:v1/GoogleLongrunning_Operation/response": response +"/videointelligence:v1/GoogleLongrunning_Operation/response/response": response +"/videointelligence:v1/GoogleProtobuf_Empty": google_protobuf_empty +"/videointelligence:v1/GoogleRpc_Status": google_rpc_status +"/videointelligence:v1/GoogleRpc_Status/code": code +"/videointelligence:v1/GoogleRpc_Status/details": details +"/videointelligence:v1/GoogleRpc_Status/details/detail": detail +"/videointelligence:v1/GoogleRpc_Status/details/detail/detail": detail +"/videointelligence:v1/GoogleRpc_Status/message": message +"/videointelligence:v1/fields": fields +"/videointelligence:v1/key": key +"/videointelligence:v1/quotaUser": quota_user +"/videointelligence:v1/videointelligence.operations.cancel": cancel_operation +"/videointelligence:v1/videointelligence.operations.cancel/name": name +"/videointelligence:v1/videointelligence.operations.delete": delete_operation +"/videointelligence:v1/videointelligence.operations.delete/name": name +"/videointelligence:v1/videointelligence.operations.get": get_operation +"/videointelligence:v1/videointelligence.operations.get/name": name +"/videointelligence:v1/videointelligence.operations.list": list_operations +"/videointelligence:v1/videointelligence.operations.list/filter": filter +"/videointelligence:v1/videointelligence.operations.list/name": name +"/videointelligence:v1/videointelligence.operations.list/pageSize": page_size +"/videointelligence:v1/videointelligence.operations.list/pageToken": page_token +"/videointelligence:v1/videointelligence.videos.annotate": annotate_video "/videointelligence:v1beta1/GoogleCloudVideointelligenceV1_AnnotateVideoProgress": google_cloud_videointelligence_v1_annotate_video_progress "/videointelligence:v1beta1/GoogleCloudVideointelligenceV1_AnnotateVideoProgress/annotationProgress": annotation_progress "/videointelligence:v1beta1/GoogleCloudVideointelligenceV1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress @@ -73567,6 +73856,283 @@ "/videointelligence:v1beta1/key": key "/videointelligence:v1beta1/quotaUser": quota_user "/videointelligence:v1beta1/videointelligence.videos.annotate": annotate_video +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_AnnotateVideoProgress": google_cloud_videointelligence_v1_annotate_video_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_AnnotateVideoResponse": google_cloud_videointelligence_v1_annotate_video_response +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_Entity": google_cloud_videointelligence_v1_entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_Entity/description": description +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_Entity/entityId": entity_id +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_Entity/languageCode": language_code +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_ExplicitContentAnnotation": google_cloud_videointelligence_v1_explicit_content_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_ExplicitContentFrame": google_cloud_videointelligence_v1_explicit_content_frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelAnnotation": google_cloud_videointelligence_v1_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelAnnotation/entity": entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelAnnotation/frames": frames +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelAnnotation/frames/frame": frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelAnnotation/segments": segments +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelAnnotation/segments/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelFrame": google_cloud_videointelligence_v1_label_frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelFrame/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelFrame/timeOffset": time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelSegment": google_cloud_videointelligence_v1_label_segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelSegment/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_LabelSegment/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationProgress": google_cloud_videointelligence_v1_video_annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationResults": google_cloud_videointelligence_v1_video_annotation_results +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationResults/error": error +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation": frame_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation": shot_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoSegment": google_cloud_videointelligence_v1_video_segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_AnnotateVideoProgress": google_cloud_videointelligence_v1beta1_annotate_video_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_AnnotateVideoResponse": google_cloud_videointelligence_v1beta1_annotate_video_response +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_LabelAnnotation": google_cloud_videointelligence_v1beta1_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_LabelAnnotation/description": description +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_LabelAnnotation/languageCode": language_code +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_LabelAnnotation/locations": locations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_LabelAnnotation/locations/location": location +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_LabelLocation": google_cloud_videointelligence_v1beta1_label_location +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_LabelLocation/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_LabelLocation/level": level +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_LabelLocation/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation": google_cloud_videointelligence_v1beta1_safe_search_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation/adult": adult +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation/medical": medical +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation/racy": racy +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation/spoof": spoof +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation/timeOffset": time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_SafeSearchAnnotation/violent": violent +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationProgress": google_cloud_videointelligence_v1beta1_video_annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults": google_cloud_videointelligence_v1beta1_video_annotation_results +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/error": error +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/labelAnnotations": label_annotations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/labelAnnotations/label_annotation": label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/safeSearchAnnotations": safe_search_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/safeSearchAnnotations/safe_search_annotation" +: safe_search_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoSegment": google_cloud_videointelligence_v1beta1_video_segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoProgress": google_cloud_videointelligence_v1beta2_annotate_video_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoRequest": google_cloud_videointelligence_v1beta2_annotate_video_request +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoRequest/features": features +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoRequest/features/feature": feature +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoRequest/inputContent": input_content +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoRequest/inputUri": input_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoRequest/locationId": location_id +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoRequest/outputUri": output_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoRequest/videoContext": video_context +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoResponse": google_cloud_videointelligence_v1beta2_annotate_video_response +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_Entity": google_cloud_videointelligence_v1beta2_entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_Entity/description": description +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_Entity/entityId": entity_id +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_Entity/languageCode": language_code +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_ExplicitContentAnnotation": google_cloud_videointelligence_v1beta2_explicit_content_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_ExplicitContentDetectionConfig": google_cloud_videointelligence_v1beta2_explicit_content_detection_config +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_ExplicitContentDetectionConfig/model": model +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_ExplicitContentFrame": google_cloud_videointelligence_v1beta2_explicit_content_frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelAnnotation": google_cloud_videointelligence_v1beta2_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/entity": entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/frames": frames +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/frames/frame": frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/segments": segments +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelAnnotation/segments/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelDetectionConfig": google_cloud_videointelligence_v1beta2_label_detection_config +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelDetectionConfig/labelDetectionMode": label_detection_mode +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelDetectionConfig/model": model +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelDetectionConfig/stationaryCamera": stationary_camera +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelFrame": google_cloud_videointelligence_v1beta2_label_frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelFrame/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelFrame/timeOffset": time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelSegment": google_cloud_videointelligence_v1beta2_label_segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelSegment/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_LabelSegment/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_ShotChangeDetectionConfig": google_cloud_videointelligence_v1beta2_shot_change_detection_config +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_ShotChangeDetectionConfig/model": model +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress": google_cloud_videointelligence_v1beta2_video_annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults": google_cloud_videointelligence_v1beta2_video_annotation_results +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/error": error +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation" +: frame_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation": shot_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoContext": google_cloud_videointelligence_v1beta2_video_context +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoContext/explicitContentDetectionConfig": explicit_content_detection_config +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoContext/labelDetectionConfig": label_detection_config +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoContext/segments": segments +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoContext/segments/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoContext/shotChangeDetectionConfig": shot_change_detection_config +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoSegment": google_cloud_videointelligence_v1beta2_video_segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1beta2_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoProgress": google_cloud_videointelligence_v1p1beta1_annotate_video_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoProgress/annotationProgress": annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoProgress/annotationProgress/annotation_progress": annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoResponse": google_cloud_videointelligence_v1p1beta1_annotate_video_response +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoResponse/annotationResults": annotation_results +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_AnnotateVideoResponse/annotationResults/annotation_result": annotation_result +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_EmotionAttribute": google_cloud_videointelligence_v1p1beta1_emotion_attribute +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_EmotionAttribute/emotion": emotion +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_EmotionAttribute/score": score +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_Entity": google_cloud_videointelligence_v1p1beta1_entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_Entity/description": description +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_Entity/entityId": entity_id +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_Entity/languageCode": language_code +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentAnnotation": google_cloud_videointelligence_v1p1beta1_explicit_content_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentAnnotation/frames": frames +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentAnnotation/frames/frame": frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentFrame": google_cloud_videointelligence_v1p1beta1_explicit_content_frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentFrame/pornographyLikelihood": pornography_likelihood +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_ExplicitContentFrame/timeOffset": time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAnnotation": google_cloud_videointelligence_v1p1beta1_face_detection_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAnnotation/frames": frames +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAnnotation/frames/frame": frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAnnotation/segments": segments +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAnnotation/segments/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAttribute": google_cloud_videointelligence_v1p1beta1_face_detection_attribute +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAttribute/emotions": emotions +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAttribute/emotions/emotion": emotion +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionAttribute/normalizedBoundingBox": normalized_bounding_box +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionFrame": google_cloud_videointelligence_v1p1beta1_face_detection_frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionFrame/attributes": attributes +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionFrame/attributes/attribute": attribute +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceDetectionFrame/timeOffset": time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceSegment": google_cloud_videointelligence_v1p1beta1_face_segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_FaceSegment/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation": google_cloud_videointelligence_v1p1beta1_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/categoryEntities": category_entities +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/categoryEntities/category_entity": category_entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/entity": entity +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/frames": frames +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/frames/frame": frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/segments": segments +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelAnnotation/segments/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelFrame": google_cloud_videointelligence_v1p1beta1_label_frame +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelFrame/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelFrame/timeOffset": time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelSegment": google_cloud_videointelligence_v1p1beta1_label_segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelSegment/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_LabelSegment/segment": segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox": google_cloud_videointelligence_v1p1beta1_normalized_bounding_box +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox/bottom": bottom +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox/left": left +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox/right": right +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_NormalizedBoundingBox/top": top +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative": google_cloud_videointelligence_v1p1beta1_speech_recognition_alternative +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative/confidence": confidence +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative/transcript": transcript +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative/words": words +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_SpeechRecognitionAlternative/words/word": word +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_SpeechTranscription": google_cloud_videointelligence_v1p1beta1_speech_transcription +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_SpeechTranscription/alternatives": alternatives +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_SpeechTranscription/alternatives/alternative": alternative +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress": google_cloud_videointelligence_v1p1beta1_video_annotation_progress +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress/inputUri": input_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress/progressPercent": progress_percent +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress/startTime": start_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationProgress/updateTime": update_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults": google_cloud_videointelligence_v1p1beta1_video_annotation_results +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/error": error +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/explicitAnnotation": explicit_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/faceDetectionAnnotations": face_detection_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/faceDetectionAnnotations/face_detection_annotation" +: face_detection_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/frameLabelAnnotations": frame_label_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/frameLabelAnnotations/frame_label_annotation" +: frame_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/inputUri": input_uri +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/segmentLabelAnnotations": segment_label_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/segmentLabelAnnotations/segment_label_annotation" +: segment_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/shotAnnotations": shot_annotations +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/shotAnnotations/shot_annotation": shot_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/shotLabelAnnotations": shot_label_annotations +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/shotLabelAnnotations/shot_label_annotation" +: shot_label_annotation +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/speechTranscriptions": speech_transcriptions +? "/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoAnnotationResults/speechTranscriptions/speech_transcription" +: speech_transcription +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoSegment": google_cloud_videointelligence_v1p1beta1_video_segment +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoSegment/endTimeOffset": end_time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_VideoSegment/startTimeOffset": start_time_offset +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_WordInfo": google_cloud_videointelligence_v1p1beta1_word_info +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_WordInfo/endTime": end_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_WordInfo/startTime": start_time +"/videointelligence:v1beta2/GoogleCloudVideointelligenceV1p1beta1_WordInfo/word": word +"/videointelligence:v1beta2/GoogleLongrunning_Operation": google_longrunning_operation +"/videointelligence:v1beta2/GoogleLongrunning_Operation/done": done +"/videointelligence:v1beta2/GoogleLongrunning_Operation/error": error +"/videointelligence:v1beta2/GoogleLongrunning_Operation/metadata": metadata +"/videointelligence:v1beta2/GoogleLongrunning_Operation/metadata/metadatum": metadatum +"/videointelligence:v1beta2/GoogleLongrunning_Operation/name": name +"/videointelligence:v1beta2/GoogleLongrunning_Operation/response": response +"/videointelligence:v1beta2/GoogleLongrunning_Operation/response/response": response +"/videointelligence:v1beta2/GoogleRpc_Status": google_rpc_status +"/videointelligence:v1beta2/GoogleRpc_Status/code": code +"/videointelligence:v1beta2/GoogleRpc_Status/details": details +"/videointelligence:v1beta2/GoogleRpc_Status/details/detail": detail +"/videointelligence:v1beta2/GoogleRpc_Status/details/detail/detail": detail +"/videointelligence:v1beta2/GoogleRpc_Status/message": message +"/videointelligence:v1beta2/fields": fields +"/videointelligence:v1beta2/key": key +"/videointelligence:v1beta2/quotaUser": quota_user +"/videointelligence:v1beta2/videointelligence.videos.annotate": annotate_video "/vision:v1/AnnotateImageRequest": annotate_image_request "/vision:v1/AnnotateImageRequest/features": features "/vision:v1/AnnotateImageRequest/features/feature": feature diff --git a/generated/google/apis/classroom_v1.rb b/generated/google/apis/classroom_v1.rb index 808718b6c..8f4135864 100644 --- a/generated/google/apis/classroom_v1.rb +++ b/generated/google/apis/classroom_v1.rb @@ -25,7 +25,7 @@ module Google # @see https://developers.google.com/classroom/ module ClassroomV1 VERSION = 'V1' - REVISION = '20180412' + REVISION = '20180413' # View and manage announcements in Google Classroom AUTH_CLASSROOM_ANNOUNCEMENTS = 'https://www.googleapis.com/auth/classroom.announcements' diff --git a/generated/google/apis/classroom_v1/classes.rb b/generated/google/apis/classroom_v1/classes.rb index 65b4ed2ac..08b00a6dc 100644 --- a/generated/google/apis/classroom_v1/classes.rb +++ b/generated/google/apis/classroom_v1/classes.rb @@ -315,7 +315,7 @@ module Google # Name of the course. # For example, "10th Grade Biology". - # The name is required. It must be between 1 and 750 characters and a valid + # The name is required. It must be between 1 and 50 characters and a valid # UTF-8 string. # Corresponds to the JSON property `name` # @return [String] diff --git a/generated/google/apis/cloudkms_v1.rb b/generated/google/apis/cloudkms_v1.rb index b424c0be5..3c2fbd216 100644 --- a/generated/google/apis/cloudkms_v1.rb +++ b/generated/google/apis/cloudkms_v1.rb @@ -18,7 +18,7 @@ require 'google/apis/cloudkms_v1/representations.rb' module Google module Apis - # Google Cloud Key Management Service (KMS) API + # Cloud Key Management Service (KMS) API # # Manages encryption for your cloud services the same way you do on-premises. # You can generate, use, rotate, and destroy AES256 encryption keys. @@ -26,7 +26,7 @@ module Google # @see https://cloud.google.com/kms/ module CloudkmsV1 VERSION = 'V1' - REVISION = '20180329' + REVISION = '20180413' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/cloudkms_v1/classes.rb b/generated/google/apis/cloudkms_v1/classes.rb index b8d9f2c86..96c6641ec 100644 --- a/generated/google/apis/cloudkms_v1/classes.rb +++ b/generated/google/apis/cloudkms_v1/classes.rb @@ -626,11 +626,11 @@ module Google # Defines an Identity and Access Management (IAM) policy. It is used to # specify access control policies for Cloud Platform resources. - # A `Policy` consists of a list of `bindings`. A `Binding` binds a list of + # A `Policy` consists of a list of `bindings`. A `binding` binds a list of # `members` to a `role`, where the members can be user accounts, Google groups, # Google domains, and service accounts. A `role` is a named list of permissions # defined by IAM. - # **Example** + # **JSON Example** # ` # "bindings": [ # ` @@ -639,7 +639,7 @@ module Google # "user:mike@example.com", # "group:admins@example.com", # "domain:google.com", - # "serviceAccount:my-other-app@appspot.gserviceaccount.com", + # "serviceAccount:my-other-app@appspot.gserviceaccount.com" # ] # `, # ` @@ -648,6 +648,17 @@ module Google # ` # ] # ` + # **YAML Example** + # bindings: + # - members: + # - user:mike@example.com + # - group:admins@example.com + # - domain:google.com + # - serviceAccount:my-other-app@appspot.gserviceaccount.com + # role: roles/owner + # - members: + # - user:sean@example.com + # role: roles/viewer # For a description of IAM and its features, see the # [IAM developer's guide](https://cloud.google.com/iam/docs). class Policy @@ -715,11 +726,11 @@ module Google # Defines an Identity and Access Management (IAM) policy. It is used to # specify access control policies for Cloud Platform resources. - # A `Policy` consists of a list of `bindings`. A `Binding` binds a list of + # A `Policy` consists of a list of `bindings`. A `binding` binds a list of # `members` to a `role`, where the members can be user accounts, Google groups, # Google domains, and service accounts. A `role` is a named list of permissions # defined by IAM. - # **Example** + # **JSON Example** # ` # "bindings": [ # ` @@ -728,7 +739,7 @@ module Google # "user:mike@example.com", # "group:admins@example.com", # "domain:google.com", - # "serviceAccount:my-other-app@appspot.gserviceaccount.com", + # "serviceAccount:my-other-app@appspot.gserviceaccount.com" # ] # `, # ` @@ -737,6 +748,17 @@ module Google # ` # ] # ` + # **YAML Example** + # bindings: + # - members: + # - user:mike@example.com + # - group:admins@example.com + # - domain:google.com + # - serviceAccount:my-other-app@appspot.gserviceaccount.com + # role: roles/owner + # - members: + # - user:sean@example.com + # role: roles/viewer # For a description of IAM and its features, see the # [IAM developer's guide](https://cloud.google.com/iam/docs). # Corresponds to the JSON property `policy` diff --git a/generated/google/apis/cloudkms_v1/service.rb b/generated/google/apis/cloudkms_v1/service.rb index ce0b8c0b7..6d8dee567 100644 --- a/generated/google/apis/cloudkms_v1/service.rb +++ b/generated/google/apis/cloudkms_v1/service.rb @@ -20,7 +20,7 @@ require 'google/apis/errors' module Google module Apis module CloudkmsV1 - # Google Cloud Key Management Service (KMS) API + # Cloud Key Management Service (KMS) API # # Manages encryption for your cloud services the same way you do on-premises. # You can generate, use, rotate, and destroy AES256 encryption keys. diff --git a/generated/google/apis/dns_v1beta2.rb b/generated/google/apis/dns_v1beta2.rb index 7719c91ec..a734d651c 100644 --- a/generated/google/apis/dns_v1beta2.rb +++ b/generated/google/apis/dns_v1beta2.rb @@ -25,7 +25,7 @@ module Google # @see https://developers.google.com/cloud-dns module DnsV1beta2 VERSION = 'V1beta2' - REVISION = '20180314' + REVISION = '20180412' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/dns_v1beta2/service.rb b/generated/google/apis/dns_v1beta2/service.rb index 63ea963e3..8429d3a03 100644 --- a/generated/google/apis/dns_v1beta2/service.rb +++ b/generated/google/apis/dns_v1beta2/service.rb @@ -568,7 +568,7 @@ module Google execute_or_queue_command(command, &block) end - # Update an existing ManagedZone. This method supports patch semantics. + # Apply a partial update to an existing ManagedZone. # @param [String] project # Identifies the project addressed by this request. # @param [String] managed_zone diff --git a/generated/google/apis/oslogin_v1beta.rb b/generated/google/apis/oslogin_v1beta.rb index e9481ceab..a53f7710b 100644 --- a/generated/google/apis/oslogin_v1beta.rb +++ b/generated/google/apis/oslogin_v1beta.rb @@ -25,7 +25,7 @@ module Google # @see https://cloud.google.com/compute/docs/oslogin/rest/ module OsloginV1beta VERSION = 'V1beta' - REVISION = '20180306' + REVISION = '20180413' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/oslogin_v1beta/classes.rb b/generated/google/apis/oslogin_v1beta/classes.rb index 97d786629..efca8fae7 100644 --- a/generated/google/apis/oslogin_v1beta/classes.rb +++ b/generated/google/apis/oslogin_v1beta/classes.rb @@ -117,6 +117,11 @@ module Google # @return [String] attr_accessor :home_directory + # The operating system type where this account applies. + # Corresponds to the JSON property `operatingSystemType` + # @return [String] + attr_accessor :operating_system_type + # Only one POSIX account can be marked as primary. # Corresponds to the JSON property `primary` # @return [Boolean] @@ -154,6 +159,7 @@ module Google @gecos = args[:gecos] if args.key?(:gecos) @gid = args[:gid] if args.key?(:gid) @home_directory = args[:home_directory] if args.key?(:home_directory) + @operating_system_type = args[:operating_system_type] if args.key?(:operating_system_type) @primary = args[:primary] if args.key?(:primary) @shell = args[:shell] if args.key?(:shell) @system_id = args[:system_id] if args.key?(:system_id) diff --git a/generated/google/apis/oslogin_v1beta/representations.rb b/generated/google/apis/oslogin_v1beta/representations.rb index e9c96c241..2673589be 100644 --- a/generated/google/apis/oslogin_v1beta/representations.rb +++ b/generated/google/apis/oslogin_v1beta/representations.rb @@ -84,6 +84,7 @@ module Google property :gecos, as: 'gecos' property :gid, :numeric_string => true, as: 'gid' property :home_directory, as: 'homeDirectory' + property :operating_system_type, as: 'operatingSystemType' property :primary, as: 'primary' property :shell, as: 'shell' property :system_id, as: 'systemId' diff --git a/generated/google/apis/toolresults_v1beta3.rb b/generated/google/apis/toolresults_v1beta3.rb index bb87e9813..29600213d 100644 --- a/generated/google/apis/toolresults_v1beta3.rb +++ b/generated/google/apis/toolresults_v1beta3.rb @@ -25,7 +25,7 @@ module Google # @see https://firebase.google.com/docs/test-lab/ module ToolresultsV1beta3 VERSION = 'V1beta3' - REVISION = '20180320' + REVISION = '20180416' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/videointelligence_v1.rb b/generated/google/apis/videointelligence_v1.rb new file mode 100644 index 000000000..c3618d580 --- /dev/null +++ b/generated/google/apis/videointelligence_v1.rb @@ -0,0 +1,34 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'google/apis/videointelligence_v1/service.rb' +require 'google/apis/videointelligence_v1/classes.rb' +require 'google/apis/videointelligence_v1/representations.rb' + +module Google + module Apis + # Cloud Video Intelligence API + # + # Cloud Video Intelligence API. + # + # @see https://cloud.google.com/video-intelligence/docs/ + module VideointelligenceV1 + VERSION = 'V1' + REVISION = '20180411' + + # View and manage your data across Google Cloud Platform services + AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' + end + end +end diff --git a/generated/google/apis/videointelligence_v1/classes.rb b/generated/google/apis/videointelligence_v1/classes.rb new file mode 100644 index 000000000..e74d56ac8 --- /dev/null +++ b/generated/google/apis/videointelligence_v1/classes.rb @@ -0,0 +1,2149 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'date' +require 'google/apis/core/base_service' +require 'google/apis/core/json_representation' +require 'google/apis/core/hashable' +require 'google/apis/errors' + +module Google + module Apis + module VideointelligenceV1 + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation request. + class GoogleCloudVideointelligenceV1AnnotateVideoRequest + include Google::Apis::Core::Hashable + + # Requested video annotation features. + # Corresponds to the JSON property `features` + # @return [Array] + attr_accessor :features + + # The video data bytes. + # If unset, the input video(s) should be specified via `input_uri`. + # If set, `input_uri` should be unset. + # Corresponds to the JSON property `inputContent` + # NOTE: Values are automatically base64 encoded/decoded in the client library. + # @return [String] + attr_accessor :input_content + + # Input video location. Currently, only + # [Google Cloud Storage](https://cloud.google.com/storage/) URIs are + # supported, which must be specified in the following format: + # `gs://bucket-id/object-id` (other URI formats return + # google.rpc.Code.INVALID_ARGUMENT). For more information, see + # [Request URIs](/storage/docs/reference-uris). + # A video URI may include wildcards in `object-id`, and thus identify + # multiple videos. Supported wildcards: '*' to match 0 or more characters; + # '?' to match 1 character. If unset, the input video should be embedded + # in the request as `input_content`. If set, `input_content` should be unset. + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Optional cloud region where annotation should take place. Supported cloud + # regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region + # is specified, a region will be determined based on video file location. + # Corresponds to the JSON property `locationId` + # @return [String] + attr_accessor :location_id + + # Optional location where the output (in JSON format) should be stored. + # Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) + # URIs are supported, which must be specified in the following format: + # `gs://bucket-id/object-id` (other URI formats return + # google.rpc.Code.INVALID_ARGUMENT). For more information, see + # [Request URIs](/storage/docs/reference-uris). + # Corresponds to the JSON property `outputUri` + # @return [String] + attr_accessor :output_uri + + # Video context and/or feature-specific parameters. + # Corresponds to the JSON property `videoContext` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoContext] + attr_accessor :video_context + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @features = args[:features] if args.key?(:features) + @input_content = args[:input_content] if args.key?(:input_content) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @location_id = args[:location_id] if args.key?(:location_id) + @output_uri = args[:output_uri] if args.key?(:output_uri) + @video_context = args[:video_context] if args.key?(:video_context) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Detected entity from video analysis. + class GoogleCloudVideointelligenceV1Entity + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Opaque entity ID. Some IDs may be available in + # [Google Knowledge Graph Search + # API](https://developers.google.com/knowledge-graph/). + # Corresponds to the JSON property `entityId` + # @return [String] + attr_accessor :entity_id + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @entity_id = args[:entity_id] if args.key?(:entity_id) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + class GoogleCloudVideointelligenceV1ExplicitContentAnnotation + include Google::Apis::Core::Hashable + + # All video frames where explicit content was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + end + end + + # Config for EXPLICIT_CONTENT_DETECTION. + class GoogleCloudVideointelligenceV1ExplicitContentDetectionConfig + include Google::Apis::Core::Hashable + + # Model to use for explicit content detection. + # Supported values: "builtin/stable" (the default if unset) and + # "builtin/latest". + # Corresponds to the JSON property `model` + # @return [String] + attr_accessor :model + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @model = args[:model] if args.key?(:model) + end + end + + # Video frame level annotation results for explicit content. + class GoogleCloudVideointelligenceV1ExplicitContentFrame + include Google::Apis::Core::Hashable + + # Likelihood of the pornography content.. + # Corresponds to the JSON property `pornographyLikelihood` + # @return [String] + attr_accessor :pornography_likelihood + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1LabelAnnotation + include Google::Apis::Core::Hashable + + # Common categories for the detected entity. + # E.g. when the label is `Terrier` the category is likely `dog`. And in some + # cases there might be more than one categories e.g. `Terrier` could also be + # a `pet`. + # Corresponds to the JSON property `categoryEntities` + # @return [Array] + attr_accessor :category_entities + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1Entity] + attr_accessor :entity + + # All video frames where a label was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a label was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @category_entities = args[:category_entities] if args.key?(:category_entities) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Config for LABEL_DETECTION. + class GoogleCloudVideointelligenceV1LabelDetectionConfig + include Google::Apis::Core::Hashable + + # What labels should be detected with LABEL_DETECTION, in addition to + # video-level labels or segment-level labels. + # If unspecified, defaults to `SHOT_MODE`. + # Corresponds to the JSON property `labelDetectionMode` + # @return [String] + attr_accessor :label_detection_mode + + # Model to use for label detection. + # Supported values: "builtin/stable" (the default if unset) and + # "builtin/latest". + # Corresponds to the JSON property `model` + # @return [String] + attr_accessor :model + + # Whether the video has been shot from a stationary (i.e. non-moving) camera. + # When set to true, might improve detection accuracy for moving objects. + # Should be used with `SHOT_AND_FRAME_MODE` enabled. + # Corresponds to the JSON property `stationaryCamera` + # @return [Boolean] + attr_accessor :stationary_camera + alias_method :stationary_camera?, :stationary_camera + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @label_detection_mode = args[:label_detection_mode] if args.key?(:label_detection_mode) + @model = args[:model] if args.key?(:model) + @stationary_camera = args[:stationary_camera] if args.key?(:stationary_camera) + end + end + + # Video frame level annotation results for label detection. + class GoogleCloudVideointelligenceV1LabelFrame + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for label detection. + class GoogleCloudVideointelligenceV1LabelSegment + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Config for SHOT_CHANGE_DETECTION. + class GoogleCloudVideointelligenceV1ShotChangeDetectionConfig + include Google::Apis::Core::Hashable + + # Model to use for shot change detection. + # Supported values: "builtin/stable" (the default if unset) and + # "builtin/latest". + # Corresponds to the JSON property `model` + # @return [String] + attr_accessor :model + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @model = args[:model] if args.key?(:model) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. + # Guaranteed to be 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for different + # programming environments, including REST APIs and RPC APIs. It is used by + # [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error message, + # and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1::GoogleRpcStatus] + attr_accessor :error + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + # Corresponds to the JSON property `explicitAnnotation` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1ExplicitContentAnnotation] + attr_accessor :explicit_annotation + + # Label annotations on frame level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `frameLabelAnnotations` + # @return [Array] + attr_accessor :frame_label_annotations + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Label annotations on video level or user specified segment level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `segmentLabelAnnotations` + # @return [Array] + attr_accessor :segment_label_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + # Label annotations on shot level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `shotLabelAnnotations` + # @return [Array] + attr_accessor :shot_label_annotations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation) + @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations) + end + end + + # Video context and/or feature-specific parameters. + class GoogleCloudVideointelligenceV1VideoContext + include Google::Apis::Core::Hashable + + # Config for EXPLICIT_CONTENT_DETECTION. + # Corresponds to the JSON property `explicitContentDetectionConfig` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1ExplicitContentDetectionConfig] + attr_accessor :explicit_content_detection_config + + # Config for LABEL_DETECTION. + # Corresponds to the JSON property `labelDetectionConfig` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelDetectionConfig] + attr_accessor :label_detection_config + + # Video segments to annotate. The segments may overlap and are not required + # to be contiguous or span the whole video. If unspecified, each video + # is treated as a single segment. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + # Config for SHOT_CHANGE_DETECTION. + # Corresponds to the JSON property `shotChangeDetectionConfig` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1ShotChangeDetectionConfig] + attr_accessor :shot_change_detection_config + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @explicit_content_detection_config = args[:explicit_content_detection_config] if args.key?(:explicit_content_detection_config) + @label_detection_config = args[:label_detection_config] if args.key?(:label_detection_config) + @segments = args[:segments] if args.key?(:segments) + @shot_change_detection_config = args[:shot_change_detection_config] if args.key?(:shot_change_detection_config) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1VideoSegment + include Google::Apis::Core::Hashable + + # Time-offset, relative to the beginning of the video, + # corresponding to the end of the segment (inclusive). + # Corresponds to the JSON property `endTimeOffset` + # @return [String] + attr_accessor :end_time_offset + + # Time-offset, relative to the beginning of the video, + # corresponding to the start of the segment (inclusive). + # Corresponds to the JSON property `startTimeOffset` + # @return [String] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1beta1AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1beta1AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1beta1LabelAnnotation + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + # Where the label was detected and with what confidence. + # Corresponds to the JSON property `locations` + # @return [Array] + attr_accessor :locations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @language_code = args[:language_code] if args.key?(:language_code) + @locations = args[:locations] if args.key?(:locations) + end + end + + # Label location. + class GoogleCloudVideointelligenceV1beta1LabelLocation + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Label level. + # Corresponds to the JSON property `level` + # @return [String] + attr_accessor :level + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @level = args[:level] if args.key?(:level) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Safe search annotation (based on per-frame visual signals only). + # If no unsafe content has been detected in a frame, no annotations + # are present for that frame. If only some types of unsafe content + # have been detected in a frame, the likelihood is set to `UNKNOWN` + # for all other types of unsafe content. + class GoogleCloudVideointelligenceV1beta1SafeSearchAnnotation + include Google::Apis::Core::Hashable + + # Likelihood of adult content. + # Corresponds to the JSON property `adult` + # @return [String] + attr_accessor :adult + + # Likelihood of medical content. + # Corresponds to the JSON property `medical` + # @return [String] + attr_accessor :medical + + # Likelihood of racy content. + # Corresponds to the JSON property `racy` + # @return [String] + attr_accessor :racy + + # Likelihood that an obvious modification was made to the original + # version to make it appear funny or offensive. + # Corresponds to the JSON property `spoof` + # @return [String] + attr_accessor :spoof + + # Video time offset in microseconds. + # Corresponds to the JSON property `timeOffset` + # @return [Fixnum] + attr_accessor :time_offset + + # Likelihood of violent content. + # Corresponds to the JSON property `violent` + # @return [String] + attr_accessor :violent + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @adult = args[:adult] if args.key?(:adult) + @medical = args[:medical] if args.key?(:medical) + @racy = args[:racy] if args.key?(:racy) + @spoof = args[:spoof] if args.key?(:spoof) + @time_offset = args[:time_offset] if args.key?(:time_offset) + @violent = args[:violent] if args.key?(:violent) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1beta1VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. + # Guaranteed to be 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1beta1VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for different + # programming environments, including REST APIs and RPC APIs. It is used by + # [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error message, + # and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1::GoogleRpcStatus] + attr_accessor :error + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Label annotations. There is exactly one element for each unique label. + # Corresponds to the JSON property `labelAnnotations` + # @return [Array] + attr_accessor :label_annotations + + # Safe search annotations. + # Corresponds to the JSON property `safeSearchAnnotations` + # @return [Array] + attr_accessor :safe_search_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @label_annotations = args[:label_annotations] if args.key?(:label_annotations) + @safe_search_annotations = args[:safe_search_annotations] if args.key?(:safe_search_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1beta1VideoSegment + include Google::Apis::Core::Hashable + + # End offset in microseconds (inclusive). Unset means 0. + # Corresponds to the JSON property `endTimeOffset` + # @return [Fixnum] + attr_accessor :end_time_offset + + # Start offset in microseconds (inclusive). Unset means 0. + # Corresponds to the JSON property `startTimeOffset` + # @return [Fixnum] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1beta2AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1beta2AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Detected entity from video analysis. + class GoogleCloudVideointelligenceV1beta2Entity + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Opaque entity ID. Some IDs may be available in + # [Google Knowledge Graph Search + # API](https://developers.google.com/knowledge-graph/). + # Corresponds to the JSON property `entityId` + # @return [String] + attr_accessor :entity_id + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @entity_id = args[:entity_id] if args.key?(:entity_id) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + class GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation + include Google::Apis::Core::Hashable + + # All video frames where explicit content was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + end + end + + # Video frame level annotation results for explicit content. + class GoogleCloudVideointelligenceV1beta2ExplicitContentFrame + include Google::Apis::Core::Hashable + + # Likelihood of the pornography content.. + # Corresponds to the JSON property `pornographyLikelihood` + # @return [String] + attr_accessor :pornography_likelihood + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1beta2LabelAnnotation + include Google::Apis::Core::Hashable + + # Common categories for the detected entity. + # E.g. when the label is `Terrier` the category is likely `dog`. And in some + # cases there might be more than one categories e.g. `Terrier` could also be + # a `pet`. + # Corresponds to the JSON property `categoryEntities` + # @return [Array] + attr_accessor :category_entities + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2Entity] + attr_accessor :entity + + # All video frames where a label was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a label was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @category_entities = args[:category_entities] if args.key?(:category_entities) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Video frame level annotation results for label detection. + class GoogleCloudVideointelligenceV1beta2LabelFrame + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for label detection. + class GoogleCloudVideointelligenceV1beta2LabelSegment + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. + # Guaranteed to be 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for different + # programming environments, including REST APIs and RPC APIs. It is used by + # [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error message, + # and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1::GoogleRpcStatus] + attr_accessor :error + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + # Corresponds to the JSON property `explicitAnnotation` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation] + attr_accessor :explicit_annotation + + # Label annotations on frame level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `frameLabelAnnotations` + # @return [Array] + attr_accessor :frame_label_annotations + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Label annotations on video level or user specified segment level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `segmentLabelAnnotations` + # @return [Array] + attr_accessor :segment_label_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + # Label annotations on shot level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `shotLabelAnnotations` + # @return [Array] + attr_accessor :shot_label_annotations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation) + @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1beta2VideoSegment + include Google::Apis::Core::Hashable + + # Time-offset, relative to the beginning of the video, + # corresponding to the end of the segment (inclusive). + # Corresponds to the JSON property `endTimeOffset` + # @return [String] + attr_accessor :end_time_offset + + # Time-offset, relative to the beginning of the video, + # corresponding to the start of the segment (inclusive). + # Corresponds to the JSON property `startTimeOffset` + # @return [String] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Emotion attribute. + class GoogleCloudVideointelligenceV1p1beta1EmotionAttribute + include Google::Apis::Core::Hashable + + # Emotion entry. + # Corresponds to the JSON property `emotion` + # @return [String] + attr_accessor :emotion + + # Confidence score. + # Corresponds to the JSON property `score` + # @return [Float] + attr_accessor :score + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @emotion = args[:emotion] if args.key?(:emotion) + @score = args[:score] if args.key?(:score) + end + end + + # Detected entity from video analysis. + class GoogleCloudVideointelligenceV1p1beta1Entity + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Opaque entity ID. Some IDs may be available in + # [Google Knowledge Graph Search + # API](https://developers.google.com/knowledge-graph/). + # Corresponds to the JSON property `entityId` + # @return [String] + attr_accessor :entity_id + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @entity_id = args[:entity_id] if args.key?(:entity_id) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation + include Google::Apis::Core::Hashable + + # All video frames where explicit content was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + end + end + + # Video frame level annotation results for explicit content. + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame + include Google::Apis::Core::Hashable + + # Likelihood of the pornography content.. + # Corresponds to the JSON property `pornographyLikelihood` + # @return [String] + attr_accessor :pornography_likelihood + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Face detection annotation. + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionAnnotation + include Google::Apis::Core::Hashable + + # All video frames where a face was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a face was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Face detection attribute. + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionAttribute + include Google::Apis::Core::Hashable + + # Emotion attributes. + # Corresponds to the JSON property `emotions` + # @return [Array] + attr_accessor :emotions + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + # Corresponds to the JSON property `normalizedBoundingBox` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox] + attr_accessor :normalized_bounding_box + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @emotions = args[:emotions] if args.key?(:emotions) + @normalized_bounding_box = args[:normalized_bounding_box] if args.key?(:normalized_bounding_box) + end + end + + # Video frame level annotation results for face detection. + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionFrame + include Google::Apis::Core::Hashable + + # Face attributes in a frame. + # There can be more than one attributes if the same face is detected in + # multiple locations within the current frame. + # Corresponds to the JSON property `attributes` + # @return [Array] + attr_accessor :attributes + + # Time-offset, relative to the beginning of the video, + # corresponding to the video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @attributes = args[:attributes] if args.key?(:attributes) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for face detection. + class GoogleCloudVideointelligenceV1p1beta1FaceSegment + include Google::Apis::Core::Hashable + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1p1beta1LabelAnnotation + include Google::Apis::Core::Hashable + + # Common categories for the detected entity. + # E.g. when the label is `Terrier` the category is likely `dog`. And in some + # cases there might be more than one categories e.g. `Terrier` could also be + # a `pet`. + # Corresponds to the JSON property `categoryEntities` + # @return [Array] + attr_accessor :category_entities + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1Entity] + attr_accessor :entity + + # All video frames where a label was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a label was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @category_entities = args[:category_entities] if args.key?(:category_entities) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Video frame level annotation results for label detection. + class GoogleCloudVideointelligenceV1p1beta1LabelFrame + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for label detection. + class GoogleCloudVideointelligenceV1p1beta1LabelSegment + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox + include Google::Apis::Core::Hashable + + # Bottom Y coordinate. + # Corresponds to the JSON property `bottom` + # @return [Float] + attr_accessor :bottom + + # Left X coordinate. + # Corresponds to the JSON property `left` + # @return [Float] + attr_accessor :left + + # Right X coordinate. + # Corresponds to the JSON property `right` + # @return [Float] + attr_accessor :right + + # Top Y coordinate. + # Corresponds to the JSON property `top` + # @return [Float] + attr_accessor :top + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @bottom = args[:bottom] if args.key?(:bottom) + @left = args[:left] if args.key?(:left) + @right = args[:right] if args.key?(:right) + @top = args[:top] if args.key?(:top) + end + end + + # Alternative hypotheses (a.k.a. n-best list). + class GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative + include Google::Apis::Core::Hashable + + # Output only. The confidence estimate between 0.0 and 1.0. A higher number + # indicates an estimated greater likelihood that the recognized words are + # correct. This field is typically provided only for the top hypothesis, and + # only for `is_final=true` results. Clients should not rely on the + # `confidence` field as it is not guaranteed to be accurate or consistent. + # The default of 0.0 is a sentinel value indicating `confidence` was not set. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Output only. Transcript text representing the words that the user spoke. + # Corresponds to the JSON property `transcript` + # @return [String] + attr_accessor :transcript + + # Output only. A list of word-specific information for each recognized word. + # Corresponds to the JSON property `words` + # @return [Array] + attr_accessor :words + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @transcript = args[:transcript] if args.key?(:transcript) + @words = args[:words] if args.key?(:words) + end + end + + # A speech recognition result corresponding to a portion of the audio. + class GoogleCloudVideointelligenceV1p1beta1SpeechTranscription + include Google::Apis::Core::Hashable + + # Output only. May contain one or more recognition hypotheses (up to the + # maximum specified in `max_alternatives`). + # These alternatives are ordered in terms of accuracy, with the top (first) + # alternative being the most probable, as ranked by the recognizer. + # Corresponds to the JSON property `alternatives` + # @return [Array] + attr_accessor :alternatives + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @alternatives = args[:alternatives] if args.key?(:alternatives) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. + # Guaranteed to be 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for different + # programming environments, including REST APIs and RPC APIs. It is used by + # [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error message, + # and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1::GoogleRpcStatus] + attr_accessor :error + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + # Corresponds to the JSON property `explicitAnnotation` + # @return [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation] + attr_accessor :explicit_annotation + + # Face detection annotations. + # Corresponds to the JSON property `faceDetectionAnnotations` + # @return [Array] + attr_accessor :face_detection_annotations + + # Label annotations on frame level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `frameLabelAnnotations` + # @return [Array] + attr_accessor :frame_label_annotations + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Label annotations on video level or user specified segment level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `segmentLabelAnnotations` + # @return [Array] + attr_accessor :segment_label_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + # Label annotations on shot level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `shotLabelAnnotations` + # @return [Array] + attr_accessor :shot_label_annotations + + # Speech transcription. + # Corresponds to the JSON property `speechTranscriptions` + # @return [Array] + attr_accessor :speech_transcriptions + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation) + @face_detection_annotations = args[:face_detection_annotations] if args.key?(:face_detection_annotations) + @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations) + @speech_transcriptions = args[:speech_transcriptions] if args.key?(:speech_transcriptions) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1p1beta1VideoSegment + include Google::Apis::Core::Hashable + + # Time-offset, relative to the beginning of the video, + # corresponding to the end of the segment (inclusive). + # Corresponds to the JSON property `endTimeOffset` + # @return [String] + attr_accessor :end_time_offset + + # Time-offset, relative to the beginning of the video, + # corresponding to the start of the segment (inclusive). + # Corresponds to the JSON property `startTimeOffset` + # @return [String] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Word-specific information for recognized words. Word information is only + # included in the response when certain request parameters are set, such + # as `enable_word_time_offsets`. + class GoogleCloudVideointelligenceV1p1beta1WordInfo + include Google::Apis::Core::Hashable + + # Output only. Time offset relative to the beginning of the audio, and + # corresponding to the end of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `endTime` + # @return [String] + attr_accessor :end_time + + # Output only. Time offset relative to the beginning of the audio, and + # corresponding to the start of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Output only. The word corresponding to this set of information. + # Corresponds to the JSON property `word` + # @return [String] + attr_accessor :word + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time = args[:end_time] if args.key?(:end_time) + @start_time = args[:start_time] if args.key?(:start_time) + @word = args[:word] if args.key?(:word) + end + end + + # The request message for Operations.CancelOperation. + class GoogleLongrunningCancelOperationRequest + include Google::Apis::Core::Hashable + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + end + end + + # The response message for Operations.ListOperations. + class GoogleLongrunningListOperationsResponse + include Google::Apis::Core::Hashable + + # The standard List next-page token. + # Corresponds to the JSON property `nextPageToken` + # @return [String] + attr_accessor :next_page_token + + # A list of operations that matches the specified filter in the request. + # Corresponds to the JSON property `operations` + # @return [Array] + attr_accessor :operations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @next_page_token = args[:next_page_token] if args.key?(:next_page_token) + @operations = args[:operations] if args.key?(:operations) + end + end + + # This resource represents a long-running operation that is the result of a + # network API call. + class GoogleLongrunningOperation + include Google::Apis::Core::Hashable + + # If the value is `false`, it means the operation is still in progress. + # If `true`, the operation is completed, and either `error` or `response` is + # available. + # Corresponds to the JSON property `done` + # @return [Boolean] + attr_accessor :done + alias_method :done?, :done + + # The `Status` type defines a logical error model that is suitable for different + # programming environments, including REST APIs and RPC APIs. It is used by + # [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error message, + # and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1::GoogleRpcStatus] + attr_accessor :error + + # Service-specific metadata associated with the operation. It typically + # contains progress information and common metadata such as create time. + # Some services might not provide such metadata. Any method that returns a + # long-running operation should document the metadata type, if any. + # Corresponds to the JSON property `metadata` + # @return [Hash] + attr_accessor :metadata + + # The server-assigned name, which is only unique within the same service that + # originally returns it. If you use the default HTTP mapping, the + # `name` should have the format of `operations/some/unique/name`. + # Corresponds to the JSON property `name` + # @return [String] + attr_accessor :name + + # The normal response of the operation in case of success. If the original + # method returns no data on success, such as `Delete`, the response is + # `google.protobuf.Empty`. If the original method is standard + # `Get`/`Create`/`Update`, the response should be the resource. For other + # methods, the response should have the type `XxxResponse`, where `Xxx` + # is the original method name. For example, if the original method name + # is `TakeSnapshot()`, the inferred response type is + # `TakeSnapshotResponse`. + # Corresponds to the JSON property `response` + # @return [Hash] + attr_accessor :response + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @done = args[:done] if args.key?(:done) + @error = args[:error] if args.key?(:error) + @metadata = args[:metadata] if args.key?(:metadata) + @name = args[:name] if args.key?(:name) + @response = args[:response] if args.key?(:response) + end + end + + # A generic empty message that you can re-use to avoid defining duplicated + # empty messages in your APIs. A typical example is to use it as the request + # or the response type of an API method. For instance: + # service Foo ` + # rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + # ` + # The JSON representation for `Empty` is empty JSON object ````. + class GoogleProtobufEmpty + include Google::Apis::Core::Hashable + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + end + end + + # The `Status` type defines a logical error model that is suitable for different + # programming environments, including REST APIs and RPC APIs. It is used by + # [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error message, + # and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + class GoogleRpcStatus + include Google::Apis::Core::Hashable + + # The status code, which should be an enum value of google.rpc.Code. + # Corresponds to the JSON property `code` + # @return [Fixnum] + attr_accessor :code + + # A list of messages that carry the error details. There is a common set of + # message types for APIs to use. + # Corresponds to the JSON property `details` + # @return [Array>] + attr_accessor :details + + # A developer-facing error message, which should be in English. Any + # user-facing error message should be localized and sent in the + # google.rpc.Status.details field, or localized by the client. + # Corresponds to the JSON property `message` + # @return [String] + attr_accessor :message + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @code = args[:code] if args.key?(:code) + @details = args[:details] if args.key?(:details) + @message = args[:message] if args.key?(:message) + end + end + end + end +end diff --git a/generated/google/apis/videointelligence_v1/representations.rb b/generated/google/apis/videointelligence_v1/representations.rb new file mode 100644 index 000000000..aaa568acb --- /dev/null +++ b/generated/google/apis/videointelligence_v1/representations.rb @@ -0,0 +1,972 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'date' +require 'google/apis/core/base_service' +require 'google/apis/core/json_representation' +require 'google/apis/core/hashable' +require 'google/apis/errors' + +module Google + module Apis + module VideointelligenceV1 + + class GoogleCloudVideointelligenceV1AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1AnnotateVideoRequest + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1Entity + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1ExplicitContentAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1ExplicitContentDetectionConfig + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1ExplicitContentFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1LabelDetectionConfig + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1LabelFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1LabelSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1ShotChangeDetectionConfig + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1VideoContext + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1LabelLocation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1SafeSearchAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2Entity + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2LabelFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2LabelSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1EmotionAttribute + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1Entity + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionAttribute + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1FaceSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1LabelFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1LabelSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1SpeechTranscription + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1WordInfo + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleLongrunningCancelOperationRequest + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleLongrunningListOperationsResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleLongrunningOperation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleProtobufEmpty + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleRpcStatus + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1AnnotateVideoRequest + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :features, as: 'features' + property :input_content, :base64 => true, as: 'inputContent' + property :input_uri, as: 'inputUri' + property :location_id, as: 'locationId' + property :output_uri, as: 'outputUri' + property :video_context, as: 'videoContext', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoContext, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoContext::Representation + + end + end + + class GoogleCloudVideointelligenceV1AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1Entity + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :entity_id, as: 'entityId' + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1ExplicitContentAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1ExplicitContentFrame::Representation + + end + end + + class GoogleCloudVideointelligenceV1ExplicitContentDetectionConfig + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :model, as: 'model' + end + end + + class GoogleCloudVideointelligenceV1ExplicitContentFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :pornography_likelihood, as: 'pornographyLikelihood' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1Entity, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1Entity::Representation + + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1Entity, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelFrame, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1LabelDetectionConfig + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :label_detection_mode, as: 'labelDetectionMode' + property :model, as: 'model' + property :stationary_camera, as: 'stationaryCamera' + end + end + + class GoogleCloudVideointelligenceV1LabelFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1LabelSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1ShotChangeDetectionConfig + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :model, as: 'model' + end + end + + class GoogleCloudVideointelligenceV1VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1::GoogleRpcStatus::Representation + + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1ExplicitContentAnnotation::Representation + + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelAnnotation::Representation + + property :input_uri, as: 'inputUri' + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoSegment::Representation + + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelAnnotation::Representation + + end + end + + class GoogleCloudVideointelligenceV1VideoContext + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :explicit_content_detection_config, as: 'explicitContentDetectionConfig', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1ExplicitContentDetectionConfig, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1ExplicitContentDetectionConfig::Representation + + property :label_detection_config, as: 'labelDetectionConfig', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelDetectionConfig, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1LabelDetectionConfig::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1VideoSegment::Representation + + property :shot_change_detection_config, as: 'shotChangeDetectionConfig', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1ShotChangeDetectionConfig, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1ShotChangeDetectionConfig::Representation + + end + end + + class GoogleCloudVideointelligenceV1VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, as: 'endTimeOffset' + property :start_time_offset, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta1AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta1AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta1LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :language_code, as: 'languageCode' + collection :locations, as: 'locations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1LabelLocation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1LabelLocation::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta1LabelLocation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :level, as: 'level' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta1SafeSearchAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :adult, as: 'adult' + property :medical, as: 'medical' + property :racy, as: 'racy' + property :spoof, as: 'spoof' + property :time_offset, :numeric_string => true, as: 'timeOffset' + property :violent, as: 'violent' + end + end + + class GoogleCloudVideointelligenceV1beta1VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1beta1VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1::GoogleRpcStatus::Representation + + property :input_uri, as: 'inputUri' + collection :label_annotations, as: 'labelAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1LabelAnnotation::Representation + + collection :safe_search_annotations, as: 'safeSearchAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1SafeSearchAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1SafeSearchAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta1VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, :numeric_string => true, as: 'endTimeOffset' + property :start_time_offset, :numeric_string => true, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2Entity + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :entity_id, as: 'entityId' + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2ExplicitContentFrame::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :pornography_likelihood, as: 'pornographyLikelihood' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta2LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2Entity, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2Entity::Representation + + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2Entity, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2LabelFrame, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2LabelFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2LabelSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2LabelSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2LabelFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta2LabelSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2VideoSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1::GoogleRpcStatus::Representation + + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation::Representation + + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2LabelAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2LabelAnnotation::Representation + + property :input_uri, as: 'inputUri' + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2LabelAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2LabelAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2VideoSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2VideoSegment::Representation + + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2LabelAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1beta2LabelAnnotation::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, as: 'endTimeOffset' + property :start_time_offset, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1EmotionAttribute + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :emotion, as: 'emotion' + property :score, as: 'score' + end + end + + class GoogleCloudVideointelligenceV1p1beta1Entity + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :entity_id, as: 'entityId' + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :pornography_likelihood, as: 'pornographyLikelihood' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1FaceDetectionFrame, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1FaceDetectionFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1FaceSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1FaceSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionAttribute + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :emotions, as: 'emotions', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1EmotionAttribute, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1EmotionAttribute::Representation + + property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :attributes, as: 'attributes', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1FaceDetectionAttribute, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1FaceDetectionAttribute::Representation + + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1FaceSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1Entity, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1Entity::Representation + + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1Entity, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1LabelFrame, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1LabelFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1LabelSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1LabelSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1LabelFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1LabelSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :bottom, as: 'bottom' + property :left, as: 'left' + property :right, as: 'right' + property :top, as: 'top' + end + end + + class GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :transcript, as: 'transcript' + collection :words, as: 'words', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1WordInfo, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1WordInfo::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1SpeechTranscription + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1::GoogleRpcStatus::Representation + + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation::Representation + + collection :face_detection_annotations, as: 'faceDetectionAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1FaceDetectionAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1FaceDetectionAnnotation::Representation + + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation::Representation + + property :input_uri, as: 'inputUri' + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1VideoSegment::Representation + + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation::Representation + + collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1SpeechTranscription, decorator: Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1p1beta1SpeechTranscription::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, as: 'endTimeOffset' + property :start_time_offset, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1WordInfo + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time, as: 'endTime' + property :start_time, as: 'startTime' + property :word, as: 'word' + end + end + + class GoogleLongrunningCancelOperationRequest + # @private + class Representation < Google::Apis::Core::JsonRepresentation + end + end + + class GoogleLongrunningListOperationsResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :next_page_token, as: 'nextPageToken' + collection :operations, as: 'operations', class: Google::Apis::VideointelligenceV1::GoogleLongrunningOperation, decorator: Google::Apis::VideointelligenceV1::GoogleLongrunningOperation::Representation + + end + end + + class GoogleLongrunningOperation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :done, as: 'done' + property :error, as: 'error', class: Google::Apis::VideointelligenceV1::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1::GoogleRpcStatus::Representation + + hash :metadata, as: 'metadata' + property :name, as: 'name' + hash :response, as: 'response' + end + end + + class GoogleProtobufEmpty + # @private + class Representation < Google::Apis::Core::JsonRepresentation + end + end + + class GoogleRpcStatus + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :code, as: 'code' + collection :details, as: 'details' + property :message, as: 'message' + end + end + end + end +end diff --git a/generated/google/apis/videointelligence_v1/service.rb b/generated/google/apis/videointelligence_v1/service.rb new file mode 100644 index 000000000..480f4c7a9 --- /dev/null +++ b/generated/google/apis/videointelligence_v1/service.rb @@ -0,0 +1,246 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'google/apis/core/base_service' +require 'google/apis/core/json_representation' +require 'google/apis/core/hashable' +require 'google/apis/errors' + +module Google + module Apis + module VideointelligenceV1 + # Cloud Video Intelligence API + # + # Cloud Video Intelligence API. + # + # @example + # require 'google/apis/videointelligence_v1' + # + # Videointelligence = Google::Apis::VideointelligenceV1 # Alias the module + # service = Videointelligence::CloudVideoIntelligenceService.new + # + # @see https://cloud.google.com/video-intelligence/docs/ + class CloudVideoIntelligenceService < Google::Apis::Core::BaseService + # @return [String] + # API key. Your API key identifies your project and provides you with API access, + # quota, and reports. Required unless you provide an OAuth 2.0 token. + attr_accessor :key + + # @return [String] + # Available to use for quota purposes for server-side applications. Can be any + # arbitrary string assigned to a user, but should not exceed 40 characters. + attr_accessor :quota_user + + def initialize + super('https://videointelligence.googleapis.com/', '') + @batch_path = 'batch' + end + + # Starts asynchronous cancellation on a long-running operation. The server + # makes a best effort to cancel the operation, but success is not + # guaranteed. If the server doesn't support this method, it returns + # `google.rpc.Code.UNIMPLEMENTED`. Clients can use + # Operations.GetOperation or + # other methods to check whether the cancellation succeeded or whether the + # operation completed despite cancellation. On successful cancellation, + # the operation is not deleted; instead, it becomes an operation with + # an Operation.error value with a google.rpc.Status.code of 1, + # corresponding to `Code.CANCELLED`. + # @param [String] name + # The name of the operation resource to be cancelled. + # @param [Google::Apis::VideointelligenceV1::GoogleLongrunningCancelOperationRequest] google_longrunning_cancel_operation_request_object + # @param [String] fields + # Selector specifying which fields to include in a partial response. + # @param [String] quota_user + # Available to use for quota purposes for server-side applications. Can be any + # arbitrary string assigned to a user, but should not exceed 40 characters. + # @param [Google::Apis::RequestOptions] options + # Request-specific options + # + # @yield [result, err] Result & error if block supplied + # @yieldparam result [Google::Apis::VideointelligenceV1::GoogleProtobufEmpty] parsed result object + # @yieldparam err [StandardError] error object if request failed + # + # @return [Google::Apis::VideointelligenceV1::GoogleProtobufEmpty] + # + # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried + # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification + # @raise [Google::Apis::AuthorizationError] Authorization is required + def cancel_operation(name, google_longrunning_cancel_operation_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) + command = make_simple_command(:post, 'v1/operations/{+name}:cancel', options) + command.request_representation = Google::Apis::VideointelligenceV1::GoogleLongrunningCancelOperationRequest::Representation + command.request_object = google_longrunning_cancel_operation_request_object + command.response_representation = Google::Apis::VideointelligenceV1::GoogleProtobufEmpty::Representation + command.response_class = Google::Apis::VideointelligenceV1::GoogleProtobufEmpty + command.params['name'] = name unless name.nil? + command.query['fields'] = fields unless fields.nil? + command.query['quotaUser'] = quota_user unless quota_user.nil? + execute_or_queue_command(command, &block) + end + + # Deletes a long-running operation. This method indicates that the client is + # no longer interested in the operation result. It does not cancel the + # operation. If the server doesn't support this method, it returns + # `google.rpc.Code.UNIMPLEMENTED`. + # @param [String] name + # The name of the operation resource to be deleted. + # @param [String] fields + # Selector specifying which fields to include in a partial response. + # @param [String] quota_user + # Available to use for quota purposes for server-side applications. Can be any + # arbitrary string assigned to a user, but should not exceed 40 characters. + # @param [Google::Apis::RequestOptions] options + # Request-specific options + # + # @yield [result, err] Result & error if block supplied + # @yieldparam result [Google::Apis::VideointelligenceV1::GoogleProtobufEmpty] parsed result object + # @yieldparam err [StandardError] error object if request failed + # + # @return [Google::Apis::VideointelligenceV1::GoogleProtobufEmpty] + # + # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried + # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification + # @raise [Google::Apis::AuthorizationError] Authorization is required + def delete_operation(name, fields: nil, quota_user: nil, options: nil, &block) + command = make_simple_command(:delete, 'v1/operations/{+name}', options) + command.response_representation = Google::Apis::VideointelligenceV1::GoogleProtobufEmpty::Representation + command.response_class = Google::Apis::VideointelligenceV1::GoogleProtobufEmpty + command.params['name'] = name unless name.nil? + command.query['fields'] = fields unless fields.nil? + command.query['quotaUser'] = quota_user unless quota_user.nil? + execute_or_queue_command(command, &block) + end + + # Gets the latest state of a long-running operation. Clients can use this + # method to poll the operation result at intervals as recommended by the API + # service. + # @param [String] name + # The name of the operation resource. + # @param [String] fields + # Selector specifying which fields to include in a partial response. + # @param [String] quota_user + # Available to use for quota purposes for server-side applications. Can be any + # arbitrary string assigned to a user, but should not exceed 40 characters. + # @param [Google::Apis::RequestOptions] options + # Request-specific options + # + # @yield [result, err] Result & error if block supplied + # @yieldparam result [Google::Apis::VideointelligenceV1::GoogleLongrunningOperation] parsed result object + # @yieldparam err [StandardError] error object if request failed + # + # @return [Google::Apis::VideointelligenceV1::GoogleLongrunningOperation] + # + # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried + # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification + # @raise [Google::Apis::AuthorizationError] Authorization is required + def get_operation(name, fields: nil, quota_user: nil, options: nil, &block) + command = make_simple_command(:get, 'v1/operations/{+name}', options) + command.response_representation = Google::Apis::VideointelligenceV1::GoogleLongrunningOperation::Representation + command.response_class = Google::Apis::VideointelligenceV1::GoogleLongrunningOperation + command.params['name'] = name unless name.nil? + command.query['fields'] = fields unless fields.nil? + command.query['quotaUser'] = quota_user unless quota_user.nil? + execute_or_queue_command(command, &block) + end + + # Lists operations that match the specified filter in the request. If the + # server doesn't support this method, it returns `UNIMPLEMENTED`. + # NOTE: the `name` binding allows API services to override the binding + # to use different resource name schemes, such as `users/*/operations`. To + # override the binding, API services can add a binding such as + # `"/v1/`name=users/*`/operations"` to their service configuration. + # For backwards compatibility, the default name includes the operations + # collection id, however overriding users must ensure the name binding + # is the parent resource, without the operations collection id. + # @param [String] filter + # The standard list filter. + # @param [String] name + # The name of the operation's parent resource. + # @param [Fixnum] page_size + # The standard list page size. + # @param [String] page_token + # The standard list page token. + # @param [String] fields + # Selector specifying which fields to include in a partial response. + # @param [String] quota_user + # Available to use for quota purposes for server-side applications. Can be any + # arbitrary string assigned to a user, but should not exceed 40 characters. + # @param [Google::Apis::RequestOptions] options + # Request-specific options + # + # @yield [result, err] Result & error if block supplied + # @yieldparam result [Google::Apis::VideointelligenceV1::GoogleLongrunningListOperationsResponse] parsed result object + # @yieldparam err [StandardError] error object if request failed + # + # @return [Google::Apis::VideointelligenceV1::GoogleLongrunningListOperationsResponse] + # + # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried + # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification + # @raise [Google::Apis::AuthorizationError] Authorization is required + def list_operations(filter: nil, name: nil, page_size: nil, page_token: nil, fields: nil, quota_user: nil, options: nil, &block) + command = make_simple_command(:get, 'v1/operations', options) + command.response_representation = Google::Apis::VideointelligenceV1::GoogleLongrunningListOperationsResponse::Representation + command.response_class = Google::Apis::VideointelligenceV1::GoogleLongrunningListOperationsResponse + command.query['filter'] = filter unless filter.nil? + command.query['name'] = name unless name.nil? + command.query['pageSize'] = page_size unless page_size.nil? + command.query['pageToken'] = page_token unless page_token.nil? + command.query['fields'] = fields unless fields.nil? + command.query['quotaUser'] = quota_user unless quota_user.nil? + execute_or_queue_command(command, &block) + end + + # Performs asynchronous video annotation. Progress and results can be + # retrieved through the `google.longrunning.Operations` interface. + # `Operation.metadata` contains `AnnotateVideoProgress` (progress). + # `Operation.response` contains `AnnotateVideoResponse` (results). + # @param [Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1AnnotateVideoRequest] google_cloud_videointelligence_v1_annotate_video_request_object + # @param [String] fields + # Selector specifying which fields to include in a partial response. + # @param [String] quota_user + # Available to use for quota purposes for server-side applications. Can be any + # arbitrary string assigned to a user, but should not exceed 40 characters. + # @param [Google::Apis::RequestOptions] options + # Request-specific options + # + # @yield [result, err] Result & error if block supplied + # @yieldparam result [Google::Apis::VideointelligenceV1::GoogleLongrunningOperation] parsed result object + # @yieldparam err [StandardError] error object if request failed + # + # @return [Google::Apis::VideointelligenceV1::GoogleLongrunningOperation] + # + # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried + # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification + # @raise [Google::Apis::AuthorizationError] Authorization is required + def annotate_video(google_cloud_videointelligence_v1_annotate_video_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) + command = make_simple_command(:post, 'v1/videos:annotate', options) + command.request_representation = Google::Apis::VideointelligenceV1::GoogleCloudVideointelligenceV1AnnotateVideoRequest::Representation + command.request_object = google_cloud_videointelligence_v1_annotate_video_request_object + command.response_representation = Google::Apis::VideointelligenceV1::GoogleLongrunningOperation::Representation + command.response_class = Google::Apis::VideointelligenceV1::GoogleLongrunningOperation + command.query['fields'] = fields unless fields.nil? + command.query['quotaUser'] = quota_user unless quota_user.nil? + execute_or_queue_command(command, &block) + end + + protected + + def apply_command_defaults(command) + command.query['key'] = key unless key.nil? + command.query['quotaUser'] = quota_user unless quota_user.nil? + end + end + end + end +end diff --git a/generated/google/apis/videointelligence_v1beta2.rb b/generated/google/apis/videointelligence_v1beta2.rb new file mode 100644 index 000000000..75fd2e4bd --- /dev/null +++ b/generated/google/apis/videointelligence_v1beta2.rb @@ -0,0 +1,34 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'google/apis/videointelligence_v1beta2/service.rb' +require 'google/apis/videointelligence_v1beta2/classes.rb' +require 'google/apis/videointelligence_v1beta2/representations.rb' + +module Google + module Apis + # Cloud Video Intelligence API + # + # Cloud Video Intelligence API. + # + # @see https://cloud.google.com/video-intelligence/docs/ + module VideointelligenceV1beta2 + VERSION = 'V1beta2' + REVISION = '20180411' + + # View and manage your data across Google Cloud Platform services + AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' + end + end +end diff --git a/generated/google/apis/videointelligence_v1beta2/classes.rb b/generated/google/apis/videointelligence_v1beta2/classes.rb new file mode 100644 index 000000000..330630bf7 --- /dev/null +++ b/generated/google/apis/videointelligence_v1beta2/classes.rb @@ -0,0 +1,2092 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'date' +require 'google/apis/core/base_service' +require 'google/apis/core/json_representation' +require 'google/apis/core/hashable' +require 'google/apis/errors' + +module Google + module Apis + module VideointelligenceV1beta2 + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Detected entity from video analysis. + class GoogleCloudVideointelligenceV1Entity + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Opaque entity ID. Some IDs may be available in + # [Google Knowledge Graph Search + # API](https://developers.google.com/knowledge-graph/). + # Corresponds to the JSON property `entityId` + # @return [String] + attr_accessor :entity_id + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @entity_id = args[:entity_id] if args.key?(:entity_id) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + class GoogleCloudVideointelligenceV1ExplicitContentAnnotation + include Google::Apis::Core::Hashable + + # All video frames where explicit content was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + end + end + + # Video frame level annotation results for explicit content. + class GoogleCloudVideointelligenceV1ExplicitContentFrame + include Google::Apis::Core::Hashable + + # Likelihood of the pornography content.. + # Corresponds to the JSON property `pornographyLikelihood` + # @return [String] + attr_accessor :pornography_likelihood + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1LabelAnnotation + include Google::Apis::Core::Hashable + + # Common categories for the detected entity. + # E.g. when the label is `Terrier` the category is likely `dog`. And in some + # cases there might be more than one categories e.g. `Terrier` could also be + # a `pet`. + # Corresponds to the JSON property `categoryEntities` + # @return [Array] + attr_accessor :category_entities + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1Entity] + attr_accessor :entity + + # All video frames where a label was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a label was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @category_entities = args[:category_entities] if args.key?(:category_entities) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Video frame level annotation results for label detection. + class GoogleCloudVideointelligenceV1LabelFrame + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for label detection. + class GoogleCloudVideointelligenceV1LabelSegment + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. + # Guaranteed to be 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for different + # programming environments, including REST APIs and RPC APIs. It is used by + # [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error message, + # and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus] + attr_accessor :error + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + # Corresponds to the JSON property `explicitAnnotation` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1ExplicitContentAnnotation] + attr_accessor :explicit_annotation + + # Label annotations on frame level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `frameLabelAnnotations` + # @return [Array] + attr_accessor :frame_label_annotations + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Label annotations on video level or user specified segment level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `segmentLabelAnnotations` + # @return [Array] + attr_accessor :segment_label_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + # Label annotations on shot level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `shotLabelAnnotations` + # @return [Array] + attr_accessor :shot_label_annotations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation) + @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1VideoSegment + include Google::Apis::Core::Hashable + + # Time-offset, relative to the beginning of the video, + # corresponding to the end of the segment (inclusive). + # Corresponds to the JSON property `endTimeOffset` + # @return [String] + attr_accessor :end_time_offset + + # Time-offset, relative to the beginning of the video, + # corresponding to the start of the segment (inclusive). + # Corresponds to the JSON property `startTimeOffset` + # @return [String] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1beta1AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1beta1AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1beta1LabelAnnotation + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + # Where the label was detected and with what confidence. + # Corresponds to the JSON property `locations` + # @return [Array] + attr_accessor :locations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @language_code = args[:language_code] if args.key?(:language_code) + @locations = args[:locations] if args.key?(:locations) + end + end + + # Label location. + class GoogleCloudVideointelligenceV1beta1LabelLocation + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Label level. + # Corresponds to the JSON property `level` + # @return [String] + attr_accessor :level + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @level = args[:level] if args.key?(:level) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Safe search annotation (based on per-frame visual signals only). + # If no unsafe content has been detected in a frame, no annotations + # are present for that frame. If only some types of unsafe content + # have been detected in a frame, the likelihood is set to `UNKNOWN` + # for all other types of unsafe content. + class GoogleCloudVideointelligenceV1beta1SafeSearchAnnotation + include Google::Apis::Core::Hashable + + # Likelihood of adult content. + # Corresponds to the JSON property `adult` + # @return [String] + attr_accessor :adult + + # Likelihood of medical content. + # Corresponds to the JSON property `medical` + # @return [String] + attr_accessor :medical + + # Likelihood of racy content. + # Corresponds to the JSON property `racy` + # @return [String] + attr_accessor :racy + + # Likelihood that an obvious modification was made to the original + # version to make it appear funny or offensive. + # Corresponds to the JSON property `spoof` + # @return [String] + attr_accessor :spoof + + # Video time offset in microseconds. + # Corresponds to the JSON property `timeOffset` + # @return [Fixnum] + attr_accessor :time_offset + + # Likelihood of violent content. + # Corresponds to the JSON property `violent` + # @return [String] + attr_accessor :violent + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @adult = args[:adult] if args.key?(:adult) + @medical = args[:medical] if args.key?(:medical) + @racy = args[:racy] if args.key?(:racy) + @spoof = args[:spoof] if args.key?(:spoof) + @time_offset = args[:time_offset] if args.key?(:time_offset) + @violent = args[:violent] if args.key?(:violent) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1beta1VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. + # Guaranteed to be 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1beta1VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for different + # programming environments, including REST APIs and RPC APIs. It is used by + # [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error message, + # and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus] + attr_accessor :error + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Label annotations. There is exactly one element for each unique label. + # Corresponds to the JSON property `labelAnnotations` + # @return [Array] + attr_accessor :label_annotations + + # Safe search annotations. + # Corresponds to the JSON property `safeSearchAnnotations` + # @return [Array] + attr_accessor :safe_search_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @label_annotations = args[:label_annotations] if args.key?(:label_annotations) + @safe_search_annotations = args[:safe_search_annotations] if args.key?(:safe_search_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1beta1VideoSegment + include Google::Apis::Core::Hashable + + # End offset in microseconds (inclusive). Unset means 0. + # Corresponds to the JSON property `endTimeOffset` + # @return [Fixnum] + attr_accessor :end_time_offset + + # Start offset in microseconds (inclusive). Unset means 0. + # Corresponds to the JSON property `startTimeOffset` + # @return [Fixnum] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1beta2AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation request. + class GoogleCloudVideointelligenceV1beta2AnnotateVideoRequest + include Google::Apis::Core::Hashable + + # Requested video annotation features. + # Corresponds to the JSON property `features` + # @return [Array] + attr_accessor :features + + # The video data bytes. + # If unset, the input video(s) should be specified via `input_uri`. + # If set, `input_uri` should be unset. + # Corresponds to the JSON property `inputContent` + # NOTE: Values are automatically base64 encoded/decoded in the client library. + # @return [String] + attr_accessor :input_content + + # Input video location. Currently, only + # [Google Cloud Storage](https://cloud.google.com/storage/) URIs are + # supported, which must be specified in the following format: + # `gs://bucket-id/object-id` (other URI formats return + # google.rpc.Code.INVALID_ARGUMENT). For more information, see + # [Request URIs](/storage/docs/reference-uris). + # A video URI may include wildcards in `object-id`, and thus identify + # multiple videos. Supported wildcards: '*' to match 0 or more characters; + # '?' to match 1 character. If unset, the input video should be embedded + # in the request as `input_content`. If set, `input_content` should be unset. + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Optional cloud region where annotation should take place. Supported cloud + # regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region + # is specified, a region will be determined based on video file location. + # Corresponds to the JSON property `locationId` + # @return [String] + attr_accessor :location_id + + # Optional location where the output (in JSON format) should be stored. + # Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) + # URIs are supported, which must be specified in the following format: + # `gs://bucket-id/object-id` (other URI formats return + # google.rpc.Code.INVALID_ARGUMENT). For more information, see + # [Request URIs](/storage/docs/reference-uris). + # Corresponds to the JSON property `outputUri` + # @return [String] + attr_accessor :output_uri + + # Video context and/or feature-specific parameters. + # Corresponds to the JSON property `videoContext` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoContext] + attr_accessor :video_context + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @features = args[:features] if args.key?(:features) + @input_content = args[:input_content] if args.key?(:input_content) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @location_id = args[:location_id] if args.key?(:location_id) + @output_uri = args[:output_uri] if args.key?(:output_uri) + @video_context = args[:video_context] if args.key?(:video_context) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1beta2AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Detected entity from video analysis. + class GoogleCloudVideointelligenceV1beta2Entity + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Opaque entity ID. Some IDs may be available in + # [Google Knowledge Graph Search + # API](https://developers.google.com/knowledge-graph/). + # Corresponds to the JSON property `entityId` + # @return [String] + attr_accessor :entity_id + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @entity_id = args[:entity_id] if args.key?(:entity_id) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + class GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation + include Google::Apis::Core::Hashable + + # All video frames where explicit content was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + end + end + + # Config for EXPLICIT_CONTENT_DETECTION. + class GoogleCloudVideointelligenceV1beta2ExplicitContentDetectionConfig + include Google::Apis::Core::Hashable + + # Model to use for explicit content detection. + # Supported values: "builtin/stable" (the default if unset) and + # "builtin/latest". + # Corresponds to the JSON property `model` + # @return [String] + attr_accessor :model + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @model = args[:model] if args.key?(:model) + end + end + + # Video frame level annotation results for explicit content. + class GoogleCloudVideointelligenceV1beta2ExplicitContentFrame + include Google::Apis::Core::Hashable + + # Likelihood of the pornography content.. + # Corresponds to the JSON property `pornographyLikelihood` + # @return [String] + attr_accessor :pornography_likelihood + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1beta2LabelAnnotation + include Google::Apis::Core::Hashable + + # Common categories for the detected entity. + # E.g. when the label is `Terrier` the category is likely `dog`. And in some + # cases there might be more than one categories e.g. `Terrier` could also be + # a `pet`. + # Corresponds to the JSON property `categoryEntities` + # @return [Array] + attr_accessor :category_entities + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2Entity] + attr_accessor :entity + + # All video frames where a label was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a label was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @category_entities = args[:category_entities] if args.key?(:category_entities) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Config for LABEL_DETECTION. + class GoogleCloudVideointelligenceV1beta2LabelDetectionConfig + include Google::Apis::Core::Hashable + + # What labels should be detected with LABEL_DETECTION, in addition to + # video-level labels or segment-level labels. + # If unspecified, defaults to `SHOT_MODE`. + # Corresponds to the JSON property `labelDetectionMode` + # @return [String] + attr_accessor :label_detection_mode + + # Model to use for label detection. + # Supported values: "builtin/stable" (the default if unset) and + # "builtin/latest". + # Corresponds to the JSON property `model` + # @return [String] + attr_accessor :model + + # Whether the video has been shot from a stationary (i.e. non-moving) camera. + # When set to true, might improve detection accuracy for moving objects. + # Should be used with `SHOT_AND_FRAME_MODE` enabled. + # Corresponds to the JSON property `stationaryCamera` + # @return [Boolean] + attr_accessor :stationary_camera + alias_method :stationary_camera?, :stationary_camera + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @label_detection_mode = args[:label_detection_mode] if args.key?(:label_detection_mode) + @model = args[:model] if args.key?(:model) + @stationary_camera = args[:stationary_camera] if args.key?(:stationary_camera) + end + end + + # Video frame level annotation results for label detection. + class GoogleCloudVideointelligenceV1beta2LabelFrame + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for label detection. + class GoogleCloudVideointelligenceV1beta2LabelSegment + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Config for SHOT_CHANGE_DETECTION. + class GoogleCloudVideointelligenceV1beta2ShotChangeDetectionConfig + include Google::Apis::Core::Hashable + + # Model to use for shot change detection. + # Supported values: "builtin/stable" (the default if unset) and + # "builtin/latest". + # Corresponds to the JSON property `model` + # @return [String] + attr_accessor :model + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @model = args[:model] if args.key?(:model) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. + # Guaranteed to be 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for different + # programming environments, including REST APIs and RPC APIs. It is used by + # [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error message, + # and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus] + attr_accessor :error + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + # Corresponds to the JSON property `explicitAnnotation` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation] + attr_accessor :explicit_annotation + + # Label annotations on frame level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `frameLabelAnnotations` + # @return [Array] + attr_accessor :frame_label_annotations + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Label annotations on video level or user specified segment level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `segmentLabelAnnotations` + # @return [Array] + attr_accessor :segment_label_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + # Label annotations on shot level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `shotLabelAnnotations` + # @return [Array] + attr_accessor :shot_label_annotations + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation) + @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations) + end + end + + # Video context and/or feature-specific parameters. + class GoogleCloudVideointelligenceV1beta2VideoContext + include Google::Apis::Core::Hashable + + # Config for EXPLICIT_CONTENT_DETECTION. + # Corresponds to the JSON property `explicitContentDetectionConfig` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2ExplicitContentDetectionConfig] + attr_accessor :explicit_content_detection_config + + # Config for LABEL_DETECTION. + # Corresponds to the JSON property `labelDetectionConfig` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelDetectionConfig] + attr_accessor :label_detection_config + + # Video segments to annotate. The segments may overlap and are not required + # to be contiguous or span the whole video. If unspecified, each video + # is treated as a single segment. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + # Config for SHOT_CHANGE_DETECTION. + # Corresponds to the JSON property `shotChangeDetectionConfig` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2ShotChangeDetectionConfig] + attr_accessor :shot_change_detection_config + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @explicit_content_detection_config = args[:explicit_content_detection_config] if args.key?(:explicit_content_detection_config) + @label_detection_config = args[:label_detection_config] if args.key?(:label_detection_config) + @segments = args[:segments] if args.key?(:segments) + @shot_change_detection_config = args[:shot_change_detection_config] if args.key?(:shot_change_detection_config) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1beta2VideoSegment + include Google::Apis::Core::Hashable + + # Time-offset, relative to the beginning of the video, + # corresponding to the end of the segment (inclusive). + # Corresponds to the JSON property `endTimeOffset` + # @return [String] + attr_accessor :end_time_offset + + # Time-offset, relative to the beginning of the video, + # corresponding to the start of the segment (inclusive). + # Corresponds to the JSON property `startTimeOffset` + # @return [String] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Video annotation progress. Included in the `metadata` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoProgress + include Google::Apis::Core::Hashable + + # Progress metadata for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationProgress` + # @return [Array] + attr_accessor :annotation_progress + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress) + end + end + + # Video annotation response. Included in the `response` + # field of the `Operation` returned by the `GetOperation` + # call of the `google::longrunning::Operations` service. + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoResponse + include Google::Apis::Core::Hashable + + # Annotation results for all videos specified in `AnnotateVideoRequest`. + # Corresponds to the JSON property `annotationResults` + # @return [Array] + attr_accessor :annotation_results + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @annotation_results = args[:annotation_results] if args.key?(:annotation_results) + end + end + + # Emotion attribute. + class GoogleCloudVideointelligenceV1p1beta1EmotionAttribute + include Google::Apis::Core::Hashable + + # Emotion entry. + # Corresponds to the JSON property `emotion` + # @return [String] + attr_accessor :emotion + + # Confidence score. + # Corresponds to the JSON property `score` + # @return [Float] + attr_accessor :score + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @emotion = args[:emotion] if args.key?(:emotion) + @score = args[:score] if args.key?(:score) + end + end + + # Detected entity from video analysis. + class GoogleCloudVideointelligenceV1p1beta1Entity + include Google::Apis::Core::Hashable + + # Textual description, e.g. `Fixed-gear bicycle`. + # Corresponds to the JSON property `description` + # @return [String] + attr_accessor :description + + # Opaque entity ID. Some IDs may be available in + # [Google Knowledge Graph Search + # API](https://developers.google.com/knowledge-graph/). + # Corresponds to the JSON property `entityId` + # @return [String] + attr_accessor :entity_id + + # Language code for `description` in BCP-47 format. + # Corresponds to the JSON property `languageCode` + # @return [String] + attr_accessor :language_code + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @description = args[:description] if args.key?(:description) + @entity_id = args[:entity_id] if args.key?(:entity_id) + @language_code = args[:language_code] if args.key?(:language_code) + end + end + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation + include Google::Apis::Core::Hashable + + # All video frames where explicit content was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + end + end + + # Video frame level annotation results for explicit content. + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame + include Google::Apis::Core::Hashable + + # Likelihood of the pornography content.. + # Corresponds to the JSON property `pornographyLikelihood` + # @return [String] + attr_accessor :pornography_likelihood + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Face detection annotation. + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionAnnotation + include Google::Apis::Core::Hashable + + # All video frames where a face was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a face was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Face detection attribute. + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionAttribute + include Google::Apis::Core::Hashable + + # Emotion attributes. + # Corresponds to the JSON property `emotions` + # @return [Array] + attr_accessor :emotions + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + # Corresponds to the JSON property `normalizedBoundingBox` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox] + attr_accessor :normalized_bounding_box + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @emotions = args[:emotions] if args.key?(:emotions) + @normalized_bounding_box = args[:normalized_bounding_box] if args.key?(:normalized_bounding_box) + end + end + + # Video frame level annotation results for face detection. + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionFrame + include Google::Apis::Core::Hashable + + # Face attributes in a frame. + # There can be more than one attributes if the same face is detected in + # multiple locations within the current frame. + # Corresponds to the JSON property `attributes` + # @return [Array] + attr_accessor :attributes + + # Time-offset, relative to the beginning of the video, + # corresponding to the video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @attributes = args[:attributes] if args.key?(:attributes) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for face detection. + class GoogleCloudVideointelligenceV1p1beta1FaceSegment + include Google::Apis::Core::Hashable + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Label annotation. + class GoogleCloudVideointelligenceV1p1beta1LabelAnnotation + include Google::Apis::Core::Hashable + + # Common categories for the detected entity. + # E.g. when the label is `Terrier` the category is likely `dog`. And in some + # cases there might be more than one categories e.g. `Terrier` could also be + # a `pet`. + # Corresponds to the JSON property `categoryEntities` + # @return [Array] + attr_accessor :category_entities + + # Detected entity from video analysis. + # Corresponds to the JSON property `entity` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1Entity] + attr_accessor :entity + + # All video frames where a label was detected. + # Corresponds to the JSON property `frames` + # @return [Array] + attr_accessor :frames + + # All video segments where a label was detected. + # Corresponds to the JSON property `segments` + # @return [Array] + attr_accessor :segments + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @category_entities = args[:category_entities] if args.key?(:category_entities) + @entity = args[:entity] if args.key?(:entity) + @frames = args[:frames] if args.key?(:frames) + @segments = args[:segments] if args.key?(:segments) + end + end + + # Video frame level annotation results for label detection. + class GoogleCloudVideointelligenceV1p1beta1LabelFrame + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Time-offset, relative to the beginning of the video, corresponding to the + # video frame for this location. + # Corresponds to the JSON property `timeOffset` + # @return [String] + attr_accessor :time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @time_offset = args[:time_offset] if args.key?(:time_offset) + end + end + + # Video segment level annotation results for label detection. + class GoogleCloudVideointelligenceV1p1beta1LabelSegment + include Google::Apis::Core::Hashable + + # Confidence that the label is accurate. Range: [0, 1]. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Video segment. + # Corresponds to the JSON property `segment` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1VideoSegment] + attr_accessor :segment + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @segment = args[:segment] if args.key?(:segment) + end + end + + # Normalized bounding box. + # The normalized vertex coordinates are relative to the original image. + # Range: [0, 1]. + class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox + include Google::Apis::Core::Hashable + + # Bottom Y coordinate. + # Corresponds to the JSON property `bottom` + # @return [Float] + attr_accessor :bottom + + # Left X coordinate. + # Corresponds to the JSON property `left` + # @return [Float] + attr_accessor :left + + # Right X coordinate. + # Corresponds to the JSON property `right` + # @return [Float] + attr_accessor :right + + # Top Y coordinate. + # Corresponds to the JSON property `top` + # @return [Float] + attr_accessor :top + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @bottom = args[:bottom] if args.key?(:bottom) + @left = args[:left] if args.key?(:left) + @right = args[:right] if args.key?(:right) + @top = args[:top] if args.key?(:top) + end + end + + # Alternative hypotheses (a.k.a. n-best list). + class GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative + include Google::Apis::Core::Hashable + + # Output only. The confidence estimate between 0.0 and 1.0. A higher number + # indicates an estimated greater likelihood that the recognized words are + # correct. This field is typically provided only for the top hypothesis, and + # only for `is_final=true` results. Clients should not rely on the + # `confidence` field as it is not guaranteed to be accurate or consistent. + # The default of 0.0 is a sentinel value indicating `confidence` was not set. + # Corresponds to the JSON property `confidence` + # @return [Float] + attr_accessor :confidence + + # Output only. Transcript text representing the words that the user spoke. + # Corresponds to the JSON property `transcript` + # @return [String] + attr_accessor :transcript + + # Output only. A list of word-specific information for each recognized word. + # Corresponds to the JSON property `words` + # @return [Array] + attr_accessor :words + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @confidence = args[:confidence] if args.key?(:confidence) + @transcript = args[:transcript] if args.key?(:transcript) + @words = args[:words] if args.key?(:words) + end + end + + # A speech recognition result corresponding to a portion of the audio. + class GoogleCloudVideointelligenceV1p1beta1SpeechTranscription + include Google::Apis::Core::Hashable + + # Output only. May contain one or more recognition hypotheses (up to the + # maximum specified in `max_alternatives`). + # These alternatives are ordered in terms of accuracy, with the top (first) + # alternative being the most probable, as ranked by the recognizer. + # Corresponds to the JSON property `alternatives` + # @return [Array] + attr_accessor :alternatives + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @alternatives = args[:alternatives] if args.key?(:alternatives) + end + end + + # Annotation progress for a single video. + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress + include Google::Apis::Core::Hashable + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Approximate percentage processed thus far. + # Guaranteed to be 100 when fully processed. + # Corresponds to the JSON property `progressPercent` + # @return [Fixnum] + attr_accessor :progress_percent + + # Time when the request was received. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Time of the most recent update. + # Corresponds to the JSON property `updateTime` + # @return [String] + attr_accessor :update_time + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @progress_percent = args[:progress_percent] if args.key?(:progress_percent) + @start_time = args[:start_time] if args.key?(:start_time) + @update_time = args[:update_time] if args.key?(:update_time) + end + end + + # Annotation results for a single video. + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults + include Google::Apis::Core::Hashable + + # The `Status` type defines a logical error model that is suitable for different + # programming environments, including REST APIs and RPC APIs. It is used by + # [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error message, + # and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus] + attr_accessor :error + + # Explicit content annotation (based on per-frame visual signals only). + # If no explicit content has been detected in a frame, no annotations are + # present for that frame. + # Corresponds to the JSON property `explicitAnnotation` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation] + attr_accessor :explicit_annotation + + # Face detection annotations. + # Corresponds to the JSON property `faceDetectionAnnotations` + # @return [Array] + attr_accessor :face_detection_annotations + + # Label annotations on frame level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `frameLabelAnnotations` + # @return [Array] + attr_accessor :frame_label_annotations + + # Video file location in + # [Google Cloud Storage](https://cloud.google.com/storage/). + # Corresponds to the JSON property `inputUri` + # @return [String] + attr_accessor :input_uri + + # Label annotations on video level or user specified segment level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `segmentLabelAnnotations` + # @return [Array] + attr_accessor :segment_label_annotations + + # Shot annotations. Each shot is represented as a video segment. + # Corresponds to the JSON property `shotAnnotations` + # @return [Array] + attr_accessor :shot_annotations + + # Label annotations on shot level. + # There is exactly one element for each unique label. + # Corresponds to the JSON property `shotLabelAnnotations` + # @return [Array] + attr_accessor :shot_label_annotations + + # Speech transcription. + # Corresponds to the JSON property `speechTranscriptions` + # @return [Array] + attr_accessor :speech_transcriptions + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @error = args[:error] if args.key?(:error) + @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation) + @face_detection_annotations = args[:face_detection_annotations] if args.key?(:face_detection_annotations) + @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations) + @input_uri = args[:input_uri] if args.key?(:input_uri) + @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations) + @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations) + @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations) + @speech_transcriptions = args[:speech_transcriptions] if args.key?(:speech_transcriptions) + end + end + + # Video segment. + class GoogleCloudVideointelligenceV1p1beta1VideoSegment + include Google::Apis::Core::Hashable + + # Time-offset, relative to the beginning of the video, + # corresponding to the end of the segment (inclusive). + # Corresponds to the JSON property `endTimeOffset` + # @return [String] + attr_accessor :end_time_offset + + # Time-offset, relative to the beginning of the video, + # corresponding to the start of the segment (inclusive). + # Corresponds to the JSON property `startTimeOffset` + # @return [String] + attr_accessor :start_time_offset + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset) + @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset) + end + end + + # Word-specific information for recognized words. Word information is only + # included in the response when certain request parameters are set, such + # as `enable_word_time_offsets`. + class GoogleCloudVideointelligenceV1p1beta1WordInfo + include Google::Apis::Core::Hashable + + # Output only. Time offset relative to the beginning of the audio, and + # corresponding to the end of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `endTime` + # @return [String] + attr_accessor :end_time + + # Output only. Time offset relative to the beginning of the audio, and + # corresponding to the start of the spoken word. This field is only set if + # `enable_word_time_offsets=true` and only in the top hypothesis. This is an + # experimental feature and the accuracy of the time offset can vary. + # Corresponds to the JSON property `startTime` + # @return [String] + attr_accessor :start_time + + # Output only. The word corresponding to this set of information. + # Corresponds to the JSON property `word` + # @return [String] + attr_accessor :word + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @end_time = args[:end_time] if args.key?(:end_time) + @start_time = args[:start_time] if args.key?(:start_time) + @word = args[:word] if args.key?(:word) + end + end + + # This resource represents a long-running operation that is the result of a + # network API call. + class GoogleLongrunningOperation + include Google::Apis::Core::Hashable + + # If the value is `false`, it means the operation is still in progress. + # If `true`, the operation is completed, and either `error` or `response` is + # available. + # Corresponds to the JSON property `done` + # @return [Boolean] + attr_accessor :done + alias_method :done?, :done + + # The `Status` type defines a logical error model that is suitable for different + # programming environments, including REST APIs and RPC APIs. It is used by + # [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error message, + # and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + # Corresponds to the JSON property `error` + # @return [Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus] + attr_accessor :error + + # Service-specific metadata associated with the operation. It typically + # contains progress information and common metadata such as create time. + # Some services might not provide such metadata. Any method that returns a + # long-running operation should document the metadata type, if any. + # Corresponds to the JSON property `metadata` + # @return [Hash] + attr_accessor :metadata + + # The server-assigned name, which is only unique within the same service that + # originally returns it. If you use the default HTTP mapping, the + # `name` should have the format of `operations/some/unique/name`. + # Corresponds to the JSON property `name` + # @return [String] + attr_accessor :name + + # The normal response of the operation in case of success. If the original + # method returns no data on success, such as `Delete`, the response is + # `google.protobuf.Empty`. If the original method is standard + # `Get`/`Create`/`Update`, the response should be the resource. For other + # methods, the response should have the type `XxxResponse`, where `Xxx` + # is the original method name. For example, if the original method name + # is `TakeSnapshot()`, the inferred response type is + # `TakeSnapshotResponse`. + # Corresponds to the JSON property `response` + # @return [Hash] + attr_accessor :response + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @done = args[:done] if args.key?(:done) + @error = args[:error] if args.key?(:error) + @metadata = args[:metadata] if args.key?(:metadata) + @name = args[:name] if args.key?(:name) + @response = args[:response] if args.key?(:response) + end + end + + # The `Status` type defines a logical error model that is suitable for different + # programming environments, including REST APIs and RPC APIs. It is used by + # [gRPC](https://github.com/grpc). The error model is designed to be: + # - Simple to use and understand for most users + # - Flexible enough to meet unexpected needs + # # Overview + # The `Status` message contains three pieces of data: error code, error message, + # and error details. The error code should be an enum value of + # google.rpc.Code, but it may accept additional error codes if needed. The + # error message should be a developer-facing English message that helps + # developers *understand* and *resolve* the error. If a localized user-facing + # error message is needed, put the localized message in the error details or + # localize it in the client. The optional error details may contain arbitrary + # information about the error. There is a predefined set of error detail types + # in the package `google.rpc` that can be used for common error conditions. + # # Language mapping + # The `Status` message is the logical representation of the error model, but it + # is not necessarily the actual wire format. When the `Status` message is + # exposed in different client libraries and different wire protocols, it can be + # mapped differently. For example, it will likely be mapped to some exceptions + # in Java, but more likely mapped to some error codes in C. + # # Other uses + # The error model and the `Status` message can be used in a variety of + # environments, either with or without APIs, to provide a + # consistent developer experience across different environments. + # Example uses of this error model include: + # - Partial errors. If a service needs to return partial errors to the client, + # it may embed the `Status` in the normal response to indicate the partial + # errors. + # - Workflow errors. A typical workflow has multiple steps. Each step may + # have a `Status` message for error reporting. + # - Batch operations. If a client uses batch request and batch response, the + # `Status` message should be used directly inside batch response, one for + # each error sub-response. + # - Asynchronous operations. If an API call embeds asynchronous operation + # results in its response, the status of those operations should be + # represented directly using the `Status` message. + # - Logging. If some API errors are stored in logs, the message `Status` could + # be used directly after any stripping needed for security/privacy reasons. + class GoogleRpcStatus + include Google::Apis::Core::Hashable + + # The status code, which should be an enum value of google.rpc.Code. + # Corresponds to the JSON property `code` + # @return [Fixnum] + attr_accessor :code + + # A list of messages that carry the error details. There is a common set of + # message types for APIs to use. + # Corresponds to the JSON property `details` + # @return [Array>] + attr_accessor :details + + # A developer-facing error message, which should be in English. Any + # user-facing error message should be localized and sent in the + # google.rpc.Status.details field, or localized by the client. + # Corresponds to the JSON property `message` + # @return [String] + attr_accessor :message + + def initialize(**args) + update!(**args) + end + + # Update properties of this object + def update!(**args) + @code = args[:code] if args.key?(:code) + @details = args[:details] if args.key?(:details) + @message = args[:message] if args.key?(:message) + end + end + end + end +end diff --git a/generated/google/apis/videointelligence_v1beta2/representations.rb b/generated/google/apis/videointelligence_v1beta2/representations.rb new file mode 100644 index 000000000..bfa5ec367 --- /dev/null +++ b/generated/google/apis/videointelligence_v1beta2/representations.rb @@ -0,0 +1,933 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'date' +require 'google/apis/core/base_service' +require 'google/apis/core/json_representation' +require 'google/apis/core/hashable' +require 'google/apis/errors' + +module Google + module Apis + module VideointelligenceV1beta2 + + class GoogleCloudVideointelligenceV1AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1Entity + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1ExplicitContentAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1ExplicitContentFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1LabelFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1LabelSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1LabelLocation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1SafeSearchAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta1VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoRequest + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2Entity + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentDetectionConfig + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2LabelDetectionConfig + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2LabelFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2LabelSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2ShotChangeDetectionConfig + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2VideoContext + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1beta2VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoResponse + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1EmotionAttribute + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1Entity + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionAttribute + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1FaceSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1LabelAnnotation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1LabelFrame + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1LabelSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1SpeechTranscription + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1VideoSegment + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1p1beta1WordInfo + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleLongrunningOperation + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleRpcStatus + class Representation < Google::Apis::Core::JsonRepresentation; end + + include Google::Apis::Core::JsonObjectSupport + end + + class GoogleCloudVideointelligenceV1AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1Entity + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :entity_id, as: 'entityId' + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1ExplicitContentAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1ExplicitContentFrame::Representation + + end + end + + class GoogleCloudVideointelligenceV1ExplicitContentFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :pornography_likelihood, as: 'pornographyLikelihood' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1Entity, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1Entity::Representation + + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1Entity, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1LabelFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1LabelFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1LabelSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1LabelSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1LabelFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1LabelSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus::Representation + + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1ExplicitContentAnnotation::Representation + + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1LabelAnnotation::Representation + + property :input_uri, as: 'inputUri' + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1LabelAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1VideoSegment::Representation + + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1LabelAnnotation::Representation + + end + end + + class GoogleCloudVideointelligenceV1VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, as: 'endTimeOffset' + property :start_time_offset, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta1AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta1AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta1LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :language_code, as: 'languageCode' + collection :locations, as: 'locations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1LabelLocation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1LabelLocation::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta1LabelLocation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :level, as: 'level' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta1SafeSearchAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :adult, as: 'adult' + property :medical, as: 'medical' + property :racy, as: 'racy' + property :spoof, as: 'spoof' + property :time_offset, :numeric_string => true, as: 'timeOffset' + property :violent, as: 'violent' + end + end + + class GoogleCloudVideointelligenceV1beta1VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1beta1VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus::Representation + + property :input_uri, as: 'inputUri' + collection :label_annotations, as: 'labelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1LabelAnnotation::Representation + + collection :safe_search_annotations, as: 'safeSearchAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1SafeSearchAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1SafeSearchAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta1VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, :numeric_string => true, as: 'endTimeOffset' + property :start_time_offset, :numeric_string => true, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoRequest + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :features, as: 'features' + property :input_content, :base64 => true, as: 'inputContent' + property :input_uri, as: 'inputUri' + property :location_id, as: 'locationId' + property :output_uri, as: 'outputUri' + property :video_context, as: 'videoContext', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoContext, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoContext::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2Entity + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :entity_id, as: 'entityId' + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2ExplicitContentFrame::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentDetectionConfig + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :model, as: 'model' + end + end + + class GoogleCloudVideointelligenceV1beta2ExplicitContentFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :pornography_likelihood, as: 'pornographyLikelihood' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta2LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2Entity, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2Entity::Representation + + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2Entity, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2LabelDetectionConfig + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :label_detection_mode, as: 'labelDetectionMode' + property :model, as: 'model' + property :stationary_camera, as: 'stationaryCamera' + end + end + + class GoogleCloudVideointelligenceV1beta2LabelFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1beta2LabelSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2ShotChangeDetectionConfig + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :model, as: 'model' + end + end + + class GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus::Representation + + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation::Representation + + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelAnnotation::Representation + + property :input_uri, as: 'inputUri' + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoSegment::Representation + + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelAnnotation::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2VideoContext + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :explicit_content_detection_config, as: 'explicitContentDetectionConfig', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2ExplicitContentDetectionConfig, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2ExplicitContentDetectionConfig::Representation + + property :label_detection_config, as: 'labelDetectionConfig', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelDetectionConfig, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2LabelDetectionConfig::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2VideoSegment::Representation + + property :shot_change_detection_config, as: 'shotChangeDetectionConfig', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2ShotChangeDetectionConfig, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2ShotChangeDetectionConfig::Representation + + end + end + + class GoogleCloudVideointelligenceV1beta2VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, as: 'endTimeOffset' + property :start_time_offset, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_progress, as: 'annotationProgress', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoResponse + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :annotation_results, as: 'annotationResults', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1EmotionAttribute + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :emotion, as: 'emotion' + property :score, as: 'score' + end + end + + class GoogleCloudVideointelligenceV1p1beta1Entity + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :description, as: 'description' + property :entity_id, as: 'entityId' + property :language_code, as: 'languageCode' + end + end + + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :pornography_likelihood, as: 'pornographyLikelihood' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1FaceDetectionFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1FaceDetectionFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1FaceSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1FaceSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionAttribute + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :emotions, as: 'emotions', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1EmotionAttribute, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1EmotionAttribute::Representation + + property :normalized_bounding_box, as: 'normalizedBoundingBox', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1FaceDetectionFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :attributes, as: 'attributes', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1FaceDetectionAttribute, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1FaceDetectionAttribute::Representation + + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1FaceSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1LabelAnnotation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :category_entities, as: 'categoryEntities', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1Entity, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1Entity::Representation + + property :entity, as: 'entity', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1Entity, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1Entity::Representation + + collection :frames, as: 'frames', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1LabelFrame, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1LabelFrame::Representation + + collection :segments, as: 'segments', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1LabelSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1LabelSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1LabelFrame + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :time_offset, as: 'timeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1LabelSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :segment, as: 'segment', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1VideoSegment::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :bottom, as: 'bottom' + property :left, as: 'left' + property :right, as: 'right' + property :top, as: 'top' + end + end + + class GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :confidence, as: 'confidence' + property :transcript, as: 'transcript' + collection :words, as: 'words', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1WordInfo, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1WordInfo::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1SpeechTranscription + # @private + class Representation < Google::Apis::Core::JsonRepresentation + collection :alternatives, as: 'alternatives', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :input_uri, as: 'inputUri' + property :progress_percent, as: 'progressPercent' + property :start_time, as: 'startTime' + property :update_time, as: 'updateTime' + end + end + + class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :error, as: 'error', class: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus::Representation + + property :explicit_annotation, as: 'explicitAnnotation', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation::Representation + + collection :face_detection_annotations, as: 'faceDetectionAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1FaceDetectionAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1FaceDetectionAnnotation::Representation + + collection :frame_label_annotations, as: 'frameLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation::Representation + + property :input_uri, as: 'inputUri' + collection :segment_label_annotations, as: 'segmentLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation::Representation + + collection :shot_annotations, as: 'shotAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1VideoSegment, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1VideoSegment::Representation + + collection :shot_label_annotations, as: 'shotLabelAnnotations', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation::Representation + + collection :speech_transcriptions, as: 'speechTranscriptions', class: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1SpeechTranscription, decorator: Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1p1beta1SpeechTranscription::Representation + + end + end + + class GoogleCloudVideointelligenceV1p1beta1VideoSegment + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time_offset, as: 'endTimeOffset' + property :start_time_offset, as: 'startTimeOffset' + end + end + + class GoogleCloudVideointelligenceV1p1beta1WordInfo + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :end_time, as: 'endTime' + property :start_time, as: 'startTime' + property :word, as: 'word' + end + end + + class GoogleLongrunningOperation + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :done, as: 'done' + property :error, as: 'error', class: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus, decorator: Google::Apis::VideointelligenceV1beta2::GoogleRpcStatus::Representation + + hash :metadata, as: 'metadata' + property :name, as: 'name' + hash :response, as: 'response' + end + end + + class GoogleRpcStatus + # @private + class Representation < Google::Apis::Core::JsonRepresentation + property :code, as: 'code' + collection :details, as: 'details' + property :message, as: 'message' + end + end + end + end +end diff --git a/generated/google/apis/videointelligence_v1beta2/service.rb b/generated/google/apis/videointelligence_v1beta2/service.rb new file mode 100644 index 000000000..641629609 --- /dev/null +++ b/generated/google/apis/videointelligence_v1beta2/service.rb @@ -0,0 +1,92 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'google/apis/core/base_service' +require 'google/apis/core/json_representation' +require 'google/apis/core/hashable' +require 'google/apis/errors' + +module Google + module Apis + module VideointelligenceV1beta2 + # Cloud Video Intelligence API + # + # Cloud Video Intelligence API. + # + # @example + # require 'google/apis/videointelligence_v1beta2' + # + # Videointelligence = Google::Apis::VideointelligenceV1beta2 # Alias the module + # service = Videointelligence::CloudVideoIntelligenceService.new + # + # @see https://cloud.google.com/video-intelligence/docs/ + class CloudVideoIntelligenceService < Google::Apis::Core::BaseService + # @return [String] + # API key. Your API key identifies your project and provides you with API access, + # quota, and reports. Required unless you provide an OAuth 2.0 token. + attr_accessor :key + + # @return [String] + # Available to use for quota purposes for server-side applications. Can be any + # arbitrary string assigned to a user, but should not exceed 40 characters. + attr_accessor :quota_user + + def initialize + super('https://videointelligence.googleapis.com/', '') + @batch_path = 'batch' + end + + # Performs asynchronous video annotation. Progress and results can be + # retrieved through the `google.longrunning.Operations` interface. + # `Operation.metadata` contains `AnnotateVideoProgress` (progress). + # `Operation.response` contains `AnnotateVideoResponse` (results). + # @param [Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2AnnotateVideoRequest] google_cloud_videointelligence_v1beta2_annotate_video_request_object + # @param [String] fields + # Selector specifying which fields to include in a partial response. + # @param [String] quota_user + # Available to use for quota purposes for server-side applications. Can be any + # arbitrary string assigned to a user, but should not exceed 40 characters. + # @param [Google::Apis::RequestOptions] options + # Request-specific options + # + # @yield [result, err] Result & error if block supplied + # @yieldparam result [Google::Apis::VideointelligenceV1beta2::GoogleLongrunningOperation] parsed result object + # @yieldparam err [StandardError] error object if request failed + # + # @return [Google::Apis::VideointelligenceV1beta2::GoogleLongrunningOperation] + # + # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried + # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification + # @raise [Google::Apis::AuthorizationError] Authorization is required + def annotate_video(google_cloud_videointelligence_v1beta2_annotate_video_request_object = nil, fields: nil, quota_user: nil, options: nil, &block) + command = make_simple_command(:post, 'v1beta2/videos:annotate', options) + command.request_representation = Google::Apis::VideointelligenceV1beta2::GoogleCloudVideointelligenceV1beta2AnnotateVideoRequest::Representation + command.request_object = google_cloud_videointelligence_v1beta2_annotate_video_request_object + command.response_representation = Google::Apis::VideointelligenceV1beta2::GoogleLongrunningOperation::Representation + command.response_class = Google::Apis::VideointelligenceV1beta2::GoogleLongrunningOperation + command.query['fields'] = fields unless fields.nil? + command.query['quotaUser'] = quota_user unless quota_user.nil? + execute_or_queue_command(command, &block) + end + + protected + + def apply_command_defaults(command) + command.query['key'] = key unless key.nil? + command.query['quotaUser'] = quota_user unless quota_user.nil? + end + end + end + end +end