// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. syntax = "proto3"; package google.cloud.video.transcoder.v1beta1; import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/video/transcoder/v1beta1;transcoder"; option java_multiple_files = true; option java_outer_classname = "ResourcesProto"; option java_package = "com.google.cloud.video.transcoder.v1beta1"; // Transcoding job resource. message Job { option (google.api.resource) = { type: "transcoder.googleapis.com/Job" pattern: "projects/{project}/locations/{location}/jobs/{job}" }; // The origin URI. message OriginUri { // HLS manifest URI per https://tools.ietf.org/html/rfc8216#section-4.3.4. // If multiple HLS manifests are created, only the first one is listed. string hls = 1; // Dash manifest URI. If multiple Dash manifests are created, only the first // one is listed. string dash = 2; } // The current state of the job. enum ProcessingState { // The processing state is not specified. PROCESSING_STATE_UNSPECIFIED = 0; // The job is enqueued and will be picked up for processing soon. PENDING = 1; // The job is being processed. RUNNING = 2; // The job has been completed successfully. SUCCEEDED = 3; // The job has failed. For additional information, see `failure_reason` and // `failure_details` FAILED = 4; } // The resource name of the job. // Format: `projects/{project}/locations/{location}/jobs/{job}` string name = 1; // Input only. Specify the `input_uri` to populate empty `uri` fields in each element of // `Job.config.inputs` or `JobTemplate.config.inputs` when using template. // URI of the media. Input files must be at least 5 seconds in duration and // stored in Cloud Storage (for example, `gs://bucket/inputs/file.mp4`). string input_uri = 2 [(google.api.field_behavior) = INPUT_ONLY]; // Input only. Specify the `output_uri` to populate an empty `Job.config.output.uri` or // `JobTemplate.config.output.uri` when using template. // URI for the output file(s). For example, `gs://my-bucket/outputs/`. string output_uri = 3 [(google.api.field_behavior) = INPUT_ONLY]; // Specify the `job_config` for the transcoding job. If you don't specify the // `job_config`, the API selects `templateId`; this template ID is set to // `preset/web-hd` by default. When you use a `template_id` to create a job, // the `Job.config` is populated by the `JobTemplate.config`.
oneof job_config { // Input only. Specify the `template_id` to use for populating `Job.config`. The default // is `preset/web-hd`. // // Preset Transcoder templates: // - `preset/{preset_id}` // // - User defined JobTemplate: // `{job_template_id}` string template_id = 4 [(google.api.field_behavior) = INPUT_ONLY]; // The configuration for this job. JobConfig config = 5; } // Specify the priority of the job. Enter a value between 0 and 100, where 0 // is the lowest priority and 100 is the highest priority. The default is 0. int32 priority = 6; // Output only. The origin URI. // OriginUri origin_uri = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The current state of the job. ProcessingState state = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Estimated fractional progress, from `0` to `1` for each // step. // Progress progress = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A description of the reason for the failure. This property is // always present when `state` is `FAILED`. string failure_reason = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. List of failure details. This property may contain additional // information about the failure when `failure_reason` is present. // repeated FailureDetail failure_details = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time the job was created. google.protobuf.Timestamp create_time = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time the transcoding started. google.protobuf.Timestamp start_time = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time the transcoding finished. google.protobuf.Timestamp end_time = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; // Job time to live value in days, which will be effective after job // completion. Job should be deleted automatically after the given TTL. Enter // a value between 1 and 90. The default is 30. int32 ttl_after_completion_days = 15; } // Transcoding job template resource. message JobTemplate { option (google.api.resource) = { type: "transcoder.googleapis.com/JobTemplate" pattern: "projects/{project}/locations/{location}/jobTemplates/{job_template}" }; // The resource name of the job template. // Format: // `projects/{project}/locations/{location}/jobTemplates/{job_template}` string name = 1; // The configuration for this template. JobConfig config = 2; } // Job configuration message JobConfig { // List of input assets stored in Cloud Storage. repeated Input inputs = 1; // List of `Edit atom`s. Defines the ultimate timeline of the resulting // file or manifest. repeated EditAtom edit_list = 2; // List of elementary streams. repeated ElementaryStream elementary_streams = 3; // List of multiplexing settings for output streams. repeated MuxStream mux_streams = 4; // List of output manifests. repeated Manifest manifests = 5; // Output configuration. Output output = 6; // List of ad breaks. Specifies where to insert ad break tags in the output // manifests. repeated AdBreak ad_breaks = 7; // Destination on Pub/Sub. PubsubDestination pubsub_destination = 8; // List of output sprite sheets. repeated SpriteSheet sprite_sheets = 9; // List of overlays on the output video, in descending Z-order. repeated Overlay overlays = 10; } // Input asset. message Input { // A unique key for this input. Must be specified when using advanced // mapping and edit lists. string key = 1; // URI of the media. Input files must be at least 5 seconds in duration and // stored in Cloud Storage (for example, `gs://bucket/inputs/file.mp4`). // If empty, the value will be populated from `Job.input_uri`. string uri = 2; // Preprocessing configurations. PreprocessingConfig preprocessing_config = 3; } // Location of output file(s) in a Cloud Storage bucket. message Output { // URI for the output file(s). For example, `gs://my-bucket/outputs/`. // If empty the value is populated from `Job.output_uri`. string uri = 1; } // Edit atom. message EditAtom { // A unique key for this atom. Must be specified when using advanced // mapping. string key = 1; // List of `Input.key`s identifying files that should be used in this atom. // The listed `inputs` must have the same timeline. repeated string inputs = 2; // End time in seconds for the atom, relative to the input file timeline. // When `end_time_offset` is not specified, the `inputs` are used until // the end of the atom. google.protobuf.Duration end_time_offset = 3; // Start time in seconds for the atom, relative to the input file timeline. // The default is `0s`. google.protobuf.Duration start_time_offset = 4; } // Ad break. message AdBreak { // Start time in seconds for the ad break, relative to the output file // timeline. The default is `0s`. google.protobuf.Duration start_time_offset = 1; } // Encoding of an input file such as an audio, video, or text track. // Elementary streams must be packaged before // mapping and sharing between different output formats. message ElementaryStream { // A unique key for this elementary stream. string key = 4; // Encoding of an audio, video, or text track. oneof elementary_stream { // Encoding of a video stream. VideoStream video_stream = 1; // Encoding of an audio stream. AudioStream audio_stream = 2; // Encoding of a text stream. For example, closed captions or subtitles. TextStream text_stream = 3; } } // Multiplexing settings for output stream. message MuxStream { // A unique key for this multiplexed stream. HLS media manifests will be // named `MuxStream.key` with the `".m3u8"` extension suffix. string key = 1; // The name of the generated file. The default is `MuxStream.key` with the // extension suffix corresponding to the `MuxStream.container`. // // Individual segments also have an incremental 10-digit zero-padded suffix // starting from 0 before the extension, such as `"mux_stream0000000123.ts"`. string file_name = 2; // The container format. The default is `"mp4"` // // Supported container formats: // - 'ts' // - 'fmp4'- the corresponding file extension is `".m4s"` // - 'mp4' // - 'vtt' string container = 3; // List of `ElementaryStream.key`s multiplexed in this stream. repeated string elementary_streams = 4; // Segment settings for `"ts"`, `"fmp4"` and `"vtt"`. SegmentSettings segment_settings = 5; // Encryption settings. Encryption encryption = 6; } // Manifest configuration. message Manifest { // The manifest type can be either `"HLS"` or `"DASH"`. enum ManifestType { // The manifest type is not specified. MANIFEST_TYPE_UNSPECIFIED = 0; // Create `"HLS"` manifest. The corresponding file extension is `".m3u8"`. HLS = 1; // Create `"DASH"` manifest. The corresponding file extension is `".mpd"`. DASH = 2; } // The name of the generated file. The default is `"manifest"` with the // extension suffix corresponding to the `Manifest.type`. string file_name = 1; // Required. Type of the manifest, can be "HLS" or "DASH". ManifestType type = 2 [(google.api.field_behavior) = REQUIRED]; // Required. List of user given `MuxStream.key`s that should appear in this manifest. // // When `Manifest.type` is `HLS`, a media manifest with name `MuxStream.key` // and `.m3u8` extension is generated for each element of the // `Manifest.mux_streams`. repeated string mux_streams = 3 [(google.api.field_behavior) = REQUIRED]; } // A Pub/Sub destination. message PubsubDestination { // The name of the Pub/Sub topic to publish job completion notification // to. For example: `projects/{project}/topics/{topic}`. string topic = 1; } // Sprite sheet configuration. message SpriteSheet { // Format type. The default is `"jpeg"`. // // Supported formats: // - 'jpeg' string format = 1; // Required. File name prefix for the generated sprite sheets. // // Each sprite sheet has an incremental 10-digit zero-padded suffix starting // from 0 before the extension, such as `"sprite_sheet0000000123.jpeg"`. string file_prefix = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The width of sprite in pixels. Must be an even integer. To preserve the // source aspect ratio, set the [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_width_pixels] field or // the [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_height_pixels] field, but not both (the API will // automatically calculate the missing field). int32 sprite_width_pixels = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The height of sprite in pixels. Must be an even integer. To preserve the // source aspect ratio, set the [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_height_pixels] field or // the [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_width_pixels] field, but not both (the API will // automatically calculate the missing field). int32 sprite_height_pixels = 4 [(google.api.field_behavior) = REQUIRED]; // The maximum number of sprites per row in a sprite sheet. The default is 0, // which indicates no maximum limit. int32 column_count = 5; // The maximum number of rows per sprite sheet. When the sprite sheet is full, // a new sprite sheet is created. The default is 0, which indicates no maximum // limit. int32 row_count = 6; // Start time in seconds, relative to the output file timeline. Determines the // first sprite to pick. The default is `0s`. google.protobuf.Duration start_time_offset = 7; // End time in seconds, relative to the output file timeline. When // `end_time_offset` is not specified, the sprites are generated until the end // of the output file. google.protobuf.Duration end_time_offset = 8; // Specify either total number of sprites or interval to create sprites. oneof extraction_strategy { // Total number of sprites. Create the specified number of sprites // distributed evenly across the timeline of the output media. The default // is 100. int32 total_count = 9; // Starting from `0s`, create sprites at regular intervals. Specify the // interval value in seconds. google.protobuf.Duration interval = 10; } // The quality of the generated sprite sheet. Enter a value between 1 // and 100, where 1 is the lowest quality and 100 is the highest quality. // The default is 100. A high quality value corresponds to a low image data // compression ratio. int32 quality = 11; } // Overlay configuration. message Overlay { // 2D normalized coordinates. Default: `{0.0, 0.0}` message NormalizedCoordinate { // Normalized x coordinate. double x = 1; // Normalized y coordinate. double y = 2; } // Overlaid jpeg image. message Image { // Required. URI of the JPEG image in Cloud Storage. For example, // `gs://bucket/inputs/image.jpeg`. JPEG is the only supported image type. string uri = 1 [(google.api.field_behavior) = REQUIRED]; // Normalized image resolution, based on output video resolution. Valid // values: `0.0`–`1.0`. To respect the original image aspect ratio, set // either `x` or `y` to `0.0`. To use the original image resolution, set // both `x` and `y` to `0.0`. NormalizedCoordinate resolution = 2; // Target image opacity. Valid values are from `1.0` (solid, default) to // `0.0` (transparent), exclusive. Set this to a value greater than `0.0`. double alpha = 3; } // Display static overlay object. message AnimationStatic { // Normalized coordinates based on output video resolution. Valid // values: `0.0`–`1.0`. `xy` is the upper-left coordinate of the overlay // object. For example, use the x and y coordinates {0,0} to position the // top-left corner of the overlay animation in the top-left corner of the // output video. NormalizedCoordinate xy = 1; // The time to start displaying the overlay object, in seconds. Default: 0 google.protobuf.Duration start_time_offset = 2; } // Display overlay object with fade animation. message AnimationFade { // Required. Type of fade animation: `FADE_IN` or `FADE_OUT`. FadeType fade_type = 1 [(google.api.field_behavior) = REQUIRED]; // Normalized coordinates based on output video resolution. Valid // values: `0.0`–`1.0`. `xy` is the upper-left coordinate of the overlay // object. For example, use the x and y coordinates {0,0} to position the // top-left corner of the overlay animation in the top-left corner of the // output video. NormalizedCoordinate xy = 2; // The time to start the fade animation, in seconds. Default: 0 google.protobuf.Duration start_time_offset = 3; // The time to end the fade animation, in seconds. Default: // `start_time_offset` + 1s google.protobuf.Duration end_time_offset = 4; } // End previous overlay animation from the video. Without AnimationEnd, the // overlay object will keep the state of previous animation until the end of // the video. message AnimationEnd { // The time to end overlay object, in seconds. Default: 0 google.protobuf.Duration start_time_offset = 1; } // Animation types. message Animation { // Animations can be static or fade, or they can end the previous animation. oneof animation_type { // Display static overlay object. AnimationStatic animation_static = 1; // Display overlay object with fade animation. AnimationFade animation_fade = 2; // End previous animation. AnimationEnd animation_end = 3; } } // Fade type for the overlay: `FADE_IN` or `FADE_OUT`. enum FadeType { // The fade type is not specified. FADE_TYPE_UNSPECIFIED = 0; // Fade the overlay object into view. FADE_IN = 1; // Fade the overlay object out of view. FADE_OUT = 2; } // Image overlay. Image image = 1; // List of Animations. The list should be chronological, without any time // overlap. repeated Animation animations = 2; } // Preprocessing configurations. message PreprocessingConfig { // Color preprocessing configuration. message Color { // Control color saturation of the video. Enter a value between -1 and 1, // where -1 is fully desaturated and 1 is maximum saturation. 0 is no // change. The default is 0. double saturation = 1; // Control black and white contrast of the video. Enter a value between -1 // and 1, where -1 is minimum contrast and 1 is maximum contrast. 0 is no // change. The default is 0. double contrast = 2; // Control brightness of the video. Enter a value between -1 and 1, where -1 // is minimum brightness and 1 is maximum brightness. 0 is no change. The // default is 0. double brightness = 3; } // Denoise preprocessing configuration. message Denoise { // Set strength of the denoise. Enter a value between 0 and 1. The higher // the value, the smoother the image. 0 is no denoising. The default is 0. double strength = 1; // Set the denoiser mode. The default is `"standard"`. // // Supported denoiser modes: // - 'standard' // - 'grain' string tune = 2; } // Deblock preprocessing configuration. message Deblock { // Set strength of the deblocker. Enter a value between 0 and 1. The higher // the value, the stronger the block removal. 0 is no deblocking. The // default is 0. double strength = 1; // Enable deblocker. The default is `false`. bool enabled = 2; } // Audio preprocessing configuration. message Audio { // Specify audio loudness normalization in loudness units relative to full // scale (LUFS). Enter a value between -24 and 0 (the default), where: // // * -24 is the Advanced Television Systems Committee (ATSC A/85) standard // * -23 is the EU R128 broadcast standard // * -19 is the prior standard for online mono audio // * -18 is the ReplayGain standard // * -16 is the prior standard for stereo audio // * -14 is the new online audio standard recommended by Spotify, as well // as Amazon Echo // * 0 disables normalization double lufs = 1; // Enable boosting high frequency components. The default is `false`. bool high_boost = 2; // Enable boosting low frequency components. The default is `false`. bool low_boost = 3; } // Video cropping configuration for the input video. The cropped input video // is scaled to match the output resolution. message Crop { // The number of pixels to crop from the top. The default is 0. int32 top_pixels = 1; // The number of pixels to crop from the bottom. The default is 0. int32 bottom_pixels = 2; // The number of pixels to crop from the left. The default is 0. int32 left_pixels = 3; // The number of pixels to crop from the right. The default is 0. int32 right_pixels = 4; } // Pad filter configuration for the input video. The padded input video // is scaled after padding with black to match the output resolution. message Pad { // The number of pixels to add to the top. The default is 0. int32 top_pixels = 1; // The number of pixels to add to the bottom. The default is 0. int32 bottom_pixels = 2; // The number of pixels to add to the left. The default is 0. int32 left_pixels = 3; // The number of pixels to add to the right. The default is 0. int32 right_pixels = 4; } // Color preprocessing configuration. Color color = 1; // Denoise preprocessing configuration. Denoise denoise = 2; // Deblock preprocessing configuration. Deblock deblock = 3; // Audio preprocessing configuration. Audio audio = 4; // Specify the video cropping configuration. Crop crop = 5; // Specify the video pad filter configuration. Pad pad = 6; } // Video stream resource. message VideoStream { // Codec type. The following codecs are supported: // // * `h264` (default) // * `h265` // * `vp9` string codec = 1; // Enforces the specified codec profile. The following profiles are supported: // // * `baseline` // * `main` // * `high` (default) // // The available options are // FFmpeg-compatible. Note that certain values for this // field may cause the transcoder to override other fields you set in the // `VideoStream` message. string profile = 2; // Enforces the specified codec tune. The available options are // FFmpeg-compatible. Note that certain values for this // field may cause the transcoder to override other fields you set in the // `VideoStream` message. string tune = 3; // Enforces the specified codec preset. The default is `veryfast`. The // available options are // FFmpeg-compatible. Note that certain values for this // field may cause the transcoder to override other fields you set in the // `VideoStream` message. string preset = 4; // The height of the video in pixels. Must be an even integer. // When not specified, the height is adjusted to match the specified width and // input aspect ratio. If both are omitted, the input height is used. int32 height_pixels = 5; // The width of the video in pixels. Must be an even integer. // When not specified, the width is adjusted to match the specified height and // input aspect ratio. If both are omitted, the input width is used. int32 width_pixels = 6; // Pixel format to use. The default is `"yuv420p"`. // // Supported pixel formats: // - 'yuv420p' pixel format. // - 'yuv422p' pixel format. // - 'yuv444p' pixel format. // - 'yuv420p10' 10-bit HDR pixel format. // - 'yuv422p10' 10-bit HDR pixel format. // - 'yuv444p10' 10-bit HDR pixel format. // - 'yuv420p12' 12-bit HDR pixel format. // - 'yuv422p12' 12-bit HDR pixel format. // - 'yuv444p12' 12-bit HDR pixel format. string pixel_format = 7; // Required. The video bitrate in bits per second. The minimum value is 1,000. // The maximum value for H264/H265 is 800,000,000. The maximum value for VP9 // is 480,000,000. int32 bitrate_bps = 8 [(google.api.field_behavior) = REQUIRED]; // Specify the `rate_control_mode`. The default is `"vbr"`. // // Supported rate control modes: // - 'vbr' - variable bitrate // - 'crf' - constant rate factor string rate_control_mode = 9; // Use two-pass encoding strategy to achieve better video quality. // `VideoStream.rate_control_mode` must be `"vbr"`. The default is `false`. bool enable_two_pass = 10; // Target CRF level. Must be between 10 and 36, where 10 is the highest // quality and 36 is the most efficient compression. The default is 21. int32 crf_level = 11; // Size of the Video Buffering Verifier (VBV) buffer in bits. Must be greater // than zero. The default is equal to `VideoStream.bitrate_bps`. int32 vbv_size_bits = 12; // Initial fullness of the Video Buffering Verifier (VBV) buffer in bits. Must // be greater than zero. The default is equal to 90% of // `VideoStream.vbv_size_bits`. int32 vbv_fullness_bits = 13; // Specifies whether an open Group of Pictures (GOP) structure should be // allowed or not. The default is `false`. bool allow_open_gop = 14; // GOP mode can be either by frame count or duration. oneof gop_mode { // Select the GOP size based on the specified frame count. Must be greater // than zero. int32 gop_frame_count = 15; // Select the GOP size based on the specified duration. The default is // `"3s"`. Note that `gopDuration` must be less than or equal to // [`segmentDuration`](#SegmentSettings), and // [`segmentDuration`](#SegmentSettings) must be divisible by `gopDuration`. google.protobuf.Duration gop_duration = 16; } // The entropy coder to use. The default is `"cabac"`. // // Supported entropy coders: // - 'cavlc' // - 'cabac' string entropy_coder = 17; // Allow B-pyramid for reference frame selection. This may not be supported // on all decoders. The default is `false`. bool b_pyramid = 18; // The number of consecutive B-frames. Must be greater than or equal to zero. // Must be less than `VideoStream.gop_frame_count` if set. The default is 0. int32 b_frame_count = 19; // Required. The target video frame rate in frames per second (FPS). Must be less than // or equal to 120. Will default to the input frame rate if larger than the // input frame rate. The API will generate an output FPS that is divisible by // the input FPS, and smaller or equal to the target FPS. See // [Calculate frame // rate](https://cloud.google.com/transcoder/docs/concepts/frame-rate) for // more information. double frame_rate = 20 [(google.api.field_behavior) = REQUIRED]; // Specify the intensity of the adaptive quantizer (AQ). Must be between 0 and // 1, where 0 disables the quantizer and 1 maximizes the quantizer. A // higher value equals a lower bitrate but smoother image. The default is 0. double aq_strength = 21; } // Audio stream resource. message AudioStream { // The mapping for the `Job.edit_list` atoms with audio `EditAtom.inputs`. message AudioAtom { // The audio channel. message AudioChannel { // Identifies which input file, track, and channel should be used. message AudioChannelInput { // Required. The `Input.key` that identifies the input file. string key = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The zero-based index of the track in the input file. int32 track = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The zero-based index of the channel in the input file. int32 channel = 3 [(google.api.field_behavior) = REQUIRED]; // Audio volume control in dB. Negative values decrease volume, // positive values increase. The default is 0. double gain_db = 4; } // List of `Job.inputs` for this audio channel. repeated AudioChannelInput inputs = 2; } // Required. The `EditAtom.key` that references the atom with audio inputs in the // `Job.edit_list`. string key = 1 [(google.api.field_behavior) = REQUIRED]; // List of `Channel`s for this audio stream. // for in-depth explanation. repeated AudioChannel channels = 2; } // The codec for this audio stream. The default is `"aac"`. // // Supported audio codecs: // - 'aac' // - 'aac-he' // - 'aac-he-v2' // - 'mp3' // - 'ac3' // - 'eac3' string codec = 1; // Required. Audio bitrate in bits per second. Must be between 1 and 10,000,000. int32 bitrate_bps = 2 [(google.api.field_behavior) = REQUIRED]; // Number of audio channels. Must be between 1 and 6. The default is 2. int32 channel_count = 3; // A list of channel names specifying layout of the audio channels. // This only affects the metadata embedded in the container headers, if // supported by the specified format. The default is `["fl", "fr"]`. // // Supported channel names: // - 'fl' - Front left channel // - 'fr' - Front right channel // - 'sl' - Side left channel // - 'sr' - Side right channel // - 'fc' - Front center channel // - 'lfe' - Low frequency repeated string channel_layout = 4; // The mapping for the `Job.edit_list` atoms with audio `EditAtom.inputs`. repeated AudioAtom mapping = 5; // The audio sample rate in Hertz. The default is 48000 Hertz. int32 sample_rate_hertz = 6; } // Encoding of a text stream. For example, closed captions or subtitles. message TextStream { // The mapping for the `Job.edit_list` atoms with text `EditAtom.inputs`. message TextAtom { // Identifies which input file and track should be used. message TextInput { // Required. The `Input.key` that identifies the input file. string key = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The zero-based index of the track in the input file. int32 track = 2 [(google.api.field_behavior) = REQUIRED]; } // Required. The `EditAtom.key` that references atom with text inputs in the // `Job.edit_list`. string key = 1 [(google.api.field_behavior) = REQUIRED]; // List of `Job.inputs` that should be embedded in this atom. Only one // input is supported. repeated TextInput inputs = 2; } // The codec for this text stream. The default is `"webvtt"`. // // Supported text codecs: // - 'srt' // - 'ttml' // - 'cea608' // - 'cea708' // - 'webvtt' string codec = 1; // Required. The BCP-47 language code, such as `"en-US"` or `"sr-Latn"`. For more // information, see // https://www.unicode.org/reports/tr35/#Unicode_locale_identifier. string language_code = 2 [(google.api.field_behavior) = REQUIRED]; // The mapping for the `Job.edit_list` atoms with text `EditAtom.inputs`. repeated TextAtom mapping = 3; } // Segment settings for `"ts"`, `"fmp4"` and `"vtt"`. message SegmentSettings { // Duration of the segments in seconds. The default is `"6.0s"`. Note that // `segmentDuration` must be greater than or equal to // [`gopDuration`](#videostream), and `segmentDuration` must be divisible by // [`gopDuration`](#videostream). google.protobuf.Duration segment_duration = 1; // Required. Create an individual segment file. The default is `false`. bool individual_segments = 3 [(google.api.field_behavior) = REQUIRED]; } // Encryption settings. message Encryption { // Configuration for AES-128 encryption. message Aes128Encryption { // Required. URI of the key delivery service. This URI is inserted into the M3U8 // header. string key_uri = 1 [(google.api.field_behavior) = REQUIRED]; } // Configuration for SAMPLE-AES encryption. message SampleAesEncryption { // Required. URI of the key delivery service. This URI is inserted into the M3U8 // header. string key_uri = 1 [(google.api.field_behavior) = REQUIRED]; } // Configuration for MPEG Common Encryption (MPEG-CENC). message MpegCommonEncryption { // Required. 128 bit Key ID represented as lowercase hexadecimal digits for use with // common encryption. string key_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Specify the encryption scheme. // // Supported encryption schemes: // - 'cenc' // - 'cbcs' string scheme = 2 [(google.api.field_behavior) = REQUIRED]; } // Required. 128 bit encryption key represented as lowercase hexadecimal digits. string key = 1 [(google.api.field_behavior) = REQUIRED]; // Required. 128 bit Initialization Vector (IV) represented as lowercase hexadecimal // digits. string iv = 2 [(google.api.field_behavior) = REQUIRED]; // Encryption mode can be either `aes` or `cenc`. oneof encryption_mode { // Configuration for AES-128 encryption. Aes128Encryption aes_128 = 3; // Configuration for SAMPLE-AES encryption. SampleAesEncryption sample_aes = 4; // Configuration for MPEG Common Encryption (MPEG-CENC). MpegCommonEncryption mpeg_cenc = 5; } } // Estimated fractional progress for each step, from `0` to `1`. message Progress { // Estimated fractional progress for `analyzing` step. double analyzed = 1; // Estimated fractional progress for `encoding` step. double encoded = 2; // Estimated fractional progress for `uploading` step. double uploaded = 3; // Estimated fractional progress for `notifying` step. double notified = 4; } // Additional information about the reasons for the failure. message FailureDetail { // A description of the failure. string description = 1; }