// Copyright 2024 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.cloud.video.transcoder.v1;
import "google/api/field_behavior.proto";
import "google/api/resource.proto";
import "google/protobuf/duration.proto";
import "google/protobuf/timestamp.proto";
import "google/rpc/status.proto";
option go_package = "cloud.google.com/go/video/transcoder/apiv1/transcoderpb;transcoderpb";
option java_multiple_files = true;
option java_outer_classname = "ResourcesProto";
option java_package = "com.google.cloud.video.transcoder.v1";
// Transcoding job resource.
message Job {
option (google.api.resource) = {
type: "transcoder.googleapis.com/Job"
pattern: "projects/{project}/locations/{location}/jobs/{job}"
};
// The current state of the job.
enum ProcessingState {
// The processing state is not specified.
PROCESSING_STATE_UNSPECIFIED = 0;
// The job is enqueued and will be picked up for processing soon.
PENDING = 1;
// The job is being processed.
RUNNING = 2;
// The job has been completed successfully.
SUCCEEDED = 3;
// The job has failed. For additional information, see `failure_reason` and
// `failure_details`
FAILED = 4;
}
// The processing mode of the job.
enum ProcessingMode {
// The job processing mode is not specified.
PROCESSING_MODE_UNSPECIFIED = 0;
// The job processing mode is interactive mode.
// Interactive job will either be ran or rejected if quota does not allow
// for it.
PROCESSING_MODE_INTERACTIVE = 1;
// The job processing mode is batch mode.
// Batch mode allows queuing of jobs.
PROCESSING_MODE_BATCH = 2;
}
// The optimization strategy of the job. The default is `AUTODETECT`.
enum OptimizationStrategy {
// The optimization strategy is not specified.
OPTIMIZATION_STRATEGY_UNSPECIFIED = 0;
// Prioritize job processing speed.
AUTODETECT = 1;
// Disable all optimizations.
DISABLED = 2;
}
// The resource name of the job.
// Format: `projects/{project_number}/locations/{location}/jobs/{job}`
string name = 1;
// Input only. Specify the `input_uri` to populate empty `uri` fields in each
// element of `Job.config.inputs` or `JobTemplate.config.inputs` when using
// template. URI of the media. Input files must be at least 5 seconds in
// duration and stored in Cloud Storage (for example,
// `gs://bucket/inputs/file.mp4`). See [Supported input and output
// formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats).
string input_uri = 2 [(google.api.field_behavior) = INPUT_ONLY];
// Input only. Specify the `output_uri` to populate an empty
// `Job.config.output.uri` or `JobTemplate.config.output.uri` when using
// template. URI for the output file(s). For example,
// `gs://my-bucket/outputs/`. See [Supported input and output
// formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats).
string output_uri = 3 [(google.api.field_behavior) = INPUT_ONLY];
// Specify the `job_config` for the transcoding job. If you don't specify the
// `job_config`, the API selects `templateId`; this template ID is set to
// `preset/web-hd` by default. When you use a `template_id` to create a job,
// the `Job.config` is populated by the `JobTemplate.config`.
oneof job_config {
// Input only. Specify the `template_id` to use for populating `Job.config`.
// The default is `preset/web-hd`, which is the only supported preset.
//
// User defined JobTemplate: `{job_template_id}`
string template_id = 4 [(google.api.field_behavior) = INPUT_ONLY];
// The configuration for this job.
JobConfig config = 5;
}
// Output only. The current state of the job.
ProcessingState state = 8 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The time the job was created.
google.protobuf.Timestamp create_time = 12
[(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The time the transcoding started.
google.protobuf.Timestamp start_time = 13
[(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The time the transcoding finished.
google.protobuf.Timestamp end_time = 14
[(google.api.field_behavior) = OUTPUT_ONLY];
// Job time to live value in days, which will be effective after job
// completion. Job should be deleted automatically after the given TTL. Enter
// a value between 1 and 90. The default is 30.
int32 ttl_after_completion_days = 15;
// The labels associated with this job. You can use these to organize and
// group your jobs.
map labels = 16;
// Output only. An error object that describes the reason for the failure.
// This property is always present when `state` is `FAILED`.
google.rpc.Status error = 17 [(google.api.field_behavior) = OUTPUT_ONLY];
// The processing mode of the job.
// The default is `PROCESSING_MODE_INTERACTIVE`.
ProcessingMode mode = 20;
// The processing priority of a batch job.
// This field can only be set for batch mode jobs. The default value is 0.
// This value cannot be negative. Higher values correspond to higher
// priorities for the job.
int32 batch_mode_priority = 21;
// Optional. The optimization strategy of the job. The default is
// `AUTODETECT`.
OptimizationStrategy optimization = 22
[(google.api.field_behavior) = OPTIONAL];
}
// Transcoding job template resource.
message JobTemplate {
option (google.api.resource) = {
type: "transcoder.googleapis.com/JobTemplate"
pattern: "projects/{project}/locations/{location}/jobTemplates/{job_template}"
};
// The resource name of the job template.
// Format:
// `projects/{project_number}/locations/{location}/jobTemplates/{job_template}`
string name = 1;
// The configuration for this template.
JobConfig config = 2;
// The labels associated with this job template. You can use these to organize
// and group your job templates.
map labels = 3;
}
// Job configuration
message JobConfig {
// List of input assets stored in Cloud Storage.
repeated Input inputs = 1;
// List of `Edit atom`s. Defines the ultimate timeline of the resulting
// file or manifest.
repeated EditAtom edit_list = 2;
// List of elementary streams.
repeated ElementaryStream elementary_streams = 3;
// List of multiplexing settings for output streams.
repeated MuxStream mux_streams = 4;
// List of output manifests.
repeated Manifest manifests = 5;
// Output configuration.
Output output = 6;
// List of ad breaks. Specifies where to insert ad break tags in the output
// manifests.
repeated AdBreak ad_breaks = 7;
// Destination on Pub/Sub.
PubsubDestination pubsub_destination = 8;
// List of output sprite sheets.
// Spritesheets require at least one VideoStream in the Jobconfig.
repeated SpriteSheet sprite_sheets = 9;
// List of overlays on the output video, in descending Z-order.
repeated Overlay overlays = 10;
// List of encryption configurations for the content.
// Each configuration has an ID. Specify this ID in the
// [MuxStream.encryption_id][google.cloud.video.transcoder.v1.MuxStream.encryption_id]
// field to indicate the configuration to use for that `MuxStream` output.
repeated Encryption encryptions = 11;
}
// Input asset.
message Input {
// A unique key for this input. Must be specified when using advanced
// mapping and edit lists.
string key = 1;
// URI of the media. Input files must be at least 5 seconds in duration and
// stored in Cloud Storage (for example, `gs://bucket/inputs/file.mp4`).
// If empty, the value is populated from `Job.input_uri`. See
// [Supported input and output
// formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats).
string uri = 2;
// Preprocessing configurations.
PreprocessingConfig preprocessing_config = 3;
}
// Location of output file(s) in a Cloud Storage bucket.
message Output {
// URI for the output file(s). For example, `gs://my-bucket/outputs/`.
// If empty, the value is populated from `Job.output_uri`. See
// [Supported input and output
// formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats).
string uri = 1;
}
// Edit atom.
message EditAtom {
// A unique key for this atom. Must be specified when using advanced
// mapping.
string key = 1;
// List of `Input.key`s identifying files that should be used in this atom.
// The listed `inputs` must have the same timeline.
repeated string inputs = 2;
// End time in seconds for the atom, relative to the input file timeline.
// When `end_time_offset` is not specified, the `inputs` are used until
// the end of the atom.
google.protobuf.Duration end_time_offset = 3;
// Start time in seconds for the atom, relative to the input file timeline.
// The default is `0s`.
google.protobuf.Duration start_time_offset = 4;
}
// Ad break.
message AdBreak {
// Start time in seconds for the ad break, relative to the output file
// timeline. The default is `0s`.
google.protobuf.Duration start_time_offset = 1;
}
// Encoding of an input file such as an audio, video, or text track.
// Elementary streams must be packaged before
// mapping and sharing between different output formats.
message ElementaryStream {
// A unique key for this elementary stream.
string key = 4;
// Encoding of an audio, video, or text track.
oneof elementary_stream {
// Encoding of a video stream.
VideoStream video_stream = 1;
// Encoding of an audio stream.
AudioStream audio_stream = 2;
// Encoding of a text stream. For example, closed captions or subtitles.
TextStream text_stream = 3;
}
}
// Multiplexing settings for output stream.
message MuxStream {
// A unique key for this multiplexed stream. HLS media manifests will be
// named `MuxStream.key` with the `.m3u8` extension suffix.
string key = 1;
// The name of the generated file. The default is `MuxStream.key` with the
// extension suffix corresponding to the `MuxStream.container`.
//
// Individual segments also have an incremental 10-digit zero-padded suffix
// starting from 0 before the extension, such as `mux_stream0000000123.ts`.
string file_name = 2;
// The container format. The default is `mp4`
//
// Supported container formats:
//
// - `ts`
// - `fmp4`- the corresponding file extension is `.m4s`
// - `mp4`
// - `vtt`
//
// See also:
// [Supported input and output
// formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats)
string container = 3;
// List of `ElementaryStream.key`s multiplexed in this stream.
repeated string elementary_streams = 4;
// Segment settings for `ts`, `fmp4` and `vtt`.
SegmentSettings segment_settings = 5;
// Identifier of the encryption configuration to use. If omitted, output will
// be unencrypted.
string encryption_id = 7;
}
// Manifest configuration.
message Manifest {
// The manifest type, which corresponds to the adaptive streaming format used.
enum ManifestType {
// The manifest type is not specified.
MANIFEST_TYPE_UNSPECIFIED = 0;
// Create an HLS manifest. The corresponding file extension is `.m3u8`.
HLS = 1;
// Create an MPEG-DASH manifest. The corresponding file extension is `.mpd`.
DASH = 2;
}
// `DASH` manifest configuration.
message DashConfig {
// The segment reference scheme for a `DASH` manifest.
enum SegmentReferenceScheme {
// The segment reference scheme is not specified.
SEGMENT_REFERENCE_SCHEME_UNSPECIFIED = 0;
// Lists the URLs of media files for each segment.
SEGMENT_LIST = 1;
// Lists each segment from a template with $Number$ variable.
SEGMENT_TEMPLATE_NUMBER = 2;
}
// The segment reference scheme for a `DASH` manifest. The default is
// `SEGMENT_LIST`.
SegmentReferenceScheme segment_reference_scheme = 1;
}
// The name of the generated file. The default is `manifest` with the
// extension suffix corresponding to the `Manifest.type`.
string file_name = 1;
// Required. Type of the manifest.
ManifestType type = 2 [(google.api.field_behavior) = REQUIRED];
// Required. List of user given `MuxStream.key`s that should appear in this
// manifest.
//
// When `Manifest.type` is `HLS`, a media manifest with name `MuxStream.key`
// and `.m3u8` extension is generated for each element of the
// `Manifest.mux_streams`.
repeated string mux_streams = 3 [(google.api.field_behavior) = REQUIRED];
// Specifies the manifest configuration.
oneof manifest_config {
// `DASH` manifest configuration.
DashConfig dash = 4;
}
}
// A Pub/Sub destination.
message PubsubDestination {
// The name of the Pub/Sub topic to publish job completion notification
// to. For example: `projects/{project}/topics/{topic}`.
string topic = 1;
}
// Sprite sheet configuration.
message SpriteSheet {
// Format type. The default is `jpeg`.
//
// Supported formats:
//
// - `jpeg`
string format = 1;
// Required. File name prefix for the generated sprite sheets.
//
// Each sprite sheet has an incremental 10-digit zero-padded suffix starting
// from 0 before the extension, such as `sprite_sheet0000000123.jpeg`.
string file_prefix = 2 [(google.api.field_behavior) = REQUIRED];
// Required. The width of sprite in pixels. Must be an even integer. To
// preserve the source aspect ratio, set the
// [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels]
// field or the
// [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels]
// field, but not both (the API will automatically calculate the missing
// field).
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the width, in pixels, per the horizontal ASR. The API calculates
// the height per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 sprite_width_pixels = 3 [(google.api.field_behavior) = REQUIRED];
// Required. The height of sprite in pixels. Must be an even integer. To
// preserve the source aspect ratio, set the
// [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels]
// field or the
// [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels]
// field, but not both (the API will automatically calculate the missing
// field).
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the height, in pixels, per the horizontal ASR. The API calculates
// the width per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 sprite_height_pixels = 4 [(google.api.field_behavior) = REQUIRED];
// The maximum number of sprites per row in a sprite sheet. The default is 0,
// which indicates no maximum limit.
int32 column_count = 5;
// The maximum number of rows per sprite sheet. When the sprite sheet is full,
// a new sprite sheet is created. The default is 0, which indicates no maximum
// limit.
int32 row_count = 6;
// Start time in seconds, relative to the output file timeline. Determines the
// first sprite to pick. The default is `0s`.
google.protobuf.Duration start_time_offset = 7;
// End time in seconds, relative to the output file timeline. When
// `end_time_offset` is not specified, the sprites are generated until the end
// of the output file.
google.protobuf.Duration end_time_offset = 8;
// Specify either total number of sprites or interval to create sprites.
oneof extraction_strategy {
// Total number of sprites. Create the specified number of sprites
// distributed evenly across the timeline of the output media. The default
// is 100.
int32 total_count = 9;
// Starting from `0s`, create sprites at regular intervals. Specify the
// interval value in seconds.
google.protobuf.Duration interval = 10;
}
// The quality of the generated sprite sheet. Enter a value between 1
// and 100, where 1 is the lowest quality and 100 is the highest quality.
// The default is 100. A high quality value corresponds to a low image data
// compression ratio.
int32 quality = 11;
}
// Overlay configuration.
message Overlay {
// 2D normalized coordinates. Default: `{0.0, 0.0}`
message NormalizedCoordinate {
// Normalized x coordinate.
double x = 1;
// Normalized y coordinate.
double y = 2;
}
// Overlaid image.
message Image {
// Required. URI of the image in Cloud Storage. For example,
// `gs://bucket/inputs/image.png`. Only PNG and JPEG images are supported.
string uri = 1 [(google.api.field_behavior) = REQUIRED];
// Normalized image resolution, based on output video resolution. Valid
// values: `0.0`–`1.0`. To respect the original image aspect ratio, set
// either `x` or `y` to `0.0`. To use the original image resolution, set
// both `x` and `y` to `0.0`.
NormalizedCoordinate resolution = 2;
// Target image opacity. Valid values are from `1.0` (solid, default) to
// `0.0` (transparent), exclusive. Set this to a value greater than `0.0`.
double alpha = 3;
}
// Display static overlay object.
message AnimationStatic {
// Normalized coordinates based on output video resolution. Valid
// values: `0.0`–`1.0`. `xy` is the upper-left coordinate of the overlay
// object. For example, use the x and y coordinates {0,0} to position the
// top-left corner of the overlay animation in the top-left corner of the
// output video.
NormalizedCoordinate xy = 1;
// The time to start displaying the overlay object, in seconds. Default: 0
google.protobuf.Duration start_time_offset = 2;
}
// Fade type for the overlay: `FADE_IN` or `FADE_OUT`.
enum FadeType {
// The fade type is not specified.
FADE_TYPE_UNSPECIFIED = 0;
// Fade the overlay object into view.
FADE_IN = 1;
// Fade the overlay object out of view.
FADE_OUT = 2;
}
// Display overlay object with fade animation.
message AnimationFade {
// Required. Type of fade animation: `FADE_IN` or `FADE_OUT`.
FadeType fade_type = 1 [(google.api.field_behavior) = REQUIRED];
// Normalized coordinates based on output video resolution. Valid
// values: `0.0`–`1.0`. `xy` is the upper-left coordinate of the overlay
// object. For example, use the x and y coordinates {0,0} to position the
// top-left corner of the overlay animation in the top-left corner of the
// output video.
NormalizedCoordinate xy = 2;
// The time to start the fade animation, in seconds. Default: 0
google.protobuf.Duration start_time_offset = 3;
// The time to end the fade animation, in seconds. Default:
// `start_time_offset` + 1s
google.protobuf.Duration end_time_offset = 4;
}
// End previous overlay animation from the video. Without AnimationEnd, the
// overlay object will keep the state of previous animation until the end of
// the video.
message AnimationEnd {
// The time to end overlay object, in seconds. Default: 0
google.protobuf.Duration start_time_offset = 1;
}
// Animation types.
message Animation {
// Animations can be static or fade, or they can end the previous animation.
oneof animation_type {
// Display static overlay object.
AnimationStatic animation_static = 1;
// Display overlay object with fade animation.
AnimationFade animation_fade = 2;
// End previous animation.
AnimationEnd animation_end = 3;
}
}
// Image overlay.
Image image = 1;
// List of Animations. The list should be chronological, without any time
// overlap.
repeated Animation animations = 2;
}
// Preprocessing configurations.
message PreprocessingConfig {
// Color preprocessing configuration.
//
// **Note:** This configuration is not supported.
message Color {
// Control color saturation of the video. Enter a value between -1 and 1,
// where -1 is fully desaturated and 1 is maximum saturation. 0 is no
// change. The default is 0.
double saturation = 1;
// Control black and white contrast of the video. Enter a value between -1
// and 1, where -1 is minimum contrast and 1 is maximum contrast. 0 is no
// change. The default is 0.
double contrast = 2;
// Control brightness of the video. Enter a value between -1 and 1, where -1
// is minimum brightness and 1 is maximum brightness. 0 is no change. The
// default is 0.
double brightness = 3;
}
// Denoise preprocessing configuration.
//
// **Note:** This configuration is not supported.
message Denoise {
// Set strength of the denoise. Enter a value between 0 and 1. The higher
// the value, the smoother the image. 0 is no denoising. The default is 0.
double strength = 1;
// Set the denoiser mode. The default is `standard`.
//
// Supported denoiser modes:
//
// - `standard`
// - `grain`
string tune = 2;
}
// Deblock preprocessing configuration.
//
// **Note:** This configuration is not supported.
message Deblock {
// Set strength of the deblocker. Enter a value between 0 and 1. The higher
// the value, the stronger the block removal. 0 is no deblocking. The
// default is 0.
double strength = 1;
// Enable deblocker. The default is `false`.
bool enabled = 2;
}
// Audio preprocessing configuration.
message Audio {
// Specify audio loudness normalization in loudness units relative to full
// scale (LUFS). Enter a value between -24 and 0 (the default), where:
//
// * -24 is the Advanced Television Systems Committee (ATSC A/85) standard
// * -23 is the EU R128 broadcast standard
// * -19 is the prior standard for online mono audio
// * -18 is the ReplayGain standard
// * -16 is the prior standard for stereo audio
// * -14 is the new online audio standard recommended by Spotify, as well
// as Amazon Echo
// * 0 disables normalization
double lufs = 1;
// Enable boosting high frequency components. The default is `false`.
//
// **Note:** This field is not supported.
bool high_boost = 2;
// Enable boosting low frequency components. The default is `false`.
//
// **Note:** This field is not supported.
bool low_boost = 3;
}
// Video cropping configuration for the input video. The cropped input video
// is scaled to match the output resolution.
message Crop {
// The number of pixels to crop from the top. The default is 0.
int32 top_pixels = 1;
// The number of pixels to crop from the bottom. The default is 0.
int32 bottom_pixels = 2;
// The number of pixels to crop from the left. The default is 0.
int32 left_pixels = 3;
// The number of pixels to crop from the right. The default is 0.
int32 right_pixels = 4;
}
// Pad filter configuration for the input video. The padded input video
// is scaled after padding with black to match the output resolution.
message Pad {
// The number of pixels to add to the top. The default is 0.
int32 top_pixels = 1;
// The number of pixels to add to the bottom. The default is 0.
int32 bottom_pixels = 2;
// The number of pixels to add to the left. The default is 0.
int32 left_pixels = 3;
// The number of pixels to add to the right. The default is 0.
int32 right_pixels = 4;
}
// Deinterlace configuration for input video.
message Deinterlace {
// Yet Another Deinterlacing Filter Configuration.
message YadifConfig {
// Specifies the deinterlacing mode to adopt.
// The default is `send_frame`.
// Supported values:
//
// - `send_frame`: Output one frame for each frame
// - `send_field`: Output one frame for each field
string mode = 1;
// Disable spacial interlacing.
// The default is `false`.
bool disable_spatial_interlacing = 2;
// The picture field parity assumed for the input interlaced video.
// The default is `auto`.
// Supported values:
//
// - `tff`: Assume the top field is first
// - `bff`: Assume the bottom field is first
// - `auto`: Enable automatic detection of field parity
string parity = 3;
// Deinterlace all frames rather than just the frames identified as
// interlaced. The default is `false`.
bool deinterlace_all_frames = 4;
}
// Bob Weaver Deinterlacing Filter Configuration.
message BwdifConfig {
// Specifies the deinterlacing mode to adopt.
// The default is `send_frame`.
// Supported values:
//
// - `send_frame`: Output one frame for each frame
// - `send_field`: Output one frame for each field
string mode = 1;
// The picture field parity assumed for the input interlaced video.
// The default is `auto`.
// Supported values:
//
// - `tff`: Assume the top field is first
// - `bff`: Assume the bottom field is first
// - `auto`: Enable automatic detection of field parity
string parity = 2;
// Deinterlace all frames rather than just the frames identified as
// interlaced. The default is `false`.
bool deinterlace_all_frames = 3;
}
// Specify the video deinterlacing filter. The default is `yadif`.
oneof deinterlacing_filter {
// Specifies the Yet Another Deinterlacing Filter Configuration.
YadifConfig yadif = 1;
// Specifies the Bob Weaver Deinterlacing Filter Configuration.
BwdifConfig bwdif = 2;
}
}
// Color preprocessing configuration.
Color color = 1;
// Denoise preprocessing configuration.
Denoise denoise = 2;
// Deblock preprocessing configuration.
Deblock deblock = 3;
// Audio preprocessing configuration.
Audio audio = 4;
// Specify the video cropping configuration.
Crop crop = 5;
// Specify the video pad filter configuration.
Pad pad = 6;
// Specify the video deinterlace configuration.
Deinterlace deinterlace = 7;
}
// Video stream resource.
message VideoStream {
// H264 codec settings.
message H264CodecSettings {
// The width of the video in pixels. Must be an even integer.
// When not specified, the width is adjusted to match the specified height
// and input aspect ratio. If both are omitted, the input width is used.
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the width, in pixels, per the horizontal ASR. The API calculates
// the height per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 width_pixels = 1;
// The height of the video in pixels. Must be an even integer.
// When not specified, the height is adjusted to match the specified width
// and input aspect ratio. If both are omitted, the input height is used.
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the height, in pixels, per the horizontal ASR. The API calculates
// the width per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 height_pixels = 2;
// Required. The target video frame rate in frames per second (FPS). Must be
// less than or equal to 120. Will default to the input frame rate if larger
// than the input frame rate. The API will generate an output FPS that is
// divisible by the input FPS, and smaller or equal to the target FPS. See
// [Calculating frame
// rate](https://cloud.google.com/transcoder/docs/concepts/frame-rate) for
// more information.
double frame_rate = 3 [(google.api.field_behavior) = REQUIRED];
// Required. The video bitrate in bits per second. The minimum value is
// 1,000. The maximum value is 800,000,000.
int32 bitrate_bps = 4 [(google.api.field_behavior) = REQUIRED];
// Pixel format to use. The default is `yuv420p`.
//
// Supported pixel formats:
//
// - `yuv420p` pixel format
// - `yuv422p` pixel format
// - `yuv444p` pixel format
// - `yuv420p10` 10-bit HDR pixel format
// - `yuv422p10` 10-bit HDR pixel format
// - `yuv444p10` 10-bit HDR pixel format
// - `yuv420p12` 12-bit HDR pixel format
// - `yuv422p12` 12-bit HDR pixel format
// - `yuv444p12` 12-bit HDR pixel format
string pixel_format = 5;
// Specify the `rate_control_mode`. The default is `vbr`.
//
// Supported rate control modes:
//
// - `vbr` - variable bitrate
// - `crf` - constant rate factor
string rate_control_mode = 6;
// Target CRF level. Must be between 10 and 36, where 10 is the highest
// quality and 36 is the most efficient compression. The default is 21.
int32 crf_level = 7;
// Specifies whether an open Group of Pictures (GOP) structure should be
// allowed or not. The default is `false`.
bool allow_open_gop = 8;
// GOP mode can be either by frame count or duration.
oneof gop_mode {
// Select the GOP size based on the specified frame count. Must be greater
// than zero.
int32 gop_frame_count = 9;
// Select the GOP size based on the specified duration. The default is
// `3s`. Note that `gopDuration` must be less than or equal to
// [`segmentDuration`](#SegmentSettings), and
// [`segmentDuration`](#SegmentSettings) must be divisible by
// `gopDuration`.
google.protobuf.Duration gop_duration = 10;
}
// Use two-pass encoding strategy to achieve better video quality.
// `VideoStream.rate_control_mode` must be `vbr`. The default is `false`.
bool enable_two_pass = 11;
// Size of the Video Buffering Verifier (VBV) buffer in bits. Must be
// greater than zero. The default is equal to `VideoStream.bitrate_bps`.
int32 vbv_size_bits = 12;
// Initial fullness of the Video Buffering Verifier (VBV) buffer in bits.
// Must be greater than zero. The default is equal to 90% of
// `VideoStream.vbv_size_bits`.
int32 vbv_fullness_bits = 13;
// The entropy coder to use. The default is `cabac`.
//
// Supported entropy coders:
//
// - `cavlc`
// - `cabac`
string entropy_coder = 14;
// Allow B-pyramid for reference frame selection. This may not be supported
// on all decoders. The default is `false`.
bool b_pyramid = 15;
// The number of consecutive B-frames. Must be greater than or equal to
// zero. Must be less than `VideoStream.gop_frame_count` if set. The default
// is 0.
int32 b_frame_count = 16;
// Specify the intensity of the adaptive quantizer (AQ). Must be between 0
// and 1, where 0 disables the quantizer and 1 maximizes the quantizer. A
// higher value equals a lower bitrate but smoother image. The default is 0.
double aq_strength = 17;
// Enforces the specified codec profile. The following profiles are
// supported:
//
// * `baseline`
// * `main`
// * `high` (default)
//
// The available options are
// [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.264#Tune).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `H264CodecSettings`
// message.
string profile = 18;
// Enforces the specified codec tune. The available options are
// [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.264#Tune).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `H264CodecSettings`
// message.
string tune = 19;
// Enforces the specified codec preset. The default is `veryfast`. The
// available options are
// [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.264#Preset).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `H264CodecSettings`
// message.
string preset = 20;
}
// H265 codec settings.
message H265CodecSettings {
// The width of the video in pixels. Must be an even integer.
// When not specified, the width is adjusted to match the specified height
// and input aspect ratio. If both are omitted, the input width is used.
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the width, in pixels, per the horizontal ASR. The API calculates
// the height per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 width_pixels = 1;
// The height of the video in pixels. Must be an even integer.
// When not specified, the height is adjusted to match the specified width
// and input aspect ratio. If both are omitted, the input height is used.
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the height, in pixels, per the horizontal ASR. The API calculates
// the width per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 height_pixels = 2;
// Required. The target video frame rate in frames per second (FPS). Must be
// less than or equal to 120. Will default to the input frame rate if larger
// than the input frame rate. The API will generate an output FPS that is
// divisible by the input FPS, and smaller or equal to the target FPS. See
// [Calculating frame
// rate](https://cloud.google.com/transcoder/docs/concepts/frame-rate) for
// more information.
double frame_rate = 3 [(google.api.field_behavior) = REQUIRED];
// Required. The video bitrate in bits per second. The minimum value is
// 1,000. The maximum value is 800,000,000.
int32 bitrate_bps = 4 [(google.api.field_behavior) = REQUIRED];
// Pixel format to use. The default is `yuv420p`.
//
// Supported pixel formats:
//
// - `yuv420p` pixel format
// - `yuv422p` pixel format
// - `yuv444p` pixel format
// - `yuv420p10` 10-bit HDR pixel format
// - `yuv422p10` 10-bit HDR pixel format
// - `yuv444p10` 10-bit HDR pixel format
// - `yuv420p12` 12-bit HDR pixel format
// - `yuv422p12` 12-bit HDR pixel format
// - `yuv444p12` 12-bit HDR pixel format
string pixel_format = 5;
// Specify the `rate_control_mode`. The default is `vbr`.
//
// Supported rate control modes:
//
// - `vbr` - variable bitrate
// - `crf` - constant rate factor
string rate_control_mode = 6;
// Target CRF level. Must be between 10 and 36, where 10 is the highest
// quality and 36 is the most efficient compression. The default is 21.
int32 crf_level = 7;
// Specifies whether an open Group of Pictures (GOP) structure should be
// allowed or not. The default is `false`.
bool allow_open_gop = 8;
// GOP mode can be either by frame count or duration.
oneof gop_mode {
// Select the GOP size based on the specified frame count. Must be greater
// than zero.
int32 gop_frame_count = 9;
// Select the GOP size based on the specified duration. The default is
// `3s`. Note that `gopDuration` must be less than or equal to
// [`segmentDuration`](#SegmentSettings), and
// [`segmentDuration`](#SegmentSettings) must be divisible by
// `gopDuration`.
google.protobuf.Duration gop_duration = 10;
}
// Use two-pass encoding strategy to achieve better video quality.
// `VideoStream.rate_control_mode` must be `vbr`. The default is `false`.
bool enable_two_pass = 11;
// Size of the Video Buffering Verifier (VBV) buffer in bits. Must be
// greater than zero. The default is equal to `VideoStream.bitrate_bps`.
int32 vbv_size_bits = 12;
// Initial fullness of the Video Buffering Verifier (VBV) buffer in bits.
// Must be greater than zero. The default is equal to 90% of
// `VideoStream.vbv_size_bits`.
int32 vbv_fullness_bits = 13;
// Allow B-pyramid for reference frame selection. This may not be supported
// on all decoders. The default is `false`.
bool b_pyramid = 14;
// The number of consecutive B-frames. Must be greater than or equal to
// zero. Must be less than `VideoStream.gop_frame_count` if set. The default
// is 0.
int32 b_frame_count = 15;
// Specify the intensity of the adaptive quantizer (AQ). Must be between 0
// and 1, where 0 disables the quantizer and 1 maximizes the quantizer. A
// higher value equals a lower bitrate but smoother image. The default is 0.
double aq_strength = 16;
// Enforces the specified codec profile. The following profiles are
// supported:
//
// * 8-bit profiles
// * `main` (default)
// * `main-intra`
// * `mainstillpicture`
// * 10-bit profiles
// * `main10` (default)
// * `main10-intra`
// * `main422-10`
// * `main422-10-intra`
// * `main444-10`
// * `main444-10-intra`
// * 12-bit profiles
// * `main12` (default)
// * `main12-intra`
// * `main422-12`
// * `main422-12-intra`
// * `main444-12`
// * `main444-12-intra`
//
// The available options are
// [FFmpeg-compatible](https://x265.readthedocs.io/).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `H265CodecSettings`
// message.
string profile = 17;
// Enforces the specified codec tune. The available options are
// [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.265).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `H265CodecSettings`
// message.
string tune = 18;
// Enforces the specified codec preset. The default is `veryfast`. The
// available options are
// [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.265).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `H265CodecSettings`
// message.
string preset = 19;
}
// VP9 codec settings.
message Vp9CodecSettings {
// The width of the video in pixels. Must be an even integer.
// When not specified, the width is adjusted to match the specified height
// and input aspect ratio. If both are omitted, the input width is used.
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the width, in pixels, per the horizontal ASR. The API calculates
// the height per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 width_pixels = 1;
// The height of the video in pixels. Must be an even integer.
// When not specified, the height is adjusted to match the specified width
// and input aspect ratio. If both are omitted, the input height is used.
//
// For portrait videos that contain horizontal ASR and rotation metadata,
// provide the height, in pixels, per the horizontal ASR. The API calculates
// the width per the horizontal ASR. The API detects any rotation metadata
// and swaps the requested height and width for the output.
int32 height_pixels = 2;
// Required. The target video frame rate in frames per second (FPS). Must be
// less than or equal to 120. Will default to the input frame rate if larger
// than the input frame rate. The API will generate an output FPS that is
// divisible by the input FPS, and smaller or equal to the target FPS. See
// [Calculating frame
// rate](https://cloud.google.com/transcoder/docs/concepts/frame-rate) for
// more information.
double frame_rate = 3 [(google.api.field_behavior) = REQUIRED];
// Required. The video bitrate in bits per second. The minimum value is
// 1,000. The maximum value is 480,000,000.
int32 bitrate_bps = 4 [(google.api.field_behavior) = REQUIRED];
// Pixel format to use. The default is `yuv420p`.
//
// Supported pixel formats:
//
// - `yuv420p` pixel format
// - `yuv422p` pixel format
// - `yuv444p` pixel format
// - `yuv420p10` 10-bit HDR pixel format
// - `yuv422p10` 10-bit HDR pixel format
// - `yuv444p10` 10-bit HDR pixel format
// - `yuv420p12` 12-bit HDR pixel format
// - `yuv422p12` 12-bit HDR pixel format
// - `yuv444p12` 12-bit HDR pixel format
string pixel_format = 5;
// Specify the `rate_control_mode`. The default is `vbr`.
//
// Supported rate control modes:
//
// - `vbr` - variable bitrate
string rate_control_mode = 6;
// Target CRF level. Must be between 10 and 36, where 10 is the highest
// quality and 36 is the most efficient compression. The default is 21.
//
// **Note:** This field is not supported.
int32 crf_level = 7;
// GOP mode can be either by frame count or duration.
oneof gop_mode {
// Select the GOP size based on the specified frame count. Must be greater
// than zero.
int32 gop_frame_count = 8;
// Select the GOP size based on the specified duration. The default is
// `3s`. Note that `gopDuration` must be less than or equal to
// [`segmentDuration`](#SegmentSettings), and
// [`segmentDuration`](#SegmentSettings) must be divisible by
// `gopDuration`.
google.protobuf.Duration gop_duration = 9;
}
// Enforces the specified codec profile. The following profiles are
// supported:
//
// * `profile0` (default)
// * `profile1`
// * `profile2`
// * `profile3`
//
// The available options are
// [WebM-compatible](https://www.webmproject.org/vp9/profiles/).
// Note that certain values for this field may cause the
// transcoder to override other fields you set in the `Vp9CodecSettings`
// message.
string profile = 10;
}
// Codec settings can be h264, h265, or vp9.
oneof codec_settings {
// H264 codec settings.
H264CodecSettings h264 = 1;
// H265 codec settings.
H265CodecSettings h265 = 2;
// VP9 codec settings.
Vp9CodecSettings vp9 = 3;
}
}
// Audio stream resource.
message AudioStream {
// The mapping for the `Job.edit_list` atoms with audio `EditAtom.inputs`.
message AudioMapping {
// Required. The `EditAtom.key` that references the atom with audio inputs
// in the `Job.edit_list`.
string atom_key = 1 [(google.api.field_behavior) = REQUIRED];
// Required. The `Input.key` that identifies the input file.
string input_key = 2 [(google.api.field_behavior) = REQUIRED];
// Required. The zero-based index of the track in the input file.
int32 input_track = 3 [(google.api.field_behavior) = REQUIRED];
// Required. The zero-based index of the channel in the input audio stream.
int32 input_channel = 4 [(google.api.field_behavior) = REQUIRED];
// Required. The zero-based index of the channel in the output audio stream.
int32 output_channel = 5 [(google.api.field_behavior) = REQUIRED];
// Audio volume control in dB. Negative values decrease volume,
// positive values increase. The default is 0.
double gain_db = 6;
}
// The codec for this audio stream. The default is `aac`.
//
// Supported audio codecs:
//
// - `aac`
// - `aac-he`
// - `aac-he-v2`
// - `mp3`
// - `ac3`
// - `eac3`
string codec = 1;
// Required. Audio bitrate in bits per second. Must be between 1 and
// 10,000,000.
int32 bitrate_bps = 2 [(google.api.field_behavior) = REQUIRED];
// Number of audio channels. Must be between 1 and 6. The default is 2.
int32 channel_count = 3;
// A list of channel names specifying layout of the audio channels.
// This only affects the metadata embedded in the container headers, if
// supported by the specified format. The default is `["fl", "fr"]`.
//
// Supported channel names:
//
// - `fl` - Front left channel
// - `fr` - Front right channel
// - `sl` - Side left channel
// - `sr` - Side right channel
// - `fc` - Front center channel
// - `lfe` - Low frequency
repeated string channel_layout = 4;
// The mapping for the `Job.edit_list` atoms with audio `EditAtom.inputs`.
repeated AudioMapping mapping = 5;
// The audio sample rate in Hertz. The default is 48000 Hertz.
int32 sample_rate_hertz = 6;
// The BCP-47 language code, such as `en-US` or `sr-Latn`. For more
// information, see
// https://www.unicode.org/reports/tr35/#Unicode_locale_identifier. Not
// supported in MP4 files.
string language_code = 7;
// The name for this particular audio stream that
// will be added to the HLS/DASH manifest. Not supported in MP4 files.
string display_name = 8;
}
// Encoding of a text stream. For example, closed captions or subtitles.
message TextStream {
// The mapping for the `Job.edit_list` atoms with text `EditAtom.inputs`.
message TextMapping {
// Required. The `EditAtom.key` that references atom with text inputs in the
// `Job.edit_list`.
string atom_key = 1 [(google.api.field_behavior) = REQUIRED];
// Required. The `Input.key` that identifies the input file.
string input_key = 2 [(google.api.field_behavior) = REQUIRED];
// Required. The zero-based index of the track in the input file.
int32 input_track = 3 [(google.api.field_behavior) = REQUIRED];
}
// The codec for this text stream. The default is `webvtt`.
//
// Supported text codecs:
//
// - `srt`
// - `ttml`
// - `cea608`
// - `cea708`
// - `webvtt`
string codec = 1;
// The BCP-47 language code, such as `en-US` or `sr-Latn`. For more
// information, see
// https://www.unicode.org/reports/tr35/#Unicode_locale_identifier. Not
// supported in MP4 files.
string language_code = 2;
// The mapping for the `Job.edit_list` atoms with text `EditAtom.inputs`.
repeated TextMapping mapping = 3;
// The name for this particular text stream that
// will be added to the HLS/DASH manifest. Not supported in MP4 files.
string display_name = 4;
}
// Segment settings for `ts`, `fmp4` and `vtt`.
message SegmentSettings {
// Duration of the segments in seconds. The default is `6.0s`. Note that
// `segmentDuration` must be greater than or equal to
// [`gopDuration`](#videostream), and `segmentDuration` must be divisible by
// [`gopDuration`](#videostream).
google.protobuf.Duration segment_duration = 1;
// Required. Create an individual segment file. The default is `false`.
bool individual_segments = 3 [(google.api.field_behavior) = REQUIRED];
}
// Encryption settings.
message Encryption {
// Configuration for AES-128 encryption.
message Aes128Encryption {}
// Configuration for SAMPLE-AES encryption.
message SampleAesEncryption {}
// Configuration for MPEG Common Encryption (MPEG-CENC).
message MpegCommonEncryption {
// Required. Specify the encryption scheme.
//
// Supported encryption schemes:
//
// - `cenc`
// - `cbcs`
string scheme = 2 [(google.api.field_behavior) = REQUIRED];
}
// Configuration for secrets stored in Google Secret Manager.
message SecretManagerSource {
// Required. The name of the Secret Version containing the encryption key in
// the following format:
// `projects/{project}/secrets/{secret_id}/versions/{version_number}`
//
// Note that only numbered versions are supported. Aliases like "latest" are
// not supported.
string secret_version = 1 [(google.api.field_behavior) = REQUIRED];
}
// Widevine configuration.
message Widevine {}
// Fairplay configuration.
message Fairplay {}
// Playready configuration.
message Playready {}
// Clearkey configuration.
message Clearkey {}
// Defines configuration for DRM systems in use.
message DrmSystems {
// Widevine configuration.
Widevine widevine = 1;
// Fairplay configuration.
Fairplay fairplay = 2;
// Playready configuration.
Playready playready = 3;
// Clearkey configuration.
Clearkey clearkey = 4;
}
// Required. Identifier for this set of encryption options.
string id = 6 [(google.api.field_behavior) = REQUIRED];
// Encryption mode can be either `aes` or `cenc`.
oneof encryption_mode {
// Configuration for AES-128 encryption.
Aes128Encryption aes_128 = 3;
// Configuration for SAMPLE-AES encryption.
SampleAesEncryption sample_aes = 4;
// Configuration for MPEG Common Encryption (MPEG-CENC).
MpegCommonEncryption mpeg_cenc = 5;
}
// Defines where content keys are stored.
oneof secret_source {
// Keys are stored in Google Secret Manager.
SecretManagerSource secret_manager_key_source = 7;
}
// Required. DRM system(s) to use; at least one must be specified. If a
// DRM system is omitted, it is considered disabled.
DrmSystems drm_systems = 8 [(google.api.field_behavior) = REQUIRED];
}