Source code for cgl.plugins.otio.tools.aaf.aaf_encoder
import pathlib
import tempfile
import shutil
import argparse
import aaf2
import opentimelineio as otio
from cgl.plugins.otio.tools.aaf import aaf_embedded_media_tool
[docs]
def mob_name(clip):
name = clip.name
cgl = clip.metadata.get('cgl', {})
task = cgl.get("task", None)
if task is None:
return name
major_version = cgl.get("major_version", None)
if major_version is None and isinstance(major_version, int):
return name
minor_version = cgl.get("minor_version", None)
if minor_version is None and isinstance(minor_version, int):
return name
sequence = cgl.get("sequence", None)
if not sequence:
return name
shot = cgl.get("shot", None)
if not shot:
return name
return f"{sequence}_{shot}_{task}_{major_version:03}.{minor_version:03}"
[docs]
def update_metadata(f, mob_id, clip):
mob = f.content.mobs.get(mob_id, None)
assert mob
mob.name = mob_name(clip)
comments = mob.comments
for prefix in ('unreal', 'cgl', 'shotgrid'):
metadata = clip.metadata.get(prefix, {})
for key, value in metadata.items():
key_name = f"{prefix}:{key}"
comments[key_name] = str(value)
# mark in and out pos
in_pos = int(clip.source_range.start_time.to_frames())
out_pos = in_pos + int(clip.source_range.duration.to_frames())
tag = f.create.TaggedValue("_IN", in_pos)
mob['MobAttributeList'].append(tag)
tag = f.create.TaggedValue("_USER_POS", in_pos)
mob['MobAttributeList'].append(tag)
tag = f.create.TaggedValue("_OUT", out_pos)
mob['MobAttributeList'].append(tag)
# color = (1.0, 0.0, 1.0)
# tag = f.create.TaggedValue("_COLOR_R", min(int(color[0] * 65535), 65535))
# mob['MobAttributeList'].append(tag)
# tag = f.create.TaggedValue("_COLOR_G", min(int(color[1] * 65535), 65535))
# mob['MobAttributeList'].append(tag)
# tag = f.create.TaggedValue("_COLOR_B", min(int(color[2] * 65535), 65535))
# mob['MobAttributeList'].append(tag)
[docs]
def encode_aaf_media(aaf_path, output_dnxhr_path, otio_timeline_path, working_dir):
video_profile_name = "dnx_1080p_36_24"
audio_profile_name = "pcm_48000_s16le"
ignore_alpha = True
frame_rate = 24
start_timecode = None
if otio_timeline_path:
timeline = otio.adapters.read_from_file(otio_timeline_path)
clips = timeline.find_clips()
assert len(clips) == 1
clip = clips[0]
start_timecode = clip.media_reference.available_range.start_time.to_timecode()
mob_id = aaf_embedded_media_tool.create_aaf_file([output_dnxhr_path],
aaf_path,
aaf_mob_name = clip.name,
aaf_tape_name = clip.name,
aaf_start_timecode=start_timecode,
aaf_start_timecode_rate=frame_rate,
working_dir=working_dir,
frame_rate=frame_rate,
video_profile_name = video_profile_name,
audio_profile_name = audio_profile_name,
ignore_alpha = ignore_alpha,
use_embedded_timecode = True,
copy_dnxhd_streams = True)
if clip:
with aaf2.open(aaf_path, 'rw') as f:
update_metadata(f, mob_id, clip)
[docs]
def run_cli():
parser = argparse.ArgumentParser(
prog = 'unreal aaf encoder',
description = 'converts unreal renders to aaf format')
parser.add_argument('-v', '--video', type=pathlib.Path, help="file containing dnxhd media")
parser.add_argument('-t', '--otio-timeline', type=pathlib.Path, help="OpenTimelineIO file containing clip metadata", dest='timeline')
parser.add_argument('output_aaf_path', type=pathlib.Path)
args = parser.parse_args()
temp_dir = tempfile.mkdtemp(suffix="-review")
try:
encode_aaf_media(str(args.output_aaf_path),
str(args.video),
str(args.timeline),
temp_dir)
finally:
shutil.rmtree(temp_dir)
if __name__ == "__main__":
run_cli()