How to use the opentimelineio.schema.Stack function in OpenTimelineIO

To help you get started, we’ve selected a few OpenTimelineIO examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github PixarAnimationStudios / OpenTimelineIO / tests / test_composition.py View on Github external
def test_cons(self):
        st = otio.schema.Stack(name="test")
        self.assertEqual(st.name, "test")
github PixarAnimationStudios / OpenTimelineIO / tests / test_composition.py View on Github external
tl.tracks.append(tr1)
        c1 = otio.schema.Clip(name="c1")
        tr1.append(c1)
        c2 = otio.schema.Clip(name="c2")
        tr1.append(c2)
        c3 = otio.schema.Clip(name="c3")
        tr1.append(c3)

        tr2 = otio.schema.Track(name="tr2")
        tl.tracks.append(tr2)
        c4 = otio.schema.Clip(name="c4")
        tr2.append(c4)
        c5 = otio.schema.Clip(name="c5")
        tr2.append(c5)

        st = otio.schema.Stack(name="st")
        tr2.append(st)
        c6 = otio.schema.Clip(name="c6")
        st.append(c6)
        tr3 = otio.schema.Track(name="tr3")
        c7 = otio.schema.Clip(name="c7")
        tr3.append(c7)
        c8 = otio.schema.Clip(name="c8")
        tr3.append(c8)
        st.append(tr3)

        self.assertEqual(2, len(tl.tracks))
        self.assertEqual(3, len(tr1))
        self.assertEqual(3, len(tr2))
        self.assertEqual(2, len(st))
        self.assertEqual(2, len(tr3))
github PixarAnimationStudios / OpenTimelineIO / tests / test_composition.py View on Github external
def test_str(self):
        st = otio.schema.Stack(name="foo", children=[])
        self.assertMultiLineEqual(
            str(st),
            "Stack(" +
            str(st.name) + ", " +
            str(list(st)) + ", " +
            str(st.source_range) + ", " +
            str(st.metadata) +
            ")"
        )
github PixarAnimationStudios / OpenTimelineIO / tests / test_filter_algorithms.py View on Github external
def test_copy_stack(self):
        """Test a no op reduce that copies the timeline."""

        md = {'test': 'bar'}
        tr = otio.schema.Stack(name='foo', metadata=md)
        tr.append(otio.schema.Clip(name='cl1', metadata=md))

        result = otio.algorithms.filtered_with_sequence_context(
            tr,
            # no op - ignore all arguments and return original thing
            lambda _, thing, __: thing
        )
        self.assertJsonEqual(tr, result)
        self.assertIsNot(tr[0], result)
github PixarAnimationStudios / OpenTimelineIO / src / py-opentimelineio / opentimelineio / console / otioconvert.py View on Github external
"media linker"
        )
    except ValueError as exc:
        sys.stderr.write("\n" + str(exc) + "\n")
        sys.exit(1)

    result_tl = otio.adapters.read_from_file(
        args.input,
        in_adapter,
        media_linker_name=media_linker_name,
        media_linker_argument_map=ml_args,
        **read_adapter_arg_map
    )

    if args.tracks:
        result_tracks = copy.deepcopy(otio.schema.Stack())
        del result_tracks[:]
        for track in args.tracks.split(","):
            tr = result_tl.tracks[int(track)]
            del result_tl.tracks[int(track)]
            print("track {0} is of kind: '{1}'".format(track, tr.kind))
            result_tracks.append(tr)
        result_tl.tracks = result_tracks

    # handle trim arguments
    if args.begin is not None and args.end is not None:
        result_tl = otio.algorithms.timeline_trimmed_to_range(
            result_tl,
            otio.opentime.range_from_start_end_time(args.begin, args.end)
        )

    try:
github PixarAnimationStudios / OpenTimelineIO / contrib / opentimelineio_contrib / adapters / aaf_adapter / aaf_writer.py View on Github external
def _stackify_nested_groups(timeline):
    """
    Ensure that all nesting in a given timeline is in a stack container.
    This conforms with how AAF thinks about nesting, there needs
    to be an outer container, even if it's just one object.
    """
    copied = copy.deepcopy(timeline)
    for track in copied.tracks:
        for i, child in enumerate(track.each_child()):
            is_nested = isinstance(child, otio.schema.Track)
            is_parent_in_stack = isinstance(child.parent(), otio.schema.Stack)
            if is_nested and not is_parent_in_stack:
                stack = otio.schema.Stack()
                track.remove(child)
                stack.append(child)
                track.insert(i, stack)
    return copied
github PixarAnimationStudios / OpenTimelineIO / contrib / opentimelineio_contrib / adapters / fcpx_xml.py View on Github external
def _squence_to_stack(self, sequence_element, name="", source_range=None):
        timeline_items = []
        lanes = []
        stack = otio.schema.Stack(name=name, source_range=source_range)
        for element in sequence_element.iter():
            if element.tag not in COMPOSABLE_ELEMENTS:
                continue
            composable = self._build_composable(
                element,
                sequence_element.get("format")
            )

            offset, lane = self._offset_and_lane(
                element,
                sequence_element.get("format")
            )

            timeline_items.append(
                {
                    "track": lane,
github PixarAnimationStudios / OpenTimelineIO / contrib / opentimelineio_contrib / adapters / xges.py View on Github external
def _otio_item_from_uri_clip(self, clip):
        asset_id = self._get_attrib(clip, "asset-id", str)
        sub_project_asset = self._asset_by_id(asset_id, "GESTimeline")
        if sub_project_asset is not None:
            # this clip refers to a sub project
            sub_ges = XGES(self._findonly(sub_project_asset, "./ges"))
            otio_stack = otio.schema.Stack()
            sub_ges._fill_otio_stack_from_ges(otio_stack)
            otio_stack.name = self._get_name(clip)
            self._add_properties_and_metadatas_to_otio(
                otio_stack, sub_project_asset, "sub-project-asset")
            # NOTE: we include asset-id in the metadata, so that two
            # stacks that refer to a single sub-project will not be
            # split into separate assets when converting from
            # xges->otio->xges
            self._add_to_otio_metadata(otio_stack, "asset-id", asset_id)
            uri_clip_asset = self._asset_by_id(asset_id, "GESUriClip")
            if uri_clip_asset is None:
                show_ignore(
                    "Did not find the expected GESUriClip asset with "
                    "the id {}".format(asset_id))
            else:
                self._add_properties_and_metadatas_to_otio(
github PixarAnimationStudios / OpenTimelineIO / src / opentimelineview / timeline_widget.py View on Github external
def _add_tracks(self):
        video_tracks_top = track_widgets.TIME_SLIDER_HEIGHT
        audio_tracks_top = track_widgets.TIME_SLIDER_HEIGHT

        video_tracks = []
        audio_tracks = []
        other_tracks = []

        if isinstance(self.composition, otio.schema.Stack):
            video_tracks = [
                t for t in self.composition
                if t.kind == otio.schema.TrackKind.Video and list(t)
            ]
            audio_tracks = [
                t for t in self.composition
                if t.kind == otio.schema.TrackKind.Audio and list(t)
            ]
            video_tracks.reverse()

            other_tracks = [
                t for t in self.composition
                if (
                    t.kind not in (
                        otio.schema.TrackKind.Video,
                        otio.schema.TrackKind.Audio
github PixarAnimationStudios / OpenTimelineIO / src / opentimelineview / track_widgets.py View on Github external
def _populate(self):
        track_map = self.track.range_of_all_children()
        for n, item in enumerate(self.track):
            timeline_range = track_map[item]

            rect = QtCore.QRectF(
                0,
                0,
                otio.opentime.to_seconds(timeline_range.duration) *
                TIME_MULTIPLIER,
                TRACK_HEIGHT
            )

            if isinstance(item, otio.schema.Clip):
                new_item = ClipItem(item, timeline_range, rect)
            elif isinstance(item, otio.schema.Stack):
                new_item = NestedItem(item, timeline_range, rect)
            elif isinstance(item, otio.schema.Track):
                new_item = NestedItem(item, timeline_range, rect)
            elif isinstance(item, otio.schema.Gap):
                new_item = GapItem(item, timeline_range, rect)
            elif isinstance(item, otio.schema.Transition):
                new_item = TransitionItem(item, timeline_range, rect)
            else:
                print("Warning: could not add item {} to UI.".format(item))
                continue

            new_item.setParentItem(self)
            new_item.setX(
                otio.opentime.to_seconds(timeline_range.start_time) *
                TIME_MULTIPLIER
            )