How to use the opentimelineio.schema.Clip function in OpenTimelineIO

To help you get started, we’ve selected a few OpenTimelineIO examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github PixarAnimationStudios / OpenTimelineIO / tests / test_cmx_3600_adapter.py View on Github external
trans = otio.schema.Transition(
            in_offset=otio.opentime.RationalTime(6.0, 24.0),
            out_offset=otio.opentime.RationalTime(6.0, 24.0)
        )
        cl2 = otio.schema.Clip(
            metadata={'cmx_3600': {'reel': 'Reel2'}},
            source_range=otio.opentime.TimeRange(
                start_time=otio.opentime.RationalTime(24.0, 24.0),
                duration=otio.opentime.RationalTime(24.0, 24.0)
            )
        )
        trans2 = otio.schema.Transition(
            in_offset=otio.opentime.RationalTime(6.0, 24.0),
            out_offset=otio.opentime.RationalTime(6.0, 24.0)
        )
        cl3 = otio.schema.Clip(
            metadata={'cmx_3600': {'reel': 'Reel3'}},
            source_range=otio.opentime.TimeRange(
                start_time=otio.opentime.RationalTime(24.0, 24.0),
                duration=otio.opentime.RationalTime(24.0, 24.0)
            )
        )
        tl.tracks[0].extend([cl, trans, cl2, trans2, cl3])

        result = otio.adapters.write_to_string(
            tl,
            adapter_name='cmx_3600',
            style='nucoda'
        )

        expected = '''TITLE: Double Transition
github PixarAnimationStudios / OpenTimelineIO / tests / test_cmx_3600_adapter.py View on Github external
def test_nucoda_edl_write_with_double_transition(self):
        track = otio.schema.Track()
        tl = otio.schema.Timeline("Double Transition", tracks=[track])

        cl = otio.schema.Clip(
            metadata={'cmx_3600': {'reel': 'Reel1'}},
            source_range=otio.opentime.TimeRange(
                start_time=otio.opentime.RationalTime(24.0, 24.0),
                duration=otio.opentime.RationalTime(24.0, 24.0)
            )
        )
        trans = otio.schema.Transition(
            in_offset=otio.opentime.RationalTime(6.0, 24.0),
            out_offset=otio.opentime.RationalTime(6.0, 24.0)
        )
        cl2 = otio.schema.Clip(
            metadata={'cmx_3600': {'reel': 'Reel2'}},
            source_range=otio.opentime.TimeRange(
                start_time=otio.opentime.RationalTime(24.0, 24.0),
                duration=otio.opentime.RationalTime(24.0, 24.0)
            )
        )
        trans2 = otio.schema.Transition(
            in_offset=otio.opentime.RationalTime(6.0, 24.0),
            out_offset=otio.opentime.RationalTime(6.0, 24.0)
        )
        cl3 = otio.schema.Clip(
            metadata={'cmx_3600': {'reel': 'Reel3'}},
            source_range=otio.opentime.TimeRange(
                start_time=otio.opentime.RationalTime(24.0, 24.0),
                duration=otio.opentime.RationalTime(24.0, 24.0)
            )
github PixarAnimationStudios / OpenTimelineIO / tests / test_filter_algorithms.py View on Github external
def test_insert_tuple(self):
        """test a reduce that takes each clip in a sequence and triples it"""

        md = {'test': 'bar'}
        tr = otio.schema.Track(name='foo', metadata=md)
        tr.append(otio.schema.Clip(name='cl1', metadata=md))

        def triple_clips(_, thing, __):
            if not isinstance(thing, otio.schema.Clip):
                return thing
            return (thing, copy.deepcopy(thing), copy.deepcopy(thing))

        result = otio.algorithms.filtered_with_sequence_context(
            tr,
            triple_clips
        )
        self.assertEqual(3, len(result))
        self.assertEqual(tr.metadata, result.metadata)

        # emptying the track should have the same effect
        tr.extend((copy.deepcopy(tr[0]), copy.deepcopy(tr[0])))
        self.assertJsonEqual(tr, result)
github PixarAnimationStudios / OpenTimelineIO / tests / baselines / example.py View on Github external
def read_from_file(filepath, suffix=""):
    fake_tl = otio.schema.Timeline(name=filepath + str(suffix))
    fake_tl.tracks.append(otio.schema.Track())
    fake_tl.tracks[0].append(otio.schema.Clip(name=filepath + "_clip"))
    return fake_tl
github PixarAnimationStudios / OpenTimelineIO / tests / test_composition.py View on Github external
)
                ),
                otio.schema.Track(
                    name="body",
                    source_range=otio.opentime.TimeRange(
                        start_time=otio.opentime.RationalTime(
                            value=9,
                            rate=24
                        ),
                        duration=otio.opentime.RationalTime(
                            value=12,
                            rate=24
                        )
                    ),
                    children=[
                        otio.schema.Clip(
                            name="clip1",
                            source_range=otio.opentime.TimeRange(
                                start_time=otio.opentime.RationalTime(
                                    value=100,
                                    rate=24
                                ),
                                duration=otio.opentime.RationalTime(
                                    value=10,
                                    rate=24
                                )
                            )
                        ),
                        otio.schema.Clip(
                            name="clip2",
                            source_range=otio.opentime.TimeRange(
                                start_time=otio.opentime.RationalTime(
github PixarAnimationStudios / OpenTimelineIO / tests / test_clip.py View on Github external
def test_cons(self):
        name = "test"
        rt = otio.opentime.RationalTime(5, 24)
        tr = otio.opentime.TimeRange(rt, rt)
        mr = otio.schema.ExternalReference(
            available_range=otio.opentime.TimeRange(
                rt,
                otio.opentime.RationalTime(10, 24)
            ),
            target_url="/var/tmp/test.mov"
        )

        cl = otio.schema.Clip(
            name=name,
            media_reference=mr,
            source_range=tr,
            # transition_in
            # transition_out
        )
        self.assertEqual(cl.name, name)
        self.assertEqual(cl.source_range, tr)
        self.assertIsOTIOEquivalentTo(cl.media_reference, mr)
        self.assertEqual(cl.source_range, tr)

        encoded = otio.adapters.otio_json.write_to_string(cl)
        decoded = otio.adapters.otio_json.read_from_string(encoded)
        self.assertIsOTIOEquivalentTo(cl, decoded)
github PixarAnimationStudios / OpenTimelineIO / tests / test_cmx_3600_adapter.py View on Github external
"comments": ["OTIO TRUNCATED REEL NAME FROM: test.mov"]
            }
        }

        tr = otio.opentime.TimeRange(
            start_time=otio.opentime.RationalTime(0.0, 24.0),
            duration=rt
        )

        cl = otio.schema.Clip(
            name="test clip1",
            media_reference=mr,
            source_range=tr,
            metadata=md
        )
        cl2 = otio.schema.Clip(
            name="test clip2",
            media_reference=mr.clone(),
            source_range=tr,
            metadata=md
        )
        cl3 = otio.schema.Clip(
            name="test clip3",
            media_reference=mr.clone(),
            source_range=tr,
            metadata=md
        )
        cl4 = otio.schema.Clip(
            name="test clip3_ff",
            media_reference=mr.clone(),
            source_range=tr,
            metadata=md
github PixarAnimationStudios / OpenTimelineIO / contrib / opentimelineio_contrib / adapters / extern_maya_sequencer.py View on Github external
def _read_shot(shot):
    rate = FPS.get(cmds.currentUnit(q=True, time=True), 25)
    start = int(cmds.shot(shot, q=True, startTime=True))
    end = int(cmds.shot(shot, q=True, endTime=True)) + 1

    video_reference = otio.schema.ExternalReference(
        target_url=_video_url_for_shot(shot),
        available_range=otio.opentime.TimeRange(
            otio.opentime.RationalTime(value=start, rate=rate),
            otio.opentime.RationalTime(value=end - start, rate=rate)
        )
    )

    return otio.schema.Clip(
        name=cmds.shot(shot, q=True, shotName=True),
        media_reference=video_reference,
        source_range=otio.opentime.TimeRange(
            otio.opentime.RationalTime(value=start, rate=rate),
            otio.opentime.RationalTime(value=end - start, rate=rate)
        )
github PixarAnimationStudios / OpenTimelineIO / contrib / opentimelineio_contrib / adapters / fcpx_xml.py View on Github external
def _build_composable(self, element, default_format):
        timing_clip = self._timing_clip(element)
        source_range = self._time_range(
            timing_clip,
            self._format_id_for_clip(element, default_format)
        )

        if element.tag != "ref-clip":
            otio_composable = otio.schema.Clip(
                name=timing_clip.get("name"),
                media_reference=self._reference_from_id(
                    element.get("ref"),
                    default_format
                ),
                source_range=source_range
            )
        else:
            media_element = self._compound_clip_by_id(element.get("ref"))
            otio_composable = self._squence_to_stack(
                media_element.find("./sequence"),
                name=media_element.get("name"),
                source_range=source_range
            )

        for marker in timing_clip.findall(".//marker"):
github PixarAnimationStudios / OpenTimelineIO / contrib / opentimelineio_contrib / adapters / fcpx_xml.py View on Github external
def _clips(self):
        return self.otio_timeline.each_child(
            descended_from_type=otio.schema.Clip
        )