How to use av - 10 common examples

To help you get started, we’ve selected a few av examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github mikeboers / PyAV / tests / test_logging.py View on Github external
def test_adapt_level(self):
        self.assertEqual(
            av.logging.adapt_level(av.logging.ERROR),
            logging.ERROR
        )
        self.assertEqual(
            av.logging.adapt_level(av.logging.WARNING),
            logging.WARNING
        )
        self.assertEqual(
            av.logging.adapt_level((av.logging.WARNING + av.logging.ERROR) // 2),
            logging.WARNING
        )
github mikeboers / PyAV / tests / test_logging.py View on Github external
def test_adapt_level(self):
        self.assertEqual(
            av.logging.adapt_level(av.logging.ERROR),
            logging.ERROR
        )
        self.assertEqual(
            av.logging.adapt_level(av.logging.WARNING),
            logging.WARNING
        )
        self.assertEqual(
            av.logging.adapt_level((av.logging.WARNING + av.logging.ERROR) // 2),
            logging.WARNING
        )
github soft-matter / pims / pims / display.py View on Github external
else:
                stream.width = width
                stream.height = (height or
                                 width * img.shape[0] // img.shape[1])

            if bitrate is not None:
                stream.bit_rate = int(bitrate)
            elif quality is not None and codec == str('wmv2'):
                bitrate = quality * _estimate_bitrate([stream.height,
                                                       stream.width],
                                                      export_rate)
                stream.bit_rate = int(bitrate)

        # Ensure correct memory layout
        img = img.astype(img.dtype, order='C', copy=False)
        frame = av.VideoFrame.from_ndarray(img, format=str('rgb24'))
        packet = stream.encode(frame)
        if packet is not None:
            output.mux(packet)

    # Finish encoding the stream
    while True:
        try:
            packet = stream.encode()
        except av.AVError:  # End of file raises AVError since after av 0.4
            break
        if packet is None:
            break
        output.mux(packet)

    output.close()
github mikeboers / PyAV / tests / test_seek.py View on Github external
def test_stream_seek(self, use_deprecated_api=False):

        container = av.open(fate_suite('h264/interlaced_crop.mp4'))

        video_stream = next(s for s in container.streams if s.type == 'video')
        total_frame_count = 0

        # Count number of frames in video
        for packet in container.demux(video_stream):
            for frame in packet.decode():
                total_frame_count += 1

        target_frame = int(total_frame_count / 2.0)
        time_base = float(video_stream.time_base)

        rate = float(video_stream.average_rate)
        target_sec = target_frame * 1 / rate

        target_timestamp = int(target_sec / time_base) + video_stream.start_time
github mikeboers / PyAV / tests / test_python_io.py View on Github external
def test_buffer_read_write(self):

        buffer_ = StringIO()
        wrapped = MethodLogger(buffer_)
        write_rgb_rotate(av.open(wrapped, 'w', 'mp4'))

        # Make sure it did actually write.
        writes = wrapped._filter('write')
        self.assertTrue(writes)

        self.assertTrue(buffer_.tell())

        # Standard assertions.
        buffer_.seek(0)
        assert_rgb_rotate(self, av.open(buffer_))
github aiortc / aiortc / tests / codecs.py View on Github external
def create_video_frame(
        self, width, height, pts, format="yuv420p", time_base=VIDEO_TIME_BASE
    ):
        """
        Create a single blank video frame.
        """
        frame = VideoFrame(width=width, height=height, format=format)
        for p in frame.planes:
            p.update(bytes(p.buffer_size))
        frame.pts = pts
        frame.time_base = time_base
        return frame
github mikeboers / PyAV / tests / test_filters.py View on Github external
def test_haldclut_graph(self):

        raise SkipTest()

        graph = Graph()

        img = Image.open(fate_suite('png1/lena-rgb24.png'))
        frame = VideoFrame.from_image(img)
        img_source = graph.add_buffer(frame)

        hald_img = Image.open('hald_7.png')
        hald_frame = VideoFrame.from_image(hald_img)
        hald_source = graph.add_buffer(hald_frame)

        try:
            hald_filter = graph.add('haldclut')
        except ValueError:
            # Not in Libav.
            raise SkipTest()

        sink = graph.add('buffersink')

        img_source.link(0, hald_filter, 0)
        hald_source.link(0, hald_filter, 1)
        hald_filter.link(0, sink, 0)
        graph.config()

        self.assertIs(img_source.outputs[0].linked_to, hald_filter.inputs[0])
github mikeboers / PyAV / tests / test_encode.py View on Github external
def test_encoding_with_pts(self):

        path = self.sandboxed('video_with_pts.mov')
        output = av.open(path, 'w')

        stream = output.add_stream('libx264', 24)
        stream.width = WIDTH
        stream.height = HEIGHT
        stream.pix_fmt = "yuv420p"

        for i in range(DURATION):
            frame = VideoFrame(WIDTH, HEIGHT, 'rgb24')
            frame.pts = i * 2000
            frame.time_base = Fraction(1, 48000)

            for packet in stream.encode(frame):
                self.assertEqual(packet.time_base, Fraction(1, 24))
                output.mux(packet)

        for packet in stream.encode(None):
            self.assertEqual(packet.time_base, Fraction(1, 24))
            output.mux(packet)

        output.close()
github mikeboers / PyAV / tests / test_videoframe.py View on Github external
def test_roundtrip(self):
        image = Image.open(fate_png())
        frame = VideoFrame.from_image(image)
        img = frame.to_image()
        img.save(self.sandboxed('roundtrip-high.jpg'))
        self.assertImagesAlmostEqual(image, img)
github mikeboers / PyAV / tests / test_videoframe.py View on Github external
def test_ndarray_rgb(self):
        array = numpy.random.randint(0, 256, size=(480, 640, 3), dtype=numpy.uint8)
        for format in ['rgb24', 'bgr24']:
            frame = VideoFrame.from_ndarray(array, format=format)
            self.assertEqual(frame.width, 640)
            self.assertEqual(frame.height, 480)
            self.assertEqual(frame.format.name, format)
            self.assertTrue((frame.to_ndarray() == array).all())