使用pygst创建程序化MPEG-4视频
我希望找到如何使用pygst创建MPEG-4视频文件的方法(也可以推荐其他框架)。
这个问题分为三个部分:
如何从Python的帧缓冲区将生成的视频数据输入到pygst的处理流程中
如何将这个视频流保存为MPEG-4文件
如何将这个视频流与MP3音频源混合在一起
下面是伪代码:
for frame in range(0, 10000): # let's render 10000 frames of video
data = []
for y in range(0, height):
for x in range(0, width):
data[y*width+x] = random.randint(0, 2**31) # rgba pixel
# XXX: how to feed th video frame generated above in GStreamer pipeline
# and save it MPEG-4 file
pass
更多信息:
http://lists.freedesktop.org/archives/gstreamer-devel/2011-August/032609.html
1 个回答
2
你可能想要创建一个 appsink
元素,然后对于每一帧,创建一个新的 GstBuffer,并通过 gst_app_src_push_buffer() 将它推送到管道中。
这里有一些示例代码,它并不能正常工作 - ffmpeg 报告帧长度的问题(?), 但我想你能理解要点和一些提示。
import os
os.putenv('GST_DEBUG_DUMP_DOT_DIR', '/tmp')
import gst
import gobject
gobject.threads_init()
import logging
import random
import pdb
_log = logging.getLogger(__name__)
_log.setLevel(logging.DEBUG)
logging.basicConfig()
def framegenerator():
'''
Yields one frame of video per iteration
'''
height = 1080
width = 1920
for frame in range(0, 10000): # let's render 10000 frames of video
data = list(range(height*width))
for y in range(0, height):
for x in range(0, width):
#_log.debug(y*width+x)
data[y*width+x] = random.randint(0, 2**31) # rgba pixel
yield data
GENERATOR = framegenerator()
def feed_appsrc(bus, message):
'''
Feed the appsrc element with a new frame
'''
global appsrc, pipeline
dotfile = "/tmp/debug-graph.dot"
pngfile = "/tmp/pipeline.png"
if os.access(dotfile, os.F_OK):
os.remove(dotfile)
if os.access(pngfile, os.F_OK):
os.remove(pngfile)
gst.DEBUG_BIN_TO_DOT_FILE(
pipeline,
gst.DEBUG_GRAPH_SHOW_ALL,
'debug-graph')
dot = '/usr/bin/dot'
os.system(dot + " -Tpng -o " + pngfile + " " + dotfile)
try:
frame = GENERATOR.next()
frame = str(frame)
buf = gst.Buffer(
frame)
buf.set_caps(
gst.caps_from_string('video/x-raw-rgb,framerate=30/1'))
#_log.debug(buf)
res = appsrc.emit('push-buffer', buf)
_log.debug('Result: {0}'.format(res))
except StopIteration:
res = appsrc.emit('eos')
_log.info('EOS')
def _on_message(bus, message):
_log.debug(message)
pipeline = gst.Pipeline('pipeline')
appsrc = gst.element_factory_make('appsrc', 'appsrc')
# Connect feed_appsrc to the need-data signal
appsrc.connect('need-data', feed_appsrc)
appsrc.set_property('caps',
gst.caps_from_string(','.join([
'video/x-raw-rgb',
'framerate=30/1',
'width=1920',
'height=1080',
'bpp=32',
'depth=32',
'green_mask=65280', #{0}'.format(0x00ff0000),
'red_mask=255', #{0}'.format(0x000000ff),
'blue_mask=16711680', #{0}'.format(0x000000ff),
'alpha_mask=-16777216', #{0}'.format(),
'endianness=4321'])))
pipeline.add(appsrc)
ffmpegcolorspace = gst.element_factory_make('ffmpegcolorspace')
pipeline.add(ffmpegcolorspace)
videorate = gst.element_factory_make('videorate')
pipeline.add(videorate)
ffvideoscale = gst.element_factory_make('videoscale')
pipeline.add(ffvideoscale)
vp8enc = gst.element_factory_make('vp8enc', 'vp8enc')
pipeline.add(vp8enc)
webmmux = gst.element_factory_make('webmmux', 'webmmux')
pipeline.add(webmmux)
filesink = gst.element_factory_make('filesink', 'filesink')
filesink.set_property('location', '/tmp/generated-video.webm')
pipeline.add(filesink)
gst.element_link_many(
appsrc,
ffmpegcolorspace,
videorate,
ffvideoscale,
vp8enc,
webmmux,
filesink)
bus = pipeline.get_bus()
bus.add_signal_watch()
bus.connect('message', _on_message)
pipeline.set_state(gst.STATE_PLAYING)
pdb.set_trace()
gobject.MainLoop().run()