// Initialize the AVFoundation state
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:someUrl options:nil];
[asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler:^{
NSError* error = nil;
AVKeyValueStatus status = [asset statusOfValueForKey:@"tracks" error:&error];
if (status == AVKeyValueStatusLoaded)
{
NSDictionary* settings = @{ (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] };
AVPlayerItemVideoOutput* output = [[[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:settings] autorelease];
AVPlayerItem* playerItem = [AVPlayerItem playerItemWithAsset:asset];
[playerItem addOutput:[self playerItemOutput]];
AVPlayer* player = [AVPlayer playerWithPlayerItem:playerItem];
// Assume some instance variable exist here. You'll need them to control the
// playback of the video (via the AVPlayer), and to copy sample buffers (via the AVPlayerItemVideoOutput).
[self setPlayer:player];
[self setPlayerItem:playerItem];
[self setOutput:output];
}
else
{
NSLog(@"%@ Failed to load the tracks.", self);
}
}];
// Now at any later point in time, you can get a pixel buffer
// that corresponds to the current AVPlayer state like this:
CVPixelBufferRef buffer = [[self output] copyPixelBufferForItemTime:[[self playerItem] currentTime] itemTimeForDisplay:nil];
2条答案
按热度按时间ss2ws0br1#
iOS 6发布了一些API,我可以使用它们来简化这个过程。它根本不使用
AVAssetReader
,而是依赖于一个名为AVPlayerItemVideoOutput
的类。这个类的示例可以通过一个新的-addOutput:
方法添加到任何AVPlayerItem
示例中。与
AVAssetReader
不同,这个类可以很好地用于由远程AVURLAsset
支持的AVPlayerItem
,并且还具有允许更复杂的回放接口的优点,该接口支持通过-copyPixelBufferForItemTime:itemTimeForDisplay:
进行非线性回放(而不是AVAssetReader
的严格限制-copyNextSampleBuffer
方法)。样品代码
一旦你有了缓冲区,你就可以随心所欲地把它上传到OpenGL。我推荐可怕的
CVOpenGLESTextureCacheCreateTextureFromImage()
函数,因为你可以在所有新设备上获得硬件加速,它比glTexSubImage2D()
快得多。比如苹果的GLCameraRipple和RosyWriter演示。k5ifujac2#
截至2023年,
CGLTexImageIOSurface2D
将CVPixelData转换为OpenGL纹理的速度比CVOpenGLESTextureCacheCreateTextureFromImage()
快得多。确保CVPixelBuffers由IOSurface支持并且格式正确:
为了得到每一帧
more info