From Stephan Huber, "attached you'll find the latest versions of the QTKit + the AVFoundation-plugin, some changes to osgPresentation and a small enhancement für ImageIO.
I fixed some bugs and did some more tests with both of the video-plugins. I integrated CoreVideo with osgPresentation, ImageStream has a new virtual method called createSuitableTexture which returns NULL for default implementations. Specialized implementations like the QTKit-plugin return a CoreVideo-texture. I refactored the code in SlideShowConstructor::createTexturedQuad to use a texture returned from ImageStream::createSuitableTexture. I did not use osgDB::readObjectFile to get the texture-object, as a lot of image-related code in SlideShowConstructor had to be refactored to use a texture. My changes are minimal and should not break existing code. There's one minor issue with CoreVideo in general: As the implementation is asynchronous, there might be no texture available, when first showing the video the first frame. I am a bit unsure how to tackle this problem, any input on this is appreciated. Back to the AVFoundation-plugin: the current implementation does not support CoreVideo as the QTKit-plugin supports it. There's no way to get decoded frames from AVFoundation stored on the GPU, which is kind of sad. I added some support for CoreVideo to transfer decoded frames back to the GPU, but in my testings the performance was worse than using the normal approach using glTexSubImage. This is why I disabled CoreVideo for AVFoundation. You can still request a CoreVideoTexture via readObjectFile, though. "
This commit is contained in:
@@ -19,6 +19,9 @@
|
||||
|
||||
namespace osg {
|
||||
|
||||
// forward declare of osg::Texture
|
||||
class Texture;
|
||||
|
||||
/**
|
||||
* Image Stream class.
|
||||
*/
|
||||
@@ -98,8 +101,13 @@ class OSG_EXPORT ImageStream : public Image
|
||||
void setAudioStreams(const AudioStreams& asl) { _audioStreams = asl; }
|
||||
AudioStreams& getAudioStreams() { return _audioStreams; }
|
||||
const AudioStreams& getAudioStreams() const { return _audioStreams; }
|
||||
|
||||
|
||||
|
||||
/** create a suitable texture for this imagestream, return NULL, if not supported
|
||||
* implement this method in subclasses to use special technologies like CoreVideo
|
||||
* or similar.
|
||||
*/
|
||||
virtual osg::Texture* createSuitableTexture() { return NULL; }
|
||||
|
||||
protected:
|
||||
virtual void applyLoopingMode() {}
|
||||
|
||||
|
||||
@@ -138,9 +138,9 @@ void OSXCoreVideoTexture::apply(osg::State& state) const {
|
||||
else
|
||||
_adapter = new OSXCoreVideoAdapter(state, _image.get());
|
||||
}
|
||||
|
||||
_adapter->getFrame();
|
||||
_textureTarget = _adapter->getTextureTarget();
|
||||
|
||||
glBindTexture(_textureTarget, _adapter->getTextureName());
|
||||
}
|
||||
|
||||
|
||||
@@ -81,6 +81,10 @@ public:
|
||||
return _waitForFirstFrame || getNeedsDispatching();
|
||||
}
|
||||
|
||||
static void initializeQTKit();
|
||||
|
||||
virtual osg::Texture* createSuitableTexture();
|
||||
|
||||
protected:
|
||||
|
||||
virtual void applyLoopingMode();
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
|
||||
#include "OSXQTKitVideo.h"
|
||||
#include "OSXCoreVideoAdapter.h"
|
||||
#include "OSXCoreVideoTexture.h"
|
||||
|
||||
namespace {
|
||||
|
||||
@@ -88,12 +89,37 @@ struct OSXQTKitVideo::Data {
|
||||
};
|
||||
|
||||
|
||||
void OSXQTKitVideo::initializeQTKit()
|
||||
{
|
||||
static bool inited(false);
|
||||
if (!inited)
|
||||
{
|
||||
inited = true;
|
||||
// force initialization of QTKit on the main-thread!
|
||||
if (![NSThread isMainThread]) {
|
||||
dispatch_apply(1, dispatch_get_main_queue(), ^(size_t n) {
|
||||
EnterMovies();
|
||||
QTMovie* movie = [QTMovie movie];
|
||||
// release missing by intent, gets released by the block!
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
EnterMovies();
|
||||
QTMovie* movie = [QTMovie movie];
|
||||
[movie release];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
OSXQTKitVideo::OSXQTKitVideo()
|
||||
: osgVideo::VideoImageStream()
|
||||
, _rate(0.0)
|
||||
, _coreVideoAdapter(NULL)
|
||||
{
|
||||
initializeQTKit();
|
||||
|
||||
_status = INVALID;
|
||||
_data = new Data();
|
||||
_data->notificationHandler = [[NotificationHandler alloc] init];
|
||||
@@ -211,9 +237,9 @@ void OSXQTKitVideo::open(const std::string& file_name)
|
||||
|
||||
applyLoopingMode();
|
||||
|
||||
_waitForFirstFrame = true;
|
||||
_waitForFirstFrame = true;
|
||||
requestNewFrame(true);
|
||||
|
||||
_fileName = file_name;
|
||||
_status = (valid) ? PAUSED : INVALID;
|
||||
}
|
||||
|
||||
@@ -367,4 +393,9 @@ void OSXQTKitVideo::decodeFrame(bool force)
|
||||
dirty();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
osg::Texture* OSXQTKitVideo::createSuitableTexture()
|
||||
{
|
||||
return new OSXCoreVideoTexture(this);
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ class ReaderWriterQTKit : public osgDB::ReaderWriter
|
||||
|
||||
supportsOption("disableCoreVideo", "disable the usage of coreVideo when using readObjectFile, returns an ImageStream instead");
|
||||
supportsOption("disableMultiThreadedFrameDispatching", "disable the usage of the multithreade VideoFrameDispatcher to decode video frames");
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -94,6 +94,9 @@ class ReaderWriterQTKit : public osgDB::ReaderWriter
|
||||
fileName = osgDB::findDataFile( fileName, options );
|
||||
if (fileName.empty()) return ReadResult::FILE_NOT_FOUND;
|
||||
}
|
||||
|
||||
static OpenThreads::Mutex mutex;
|
||||
OpenThreads::ScopedLock<OpenThreads::Mutex> lock(mutex);
|
||||
|
||||
OSG_INFO<<"ReaderWriterQTKit::readImage "<< fileName<< std::endl;
|
||||
|
||||
|
||||
@@ -57,6 +57,10 @@ public:
|
||||
/// jumps to a specific position
|
||||
virtual void seek(double pos);
|
||||
|
||||
virtual void rewind() {
|
||||
seek(0);
|
||||
}
|
||||
|
||||
|
||||
/// returns the current playing position
|
||||
virtual double getCurrentTime () const;
|
||||
@@ -91,6 +95,9 @@ public:
|
||||
bool isCoreVideoUsed() const { return _useCoreVideo; }
|
||||
void lazyInitCoreVideoTextureCache(osg::State& state);
|
||||
bool getCurrentCoreVideoTexture(GLenum& target, GLint& name, int& width, int& height) const;
|
||||
|
||||
virtual osg::Texture* createSuitableTexture();
|
||||
|
||||
protected:
|
||||
|
||||
virtual bool needsDispatching() const;
|
||||
|
||||
@@ -3,10 +3,12 @@
|
||||
#include <osgdB/FileNameUtils>
|
||||
#include <osgViewer/api/Cocoa/GraphicsWindowCocoa>
|
||||
#include <iostream>
|
||||
#include <deque>
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <Cocoa/Cocoa.h>
|
||||
|
||||
#include "OSXAVFoundationCoreVideoTexture.h"
|
||||
|
||||
|
||||
namespace {
|
||||
@@ -99,7 +101,6 @@ private:
|
||||
class OSXAVFoundationVideo::Data {
|
||||
public:
|
||||
AVPlayer* avplayer;
|
||||
AVPlayerItem* avplayeritem;
|
||||
AVPlayerItemVideoOutput* output;
|
||||
OSXAVFoundationVideoDelegate* delegate;
|
||||
std::vector<CVBufferRef> lastFrames;
|
||||
@@ -108,7 +109,6 @@ public:
|
||||
|
||||
Data()
|
||||
: avplayer(NULL)
|
||||
, avplayeritem(NULL)
|
||||
, output(NULL)
|
||||
, delegate(NULL)
|
||||
, lastFrames(3)
|
||||
@@ -117,12 +117,35 @@ public:
|
||||
, coreVideoTextureCache(0)
|
||||
{
|
||||
}
|
||||
~Data() {
|
||||
|
||||
void clear()
|
||||
{
|
||||
if (delegate) {
|
||||
[[NSNotificationCenter defaultCenter] removeObserver: delegate
|
||||
name:AVPlayerItemDidPlayToEndTimeNotification
|
||||
object:avplayer.currentItem
|
||||
];
|
||||
[delegate release];
|
||||
}
|
||||
|
||||
if (avplayer) {
|
||||
[avplayer cancelPendingPrerolls];
|
||||
[avplayer.currentItem.asset cancelLoading];
|
||||
[avplayer.currentItem removeOutput:output];
|
||||
}
|
||||
|
||||
[output release];
|
||||
[avplayeritem release];
|
||||
[avplayer release];
|
||||
|
||||
[delegate release];
|
||||
|
||||
avplayer = NULL;
|
||||
output = NULL;
|
||||
delegate = NULL;
|
||||
}
|
||||
|
||||
~Data() {
|
||||
|
||||
clear();
|
||||
|
||||
for(unsigned int i=0; i< lastFrames.size(); ++i)
|
||||
{
|
||||
@@ -137,10 +160,6 @@ public:
|
||||
CVOpenGLTextureCacheRelease(coreVideoTextureCache);
|
||||
coreVideoTextureCache = NULL;
|
||||
}
|
||||
output = NULL;
|
||||
avplayer = NULL;
|
||||
avplayeritem = NULL;
|
||||
delegate = NULL;
|
||||
}
|
||||
|
||||
void addFrame(CVBufferRef frame)
|
||||
@@ -188,12 +207,16 @@ OSXAVFoundationVideo::OSXAVFoundationVideo()
|
||||
_data = new Data();
|
||||
_status = INVALID;
|
||||
setOrigin(TOP_LEFT);
|
||||
|
||||
// std::cout << " OSXAVFoundationVideo " << this << std::endl;
|
||||
}
|
||||
|
||||
|
||||
OSXAVFoundationVideo::~OSXAVFoundationVideo()
|
||||
{
|
||||
// std::cout << "~OSXAVFoundationVideo " << this << " " << _data->avplayer << std::endl;
|
||||
quit();
|
||||
clear();
|
||||
if (_data)
|
||||
delete _data;
|
||||
}
|
||||
@@ -226,33 +249,21 @@ double OSXAVFoundationVideo::getTimeMultiplier() const
|
||||
|
||||
void OSXAVFoundationVideo::pause()
|
||||
{
|
||||
setNeedsDispatching(StopUpdate);
|
||||
|
||||
NSAutoreleasePoolHelper helper;
|
||||
|
||||
if (_data->avplayer) {
|
||||
[_data->avplayer pause];
|
||||
_status = PAUSED;
|
||||
setNeedsDispatching(StopUpdate);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void OSXAVFoundationVideo::clear()
|
||||
{
|
||||
[_data->output release];
|
||||
[_data->avplayeritem release];
|
||||
[_data->avplayer release];
|
||||
|
||||
if (_data->delegate) {
|
||||
[[NSNotificationCenter defaultCenter] removeObserver: _data->delegate
|
||||
name:AVPlayerItemDidPlayToEndTimeNotification
|
||||
object:[_data->avplayer currentItem]
|
||||
];
|
||||
}
|
||||
|
||||
[_data->delegate release];
|
||||
|
||||
_data->output = NULL;
|
||||
_data->avplayer = NULL;
|
||||
_data->avplayeritem = NULL;
|
||||
_data->delegate = NULL;
|
||||
if (_data)
|
||||
_data->clear();
|
||||
}
|
||||
|
||||
|
||||
@@ -277,6 +288,8 @@ double OSXAVFoundationVideo::getCurrentTime () const
|
||||
|
||||
void OSXAVFoundationVideo::open(const std::string& filename)
|
||||
{
|
||||
NSAutoreleasePoolHelper helper;
|
||||
|
||||
clear();
|
||||
|
||||
_data->delegate = [[OSXAVFoundationVideoDelegate alloc] init];
|
||||
@@ -303,21 +316,24 @@ void OSXAVFoundationVideo::open(const std::string& filename)
|
||||
_data->output.suppressesPlayerRendering = YES;
|
||||
}
|
||||
|
||||
_data->avplayeritem = [[AVPlayerItem alloc] initWithURL: url];
|
||||
_data->avplayer = [AVPlayer playerWithPlayerItem: _data->avplayeritem];
|
||||
_data->avplayer = [AVPlayer playerWithURL: url]; // AVPlayerFactory::instance()->getOrCreate(url);
|
||||
[_data->avplayer retain];
|
||||
|
||||
_data->avplayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
|
||||
|
||||
[[_data->avplayer currentItem] addOutput:_data->output];
|
||||
[_data->avplayer.currentItem addOutput:_data->output];
|
||||
|
||||
|
||||
[[NSNotificationCenter defaultCenter] addObserver: _data->delegate
|
||||
selector:@selector(playerItemDidReachEnd:)
|
||||
name:AVPlayerItemDidPlayToEndTimeNotification
|
||||
object:[_data->avplayer currentItem]];
|
||||
object:_data->avplayer.currentItem];
|
||||
|
||||
_videoDuration = CMTimeGetSeconds([[_data->avplayer currentItem] duration]);
|
||||
|
||||
_videoDuration = CMTimeGetSeconds([_data->avplayer.currentItem duration]);
|
||||
|
||||
// get the max size of the video-tracks
|
||||
NSArray* tracks = [_data->avplayeritem.asset tracksWithMediaType: AVMediaTypeVideo];
|
||||
NSArray* tracks = [_data->avplayer.currentItem.asset tracksWithMediaType: AVMediaTypeVideo];
|
||||
CGSize size;
|
||||
for(unsigned int i=0; i < [tracks count]; ++i)
|
||||
{
|
||||
@@ -329,7 +345,11 @@ void OSXAVFoundationVideo::open(const std::string& filename)
|
||||
_s = size.width;
|
||||
_t = size.height;
|
||||
_r = 1;
|
||||
unsigned char* buffer = (unsigned char*)calloc(_s*_t*4, 1);
|
||||
setImage(_s, _t, 1, GL_RGBA, GL_BGRA, GL_UNSIGNED_BYTE, buffer, USE_MALLOC_FREE);
|
||||
|
||||
_fileName = filename;
|
||||
|
||||
requestNewFrame();
|
||||
|
||||
_status = PAUSED;
|
||||
@@ -344,6 +364,7 @@ float OSXAVFoundationVideo::getVolume() const
|
||||
|
||||
void OSXAVFoundationVideo::setVolume(float v)
|
||||
{
|
||||
NSAutoreleasePoolHelper helper;
|
||||
_volume = v;
|
||||
if (_data->avplayer)
|
||||
[_data->avplayer setVolume: v];
|
||||
@@ -511,4 +532,11 @@ void OSXAVFoundationVideo::lazyInitCoreVideoTextureCache(osg::State& state)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
osg::Texture* OSXAVFoundationVideo::createSuitableTexture()
|
||||
{
|
||||
return NULL; // new OSXAVFoundationCoreVideoTexture(this);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -59,13 +59,16 @@ class ReaderWriterAVFoundation : public osgDB::ReaderWriter
|
||||
fileName = osgDB::findDataFile( fileName, options );
|
||||
if (fileName.empty()) return ReadResult::FILE_NOT_FOUND;
|
||||
}
|
||||
|
||||
|
||||
static OpenThreads::Mutex mutex;
|
||||
OpenThreads::ScopedLock<OpenThreads::Mutex> lock(mutex);
|
||||
|
||||
OSG_INFO<<"ReaderWriterAVFoundation::readImage "<< fileName<< std::endl;
|
||||
|
||||
osg::ref_ptr<OSXAVFoundationVideo> video = new OSXAVFoundationVideo();
|
||||
|
||||
bool disable_multi_threaded_frame_dispatching = options ? (options->getPluginStringData("disableMultiThreadedFrameDispatching") == "true") : false;
|
||||
bool disable_core_video = true; // options ? (options->getPluginStringData("disableCoreVideo") == "true") : false;
|
||||
bool disable_core_video = options ? (options->getPluginStringData("disableCoreVideo") == "true") : false;
|
||||
OSG_INFO << "disableMultiThreadedFrameDispatching: " << disable_multi_threaded_frame_dispatching << std::endl;
|
||||
OSG_INFO << "disableCoreVideo : " << disable_core_video << std::endl;
|
||||
|
||||
|
||||
@@ -336,6 +336,7 @@ osg::Image* CreateOSGImageFromCGImage(CGImageRef image_ref)
|
||||
//
|
||||
case 16:
|
||||
case 32:
|
||||
case 64:
|
||||
{
|
||||
|
||||
internal_format = GL_RGBA8;
|
||||
|
||||
@@ -59,6 +59,7 @@
|
||||
using namespace osgPresentation;
|
||||
|
||||
#define USE_CLIENT_STORAGE_HINT 0
|
||||
#define USE_TEXTURE_FROM_VIDEO_PLUGIN 1
|
||||
|
||||
class SetToTransparentBin : public osg::NodeVisitor
|
||||
{
|
||||
@@ -770,7 +771,7 @@ void SlideShowConstructor::findImageStreamsAndAddCallbacks(osg::Node* node)
|
||||
osg::Geometry* SlideShowConstructor::createTexturedQuadGeometry(const osg::Vec3& pos, const osg::Vec4& rotation, float width, float height, osg::Image* image, bool& usedTextureRectangle)
|
||||
{
|
||||
osg::Geometry* pictureQuad = 0;
|
||||
osg::Texture* texture = 0;
|
||||
osg::ref_ptr<osg::Texture> texture = 0;
|
||||
osg::StateSet* stateset = 0;
|
||||
|
||||
osg::Vec3 positionVec = pos;
|
||||
@@ -782,7 +783,15 @@ osg::Geometry* SlideShowConstructor::createTexturedQuadGeometry(const osg::Vec3&
|
||||
heightVec = heightVec*rotationMatrix;
|
||||
|
||||
osg::ImageStream* imageStream = dynamic_cast<osg::ImageStream*>(image);
|
||||
|
||||
|
||||
// let the video-plugin create a texture for us, if supported
|
||||
#if USE_TEXTURE_FROM_VIDEO_PLUGIN
|
||||
if(imageStream)
|
||||
{
|
||||
texture = imageStream->createSuitableTexture();
|
||||
}
|
||||
#endif
|
||||
|
||||
bool flipYAxis = image->getOrigin()==osg::Image::TOP_LEFT;
|
||||
|
||||
#if 1
|
||||
@@ -798,50 +807,47 @@ osg::Geometry* SlideShowConstructor::createTexturedQuadGeometry(const osg::Vec3&
|
||||
// pass back info on wether texture 2D is used.
|
||||
usedTextureRectangle = useTextureRectangle;
|
||||
|
||||
if (useTextureRectangle)
|
||||
if (!texture)
|
||||
{
|
||||
pictureQuad = osg::createTexturedQuadGeometry(positionVec,
|
||||
widthVec,
|
||||
heightVec,
|
||||
0.0f, flipYAxis ? image->t() : 0.0f,
|
||||
image->s(), flipYAxis ? 0.0f : image->t());
|
||||
|
||||
stateset = pictureQuad->getOrCreateStateSet();
|
||||
|
||||
texture = new osg::TextureRectangle(image);
|
||||
stateset->setTextureAttributeAndModes(0,
|
||||
texture,
|
||||
osg::StateAttribute::ON);
|
||||
|
||||
|
||||
if (useTextureRectangle)
|
||||
{
|
||||
texture = new osg::TextureRectangle(image);
|
||||
}
|
||||
else
|
||||
{
|
||||
texture = new osg::Texture2D(image);
|
||||
|
||||
texture->setResizeNonPowerOfTwoHint(false);
|
||||
texture->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINEAR);
|
||||
texture->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINEAR);
|
||||
#if USE_CLIENT_STORAGE_HINT
|
||||
texture->setClientStorageHint(true);
|
||||
#endif
|
||||
|
||||
}
|
||||
}
|
||||
else
|
||||
if (texture)
|
||||
{
|
||||
float t(0), l(0);
|
||||
float r = (texture->getTextureTarget() == GL_TEXTURE_RECTANGLE) ? image->s() : 1;
|
||||
float b = (texture->getTextureTarget() == GL_TEXTURE_RECTANGLE) ? image->t() : 1;
|
||||
|
||||
if (flipYAxis)
|
||||
std::swap(t,b);
|
||||
|
||||
pictureQuad = osg::createTexturedQuadGeometry(positionVec,
|
||||
widthVec,
|
||||
heightVec,
|
||||
0.0f, flipYAxis ? 1.0f : 0.0f,
|
||||
1.0f, flipYAxis ? 0.0f : 1.0f);
|
||||
|
||||
widthVec,
|
||||
heightVec,
|
||||
l, t, r, b);
|
||||
|
||||
stateset = pictureQuad->getOrCreateStateSet();
|
||||
|
||||
texture = new osg::Texture2D(image);
|
||||
|
||||
texture->setResizeNonPowerOfTwoHint(false);
|
||||
texture->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINEAR);
|
||||
texture->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINEAR);
|
||||
#if USE_CLIENT_STORAGE_HINT
|
||||
texture->setClientStorageHint(true);
|
||||
#endif
|
||||
stateset->setTextureAttributeAndModes(0,
|
||||
texture,
|
||||
osg::StateAttribute::ON);
|
||||
|
||||
texture,
|
||||
osg::StateAttribute::ON);
|
||||
}
|
||||
|
||||
|
||||
if (!pictureQuad) return 0;
|
||||
|
||||
|
||||
if (imageStream)
|
||||
{
|
||||
imageStream->pause();
|
||||
|
||||
Reference in New Issue
Block a user