From f9fd4342bafb54c834b4e02a93d3f0469e45fd30 Mon Sep 17 00:00:00 2001 From: Robert Osfield Date: Wed, 24 Oct 2012 10:43:01 +0000 Subject: [PATCH] =?UTF-8?q?From=20Stephan=20Huber,=20"attached=20you'll=20?= =?UTF-8?q?find=20the=20latest=20versions=20of=20the=20QTKit=20+=20the=20A?= =?UTF-8?q?VFoundation-plugin,=20some=20changes=20to=20osgPresentation=20a?= =?UTF-8?q?nd=20a=20small=20enhancement=20f=C3=BCr=20ImageIO.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I fixed some bugs and did some more tests with both of the video-plugins. I integrated CoreVideo with osgPresentation, ImageStream has a new virtual method called createSuitableTexture which returns NULL for default implementations. Specialized implementations like the QTKit-plugin return a CoreVideo-texture. I refactored the code in SlideShowConstructor::createTexturedQuad to use a texture returned from ImageStream::createSuitableTexture. I did not use osgDB::readObjectFile to get the texture-object, as a lot of image-related code in SlideShowConstructor had to be refactored to use a texture. My changes are minimal and should not break existing code. There's one minor issue with CoreVideo in general: As the implementation is asynchronous, there might be no texture available, when first showing the video the first frame. I am a bit unsure how to tackle this problem, any input on this is appreciated. Back to the AVFoundation-plugin: the current implementation does not support CoreVideo as the QTKit-plugin supports it. There's no way to get decoded frames from AVFoundation stored on the GPU, which is kind of sad. I added some support for CoreVideo to transfer decoded frames back to the GPU, but in my testings the performance was worse than using the normal approach using glTexSubImage. This is why I disabled CoreVideo for AVFoundation. You can still request a CoreVideoTexture via readObjectFile, though. " --- include/osg/ImageStream | 12 ++- src/osgPlugins/QTKit/OSXCoreVideoTexture.cpp | 2 +- src/osgPlugins/QTKit/OSXQTKitVideo.h | 4 + src/osgPlugins/QTKit/OSXQTKitVideo.mm | 37 +++++++- src/osgPlugins/QTKit/ReaderWriterQTKit.cpp | 5 +- .../avfoundation/OSXAVFoundationVideo.h | 7 ++ .../avfoundation/OSXAVFoundationVideo.mm | 94 ++++++++++++------- .../avfoundation/ReaderWriterAVFoundation.cpp | 7 +- .../imageio/ReaderWriterImageIO.cpp | 1 + src/osgPresentation/SlideShowConstructor.cpp | 80 ++++++++-------- 10 files changed, 170 insertions(+), 79 deletions(-) diff --git a/include/osg/ImageStream b/include/osg/ImageStream index 4257ab7d3..c805e4e43 100644 --- a/include/osg/ImageStream +++ b/include/osg/ImageStream @@ -19,6 +19,9 @@ namespace osg { +// forward declare of osg::Texture +class Texture; + /** * Image Stream class. */ @@ -98,8 +101,13 @@ class OSG_EXPORT ImageStream : public Image void setAudioStreams(const AudioStreams& asl) { _audioStreams = asl; } AudioStreams& getAudioStreams() { return _audioStreams; } const AudioStreams& getAudioStreams() const { return _audioStreams; } - - + + /** create a suitable texture for this imagestream, return NULL, if not supported + * implement this method in subclasses to use special technologies like CoreVideo + * or similar. + */ + virtual osg::Texture* createSuitableTexture() { return NULL; } + protected: virtual void applyLoopingMode() {} diff --git a/src/osgPlugins/QTKit/OSXCoreVideoTexture.cpp b/src/osgPlugins/QTKit/OSXCoreVideoTexture.cpp index a97485f31..acb2965fc 100644 --- a/src/osgPlugins/QTKit/OSXCoreVideoTexture.cpp +++ b/src/osgPlugins/QTKit/OSXCoreVideoTexture.cpp @@ -138,9 +138,9 @@ void OSXCoreVideoTexture::apply(osg::State& state) const { else _adapter = new OSXCoreVideoAdapter(state, _image.get()); } + _adapter->getFrame(); _textureTarget = _adapter->getTextureTarget(); - glBindTexture(_textureTarget, _adapter->getTextureName()); } diff --git a/src/osgPlugins/QTKit/OSXQTKitVideo.h b/src/osgPlugins/QTKit/OSXQTKitVideo.h index 2cf6ec728..3719e2917 100644 --- a/src/osgPlugins/QTKit/OSXQTKitVideo.h +++ b/src/osgPlugins/QTKit/OSXQTKitVideo.h @@ -81,6 +81,10 @@ public: return _waitForFirstFrame || getNeedsDispatching(); } + static void initializeQTKit(); + + virtual osg::Texture* createSuitableTexture(); + protected: virtual void applyLoopingMode(); diff --git a/src/osgPlugins/QTKit/OSXQTKitVideo.mm b/src/osgPlugins/QTKit/OSXQTKitVideo.mm index 6be2d9539..e5f9ba4a7 100644 --- a/src/osgPlugins/QTKit/OSXQTKitVideo.mm +++ b/src/osgPlugins/QTKit/OSXQTKitVideo.mm @@ -19,6 +19,7 @@ #include "OSXQTKitVideo.h" #include "OSXCoreVideoAdapter.h" +#include "OSXCoreVideoTexture.h" namespace { @@ -88,12 +89,37 @@ struct OSXQTKitVideo::Data { }; +void OSXQTKitVideo::initializeQTKit() +{ + static bool inited(false); + if (!inited) + { + inited = true; + // force initialization of QTKit on the main-thread! + if (![NSThread isMainThread]) { + dispatch_apply(1, dispatch_get_main_queue(), ^(size_t n) { + EnterMovies(); + QTMovie* movie = [QTMovie movie]; + // release missing by intent, gets released by the block! + }); + } + else + { + EnterMovies(); + QTMovie* movie = [QTMovie movie]; + [movie release]; + } + } +} + OSXQTKitVideo::OSXQTKitVideo() : osgVideo::VideoImageStream() , _rate(0.0) , _coreVideoAdapter(NULL) { + initializeQTKit(); + _status = INVALID; _data = new Data(); _data->notificationHandler = [[NotificationHandler alloc] init]; @@ -211,9 +237,9 @@ void OSXQTKitVideo::open(const std::string& file_name) applyLoopingMode(); - _waitForFirstFrame = true; + _waitForFirstFrame = true; requestNewFrame(true); - + _fileName = file_name; _status = (valid) ? PAUSED : INVALID; } @@ -367,4 +393,9 @@ void OSXQTKitVideo::decodeFrame(bool force) dirty(); } } - + + +osg::Texture* OSXQTKitVideo::createSuitableTexture() +{ + return new OSXCoreVideoTexture(this); +} diff --git a/src/osgPlugins/QTKit/ReaderWriterQTKit.cpp b/src/osgPlugins/QTKit/ReaderWriterQTKit.cpp index 6e367a4dd..e1f8553ae 100644 --- a/src/osgPlugins/QTKit/ReaderWriterQTKit.cpp +++ b/src/osgPlugins/QTKit/ReaderWriterQTKit.cpp @@ -67,7 +67,7 @@ class ReaderWriterQTKit : public osgDB::ReaderWriter supportsOption("disableCoreVideo", "disable the usage of coreVideo when using readObjectFile, returns an ImageStream instead"); supportsOption("disableMultiThreadedFrameDispatching", "disable the usage of the multithreade VideoFrameDispatcher to decode video frames"); - + } @@ -94,6 +94,9 @@ class ReaderWriterQTKit : public osgDB::ReaderWriter fileName = osgDB::findDataFile( fileName, options ); if (fileName.empty()) return ReadResult::FILE_NOT_FOUND; } + + static OpenThreads::Mutex mutex; + OpenThreads::ScopedLock lock(mutex); OSG_INFO<<"ReaderWriterQTKit::readImage "<< fileName<< std::endl; diff --git a/src/osgPlugins/avfoundation/OSXAVFoundationVideo.h b/src/osgPlugins/avfoundation/OSXAVFoundationVideo.h index 24ecec585..d48349c99 100644 --- a/src/osgPlugins/avfoundation/OSXAVFoundationVideo.h +++ b/src/osgPlugins/avfoundation/OSXAVFoundationVideo.h @@ -57,6 +57,10 @@ public: /// jumps to a specific position virtual void seek(double pos); + virtual void rewind() { + seek(0); + } + /// returns the current playing position virtual double getCurrentTime () const; @@ -91,6 +95,9 @@ public: bool isCoreVideoUsed() const { return _useCoreVideo; } void lazyInitCoreVideoTextureCache(osg::State& state); bool getCurrentCoreVideoTexture(GLenum& target, GLint& name, int& width, int& height) const; + + virtual osg::Texture* createSuitableTexture(); + protected: virtual bool needsDispatching() const; diff --git a/src/osgPlugins/avfoundation/OSXAVFoundationVideo.mm b/src/osgPlugins/avfoundation/OSXAVFoundationVideo.mm index c3d0bbf82..c647ad93c 100644 --- a/src/osgPlugins/avfoundation/OSXAVFoundationVideo.mm +++ b/src/osgPlugins/avfoundation/OSXAVFoundationVideo.mm @@ -3,10 +3,12 @@ #include #include #include +#include #import #import +#include "OSXAVFoundationCoreVideoTexture.h" namespace { @@ -99,7 +101,6 @@ private: class OSXAVFoundationVideo::Data { public: AVPlayer* avplayer; - AVPlayerItem* avplayeritem; AVPlayerItemVideoOutput* output; OSXAVFoundationVideoDelegate* delegate; std::vector lastFrames; @@ -108,7 +109,6 @@ public: Data() : avplayer(NULL) - , avplayeritem(NULL) , output(NULL) , delegate(NULL) , lastFrames(3) @@ -117,12 +117,35 @@ public: , coreVideoTextureCache(0) { } - ~Data() { + + void clear() + { + if (delegate) { + [[NSNotificationCenter defaultCenter] removeObserver: delegate + name:AVPlayerItemDidPlayToEndTimeNotification + object:avplayer.currentItem + ]; + [delegate release]; + } + + if (avplayer) { + [avplayer cancelPendingPrerolls]; + [avplayer.currentItem.asset cancelLoading]; + [avplayer.currentItem removeOutput:output]; + } + [output release]; - [avplayeritem release]; [avplayer release]; - [delegate release]; + + avplayer = NULL; + output = NULL; + delegate = NULL; + } + + ~Data() { + + clear(); for(unsigned int i=0; i< lastFrames.size(); ++i) { @@ -137,10 +160,6 @@ public: CVOpenGLTextureCacheRelease(coreVideoTextureCache); coreVideoTextureCache = NULL; } - output = NULL; - avplayer = NULL; - avplayeritem = NULL; - delegate = NULL; } void addFrame(CVBufferRef frame) @@ -188,12 +207,16 @@ OSXAVFoundationVideo::OSXAVFoundationVideo() _data = new Data(); _status = INVALID; setOrigin(TOP_LEFT); + + // std::cout << " OSXAVFoundationVideo " << this << std::endl; } OSXAVFoundationVideo::~OSXAVFoundationVideo() { + // std::cout << "~OSXAVFoundationVideo " << this << " " << _data->avplayer << std::endl; quit(); + clear(); if (_data) delete _data; } @@ -226,33 +249,21 @@ double OSXAVFoundationVideo::getTimeMultiplier() const void OSXAVFoundationVideo::pause() { + setNeedsDispatching(StopUpdate); + + NSAutoreleasePoolHelper helper; + if (_data->avplayer) { [_data->avplayer pause]; _status = PAUSED; - setNeedsDispatching(StopUpdate); } } void OSXAVFoundationVideo::clear() { - [_data->output release]; - [_data->avplayeritem release]; - [_data->avplayer release]; - - if (_data->delegate) { - [[NSNotificationCenter defaultCenter] removeObserver: _data->delegate - name:AVPlayerItemDidPlayToEndTimeNotification - object:[_data->avplayer currentItem] - ]; - } - - [_data->delegate release]; - - _data->output = NULL; - _data->avplayer = NULL; - _data->avplayeritem = NULL; - _data->delegate = NULL; + if (_data) + _data->clear(); } @@ -277,6 +288,8 @@ double OSXAVFoundationVideo::getCurrentTime () const void OSXAVFoundationVideo::open(const std::string& filename) { + NSAutoreleasePoolHelper helper; + clear(); _data->delegate = [[OSXAVFoundationVideoDelegate alloc] init]; @@ -303,21 +316,24 @@ void OSXAVFoundationVideo::open(const std::string& filename) _data->output.suppressesPlayerRendering = YES; } - _data->avplayeritem = [[AVPlayerItem alloc] initWithURL: url]; - _data->avplayer = [AVPlayer playerWithPlayerItem: _data->avplayeritem]; + _data->avplayer = [AVPlayer playerWithURL: url]; // AVPlayerFactory::instance()->getOrCreate(url); + [_data->avplayer retain]; + _data->avplayer.actionAtItemEnd = AVPlayerActionAtItemEndNone; - [[_data->avplayer currentItem] addOutput:_data->output]; + [_data->avplayer.currentItem addOutput:_data->output]; + [[NSNotificationCenter defaultCenter] addObserver: _data->delegate selector:@selector(playerItemDidReachEnd:) name:AVPlayerItemDidPlayToEndTimeNotification - object:[_data->avplayer currentItem]]; + object:_data->avplayer.currentItem]; - _videoDuration = CMTimeGetSeconds([[_data->avplayer currentItem] duration]); + + _videoDuration = CMTimeGetSeconds([_data->avplayer.currentItem duration]); // get the max size of the video-tracks - NSArray* tracks = [_data->avplayeritem.asset tracksWithMediaType: AVMediaTypeVideo]; + NSArray* tracks = [_data->avplayer.currentItem.asset tracksWithMediaType: AVMediaTypeVideo]; CGSize size; for(unsigned int i=0; i < [tracks count]; ++i) { @@ -329,7 +345,11 @@ void OSXAVFoundationVideo::open(const std::string& filename) _s = size.width; _t = size.height; _r = 1; + unsigned char* buffer = (unsigned char*)calloc(_s*_t*4, 1); + setImage(_s, _t, 1, GL_RGBA, GL_BGRA, GL_UNSIGNED_BYTE, buffer, USE_MALLOC_FREE); + _fileName = filename; + requestNewFrame(); _status = PAUSED; @@ -344,6 +364,7 @@ float OSXAVFoundationVideo::getVolume() const void OSXAVFoundationVideo::setVolume(float v) { + NSAutoreleasePoolHelper helper; _volume = v; if (_data->avplayer) [_data->avplayer setVolume: v]; @@ -511,4 +532,11 @@ void OSXAVFoundationVideo::lazyInitCoreVideoTextureCache(osg::State& state) } } } + + +osg::Texture* OSXAVFoundationVideo::createSuitableTexture() +{ + return NULL; // new OSXAVFoundationCoreVideoTexture(this); +} + diff --git a/src/osgPlugins/avfoundation/ReaderWriterAVFoundation.cpp b/src/osgPlugins/avfoundation/ReaderWriterAVFoundation.cpp index 7dc5a36db..0933b7dda 100644 --- a/src/osgPlugins/avfoundation/ReaderWriterAVFoundation.cpp +++ b/src/osgPlugins/avfoundation/ReaderWriterAVFoundation.cpp @@ -59,13 +59,16 @@ class ReaderWriterAVFoundation : public osgDB::ReaderWriter fileName = osgDB::findDataFile( fileName, options ); if (fileName.empty()) return ReadResult::FILE_NOT_FOUND; } - + + static OpenThreads::Mutex mutex; + OpenThreads::ScopedLock lock(mutex); + OSG_INFO<<"ReaderWriterAVFoundation::readImage "<< fileName<< std::endl; osg::ref_ptr video = new OSXAVFoundationVideo(); bool disable_multi_threaded_frame_dispatching = options ? (options->getPluginStringData("disableMultiThreadedFrameDispatching") == "true") : false; - bool disable_core_video = true; // options ? (options->getPluginStringData("disableCoreVideo") == "true") : false; + bool disable_core_video = options ? (options->getPluginStringData("disableCoreVideo") == "true") : false; OSG_INFO << "disableMultiThreadedFrameDispatching: " << disable_multi_threaded_frame_dispatching << std::endl; OSG_INFO << "disableCoreVideo : " << disable_core_video << std::endl; diff --git a/src/osgPlugins/imageio/ReaderWriterImageIO.cpp b/src/osgPlugins/imageio/ReaderWriterImageIO.cpp index fd9a686fe..fe4025547 100644 --- a/src/osgPlugins/imageio/ReaderWriterImageIO.cpp +++ b/src/osgPlugins/imageio/ReaderWriterImageIO.cpp @@ -336,6 +336,7 @@ osg::Image* CreateOSGImageFromCGImage(CGImageRef image_ref) // case 16: case 32: + case 64: { internal_format = GL_RGBA8; diff --git a/src/osgPresentation/SlideShowConstructor.cpp b/src/osgPresentation/SlideShowConstructor.cpp index cdb91cbd4..f58387742 100644 --- a/src/osgPresentation/SlideShowConstructor.cpp +++ b/src/osgPresentation/SlideShowConstructor.cpp @@ -59,6 +59,7 @@ using namespace osgPresentation; #define USE_CLIENT_STORAGE_HINT 0 +#define USE_TEXTURE_FROM_VIDEO_PLUGIN 1 class SetToTransparentBin : public osg::NodeVisitor { @@ -770,7 +771,7 @@ void SlideShowConstructor::findImageStreamsAndAddCallbacks(osg::Node* node) osg::Geometry* SlideShowConstructor::createTexturedQuadGeometry(const osg::Vec3& pos, const osg::Vec4& rotation, float width, float height, osg::Image* image, bool& usedTextureRectangle) { osg::Geometry* pictureQuad = 0; - osg::Texture* texture = 0; + osg::ref_ptr texture = 0; osg::StateSet* stateset = 0; osg::Vec3 positionVec = pos; @@ -782,7 +783,15 @@ osg::Geometry* SlideShowConstructor::createTexturedQuadGeometry(const osg::Vec3& heightVec = heightVec*rotationMatrix; osg::ImageStream* imageStream = dynamic_cast(image); - + + // let the video-plugin create a texture for us, if supported + #if USE_TEXTURE_FROM_VIDEO_PLUGIN + if(imageStream) + { + texture = imageStream->createSuitableTexture(); + } + #endif + bool flipYAxis = image->getOrigin()==osg::Image::TOP_LEFT; #if 1 @@ -798,50 +807,47 @@ osg::Geometry* SlideShowConstructor::createTexturedQuadGeometry(const osg::Vec3& // pass back info on wether texture 2D is used. usedTextureRectangle = useTextureRectangle; - if (useTextureRectangle) + if (!texture) { - pictureQuad = osg::createTexturedQuadGeometry(positionVec, - widthVec, - heightVec, - 0.0f, flipYAxis ? image->t() : 0.0f, - image->s(), flipYAxis ? 0.0f : image->t()); - - stateset = pictureQuad->getOrCreateStateSet(); - - texture = new osg::TextureRectangle(image); - stateset->setTextureAttributeAndModes(0, - texture, - osg::StateAttribute::ON); - - + if (useTextureRectangle) + { + texture = new osg::TextureRectangle(image); + } + else + { + texture = new osg::Texture2D(image); + texture->setResizeNonPowerOfTwoHint(false); + texture->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINEAR); + texture->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINEAR); + #if USE_CLIENT_STORAGE_HINT + texture->setClientStorageHint(true); + #endif + + } } - else + if (texture) { + float t(0), l(0); + float r = (texture->getTextureTarget() == GL_TEXTURE_RECTANGLE) ? image->s() : 1; + float b = (texture->getTextureTarget() == GL_TEXTURE_RECTANGLE) ? image->t() : 1; + + if (flipYAxis) + std::swap(t,b); + pictureQuad = osg::createTexturedQuadGeometry(positionVec, - widthVec, - heightVec, - 0.0f, flipYAxis ? 1.0f : 0.0f, - 1.0f, flipYAxis ? 0.0f : 1.0f); - + widthVec, + heightVec, + l, t, r, b); + stateset = pictureQuad->getOrCreateStateSet(); - - texture = new osg::Texture2D(image); - - texture->setResizeNonPowerOfTwoHint(false); - texture->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINEAR); - texture->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINEAR); -#if USE_CLIENT_STORAGE_HINT - texture->setClientStorageHint(true); -#endif stateset->setTextureAttributeAndModes(0, - texture, - osg::StateAttribute::ON); - + texture, + osg::StateAttribute::ON); } - + if (!pictureQuad) return 0; - + if (imageStream) { imageStream->pause();