From Stephan Huber, OSX and iOS Video support via a QTKit plugin from OSX 10.7 and before, and an AVFoundation plugin for iOS and OSX10.8 and later.

This commit is contained in:
Robert Osfield
2012-10-02 14:07:12 +00:00
parent 0dbafcc316
commit 1796d55bea
30 changed files with 3202 additions and 8 deletions

View File

@@ -27,6 +27,7 @@
#include <osg/TextureCubeMap>
#include <osg/Light>
#include <algorithm>
#include <string.h>
#include <stdlib.h>
@@ -213,7 +214,8 @@ Image::Image()
_packing(4),
_pixelAspectRatio(1.0),
_allocationMode(USE_NEW_DELETE),
_data(0L)
_data(0L),
_dimensionsChangedCallbacks()
{
setDataVariance(STATIC);
}
@@ -232,7 +234,8 @@ Image::Image(const Image& image,const CopyOp& copyop):
_pixelAspectRatio(image._pixelAspectRatio),
_allocationMode(USE_NEW_DELETE),
_data(0L),
_mipmapData(image._mipmapData)
_mipmapData(image._mipmapData),
_dimensionsChangedCallbacks(image._dimensionsChangedCallbacks)
{
if (image._data)
{
@@ -853,6 +856,8 @@ void Image::allocateImage(int s,int t,int r,
int packing)
{
_mipmapData.clear();
bool callback_needed(false);
unsigned int previousTotalSize = 0;
@@ -870,6 +875,7 @@ void Image::allocateImage(int s,int t,int r,
if (_data)
{
callback_needed = (_s != s) || (_t != t) || (_r != r);
_s = s;
_t = t;
_r = r;
@@ -884,7 +890,8 @@ void Image::allocateImage(int s,int t,int r,
}
else
{
callback_needed = (_s != 0) || (_t != 0) || (_r != 0);
// failed to allocate memory, for now, will simply set values to 0.
_s = 0;
_t = 0;
@@ -898,7 +905,10 @@ void Image::allocateImage(int s,int t,int r,
// policy so that allocateImage honours previous settings of _internalTextureFormat.
//_internalTextureFormat = 0;
}
if (callback_needed)
handleDimensionsChangedCallbacks();
dirty();
}
@@ -911,7 +921,9 @@ void Image::setImage(int s,int t,int r,
int rowLength)
{
_mipmapData.clear();
bool callback_needed = (_s != s) || (_t != t) || (_r != r);
_s = s;
_t = t;
_r = r;
@@ -926,6 +938,9 @@ void Image::setImage(int s,int t,int r,
_rowLength = rowLength;
dirty();
if (callback_needed)
handleDimensionsChangedCallbacks();
}
@@ -1758,3 +1773,14 @@ Vec4 Image::getColor(const Vec3& texcoord) const
//OSG_NOTICE<<"getColor("<<texcoord<<")="<<getColor(s,t,r)<<std::endl;
return getColor(s,t,r);
}
void Image::addDimensionsChangedCallback(DimensionsChangedCallback* cb)
{
_dimensionsChangedCallbacks.push_back(cb);
}
void Image::removeDimensionsChangedCallback(DimensionsChangedCallback* cb)
{
DimensionsChangedCallbackVector::iterator itr = std::find(_dimensionsChangedCallbacks.begin(), _dimensionsChangedCallbacks.end(), cb);
if (itr!=_dimensionsChangedCallbacks.end()) _dimensionsChangedCallbacks.erase(itr);
}

View File

@@ -317,7 +317,8 @@ void Referenced::signalObserversAndDelete(bool signalDelete, bool doDelete) cons
if (doDelete)
{
if (_refCount!=0) OSG_NOTICE<<"Warning Referenced::signalObserversAndDelete(,,) doing delete with _refCount="<<_refCount<<std::endl;
if (_refCount!=0)
OSG_NOTICE<<"Warning Referenced::signalObserversAndDelete(,,) doing delete with _refCount="<<_refCount<<std::endl;
if (getDeleteHandler()) deleteUsingDeleteHandler();
else delete this;

View File

@@ -137,6 +137,9 @@ ENDIF()
IF(QUICKTIME_FOUND)
ADD_DEFINITIONS(-DUSE_QUICKTIME)
ENDIF()
IF(AV_FOUNDATION_FOUND)
ADD_DEFINITIONS(-DUSE_AV_FOUNDATION)
ENDIF()
IF(XINE_FOUND)
ADD_DEFINITIONS(-DUSE_XINE)

View File

@@ -389,6 +389,14 @@ Registry::Registry()
#endif
#endif
#if defined(USE_AV_FOUNDATION)
addFileExtensionAlias("mov", "AVFoundation");
addFileExtensionAlias("mpg", "AVFoundation");
addFileExtensionAlias("mpv", "AVFoundation");
addFileExtensionAlias("mp4", "AVFoundation");
addFileExtensionAlias("m4v", "AVFoundation");
#endif
// remove geo to lwo alias as the new Carbon Graphics GEO format
// also uses the .geo. It is still possible to load light wave .geo
// files via loading the lwo plugin explicitly and then doing a readNodeFile.

View File

@@ -212,6 +212,10 @@ IF(APPLE AND NOT ANDROID)
ADD_SUBDIRECTORY(imageio)
ENDIF()
IF(AV_FOUNDATION_FOUND)
ADD_SUBDIRECTORY(avfoundation)
ENDIF()
IF(QUICKTIME_FOUND)
ADD_SUBDIRECTORY(quicktime)
ENDIF()

View File

@@ -1,7 +1,15 @@
INCLUDE_DIRECTORIES( ${QTKIT_INCLUDE_DIR} )
SET(TARGET_SRC
ReaderWriterQTKit.mm
ReaderWriterQTKit.cpp
OSXQTKitVideo.h
OSXQTKitVideo.mm
OSXCoreVideoAdapter.h
OSXCoreVideoAdapter.mm
OSXCoreVideoTexture.cpp
OSXCoreVideoTexture.h
VideoFrameDispatcher.h
VideoFrameDispatcher.cpp
)
SET(TARGET_LIBRARIES_VARS QTKIT_LIBRARY COCOA_LIBRARY QUICKTIME_LIBRARY COREVIDEO_LIBRARY)

View File

@@ -0,0 +1,50 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2008 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#pragma once
#include <CoreVideo/CoreVideo.h>
#include <Quicktime/Quicktime.h>
#include "OSXQTKitVideo.h"
class OSXCoreVideoAdapter : public osg::Referenced {
public:
OSXCoreVideoAdapter(osg::State& state, osg::Image* image);
void setVideo(osg::Image* image);
void setTimeStamp(const CVTimeStamp* ts) {_timestamp = ts; getFrame();}
bool getFrame();
inline GLenum getTextureName() { return _currentTexName; }
inline GLenum getTextureTarget() { return _currentTexTarget; }
QTVisualContextRef getVisualContext() { return _context; }
virtual ~OSXCoreVideoAdapter();
private:
osg::ref_ptr<OSXQTKitVideo> _video;
QTVisualContextRef _context;
const CVTimeStamp* _timestamp;
CVOpenGLTextureRef _currentFrame;
GLint _currentTexName;
GLenum _currentTexTarget;
};

View File

@@ -0,0 +1,127 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2008 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#include "OSXCoreVideoAdapter.h"
#include <osg/GL>
#include <osg/State>
#include <osgViewer/api/Cocoa/GraphicsWindowCocoa>
#import <Cocoa/Cocoa.h>
OSXCoreVideoAdapter::OSXCoreVideoAdapter(osg::State& state, osg::Image* image) :
osg::Referenced(),
_context(NULL),
_timestamp(NULL),
_currentFrame(NULL),
_currentTexTarget(GL_TEXTURE_RECTANGLE_EXT)
{
setVideo(image);
if (!_video.valid())
return;
CGLContextObj cglcntx(NULL);
CGLPixelFormatObj cglPixelFormat;
OSStatus err = noErr;
if (cglcntx == NULL) {
osgViewer::GraphicsWindowCocoa* win = dynamic_cast<osgViewer::GraphicsWindowCocoa*>(state.getGraphicsContext());
if (win)
{
NSOpenGLContext* context = win->getContext();
cglcntx = (CGLContextObj)[context CGLContextObj];
cglPixelFormat = (CGLPixelFormatObj)[ win->getPixelFormat() CGLPixelFormatObj];
}
}
if ((cglcntx == NULL) || (err != noErr)) {
OSG_WARN <<"CoreVideoTexture: could not get Context/Pixelformat " << err << std::endl;
return;
}
CFTypeRef keys[] = { kQTVisualContextWorkingColorSpaceKey };
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CFDictionaryRef textureContextAttributes = CFDictionaryCreate(kCFAllocatorDefault,
(const void **)keys,
(const void **)&colorSpace, 1,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
err = QTOpenGLTextureContextCreate(kCFAllocatorDefault, cglcntx, cglPixelFormat, textureContextAttributes, &_context);
setVideo(_video.get());
setTimeStamp(NULL);
}
OSXCoreVideoAdapter::~OSXCoreVideoAdapter()
{
setVideo(NULL);
if (_currentFrame) {
CVOpenGLTextureRelease(_currentFrame);
_currentFrame = NULL;
}
// release the OpenGL Texture Context
if (_context) {
CFRelease(_context);
_context = NULL;
}
}
void OSXCoreVideoAdapter::setVideo(osg::Image* image)
{
if (_video.valid()) {
_video->setCoreVideoAdapter(NULL);
}
_video = dynamic_cast<OSXQTKitVideo*>(image);
if ((_context) && (_video.valid()))
{
_video->setCoreVideoAdapter(this);
setTimeStamp(NULL);
}
}
bool OSXCoreVideoAdapter::getFrame()
{
QTVisualContextTask(_context);
bool b = QTVisualContextIsNewImageAvailable(_context, _timestamp);
if (b){
CVOpenGLTextureRef newFrame;
QTVisualContextCopyImageForTime(_context, kCFAllocatorDefault, _timestamp, &newFrame);
if (_currentFrame)
CVOpenGLTextureRelease(_currentFrame);
_currentFrame = newFrame;
_currentTexTarget = CVOpenGLTextureGetTarget(_currentFrame);
_currentTexName = CVOpenGLTextureGetName(_currentFrame);
}
//std::cerr << _movie->getFileName() << ": " << b << " / " << _movie->isPlaying() << " " << _movie->getCurrentTime() << std::endl;
return b;
}

View File

@@ -0,0 +1,147 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2008 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#include "OSXCoreVideoTexture.h"
OSXCoreVideoTexture::OSXCoreVideoTexture()
: osg::Texture()
, _textureTarget(GL_TEXTURE_RECTANGLE_EXT)
, _inited(false)
, _adapter(NULL)
{
}
OSXCoreVideoTexture::OSXCoreVideoTexture(osg::Image* image):
osg::Texture(),
_textureTarget(GL_TEXTURE_RECTANGLE_EXT),
_inited(false),
_adapter(NULL)
{
setImage(image);
}
OSXCoreVideoTexture::OSXCoreVideoTexture(const OSXCoreVideoTexture& text,const osg::CopyOp& copyop) :
osg::Texture(text, copyop),
_textureTarget(text._textureTarget),
_inited(text._inited),
_adapter(text._adapter),
_image(text._image)
{
}
OSXCoreVideoTexture::~OSXCoreVideoTexture() {
}
int OSXCoreVideoTexture::compare(const osg::StateAttribute& sa) const {
COMPARE_StateAttribute_Types(OSXCoreVideoTexture,sa)
if (_image!=rhs._image) // smart pointer comparison.
{
if (_image.valid())
{
if (rhs._image.valid())
{
int result = _image->compare(*rhs._image);
if (result!=0) return result;
}
else
{
return 1; // valid lhs._image is greater than null.
}
}
else if (rhs._image.valid())
{
return -1; // valid rhs._image is greater than null.
}
}
if (!_image && !rhs._image)
{
// no image attached to either Texture2D
// but could these textures already be downloaded?
// check the _textureObjectBuffer to see if they have been
// downloaded
int result = compareTextureObjects(rhs);
if (result!=0) return result;
}
int result = compareTexture(rhs);
if (result!=0) return result;
// compare each paramter in turn against the rhs.
#if 1
if (_textureWidth != 0 && rhs._textureWidth != 0)
{
COMPARE_StateAttribute_Parameter(_textureWidth)
}
if (_textureHeight != 0 && rhs._textureHeight != 0)
{
COMPARE_StateAttribute_Parameter(_textureHeight)
}
#endif
return 0; // passed all the above comparison macro's, must be equal.
}
void OSXCoreVideoTexture::setImage(osg::Image* image)
{
if (_image == image) return;
if (_image.valid() && _image->requiresUpdateCall())
{
setUpdateCallback(0);
setDataVariance(osg::Object::STATIC);
}
_image = image;
_modifiedCount.setAllElementsTo(0);
if (_image.valid() && _image->requiresUpdateCall())
{
setUpdateCallback(new osg::Image::UpdateCallback());
setDataVariance(osg::Object::DYNAMIC);
}
_adapter = NULL;
}
void OSXCoreVideoTexture::apply(osg::State& state) const {
if (!_image.valid())
return;
if (!_adapter.valid()) {
OSXQTKitVideo* m = dynamic_cast<OSXQTKitVideo*>(_image.get());
if ((m) && (m->getCoreVideoAdapter()))
_adapter = m->getCoreVideoAdapter();
else
_adapter = new OSXCoreVideoAdapter(state, _image.get());
}
_adapter->getFrame();
_textureTarget = _adapter->getTextureTarget();
glBindTexture(_textureTarget, _adapter->getTextureName());
}

View File

@@ -0,0 +1,80 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2008 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#pragma once
#include <osg/Texture>
#include "OSXCoreVideoAdapter.h"
class OSXCoreVideoTexture : public osg::Texture {
public:
OSXCoreVideoTexture();
OSXCoreVideoTexture(osg::Image* image);
OSXCoreVideoTexture(const OSXCoreVideoTexture& text,const osg::CopyOp& copyop=osg::CopyOp::SHALLOW_COPY);
META_StateAttribute( , OSXCoreVideoTexture, TEXTURE);
virtual int compare(const osg::StateAttribute& rhs) const;
virtual GLenum getTextureTarget() const { return _textureTarget; }
virtual void setImage(unsigned int, osg::Image* image) { setImage(image); }
void setImage(osg::Image* image);
osg::Image* getImage() { return _image.get(); }
const osg::Image* getImage() const { return _image.get(); }
virtual osg::Image* getImage(unsigned int) { return _image.get(); }
virtual const osg::Image* getImage(unsigned int) const { return _image.get(); }
virtual unsigned int getNumImages() const { return 1; }
virtual int getTextureWidth() const { return _textureWidth; }
virtual int getTextureHeight() const { return _textureHeight; }
virtual int getTextureDepth() const { return 1; }
virtual void apply(osg::State& state) const;
virtual void allocateMipmap(osg::State& state) const {}
inline unsigned int& getModifiedCount(unsigned int contextID) const
{
return _modifiedCount[contextID];
}
protected:
virtual void computeInternalFormat() const {}
virtual ~OSXCoreVideoTexture();
mutable GLenum _textureTarget;
int _textureWidth;
int _textureHeight;
bool _inited;
mutable osg::ref_ptr<OSXCoreVideoAdapter> _adapter;
osg::ref_ptr<osg::Image> _image;
typedef osg::buffered_value<unsigned int> ImageModifiedCount;
mutable ImageModifiedCount _modifiedCount;
};

View File

@@ -0,0 +1,99 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2008 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#pragma once
#include <osg/ImageStream>
#include "VideoFrameDispatcher.h"
#ifdef __OBJC__
@class QTMovie;
#else
class QTMovie;
#endif
class QTVisualContext;
class OSXCoreVideoAdapter;
class OSXQTKitVideo : public osgVideo::VideoImageStream {
public:
OSXQTKitVideo();
~OSXQTKitVideo();
virtual void setTimeMultiplier(double r);
virtual double getTimeMultiplier() const;
virtual double getCurrentTime() const;
virtual bool isPlaying() const { return (getStatus() == PLAYING); }
virtual bool valid() const { return (getStatus() != INVALID); }
void open(const std::string& file_name);
virtual void setVolume (float);
virtual float getVolume () const;
virtual float getAudioBalance();
virtual void setAudioBalance(float b);
virtual double getLength() const { return _duration; }
virtual void seek (double t);
virtual void play ();
virtual void pause ();
void setCoreVideoAdapter(OSXCoreVideoAdapter* adapter);
OSXCoreVideoAdapter* getCoreVideoAdapter() const { return _coreVideoAdapter; }
void decodeFrame(bool force);
virtual void decodeFrame() { decodeFrame(_waitForFirstFrame); }
virtual bool requiresUpdateCall () const { return (!getCoreVideoAdapter() && !getVideoFrameDispatcher() ); }
virtual void update(osg::NodeVisitor *)
{
requestNewFrame(_waitForFirstFrame);
}
void requestNewFrame(bool force)
{
if (!setNeedsDispatching(RequestSingleUpdate))
decodeFrame(force);
else
_waitForFirstFrame = true;
}
virtual bool needsDispatching() const
{
return _waitForFirstFrame || getNeedsDispatching();
}
protected:
virtual void applyLoopingMode();
struct Data;
private:
bool _isActive, _isValid;
double _duration;
QTMovie* _movie;
Data* _data;
mutable double _rate;
bool _waitForFirstFrame;
OSXCoreVideoAdapter* _coreVideoAdapter;
};

View File

@@ -0,0 +1,370 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2008 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#include <osgDB/FileNameUtils>
#include <QTKit/QTKit.h>
#include <QTKit/QTTime.h>
#include <Quicktime/Quicktime.h>
#include "OSXQTKitVideo.h"
#include "OSXCoreVideoAdapter.h"
namespace {
static NSString* toNSString(const std::string& str)
{
return [NSString stringWithUTF8String: str.c_str()];
}
static std::string toString(NSString* str)
{
return str ? std::string([str UTF8String]) : "";
}
class NSAutoreleasePoolHelper {
public:
NSAutoreleasePoolHelper()
{
_pool = [[NSAutoreleasePool alloc] init];
}
~NSAutoreleasePoolHelper()
{
[_pool release];
}
private:
NSAutoreleasePool* _pool;
};
}
@interface NotificationHandler : NSObject {
OSXQTKitVideo* video;
}
@property (readwrite,assign) OSXQTKitVideo* video;
- (void) movieNaturalSizeDidChange:(NSNotification*)the_notification;
- (void) movieLoadStateDidChange:(NSNotification*)the_notification;
@end
@implementation NotificationHandler
@synthesize video;
- (void) movieNaturalSizeDidChange:(NSNotification*)the_notification
{
video->requestNewFrame(true);
}
- (void) movieLoadStateDidChange:(NSNotification*)the_notification
{
video->requestNewFrame(true);
}
@end
struct OSXQTKitVideo::Data {
QTVisualContextRef visualContext;
CVPixelBufferRef lastFrame;
NotificationHandler* notificationHandler;
Data() : visualContext(NULL), lastFrame(NULL) {}
};
OSXQTKitVideo::OSXQTKitVideo()
: osgVideo::VideoImageStream()
, _rate(0.0)
, _coreVideoAdapter(NULL)
{
_status = INVALID;
_data = new Data();
_data->notificationHandler = [[NotificationHandler alloc] init];
_data->notificationHandler.video = this;
setOrigin(osg::Image::TOP_LEFT);
}
OSXQTKitVideo::~OSXQTKitVideo()
{
_status = INVALID;
NSAutoreleasePoolHelper autorelease_pool_helper;
[[NSNotificationCenter defaultCenter] removeObserver:_data->notificationHandler
name:QTMovieLoadStateDidChangeNotification object:_movie];
[[NSNotificationCenter defaultCenter] removeObserver:_data->notificationHandler
#if defined(MAC_OS_X_VERSION_10_6) && (MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_6)
name:QTMovieNaturalSizeDidChangeNotification
#else
name:QTMovieSizeDidChangeNotification
#endif
object:_movie];
[_movie stop];
[_movie invalidate];
[_movie release];
if (_data->visualContext)
QTVisualContextRelease(_data->visualContext);
if (_data->lastFrame)
{
CFRelease(_data->lastFrame);
CVPixelBufferRelease(_data->lastFrame);
}
[_data->notificationHandler release];
delete _data;
}
void OSXQTKitVideo::open(const std::string& file_name)
{
bool valid = true;
NSAutoreleasePoolHelper autorelease_pool_helper;
NSError* error;
NSMutableDictionary* movieAttributes = [NSMutableDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:NO], QTMovieOpenAsyncOKAttribute,
nil];
if (osgDB::containsServerAddress(file_name))
[movieAttributes setObject:[NSURL URLWithString: toNSString(file_name)] forKey: QTMovieURLAttribute];
else
[movieAttributes setObject:[NSURL fileURLWithPath: toNSString(file_name)] forKey: QTMovieURLAttribute];
_movie = [[QTMovie alloc] initWithAttributes:movieAttributes
error: &error];
if(error || _movie == NULL)
{
OSG_WARN << "OSXQTKitVideo: could not load movie from " << file_name << std::endl;
valid = false;
}
NSSize movie_size = [[_movie attributeForKey:QTMovieNaturalSizeAttribute] sizeValue];
QTGetTimeInterval([_movie duration], &_duration);
NSDictionary *pixelBufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
//in general this shouldn't be forced. but in order to ensure we get good pixels use this one
[NSNumber numberWithInt: kCVPixelFormatType_32BGRA], (NSString*)kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInteger:1], kCVPixelBufferBytesPerRowAlignmentKey,
[NSNumber numberWithBool:YES], kCVPixelBufferOpenGLCompatibilityKey,
//specifying width and height can't hurt since we know
nil];
NSMutableDictionary *ctxAttributes = [NSMutableDictionary dictionaryWithObject:pixelBufferAttributes
forKey:(NSString*)kQTVisualContextPixelBufferAttributesKey];
OSStatus err = QTPixelBufferContextCreate(kCFAllocatorDefault, (CFDictionaryRef)ctxAttributes, &_data->visualContext);
if(err)
{
OSG_WARN << "OSXQTKitVideo: could not create Pixel Buffer: " << err << std::endl;
valid = false;
}
allocateImage((int)movie_size.width,(int)movie_size.height,1,GL_BGRA,GL_UNSIGNED_INT_8_8_8_8_REV,1);
setInternalTextureFormat(GL_RGBA8);
SetMovieVisualContext([_movie quickTimeMovie], _data->visualContext);
[[NSNotificationCenter defaultCenter] addObserver:_data->notificationHandler
selector:@selector(movieNaturalSizeDidChange:)
#if defined(MAC_OS_X_VERSION_10_6) && (MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_6)
name:QTMovieNaturalSizeDidChangeNotification
#else
name:QTMovieSizeDidChangeNotification
#endif
object:_movie];
[[NSNotificationCenter defaultCenter] addObserver:_data->notificationHandler
selector:@selector(movieLoadStateDidChange:)
name:QTMovieLoadStateDidChangeNotification
object:_movie];
applyLoopingMode();
_waitForFirstFrame = true;
requestNewFrame(true);
_status = (valid) ? PAUSED : INVALID;
}
void OSXQTKitVideo::setTimeMultiplier(double r)
{
if (!valid())
return;
NSAutoreleasePoolHelper pool;
_rate = r;
[_movie setRate: _rate];
_status = (_rate != 0) ? PLAYING : PAUSED;
setNeedsDispatching( _status == PLAYING ? RequestContinuousUpdate : StopUpdate );
}
double OSXQTKitVideo::getTimeMultiplier() const
{
NSAutoreleasePoolHelper pool;
_rate = [_movie rate];
return _rate;
}
void OSXQTKitVideo::setVolume (float f)
{
NSAutoreleasePoolHelper pool;
[_movie setVolume: f];
}
float OSXQTKitVideo::getVolume () const
{
NSAutoreleasePoolHelper pool;
return [_movie volume];
}
float OSXQTKitVideo::getAudioBalance()
{
float balance;
GetMovieAudioBalance([_movie quickTimeMovie], &balance, 0);
return balance;
}
void OSXQTKitVideo::setAudioBalance(float b)
{
SetMovieAudioBalance([_movie quickTimeMovie], b, 0);
}
void OSXQTKitVideo::seek (double t)
{
NSAutoreleasePoolHelper pool;
[_movie setCurrentTime: QTMakeTimeWithTimeInterval(t)];
if (!isPlaying())
requestNewFrame(true);
}
void OSXQTKitVideo::play ()
{
setTimeMultiplier(1.0);
}
void OSXQTKitVideo::pause ()
{
setTimeMultiplier(0.0);
}
void OSXQTKitVideo::applyLoopingMode()
{
NSAutoreleasePoolHelper pool;
[_movie setAttribute:[NSNumber numberWithBool:(getLoopingMode() == LOOPING) ] forKey:QTMovieLoopsAttribute];
}
double OSXQTKitVideo::getCurrentTime() const
{
double t;
QTGetTimeInterval([_movie currentTime], &t);
return t;
}
void OSXQTKitVideo::setCoreVideoAdapter(OSXCoreVideoAdapter* adapter)
{
_coreVideoAdapter = adapter;
SetMovieVisualContext([_movie quickTimeMovie], _coreVideoAdapter ? _coreVideoAdapter->getVisualContext() : _data->visualContext );
}
void OSXQTKitVideo::decodeFrame(bool force)
{
if(getCoreVideoAdapter())
return;
QTVisualContextTask(_data->visualContext);
CVPixelBufferRef currentFrame(NULL);
const CVTimeStamp* in_output_time(NULL);
if(!force && !QTVisualContextIsNewImageAvailable(_data->visualContext, in_output_time))
return;
OSStatus error_status = QTVisualContextCopyImageForTime(_data->visualContext, kCFAllocatorDefault, in_output_time, &currentFrame);
if ((noErr == error_status) && (NULL != currentFrame))
{
if (_waitForFirstFrame) {
_waitForFirstFrame = false;
}
if (_data->lastFrame) {
CFRelease(_data->lastFrame);
CVPixelBufferRelease(_data->lastFrame);
}
size_t buffer_width = CVPixelBufferGetWidth(currentFrame);
size_t buffer_height = CVPixelBufferGetHeight(currentFrame);
CVPixelBufferLockBaseAddress( currentFrame, kCVPixelBufferLock_ReadOnly );
void* raw_pixel_data = CVPixelBufferGetBaseAddress(currentFrame);
setImage(buffer_width,buffer_height,1,
GL_RGBA8,
GL_BGRA,
GL_UNSIGNED_INT_8_8_8_8_REV,
(unsigned char *)raw_pixel_data,
osg::Image::NO_DELETE,1);
CVPixelBufferUnlockBaseAddress( currentFrame, 0 );
_data->lastFrame = currentFrame;
CFRetain(_data->lastFrame);
dirty();
}
}

View File

@@ -0,0 +1,150 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2008 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
/* README:
*
* This code is loosely based on the QTKit implementation of Eric Wing, I removed
* some parts and added other parts.
*
* What's new:
* - it can handle URLs currently http and rtsp
* - it supports OS X's CoreVideo-technology, this will render the movie-frames
* into a bunch of textures. If you load your movie via readImageFile you'll
* get the standard behaviour, an ImageStream, where the data gets updated on
* every new video-frame. This may be slow.
* To get CoreVideo, you'll need to use readObjectFile and cast the result (if any)
* to an osg::Texture and use that as your video-texture. If you need access to the
* imagestream, just cast getImage to an image-stream. Please note, the data-
* property of the image-stream does NOT store the current frame, instead it's empty.
*
*/
#include <osg/ImageStream>
#include <osg/Notify>
#include <osg/Geode>
#include <osgDB/Registry>
#include <osgDB/FileNameUtils>
#include <osgDB/FileUtils>
#include "OSXQTKitVideo.h"
#include "OSXCoreVideoTexture.h"
#include "VideoFrameDispatcher.h"
class ReaderWriterQTKit : public osgDB::ReaderWriter
{
public:
ReaderWriterQTKit()
{
supportsExtension("mov","Quicktime movie format");
supportsExtension("mpg","Mpeg movie format");
supportsExtension("mp4","Mpeg movie format");
supportsExtension("m4v","Mpeg movie format");
supportsExtension("flv","Flash video file (if Perian is installed)");
supportsExtension("dv","dv movie format");
supportsExtension("avi","avi movie format (if Perian/WMV is installed)");
supportsExtension("sdp","sdp movie format");
supportsExtension("swf","swf movie format (if Perian is installed)");
supportsExtension("3gp","3gp movie format");
supportsProtocol("http", "streaming media per http");
supportsProtocol("rtsp", "streaming media per rtsp");
supportsOption("disableCoreVideo", "disable the usage of coreVideo when using readObjectFile, returns an ImageStream instead");
supportsOption("disableMultiThreadedFrameDispatching", "disable the usage of the multithreade VideoFrameDispatcher to decode video frames");
}
virtual ~ReaderWriterQTKit()
{
OSG_INFO<<"~ReaderWriterQTKit()"<<std::endl;
}
virtual const char* className() const { return "QTKit ImageStream Reader"; }
virtual ReadResult readImage(const std::string& file, const osgDB::ReaderWriter::Options* options) const
{
std::string ext = osgDB::getLowerCaseFileExtension(file);
if (!acceptsExtension(ext)) return ReadResult::FILE_NOT_HANDLED;
std::string fileName(file);
if (ext=="QTKit")
{
fileName = osgDB::getNameLessExtension(fileName);
OSG_INFO<<"ReaderWriterQTKit stipped filename = "<<fileName<<std::endl;
}
if (!osgDB::containsServerAddress(fileName))
{
fileName = osgDB::findDataFile( fileName, options );
if (fileName.empty()) return ReadResult::FILE_NOT_FOUND;
}
OSG_INFO<<"ReaderWriterQTKit::readImage "<< fileName<< std::endl;
osg::ref_ptr<OSXQTKitVideo> video = new OSXQTKitVideo();
bool disable_multi_threaded_frame_dispatching = options ? (options->getPluginStringData("disableMultiThreadedFrameDispatching") == "true"): false;
bool disable_core_video = options ? (options->getPluginStringData("disableCoreVideo") == "true") : false;
OSG_INFO << "disableMultiThreadedFrameDispatching: " << disable_multi_threaded_frame_dispatching << std::endl;
OSG_INFO << "disableCoreVideo : " << disable_core_video << std::endl;
if (!options
|| (!disable_multi_threaded_frame_dispatching
&& disable_core_video))
{
static osg::ref_ptr<osgVideo::VideoFrameDispatcher> video_frame_dispatcher(NULL);
if (!video_frame_dispatcher) {
std::string num_threads_str = options ? options->getPluginStringData("numFrameDispatchThreads") : "0";
video_frame_dispatcher = new osgVideo::VideoFrameDispatcher(atoi(num_threads_str.c_str()));
}
video_frame_dispatcher->addVideo(video);
}
video->open(fileName);
return video->valid() ? video.release() : NULL;
}
virtual ReadResult readObject (const std::string &file, const osgDB::ReaderWriter::Options* options) const
{
ReadResult rr = readImage(file, options);
if (!rr.validImage())
return rr;
bool use_core_video = true;
if (options && !options->getPluginStringData("disableCoreVideo").empty())
use_core_video = false;
osg::ref_ptr<OSXQTKitVideo> video = dynamic_cast<OSXQTKitVideo*>(rr.getImage());
if (!video || !use_core_video)
return rr;
osg::ref_ptr<OSXCoreVideoTexture> texture = new OSXCoreVideoTexture(video);
return texture.release();
}
protected:
};
// now register with Registry to instantiate the above
// reader/writer.
REGISTER_OSGPLUGIN(QTKit, ReaderWriterQTKit)

View File

@@ -0,0 +1,187 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2008 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#include "VideoFrameDispatcher.h"
#include <iostream>
#include <osg/Timer>
namespace osgVideo {
VideoImageStream::VideoImageStream()
: osg::ImageStream()
, _needsDispatching(false)
, _dispatcher(NULL)
, _queue(NULL)
{
}
VideoImageStream::VideoImageStream(const VideoImageStream& image,const osg::CopyOp& copyop)
: osg::ImageStream(image, copyop)
, _needsDispatching(image._needsDispatching)
, _dispatcher(image._dispatcher)
, _queue(NULL)
{
}
VideoImageStream::~VideoImageStream()
{
setNeedsDispatching(StopUpdate);
_dispatcher = NULL;
}
bool VideoImageStream::setNeedsDispatching(RequestMode request_mode)
{
_needsDispatching = (_needsDispatching || (request_mode == RequestContinuousUpdate)) && (request_mode != StopUpdate);
if (!_dispatcher)
return false;
if (request_mode == StopUpdate) {
_dispatcher->removeFromQueue(this);
}
else
{
_dispatcher->addToQueue(this);
}
return (_dispatcher != NULL);
}
#pragma mark
VideoFrameDispatchQueue::VideoFrameDispatchQueue()
: OpenThreads::Thread()
, osg::Referenced()
, _queue()
, _numItems(0)
, _block()
, _mutex()
, _finished(false)
{
}
void VideoFrameDispatchQueue::run()
{
osg::Timer t;
static unsigned int frame_delay = 1000 * 1000 / 120;
_block.reset();
_block.block();
while(!_finished)
{
unsigned int num_items(0);
{
osg::Timer_t last_tick(t.tick());
OpenThreads::ScopedLock<OpenThreads::Mutex> lock(_mutex);
for(Queue::iterator i = _queue.begin(); i != _queue.end(); )
{
osg::observer_ptr<VideoImageStream> stream(*i);
if (stream.valid() && stream->needsDispatching())
{
if (stream.valid())
stream->decodeFrame();
++num_items;
++i;
}
else
{
if (stream.valid())
stream->setDispatchQueue(NULL);
_queue.erase(i++);
}
}
_numItems = num_items;
if (_numItems > 0)
{
unsigned int dt = t.delta_u(last_tick, t.tick());
if (dt < frame_delay) {
OpenThreads::Thread::microSleep(frame_delay - dt);
}
}
}
if (_numItems == 0)
{
// std::cout << this << " blocking" << std::endl;
_block.reset();
_block.block();
}
}
}
void VideoFrameDispatchQueue::addItem(osgVideo::VideoImageStream *stream)
{
if (_finished) return;
OpenThreads::ScopedLock<OpenThreads::Mutex> lock(_mutex);
_queue.insert(stream);
stream->setDispatchQueue(this);
_numItems = _queue.size();
_block.release();
// std::cout << this << " release" << std::endl;
}
void VideoFrameDispatchQueue::removeItem(osgVideo::VideoImageStream* stream)
{
stream->setDispatchQueue(NULL);
OpenThreads::ScopedLock<OpenThreads::Mutex> lock(_mutex);
_queue.erase(stream);
_numItems = _queue.size();
}
VideoFrameDispatchQueue::~VideoFrameDispatchQueue()
{
_finished = true;
_block.release();
join();
}
#pragma mark
VideoFrameDispatcher::VideoFrameDispatcher(unsigned int num_threads)
: osg::Referenced()
, _queues()
{
num_threads = num_threads ? num_threads : OpenThreads::GetNumberOfProcessors();
OSG_ALWAYS << "VideoFrameDispatcher: creating " << num_threads << " queues." << std::endl;
for(unsigned int i = 0; i < num_threads; ++i)
{
VideoFrameDispatchQueue* q = new VideoFrameDispatchQueue();
q->start();
_queues.push_back(q);
}
}
void VideoFrameDispatcher::addToQueue(VideoImageStream *stream)
{
stream->setThreadSafeRefUnref(true);
if (stream->getDispatchQueue())
return;
VideoFrameDispatchQueue* queue = *std::min_element(_queues.begin(), _queues.end(), VideoFrameDispatchQueueComparator());
queue->addItem(stream);
}
void VideoFrameDispatcher::removeFromQueue(VideoImageStream* stream)
{
if (stream->getDispatchQueue())
stream->getDispatchQueue()->removeItem(stream);
}
}

View File

@@ -0,0 +1,108 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2008 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#pragma once
#include <OpenThreads/Thread>
#include <OpenThreads/Mutex>
#include <OpenThreads/ScopedLock>
#include <osg/ImageStream>
#include <osg/observer_ptr>
#include <OpenThreads/Block>
namespace osgVideo {
class VideoFrameDispatchQueue;
class VideoFrameDispatcher;
class VideoImageStream : public osg::ImageStream {
public:
enum RequestMode { RequestContinuousUpdate, RequestSingleUpdate, StopUpdate };
VideoImageStream();
VideoImageStream(const VideoImageStream& image,const osg::CopyOp& copyop=osg::CopyOp::SHALLOW_COPY);
~VideoImageStream();
virtual bool needsDispatching() const { return _needsDispatching; }
virtual void decodeFrame() = 0;
protected:
bool setNeedsDispatching(RequestMode request_mode);
VideoFrameDispatcher* getVideoFrameDispatcher() const { return _dispatcher; }
void setVideoFrameDispatcher(VideoFrameDispatcher* dispatcher) { _dispatcher = dispatcher; }
bool getNeedsDispatching() const { return _needsDispatching; }
void setDispatchQueue(VideoFrameDispatchQueue* queue) { _queue = queue; }
VideoFrameDispatchQueue* getDispatchQueue() const { return _queue; }
private:
bool _needsDispatching;
VideoFrameDispatcher* _dispatcher;
VideoFrameDispatchQueue* _queue;
friend class VideoFrameDispatcher;
friend class VideoFrameDispatchQueue;
};
class VideoFrameDispatchQueue: public OpenThreads::Thread, public osg::Referenced {
public:
typedef std::set< osg::observer_ptr<VideoImageStream> > Queue;
VideoFrameDispatchQueue();
~VideoFrameDispatchQueue();
unsigned int getNumItemsInQueue() const { return _numItems; }
void addItem(VideoImageStream* stream);
void removeItem(osgVideo::VideoImageStream* stream);
virtual void run();
private:
Queue _queue;
unsigned int _numItems;
OpenThreads::Block _block;
OpenThreads::Mutex _mutex;
bool _finished;
};
struct VideoFrameDispatchQueueComparator {
bool operator() (const osg::ref_ptr<VideoFrameDispatchQueue>& lhs, const osg::ref_ptr<VideoFrameDispatchQueue>& rhs) const {
return lhs->getNumItemsInQueue() < rhs->getNumItemsInQueue();
}
};
class VideoFrameDispatcher : public osg::Referenced {
public:
typedef std::vector< osg::ref_ptr<VideoFrameDispatchQueue> > DispatchQueues;
VideoFrameDispatcher(unsigned int num_threads = 0);
void addVideo(VideoImageStream* stream)
{
stream->setVideoFrameDispatcher(this);
}
void addToQueue(VideoImageStream* stream);
void removeFromQueue(VideoImageStream* stream);
private:
DispatchQueues _queues;
};
}

View File

@@ -0,0 +1,17 @@
INCLUDE_DIRECTORIES( ${AV_FOUNDATION_INCLUDE_DIR} )
SET(TARGET_SRC
OSXAVFoundationVideo.mm
OSXAVFoundationVideo.h
../QTKit/VideoFrameDispatcher.h
../QTKIt/VideoFrameDispatcher.cpp
OSXAVFoundationCoreVideoTexture.h
OSXAVFoundationCoreVideoTexture.cpp
ReaderWriterAVFoundation.cpp
)
SET(TARGET_LIBRARIES_VARS AV_FOUNDATION_LIBRARY COCOA_LIBRARY COREVIDEO_LIBRARY COREMEDIA_LIBRARY QUARTZCORE_LIBRARY)
SET(TARGET_ADDED_LIBRARIES osgViewer )
#### end var setup ###
SETUP_PLUGIN(AVFoundation)

View File

@@ -0,0 +1,112 @@
//
// OSXAVFoundationVideo.h
// cefix_presentation_ios
//
// Created by Stephan Maximilian Huber on 25.07.12.
// Copyright (c) 2012 stephanmaximilianhuber.com. All rights reserved.
//
#pragma once
#include <osg/Timer>
#include "../QTKit/VideoFrameDispatcher.h"
class OSXAVFoundationVideo : public osgVideo::VideoImageStream {
public:
OSXAVFoundationVideo();
/// Destructor
~OSXAVFoundationVideo();
virtual Object* clone() const { return new OSXAVFoundationVideo(); }
virtual bool isSameKindAs(const Object* obj) const {
return dynamic_cast<const OSXAVFoundationVideo*>(obj) != NULL;
}
virtual const char* className() const { return "OSXAVFoundationVideo"; }
/// Start or continue stream.
virtual void play();
/** @return true, if a movie is playing */
bool isPlaying() const { return (getStatus() == PLAYING); }
/// sets the movierate
void setTimeMultiplier(double rate);
/// gets the movierate
double getTimeMultiplier() const;
/// Pause stream at current position.
virtual void pause();
/// stop playing
virtual void quit(bool /*waitForThreadToExit*/ = true);
/// Get total length in seconds.
virtual double getLength() const { return _videoDuration; }
/// jumps to a specific position
virtual void seek(double pos);
/// returns the current playing position
virtual double getCurrentTime () const;
void open(const std::string& filename);
/** @return the current volume as float */
virtual float getVolume() const;
/** sets the volume of this quicktime to v*/
virtual void setVolume(float v);
/** @return the current balance-setting (0 = neutral, -1 = left, 1 = right */
virtual float getAudioBalance();
/** sets the current balance-setting (0 = neutral, -1 = left, 1 = right */
virtual void setAudioBalance(float b);
virtual double getFrameRate () const { return _framerate; }
virtual void decodeFrame();
virtual bool valid() const { return (getStatus() != INVALID); }
virtual bool requiresUpdateCall () const { return true; }
virtual void update(osg::NodeVisitor *);
virtual void applyLoopingMode();
void setUseCoreVideo(bool b) { _useCoreVideo = b; }
bool isCoreVideoUsed() const { return _useCoreVideo; }
void lazyInitCoreVideoTextureCache(osg::State& state);
bool getCurrentCoreVideoTexture(GLenum& target, GLint& name, int& width, int& height) const;
protected:
virtual bool needsDispatching() const;
void requestNewFrame();
private:
class Data;
void clear();
float _videoDuration;
double _volume;
bool _fileOpened, _waitForFrame;
Data* _data;
bool _useCoreVideo, _dimensionsChangedCallbackNeeded;
double _framerate;
};

View File

@@ -0,0 +1,514 @@
#include "OSXAVFoundationVideo.h"
#include <osgdB/FileNameUtils>
#include <osgViewer/api/Cocoa/GraphicsWindowCocoa>
#include <iostream>
#import <AVFoundation/AVFoundation.h>
#import <Cocoa/Cocoa.h>
namespace {
static NSString* toNSString(const std::string& str)
{
return [NSString stringWithUTF8String: str.c_str()];
}
static std::string toString(NSString* str)
{
return str ? std::string([str UTF8String]) : "";
}
class NSAutoreleasePoolHelper {
public:
NSAutoreleasePoolHelper()
{
_pool = [[NSAutoreleasePool alloc] init];
}
~NSAutoreleasePoolHelper()
{
[_pool release];
}
private:
NSAutoreleasePool* _pool;
};
}
@interface AVPlayer (MOAdditions)
- (NSURL *)currentURL;
- (void)setVolume:(CGFloat)volume;
@end;
@implementation AVPlayer (MOAdditions)
- (NSURL *)currentURL {
AVAsset *asset = self.currentItem.asset;
if ([asset isMemberOfClass:[AVURLAsset class]])
return ((AVURLAsset *)asset).URL;
return nil;
}
- (void)setVolume:(CGFloat)volume {
NSArray *audioTracks = [self.currentItem.asset tracksWithMediaType:AVMediaTypeAudio];
NSMutableArray *allAudioParams = [NSMutableArray array];
for (AVAssetTrack *track in audioTracks) {
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
[audioInputParams setVolume:volume atTime:kCMTimeZero];
[audioInputParams setTrackID:[track trackID]];
[allAudioParams addObject:audioInputParams];
}
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
[audioMix setInputParameters:allAudioParams];
[self.currentItem setAudioMix:audioMix];
}
@end
@interface OSXAVFoundationVideoDelegate : NSObject {
OSXAVFoundationVideo* video;
}
@property (readwrite,assign) OSXAVFoundationVideo* video;
- (void) playerItemDidReachEnd:(NSNotification*)the_notification;
@end;
@implementation OSXAVFoundationVideoDelegate
@synthesize video;
- (void) playerItemDidReachEnd:(NSNotification*)the_notification
{
if (video->getLoopingMode() == osg::ImageStream::LOOPING) {
video->seek(0);
}
else {
video->pause();
}
}
@end
class OSXAVFoundationVideo::Data {
public:
AVPlayer* avplayer;
AVPlayerItem* avplayeritem;
AVPlayerItemVideoOutput* output;
OSXAVFoundationVideoDelegate* delegate;
std::vector<CVBufferRef> lastFrames;
int readFrameNdx, writeFrameNdx;
CVOpenGLTextureCacheRef coreVideoTextureCache;
Data()
: avplayer(NULL)
, avplayeritem(NULL)
, output(NULL)
, delegate(NULL)
, lastFrames(3)
, readFrameNdx(0)
, writeFrameNdx(0)
, coreVideoTextureCache(0)
{
}
~Data() {
[output release];
[avplayeritem release];
[avplayer release];
[delegate release];
for(unsigned int i=0; i< lastFrames.size(); ++i)
{
if (lastFrames[i])
{
CVBufferRelease(lastFrames[i]);
}
}
if (coreVideoTextureCache)
{
CVOpenGLTextureCacheRelease(coreVideoTextureCache);
coreVideoTextureCache = NULL;
}
output = NULL;
avplayer = NULL;
avplayeritem = NULL;
delegate = NULL;
}
void addFrame(CVBufferRef frame)
{
unsigned int new_ndx = writeFrameNdx + 1;
if (new_ndx >= lastFrames.size())
new_ndx = 0;
if (new_ndx == readFrameNdx) {
new_ndx = readFrameNdx+1;
if (new_ndx >= lastFrames.size())
new_ndx = 0;
}
if (lastFrames[new_ndx])
{
CVBufferRelease(lastFrames[new_ndx]);
}
lastFrames[new_ndx] = frame;
writeFrameNdx = new_ndx;
//std::cout << "new frame: " << writeFrameNdx << std::endl;
}
bool hasNewFrame() const {
return readFrameNdx != writeFrameNdx;
}
CVBufferRef getLastFrame() {
readFrameNdx = writeFrameNdx;
// std::cout << "get frame: " << readFrameNdx << std::endl;
return lastFrames[readFrameNdx];
}
};
OSXAVFoundationVideo::OSXAVFoundationVideo()
: osgVideo::VideoImageStream()
, _volume(1.0)
, _fileOpened(false)
, _useCoreVideo(false)
, _dimensionsChangedCallbackNeeded(false)
{
_data = new Data();
_status = INVALID;
setOrigin(TOP_LEFT);
}
OSXAVFoundationVideo::~OSXAVFoundationVideo()
{
quit();
if (_data)
delete _data;
}
void OSXAVFoundationVideo::play()
{
if (_data->avplayer) {
[_data->avplayer play];
_status = PLAYING;
setNeedsDispatching(RequestContinuousUpdate);
}
}
void OSXAVFoundationVideo::setTimeMultiplier(double rate)
{
if (_data->avplayer)
{
_data->avplayer.rate = rate;
_status = (rate != 0.0) ? PLAYING : PAUSED;
setNeedsDispatching(_status == PLAYING ? RequestContinuousUpdate: StopUpdate);
}
}
double OSXAVFoundationVideo::getTimeMultiplier() const
{
return _data->avplayer ? _data->avplayer.rate : 0.0f;
}
void OSXAVFoundationVideo::pause()
{
if (_data->avplayer) {
[_data->avplayer pause];
_status = PAUSED;
setNeedsDispatching(StopUpdate);
}
}
void OSXAVFoundationVideo::clear()
{
[_data->output release];
[_data->avplayeritem release];
[_data->avplayer release];
if (_data->delegate) {
[[NSNotificationCenter defaultCenter] removeObserver: _data->delegate
name:AVPlayerItemDidPlayToEndTimeNotification
object:[_data->avplayer currentItem]
];
}
[_data->delegate release];
_data->output = NULL;
_data->avplayer = NULL;
_data->avplayeritem = NULL;
_data->delegate = NULL;
}
void OSXAVFoundationVideo::quit(bool t)
{
pause();
}
void OSXAVFoundationVideo::seek(double pos)
{
static CMTime tolerance = CMTimeMakeWithSeconds(0.01, 600);
if(_data->avplayer)
[_data->avplayer seekToTime: CMTimeMakeWithSeconds(pos, 600) toleranceBefore: tolerance toleranceAfter: tolerance];
requestNewFrame();
}
double OSXAVFoundationVideo::getCurrentTime () const
{
return _data->avplayer ? CMTimeGetSeconds([_data->avplayer currentTime]) : 0;
}
void OSXAVFoundationVideo::open(const std::string& filename)
{
clear();
_data->delegate = [[OSXAVFoundationVideoDelegate alloc] init];
_data->delegate.video = this;
NSURL* url(NULL);
if (osgDB::containsServerAddress(filename))
{
url = [NSURL URLWithString: toNSString(filename)];
}
else
{
url = [NSURL fileURLWithPath: toNSString(filename)];
}
_data->output = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:
[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInteger:1], kCVPixelBufferBytesPerRowAlignmentKey,
[NSNumber numberWithBool:YES], kCVPixelBufferOpenGLCompatibilityKey,
nil]];
if (_data->output)
{
_data->output.suppressesPlayerRendering = YES;
}
_data->avplayeritem = [[AVPlayerItem alloc] initWithURL: url];
_data->avplayer = [AVPlayer playerWithPlayerItem: _data->avplayeritem];
_data->avplayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
[[_data->avplayer currentItem] addOutput:_data->output];
[[NSNotificationCenter defaultCenter] addObserver: _data->delegate
selector:@selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[_data->avplayer currentItem]];
_videoDuration = CMTimeGetSeconds([[_data->avplayer currentItem] duration]);
// get the max size of the video-tracks
NSArray* tracks = [_data->avplayeritem.asset tracksWithMediaType: AVMediaTypeVideo];
CGSize size;
for(unsigned int i=0; i < [tracks count]; ++i)
{
AVAssetTrack* track = [tracks objectAtIndex:i];
size = track.naturalSize;
_framerate = track.nominalFrameRate;
}
_s = size.width;
_t = size.height;
_r = 1;
requestNewFrame();
_status = PAUSED;
_fileOpened = true;
}
float OSXAVFoundationVideo::getVolume() const
{
return _volume;
}
void OSXAVFoundationVideo::setVolume(float v)
{
_volume = v;
if (_data->avplayer)
[_data->avplayer setVolume: v];
}
float OSXAVFoundationVideo::getAudioBalance()
{
return 0.0f;
}
void OSXAVFoundationVideo::setAudioBalance(float b)
{
OSG_WARN << "OSXAVFoundationVideo: setAudioBalance not supported!" << std::endl;
}
void OSXAVFoundationVideo::decodeFrame()
{
// std::cout << this << " decodeFrame: " << _waitForFrame << std::endl;
if (!_fileOpened)
return;
NSAutoreleasePoolHelper helper;
bool is_valid = (_data && (_data->avplayer.status != AVPlayerStatusFailed));
if (!is_valid)
{
_waitForFrame = false;
pause();
OSG_WARN << "OSXAVFoundationVideo: " << toString([_data->avplayer.error localizedFailureReason]) << std::endl;
}
bool is_playing = is_valid && (getTimeMultiplier() != 0);
CMTime outputItemTime = [_data->output itemTimeForHostTime:CACurrentMediaTime()];
if (_waitForFrame || [_data->output hasNewPixelBufferForItemTime:outputItemTime])
{
CVPixelBufferRef newframe = [_data->output copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
if (newframe)
{
if (isCoreVideoUsed())
{
CVPixelBufferLockBaseAddress(newframe, kCVPixelBufferLock_ReadOnly);
CVOpenGLTextureRef texture = NULL;
CVReturn err = CVOpenGLTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _data->coreVideoTextureCache, newframe, 0, &texture);
if (err)
{
OSG_WARN << "OSXAVFoundationVideo :: could not create texture from image, err: " << err << std::endl;
}
int w = CVPixelBufferGetWidth(newframe);
int h = CVPixelBufferGetHeight(newframe);
_dimensionsChangedCallbackNeeded = (_s != w) || (_t != h);
_s = w; _t = h; _r = 1;
_data->addFrame(texture);
CVPixelBufferUnlockBaseAddress(newframe, kCVPixelBufferLock_ReadOnly);
CVPixelBufferRelease(newframe);
}
else
{
_data->addFrame(newframe);
}
_waitForFrame = false;
}
}
_status = is_valid ? is_playing ? PLAYING : PAUSED : INVALID;
}
void OSXAVFoundationVideo::update(osg::NodeVisitor *)
{
if (!getVideoFrameDispatcher())
decodeFrame();
if (isCoreVideoUsed())
{
if (_dimensionsChangedCallbackNeeded)
handleDimensionsChangedCallbacks();
_dimensionsChangedCallbackNeeded = false;
return;
}
if (_data->hasNewFrame())
{
CVPixelBufferRef newframe = _data->getLastFrame();
CVPixelBufferLockBaseAddress(newframe,kCVPixelBufferLock_ReadOnly);
size_t width = CVPixelBufferGetWidth(newframe);
size_t height = CVPixelBufferGetHeight(newframe);
// Get the base address of the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(newframe);
setImage(width, height, 1, GL_RGBA, GL_BGRA, GL_UNSIGNED_BYTE, (unsigned char*)baseAddress, NO_DELETE);
// std::cout << this << " new frame: " << width << "x" << height << " " << baseAddress << std::endl;
CVPixelBufferUnlockBaseAddress(newframe, kCVPixelBufferLock_ReadOnly);
}
}
bool OSXAVFoundationVideo::needsDispatching() const
{
// std::cout << this << " needs dispatching: " << (_waitForFrame || getNeedsDispatching()) << std::endl;
return _waitForFrame || getNeedsDispatching();
}
void OSXAVFoundationVideo::applyLoopingMode()
{
// looping is handled by the delegate
}
void OSXAVFoundationVideo::requestNewFrame()
{
setNeedsDispatching(RequestSingleUpdate);
_waitForFrame = true;
}
bool OSXAVFoundationVideo::getCurrentCoreVideoTexture(GLenum& target, GLint& name, int& width, int& height) const
{
CVOpenGLTextureCacheFlush(_data->coreVideoTextureCache, 0);
CVOpenGLTextureRef texture = _data->getLastFrame();
if (texture)
{
target = CVOpenGLTextureGetTarget(texture);
name = CVOpenGLTextureGetName(texture);
width = _s;
height = _t;
}
return (texture != NULL);
}
void OSXAVFoundationVideo::lazyInitCoreVideoTextureCache(osg::State& state)
{
if (_data->coreVideoTextureCache)
return;
osgViewer::GraphicsWindowCocoa* win = dynamic_cast<osgViewer::GraphicsWindowCocoa*>(state.getGraphicsContext());
if (win)
{
NSOpenGLContext* context = win->getContext();
CGLContextObj cglcntx = (CGLContextObj)[context CGLContextObj];
CGLPixelFormatObj cglPixelFormat = (CGLPixelFormatObj)[ win->getPixelFormat() CGLPixelFormatObj];
CVReturn cvRet = CVOpenGLTextureCacheCreate(kCFAllocatorDefault, 0, cglcntx, cglPixelFormat, 0, &_data->coreVideoTextureCache);
if (cvRet != kCVReturnSuccess)
{
OSG_WARN << "OSXAVFoundationVideo : could not create texture cache :" << cvRet << std::endl;
}
}
}

View File

@@ -0,0 +1,152 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2008 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#include "OSXAVFoundationCoreVideoTexture.h"
#include "OSXAVFoundationVideo.H"
#include <CoreVideo/CoreVideo.h>
OSXAVFoundationCoreVideoTexture::OSXAVFoundationCoreVideoTexture()
: osg::Texture()
, _textureTarget(GL_TEXTURE_RECTANGLE_EXT)
, _textureWidth(0)
, _textureHeight(0)
, _inited(false)
{
}
OSXAVFoundationCoreVideoTexture::OSXAVFoundationCoreVideoTexture(osg::Image* image)
: osg::Texture()
, _textureTarget(GL_TEXTURE_RECTANGLE_EXT)
, _textureWidth(0)
, _textureHeight(0)
, _inited(false)
{
setImage(image);
}
OSXAVFoundationCoreVideoTexture::OSXAVFoundationCoreVideoTexture(const OSXAVFoundationCoreVideoTexture& text,const osg::CopyOp& copyop)
: osg::Texture(text, copyop)
, _textureTarget(text._textureTarget)
, _textureWidth(text._textureWidth)
, _textureHeight(text._textureHeight)
, _inited(text._inited)
, _image(text._image)
{
}
OSXAVFoundationCoreVideoTexture::~OSXAVFoundationCoreVideoTexture() {
}
int OSXAVFoundationCoreVideoTexture::compare(const osg::StateAttribute& sa) const {
COMPARE_StateAttribute_Types(OSXAVFoundationCoreVideoTexture,sa)
if (_image!=rhs._image) // smart pointer comparison.
{
if (_image.valid())
{
if (rhs._image.valid())
{
int result = _image->compare(*rhs._image);
if (result!=0) return result;
}
else
{
return 1; // valid lhs._image is greater than null.
}
}
else if (rhs._image.valid())
{
return -1; // valid rhs._image is greater than null.
}
}
if (!_image && !rhs._image)
{
// no image attached to either Texture2D
// but could these textures already be downloaded?
// check the _textureObjectBuffer to see if they have been
// downloaded
int result = compareTextureObjects(rhs);
if (result!=0) return result;
}
int result = compareTexture(rhs);
if (result!=0) return result;
// compare each paramter in turn against the rhs.
#if 1
if (_textureWidth != 0 && rhs._textureWidth != 0)
{
COMPARE_StateAttribute_Parameter(_textureWidth)
}
if (_textureHeight != 0 && rhs._textureHeight != 0)
{
COMPARE_StateAttribute_Parameter(_textureHeight)
}
#endif
return 0; // passed all the above comparison macro's, must be equal.
}
void OSXAVFoundationCoreVideoTexture::setImage(osg::Image* image)
{
if (_image == image) return;
if (_image.valid() && _image->requiresUpdateCall())
{
setUpdateCallback(0);
setDataVariance(osg::Object::STATIC);
}
_image = image;
_modifiedCount.setAllElementsTo(0);
if (_image.valid() && _image->requiresUpdateCall())
{
setUpdateCallback(new osg::Image::UpdateCallback());
setDataVariance(osg::Object::DYNAMIC);
}
OSXAVFoundationVideo* m = dynamic_cast<OSXAVFoundationVideo*>(_image.get());
if (m)
m->setUseCoreVideo(true);
}
void OSXAVFoundationCoreVideoTexture::apply(osg::State& state) const
{
if (!_image.valid())
return;
OSXAVFoundationVideo* m = dynamic_cast<OSXAVFoundationVideo*>(_image.get());
if ((m) && (m->isCoreVideoUsed()))
{
m->lazyInitCoreVideoTextureCache(state);
GLint texture_name;
if (m->getCurrentCoreVideoTexture(_textureTarget, texture_name, _textureWidth, _textureHeight))
{
glBindTexture(_textureTarget, texture_name);
}
}
}

View File

@@ -0,0 +1,76 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2008 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#pragma once
#include <osg/Texture>
class OSXAVFoundationCoreVideoTexture : public osg::Texture {
public:
OSXAVFoundationCoreVideoTexture();
OSXAVFoundationCoreVideoTexture(osg::Image* image);
OSXAVFoundationCoreVideoTexture(const OSXAVFoundationCoreVideoTexture& text,const osg::CopyOp& copyop=osg::CopyOp::SHALLOW_COPY);
META_StateAttribute( , OSXAVFoundationCoreVideoTexture, TEXTURE);
virtual int compare(const osg::StateAttribute& rhs) const;
virtual GLenum getTextureTarget() const { return _textureTarget; }
virtual void setImage(unsigned int, osg::Image* image) { setImage(image); }
void setImage(osg::Image* image);
osg::Image* getImage() { return _image.get(); }
const osg::Image* getImage() const { return _image.get(); }
virtual osg::Image* getImage(unsigned int) { return _image.get(); }
virtual const osg::Image* getImage(unsigned int) const { return _image.get(); }
virtual unsigned int getNumImages() const { return 1; }
virtual int getTextureWidth() const { return _textureWidth; }
virtual int getTextureHeight() const { return _textureHeight; }
virtual int getTextureDepth() const { return 1; }
virtual void apply(osg::State& state) const;
virtual void allocateMipmap(osg::State& state) const {}
inline unsigned int& getModifiedCount(unsigned int contextID) const
{
return _modifiedCount[contextID];
}
protected:
virtual void computeInternalFormat() const {}
virtual ~OSXAVFoundationCoreVideoTexture();
mutable GLenum _textureTarget;
mutable int _textureWidth;
mutable int _textureHeight;
bool _inited;
osg::ref_ptr<osg::Image> _image;
typedef osg::buffered_value<unsigned int> ImageModifiedCount;
mutable ImageModifiedCount _modifiedCount;
};

View File

@@ -0,0 +1,117 @@
#include <osg/ImageStream>
#include <osg/Notify>
#include <osg/Geode>
#include <osgDB/Registry>
#include <osgDB/FileNameUtils>
#include <osgDB/FileUtils>
#include "OSXAVFoundationVideo.h"
#include "OSXAVFoundationCoreVideoTexture.h"
class ReaderWriterAVFoundation : public osgDB::ReaderWriter
{
public:
ReaderWriterAVFoundation()
{
supportsExtension("mov","Quicktime movie format");
supportsExtension("mpg","Mpeg movie format");
supportsExtension("mp4","Mpeg movie format");
supportsExtension("m4v","Mpeg movie format");
supportsExtension("mpeg","Mpeg movie format");
supportsExtension("avfoundation","AVFoundation movie format");
supportsProtocol("http", "streaming media per http");
supportsProtocol("rtsp", "streaming media per rtsp");
}
virtual bool acceptsExtension(const std::string& extension) const
{
return
osgDB::equalCaseInsensitive(extension,"mov") ||
osgDB::equalCaseInsensitive(extension,"mpg") ||
osgDB::equalCaseInsensitive(extension,"mp4") ||
osgDB::equalCaseInsensitive(extension,"mpv") ||
osgDB::equalCaseInsensitive(extension,"mpeg")||
osgDB::equalCaseInsensitive(extension,"avfoundation");
}
virtual ~ReaderWriterAVFoundation()
{
OSG_INFO<<"~ReaderWriterAVFoundation()"<<std::endl;
}
virtual const char* className() const { return "AVFoundation ImageStream Reader"; }
virtual ReadResult readImage(const std::string& file, const osgDB::ReaderWriter::Options* options) const
{
std::string ext = osgDB::getLowerCaseFileExtension(file);
if (!acceptsExtension(ext)) return ReadResult::FILE_NOT_HANDLED;
std::string fileName(file);
if (ext=="avfoundation")
{
fileName = osgDB::getNameLessExtension(fileName);
OSG_INFO<<"AVFoundation stipped filename = "<<fileName<<std::endl;
}
if (!osgDB::containsServerAddress(fileName))
{
fileName = osgDB::findDataFile( fileName, options );
if (fileName.empty()) return ReadResult::FILE_NOT_FOUND;
}
OSG_INFO<<"ReaderWriterAVFoundation::readImage "<< fileName<< std::endl;
osg::ref_ptr<OSXAVFoundationVideo> video = new OSXAVFoundationVideo();
bool disable_multi_threaded_frame_dispatching = options ? (options->getPluginStringData("disableMultiThreadedFrameDispatching") == "true") : false;
bool disable_core_video = true; // options ? (options->getPluginStringData("disableCoreVideo") == "true") : false;
OSG_INFO << "disableMultiThreadedFrameDispatching: " << disable_multi_threaded_frame_dispatching << std::endl;
OSG_INFO << "disableCoreVideo : " << disable_core_video << std::endl;
if (!options
|| (!disable_multi_threaded_frame_dispatching
&& disable_core_video))
{
static osg::ref_ptr<osgVideo::VideoFrameDispatcher> video_frame_dispatcher(NULL);
if (!video_frame_dispatcher) {
std::string num_threads_str = options ? options->getPluginStringData("numFrameDispatchThreads") : "0";
video_frame_dispatcher = new osgVideo::VideoFrameDispatcher(atoi(num_threads_str.c_str()));
}
video_frame_dispatcher->addVideo(video);
}
video->open(fileName);
return video->valid() ? video.release() : NULL;
}
virtual ReadResult readObject (const std::string &file, const osgDB::ReaderWriter::Options* options) const
{
ReadResult rr = readImage(file, options);
if (!rr.validImage())
return rr;
bool use_core_video = true;
if (options && !options->getPluginStringData("disableCoreVideo").empty())
use_core_video = false;
osg::ref_ptr<OSXAVFoundationVideo> video = dynamic_cast<OSXAVFoundationVideo*>(rr.getImage());
if (!video || !use_core_video)
return rr;
osg::ref_ptr<OSXAVFoundationCoreVideoTexture> texture = new OSXAVFoundationCoreVideoTexture(video);
return texture.release();
}
protected:
};
// now register with Registry to instantiate the above
// reader/writer.
REGISTER_OSGPLUGIN(AVFoundation, ReaderWriterAVFoundation)