2.8 branch: Add up-to-date ffmpeg plugin. This commint merges the following revisions from trunk: 9910, 9912, 9933, 9948, 9960, 9965, 9968-9969, 9990, 10030, 10053, 10082, 10161, 10391-10392, 10408, 10414, 10422, 10538, 10809, 10851, 10892, 10925, 10961, 11165, and 11177.

This commit is contained in:
Paul MARTZ
2010-03-19 15:56:12 +00:00
parent 4563195a18
commit 1163ebb25b
24 changed files with 752 additions and 257 deletions

View File

@@ -1,44 +1,165 @@
# Locate ffmpeg
# This module defines
# FFMPEG_LIBRARIES
# FFMPEG_FOUND, if false, do not try to link to ffmpeg
# FFMPEG_FOUND, if false, do not try to link to ffmpeg
# FFMPEG_INCLUDE_DIR, where to find the headers
#
# $FFMPEG_DIR is an environment variable that would
# correspond to the ./configure --prefix=$FFMPEG_DIR
#
# Created by Robert Osfield.
# Created by Robert Osfield.
#use pkg-config to find various modes
INCLUDE(FindPkgConfig OPTIONAL)
IF(PKG_CONFIG_FOUND)
#In ffmpeg code, old version use "#include <header.h>" and newer use "#include <libname/header.h>"
#In OSG ffmpeg plugin, we use "#include <header.h>" for compatibility with old version of ffmpeg
INCLUDE(FindPkgConfig)
#We have to search the path which contain the header.h (usefull for old version)
#and search the path which contain the libname/header.h (usefull for new version)
pkg_check_modules(FFMPEG_LIBAVFORMAT libavformat)
pkg_check_modules(FFMPEG_LIBAVDEVICE libavdevice)
pkg_check_modules(FFMPEG_LIBAVCODEC libavcodec)
pkg_check_modules(FFMPEG_LIBAVUTIL libavutil)
pkg_check_modules(FFMPEG_LIBSWSCALE libswscale)
#Then we need to include ${FFMPEG_libname_INCLUDE_DIRS} (in old version case, use by ffmpeg header and osg plugin code)
# (in new version case, use by ffmpeg header)
#and ${FFMPEG_libname_INCLUDE_DIRS/libname} (in new version case, use by osg plugin code)
ENDIF(PKG_CONFIG_FOUND)
# Macro to find header and lib directories
# example: FFMPEG_FIND(AVFORMAT avformat avformat.h)
MACRO(FFMPEG_FIND varname shortname headername)
# old version of ffmpeg put header in $prefix/include/[ffmpeg]
# so try to find header in include directory
FIND_PATH(FFMPEG_${varname}_INCLUDE_DIRS lib${shortname}/${headername}
PATHS
${FFMPEG_ROOT}/include
$ENV{FFMPEG_DIR}/include
~/Library/Frameworks
/Library/Frameworks
/usr/local/include
/usr/include
/sw/include # Fink
/opt/local/include # DarwinPorts
/opt/csw/include # Blastwave
/opt/include
/usr/freeware/include
PATH_SUFFIXES ffmpeg
DOC "Location of FFMPEG Headers"
)
FIND_PATH(FFMPEG_${varname}_INCLUDE_DIRS ${headername}
PATHS
${FFMPEG_ROOT}/include
$ENV{FFMPEG_DIR}/include
~/Library/Frameworks
/Library/Frameworks
/usr/local/include
/usr/include
/sw/include # Fink
/opt/local/include # DarwinPorts
/opt/csw/include # Blastwave
/opt/include
/usr/freeware/include
PATH_SUFFIXES ffmpeg
DOC "Location of FFMPEG Headers"
)
FIND_LIBRARY(FFMPEG_${varname}_LIBRARIES
NAMES ${shortname}
PATHS
${FFMPEG_ROOT}/lib
$ENV{FFMPEG_DIR}/lib
~/Library/Frameworks
/Library/Frameworks
/usr/local/lib
/usr/local/lib64
/usr/lib
/usr/lib64
/sw/lib
/opt/local/lib
/opt/csw/lib
/opt/lib
/usr/freeware/lib64
DOC "Location of FFMPEG Libraries"
)
IF (FFMPEG_${varname}_LIBRARIES AND FFMPEG_${varname}_INCLUDE_DIRS)
SET(FFMPEG_${varname}_FOUND 1)
ENDIF(FFMPEG_${varname}_LIBRARIES AND FFMPEG_${varname}_INCLUDE_DIRS)
ENDMACRO(FFMPEG_FIND)
SET(FFMPEG_ROOT "$ENV{FFMPEG_DIR}" CACHE PATH "Location of FFMPEG")
# find stdint.h
IF(WIN32)
FIND_PATH(FFMPEG_STDINT_INCLUDE_DIR stdint.h
PATHS
${FFMPEG_ROOT}/include
$ENV{FFMPEG_DIR}/include
~/Library/Frameworks
/Library/Frameworks
/usr/local/include
/usr/include
/sw/include # Fink
/opt/local/include # DarwinPorts
/opt/csw/include # Blastwave
/opt/include
/usr/freeware/include
PATH_SUFFIXES ffmpeg
DOC "Location of FFMPEG stdint.h Header"
)
IF (FFMPEG_STDINT_INCLUDE_DIR)
SET(STDINT_OK TRUE)
ENDIF()
ELSE()
SET(STDINT_OK TRUE)
ENDIF()
FFMPEG_FIND(LIBAVFORMAT avformat avformat.h)
FFMPEG_FIND(LIBAVDEVICE avdevice avdevice.h)
FFMPEG_FIND(LIBAVCODEC avcodec avcodec.h)
FFMPEG_FIND(LIBAVUTIL avutil avutil.h)
FFMPEG_FIND(LIBSWSCALE swscale swscale.h) # not sure about the header to look for here.
SET(FFMPEG_FOUND "NO")
IF (FFMPEG_LIBAVFORMAT_FOUND AND FFMPEG_LIBAVDEVICE_FOUND AND FFMPEG_LIBAVCODEC_FOUND AND FFMPEG_LIBAVUTIL_FOUND)
# Note we don't check FFMPEG_LIBSWSCALE_FOUND here, it's optional.
IF (FFMPEG_LIBAVFORMAT_FOUND AND FFMPEG_LIBAVDEVICE_FOUND AND FFMPEG_LIBAVCODEC_FOUND AND FFMPEG_LIBAVUTIL_FOUND AND STDINT_OK)
SET(FFMPEG_FOUND "YES")
SET(FFMPEG_INCLUDE_DIRS ${FFMPEG_LIBAVFORMAT_INCLUDE_DIRS})
SET(FFMPEG_INCLUDE_DIRS
${FFMPEG_LIBAVFORMAT_INCLUDE_DIRS} ${FFMPEG_LIBAVFORMAT_INCLUDE_DIRS}/libavformat
${FFMPEG_LIBAVDEVICE_INCLUDE_DIRS} ${FFMPEG_LIBAVDEVICE_INCLUDE_DIRS}/libavdevice
${FFMPEG_LIBAVCODEC_INCLUDE_DIRS} ${FFMPEG_LIBAVCODEC_INCLUDE_DIRS}/libavcodec
${FFMPEG_LIBAVUTIL_INCLUDE_DIRS} ${FFMPEG_LIBAVUTIL_INCLUDE_DIRS}/libavutil
)
IF (${FFMPEG_STDINT_INCLUDE_DIR})
SET(FFMPEG_INCLUDE_DIRS
${FFMPEG_INCLUDE_DIRS}
${FFMPEG_STDINT_INCLUDE_DIR}
${FFMPEG_STDINT_INCLUDE_DIR}/libavformat
${FFMPEG_STDINT_INCLUDE_DIR}/libavdevice
${FFMPEG_STDINT_INCLUDE_DIR}/libavcodec
${FFMPEG_STDINT_INCLUDE_DIR}/libavutil
)
ENDIF()
SET(FFMPEG_LIBRARY_DIRS ${FFMPEG_LIBAVFORMAT_LIBRARY_DIRS})
# Note we don't add FFMPEG_LIBSWSCALE_LIBRARIES here, it will be added if found later.
SET(FFMPEG_LIBRARIES
${FFMPEG_LIBAVFORMAT_LIBRARIES}
${FFMPEG_LIBAVDEVICE_LIBRARIES}
${FFMPEG_LIBAVCODEC_LIBRARIES}
${FFMPEG_LIBAVUTIL_LIBRARIES})
ENDIF(FFMPEG_LIBAVFORMAT_FOUND AND FFMPEG_LIBAVDEVICE_FOUND AND FFMPEG_LIBAVCODEC_FOUND AND FFMPEG_LIBAVUTIL_FOUND)
ELSE ()
# MESSAGE(STATUS "Could not find FFMPEG")
ENDIF()

View File

@@ -119,6 +119,7 @@ protected:
bool _playToggle;
bool _trackMouse;
ImageStreamList _imageStreamList;
unsigned int _seekIncr;
};
@@ -239,6 +240,23 @@ bool MovieEventHandler::handle(const osgGA::GUIEventAdapter& ea,osgGA::GUIAction
}
return true;
}
else if (ea.getKey()=='>')
{
for(ImageStreamList::iterator itr=_imageStreamList.begin();
itr!=_imageStreamList.end();
++itr)
{
std::cout<<"Seeking"<<std::endl;
if(_seekIncr > 3) _seekIncr = 0;
double length = (*itr)->getLength();
double t_pos = (length/4.0f)*_seekIncr;
//(*itr)->rewind();
(*itr)->seek(t_pos);
(*itr)->play();
_seekIncr++;
}
return true;
}
else if (ea.getKey()=='L')
{
for(ImageStreamList::iterator itr=_imageStreamList.begin();
@@ -272,6 +290,7 @@ void MovieEventHandler::getUsage(osg::ApplicationUsage& usage) const
usage.addKeyboardMouseBinding("p","Play/Pause movie");
usage.addKeyboardMouseBinding("r","Restart movie");
usage.addKeyboardMouseBinding("l","Toggle looping of movie");
usage.addKeyboardMouseBinding(">","Advance the movie using seek");
}
@@ -325,7 +344,8 @@ class CustomAudioSink : public osg::AudioSink
public:
CustomAudioSink(osg::AudioStream* audioStream):
_playing(false),
_started(false),
_paused(false),
_audioStream(audioStream) {}
virtual void startPlaying()
@@ -340,7 +360,9 @@ class CustomAudioSink : public osg::AudioSink
}
virtual bool playing() const { return _playing; }
bool _playing;
bool _started;
bool _paused;
osg::observer_ptr<osg::AudioStream> _audioStream;
};

View File

@@ -25,14 +25,20 @@ public:
AudioSink();
virtual void startPlaying() = 0;
virtual const char * libraryName() const { return "osg"; }
virtual const char * className() const { return "AudioSinkInterface"; }
virtual void play() = 0;
virtual void pause() = 0;
virtual void stop() = 0;
virtual bool playing() const = 0;
virtual double getDelay() const { return _delay; }
virtual void setDelay(const double delay) { _delay = delay; }
virtual const char * libraryName() const { return "osgFFmpeg"; }
virtual const char * className() const { return "AudioSinkInterface"; }
virtual void setVolume(float) {}
virtual float getVolume() const { return 0.0f; }
private:
@@ -59,7 +65,6 @@ class OSG_EXPORT AudioStream : public osg::Object
virtual void consumeAudioBuffer(void * const buffer, const size_t size) = 0;
virtual bool audioStream() const = 0;
virtual int audioFrequency() const = 0;
virtual int audioNbChannels() const = 0;

View File

@@ -185,6 +185,9 @@ class OSG_EXPORT Image : public Object
void setPacking(unsigned int packing) { _packing = packing; }
inline unsigned int getPacking() const { return _packing; }
inline void setPixelAspectRatio(float pixelAspectRatio) { _pixelAspectRatio = pixelAspectRatio; }
inline float getPixelAspectRatio() const { return _pixelAspectRatio; }
/** Return the number of bits required for each pixel. */
inline unsigned int getPixelSizeInBits() const { return computePixelSizeInBits(_pixelFormat,_dataType); }
@@ -346,6 +349,7 @@ class OSG_EXPORT Image : public Object
GLenum _pixelFormat;
GLenum _dataType;
unsigned int _packing;
float _pixelAspectRatio;
AllocationMode _allocationMode;
unsigned char* _data;

View File

@@ -78,7 +78,8 @@ class OSG_EXPORT ImageStream : public Image
virtual double getLength() const { return 0.0; }
virtual double getFrameRate() const { return 0.0; }
virtual void setReferenceTime(double) {}
virtual double getReferenceTime() const { return 0.0; }
@@ -88,7 +89,6 @@ class OSG_EXPORT ImageStream : public Image
virtual void setVolume(float) {}
virtual float getVolume() const { return 0.0f; }
typedef std::vector< osg::ref_ptr<osg::AudioStream> > AudioStreams;
void setAudioStreams(const AudioStreams& asl) { _audioStreams = asl; }
AudioStreams& getAudioStreams() { return _audioStreams; }

View File

@@ -449,6 +449,12 @@ class DrawElements : public PrimitiveSet
{
if (_ebo.valid()) _ebo->releaseGLObjects(state);
}
virtual void reserveElements(unsigned int numIndices) = 0;
virtual void setElement(unsigned int, unsigned int) = 0;
virtual unsigned int getElement(unsigned int) = 0;
virtual void addElement(unsigned int) = 0;
protected:
@@ -526,6 +532,11 @@ class OSG_EXPORT DrawElementsUByte : public DrawElements, public VectorGLubyte
_rangeModifiedCount = _modifiedCount;
}
virtual void reserveElements(unsigned int numIndices) { reserve(numIndices); }
virtual void setElement(unsigned int i, unsigned int v) { (*this)[i] = v; }
virtual unsigned int getElement(unsigned int i) { return (*this)[i]; }
virtual void addElement(unsigned int v) { push_back(GLubyte(v)); }
protected:
virtual ~DrawElementsUByte();
@@ -601,6 +612,11 @@ class OSG_EXPORT DrawElementsUShort : public DrawElements, public VectorGLushort
_rangeModifiedCount = _modifiedCount;
}
virtual void reserveElements(unsigned int numIndices) { reserve(numIndices); }
virtual void setElement(unsigned int i, unsigned int v) { (*this)[i] = v; }
virtual unsigned int getElement(unsigned int i) { return (*this)[i]; }
virtual void addElement(unsigned int v) { push_back(GLushort(v)); }
protected:
virtual ~DrawElementsUShort();
@@ -676,6 +692,11 @@ class OSG_EXPORT DrawElementsUInt : public DrawElements, public VectorGLuint
_rangeModifiedCount = _modifiedCount;
}
virtual void reserveElements(unsigned int numIndices) { reserve(numIndices); }
virtual void setElement(unsigned int i, unsigned int v) { (*this)[i] = v; }
virtual unsigned int getElement(unsigned int i) { return (*this)[i]; }
virtual void addElement(unsigned int v) { push_back(GLuint(v)); }
protected:
virtual ~DrawElementsUInt();

View File

@@ -33,23 +33,21 @@ using namespace osg;
using namespace std;
Image::Image()
:Object(true)
:Object(true),
_fileName(""),
_writeHint(NO_PREFERENCE),
_origin(BOTTOM_LEFT),
_s(0), _t(0), _r(0),
_internalTextureFormat(0),
_pixelFormat(0),
_dataType(0),
_packing(4),
_pixelAspectRatio(1.0),
_allocationMode(USE_NEW_DELETE),
_data(0L),
_modifiedCount(0)
{
setDataVariance(STATIC);
_fileName = "";
_writeHint = NO_PREFERENCE;
_origin = BOTTOM_LEFT;
_s = _t = _r = 0;
_internalTextureFormat = 0;
_pixelFormat = (unsigned int)0;
_dataType = (unsigned int)0;
_packing = 4;
_allocationMode = USE_NEW_DELETE;
_data = (unsigned char *)0L;
_modifiedCount = 0;
}
Image::Image(const Image& image,const CopyOp& copyop):
@@ -62,6 +60,7 @@ Image::Image(const Image& image,const CopyOp& copyop):
_pixelFormat(image._pixelFormat),
_dataType(image._dataType),
_packing(image._packing),
_pixelAspectRatio(image._pixelAspectRatio),
_data(0L),
_modifiedCount(image._modifiedCount),
_mipmapData(image._mipmapData)

View File

@@ -1,34 +1,16 @@
# INCLUDE_DIRECTORIES( ${FFMPEG_INCLUDE_DIRS} )
INCLUDE_DIRECTORIES( ${FFMPEG_INCLUDE_DIRS} )
LINK_DIRECTORIES(${FFMPEG_LIBRARY_DIRS})
SET(TARGET_EXTERNAL_LIBRARIES ${FFMPEG_LIBRARIES} )
IF (FFMPEG_LIBSWSCALE_FOUND)
INCLUDE_DIRECTORIES(
${FFMPEG_LIBAVFORMAT_INCLUDE_DIRS} ${FFMPEG_LIBAVFORMAT_INCLUDE_DIRS}/libavformat ${FFMPEG_LIBAVFORMAT_INCLUDE_DIRS}/ffmpeg
${FFMPEG_LIBAVDEVICE_INCLUDE_DIRS} ${FFMPEG_LIBAVDEVICE_INCLUDE_DIRS}/libavdevice ${FFMPEG_LIBAVDEVICE_INCLUDE_DIRS}/ffmpeg
${FFMPEG_LIBAVCODEC_INCLUDE_DIRS} ${FFMPEG_LIBAVCODEC_INCLUDE_DIRS}/libavcodec ${FFMPEG_LIBAVCODEC_INCLUDE_DIRS}/ffmpeg
${FFMPEG_LIBAVUTIL_INCLUDE_DIRS} ${FFMPEG_LIBAVUTIL_INCLUDE_DIRS}/libavcodec ${FFMPEG_LIBAVUTIL_INCLUDE_DIRS}/ffmpeg
${FFMPEG_LIBSWSCALE_INCLUDE_DIRS} ${FFMPEG_LIBSWSCALE_INCLUDE_DIRS}/libswscale ${FFMPEG_LIBSWSCALE_INCLUDE_DIRS}/ffmpeg
)
IF(FFMPEG_LIBSWSCALE_FOUND)
INCLUDE_DIRECTORIES( ${FFMPEG_LIBSWSCALE_INCLUDE_DIRS} ${FFMPEG_LIBSWSCALE_INCLUDE_DIRS}/libswscale )
ADD_DEFINITIONS(-DUSE_SWSCALE)
LINK_DIRECTORIES(${FFMPEG_LIBRARY_DIRS})
SET(TARGET_EXTERNAL_LIBRARIES ${FFMPEG_LIBRARIES} ${FFMPEG_LIBSWSCALE_LIBRARIES})
ELSE(FFMPEG_LIBSWSCALE_FOUND)
INCLUDE_DIRECTORIES(
${FFMPEG_LIBAVFORMAT_INCLUDE_DIRS} ${FFMPEG_LIBAVFORMAT_INCLUDE_DIRS}/libavformat ${FFMPEG_LIBAVFORMAT_INCLUDE_DIRS}/ffmpeg
${FFMPEG_LIBAVDEVICE_INCLUDE_DIRS} ${FFMPEG_LIBAVDEVICE_INCLUDE_DIRS}/libavdevice ${FFMPEG_LIBAVDEVICE_INCLUDE_DIRS}/ffmpeg
${FFMPEG_LIBAVCODEC_INCLUDE_DIRS} ${FFMPEG_LIBAVCODEC_INCLUDE_DIRS}/libavcodec ${FFMPEG_LIBAVCODEC_INCLUDE_DIRS}/ffmpeg
${FFMPEG_LIBAVUTIL_INCLUDE_DIRS} ${FFMPEG_LIBAVUTIL_INCLUDE_DIRS}/libavcodec ${FFMPEG_LIBAVUTIL_INCLUDE_DIRS}/ffmpeg
)
LINK_DIRECTORIES(${FFMPEG_LIBRARY_DIRS})
SET(TARGET_EXTERNAL_LIBRARIES ${FFMPEG_LIBRARIES} )
ENDIF()
# MESSAGE("FFMPEG_LIBAVFORMAT_INCLUDE_DIRS = " ${FFMPEG_LIBAVFORMAT_INCLUDE_DIRS} )
@@ -60,6 +42,10 @@ SET(TARGET_H
MessageQueue.hpp
)
IF(CMAKE_COMPILER_IS_GNUCXX)
# Remove -pedantic flag as it barfs on ffmoeg headers
STRING(REGEX REPLACE "-pedantic" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
ENDIF()
#### end var setup ###

View File

@@ -51,12 +51,6 @@ double FFmpegAudioStream::duration() const
}
bool FFmpegAudioStream::audioStream() const
{
return m_decoder->audio_decoder().validContext();
}
int FFmpegAudioStream::audioFrequency() const
{

View File

@@ -21,7 +21,6 @@ namespace osgFFmpeg
void consumeAudioBuffer(void * const buffer, const size_t size);
bool audioStream() const;
int audioFrequency() const;
int audioNbChannels() const;
osg::AudioStream::SampleFormat audioSampleFormat() const;

View File

@@ -30,6 +30,8 @@ namespace
FFmpegClocks::FFmpegClocks() :
m_video_clock(0),
m_start_time(0),
m_pause_time(0),
m_seek_time(0),
m_last_frame_delay(0.040),
m_last_frame_pts(0),
m_last_actual_delay(0),
@@ -37,7 +39,8 @@ FFmpegClocks::FFmpegClocks() :
m_audio_buffer_end_pts(0),
m_audio_delay(0.0),
m_audio_disabled(false),
m_rewind(false)
m_paused(false),
m_last_current_time(0.0)
{
}
@@ -55,40 +58,53 @@ void FFmpegClocks::reset(const double start_time)
m_last_frame_pts = start_time - m_last_frame_delay;
m_frame_time = start_time;
m_pause_time = 0;
m_seek_time = 0;
m_audio_buffer_end_pts = start_time;
m_audio_timer.setStartTick();
}
void FFmpegClocks::rewindAudio()
void FFmpegClocks::pause(bool pause)
{
ScopedLock lock(m_mutex);
m_audio_buffer_end_pts = m_start_time;
m_audio_timer.setStartTick();
m_rewind = ! m_rewind;
if(pause)
m_paused = true;
else
{
m_paused = false;
if(!m_audio_disabled) m_audio_timer.setStartTick();
}
}
void FFmpegClocks::rewindVideo()
void FFmpegClocks::rewind()
{
ScopedLock lock(m_mutex);
m_pause_time = 0;
m_seek_time = 0;
m_audio_buffer_end_pts = m_start_time;
m_audio_timer.setStartTick();
m_last_frame_delay = 0.040;
m_frame_time = m_start_time;
if (m_audio_disabled)
return;
m_video_clock = m_start_time;
m_last_frame_delay = 0.040;
m_last_frame_pts = m_start_time - m_last_frame_delay;
m_frame_time = m_start_time;
m_rewind = ! m_rewind;
m_video_clock = m_start_time;
}
void FFmpegClocks::seek(double seek_time)
{
ScopedLock lock(m_mutex);
m_video_clock = seek_time;
m_last_frame_delay = 0.040;
m_frame_time = seek_time;
}
void FFmpegClocks::audioSetBufferEndPts(const double pts)
@@ -167,8 +183,13 @@ double FFmpegClocks::videoRefreshSchedule(const double pts)
// If incorrect delay, use previous one
if (delay <= 0.0 || delay >= 1.0)
{
delay = m_last_frame_delay;
if(!m_audio_disabled) m_frame_time = pts - delay;
}
// Save for next time
m_last_frame_delay = delay;
@@ -181,9 +202,7 @@ double FFmpegClocks::videoRefreshSchedule(const double pts)
m_frame_time += delay;
const double audio_time = getAudioTime();
const double actual_delay = (! m_rewind) ?
clamp(m_frame_time - audio_time, -0.5*delay, 2.5*delay) :
m_last_actual_delay; // when rewinding audio or video (but the other has yet to be), get the last used delay
const double actual_delay = clamp(m_frame_time - audio_time, -0.5*delay, 2.5*delay);
//m_frame_time += delay;
@@ -205,13 +224,33 @@ double FFmpegClocks::getStartTime() const
return m_start_time;
}
double FFmpegClocks::getAudioTime() const
void FFmpegClocks::setPauseTime(double pause_time)
{
return m_audio_buffer_end_pts + m_audio_timer.time_s() - m_audio_delay;
m_pause_time += pause_time;
}
void FFmpegClocks::setSeekTime(double seek_time)
{
m_seek_time += getAudioTime() - seek_time;
}
double FFmpegClocks::getAudioTime() const
{
if(m_audio_disabled)
return m_audio_buffer_end_pts + m_audio_timer.time_s() - m_pause_time - m_audio_delay - m_seek_time;
else
return m_audio_buffer_end_pts + m_audio_timer.time_s() - m_audio_delay;
}
double FFmpegClocks::getCurrentTime()
{
if(!m_paused)
m_last_current_time = getAudioTime();
return m_last_current_time;
}
} // namespace osgFFmpeg

View File

@@ -22,18 +22,23 @@ public:
FFmpegClocks();
void reset(double start_time);
void rewindAudio();
void rewindVideo();
void pause(bool pause);
void seek(double seek_time);
void rewind();
void audioSetBufferEndPts(double pts);
void audioAdjustBufferEndPts(double increment);
void audioSetDelay(double delay);
void audioDisable();
bool audioDisabled() const { return m_audio_disabled; }
double videoSynchClock(const AVFrame * frame, double time_base, double pts);
double videoRefreshSchedule(double pts);
double getStartTime() const;
double getCurrentTime();
void setPauseTime(double pause_time);
void setSeekTime(double seek_time);
private:
@@ -48,15 +53,19 @@ private:
double m_video_clock;
double m_start_time;
double m_pause_time;
double m_seek_time;
double m_last_frame_delay;
double m_last_frame_pts;
double m_last_actual_delay;
double m_frame_time;
double m_audio_buffer_end_pts;
double m_audio_delay;
Timer m_audio_timer;
bool m_audio_disabled;
bool m_rewind;
Timer m_audio_timer;
bool m_audio_disabled;
bool m_paused;
double m_last_current_time;
};

View File

@@ -2,12 +2,13 @@
#include "FFmpegDecoder.hpp"
#include <osg/Notify>
#include <osgDB/FileNameUtils>
#include <cassert>
#include <limits>
#include <stdexcept>
#include <string.h>
#include <iostream>
namespace osgFFmpeg {
@@ -54,24 +55,48 @@ bool FFmpegDecoder::open(const std::string & filename)
formatParams.channel = 0;
formatParams.standard = 0;
#if 1
formatParams.width = 320;
formatParams.height = 240;
#else
formatParams.width = 640;
formatParams.height = 480;
#endif
formatParams.time_base.num = 1;
formatParams.time_base.den = 50;
formatParams.time_base.den = 30;
iformat = av_find_input_format("video4linux2");
std::string format = "video4linux2";
iformat = av_find_input_format(format.c_str());
if (iformat)
{
osg::notify(osg::NOTICE)<<"Found input format"<<std::endl;
osg::notify(osg::NOTICE)<<"Found input format: "<<format<<std::endl;
}
else
{
osg::notify(osg::NOTICE)<<"Failed to find input_format"<<std::endl;
osg::notify(osg::NOTICE)<<"Failed to find input format: "<<format<<std::endl;
}
if (av_open_input_file(&p_format_context, filename.c_str(), iformat, 0, &formatParams) != 0)
throw std::runtime_error("av_open_input_file() failed");
int error = av_open_input_file(&p_format_context, filename.c_str(), iformat, 0, &formatParams);
if (error != 0)
{
std::string error_str;
switch (error)
{
//case AVERROR_UNKNOWN: error_str = "AVERROR_UNKNOWN"; break; // same value as AVERROR_INVALIDDATA
case AVERROR_IO: error_str = "AVERROR_IO"; break;
case AVERROR_NUMEXPECTED: error_str = "AVERROR_NUMEXPECTED"; break;
case AVERROR_INVALIDDATA: error_str = "AVERROR_INVALIDDATA"; break;
case AVERROR_NOMEM: error_str = "AVERROR_NOMEM"; break;
case AVERROR_NOFMT: error_str = "AVERROR_NOFMT"; break;
case AVERROR_NOTSUPP: error_str = "AVERROR_NOTSUPP"; break;
case AVERROR_NOENT: error_str = "AVERROR_NOENT"; break;
case AVERROR_PATCHWELCOME: error_str = "AVERROR_PATCHWELCOME"; break;
default: error_str = "Unknown error"; break;
}
throw std::runtime_error("av_open_input_file() failed : " + error_str);
}
}
else
{
@@ -141,12 +166,18 @@ bool FFmpegDecoder::readNextPacket()
case NORMAL:
return readNextPacketNormal();
case PAUSE:
return false;
case END_OF_STREAM:
return readNextPacketEndOfStream();
case REWINDING:
return readNextPacketRewinding();
case SEEKING:
return readNextPacketSeeking();
default:
assert(false);
return false;
@@ -164,8 +195,23 @@ void FFmpegDecoder::rewind()
rewindButDontFlushQueues();
}
void FFmpegDecoder::seek(double time)
{
m_pending_packet.clear();
flushAudioQueue();
flushVideoQueue();
seekButDontFlushQueues(time);
}
void FFmpegDecoder::pause()
{
m_pending_packet.clear();
flushAudioQueue();
flushVideoQueue();
m_state = PAUSE;
}
void FFmpegDecoder::findAudioStream()
{
@@ -239,7 +285,10 @@ bool FFmpegDecoder::readNextPacketNormal()
{
// If we reach the end of the stream, change the decoder state
if (loop())
{
m_clocks.reset(m_start);
rewindButDontFlushQueues();
}
else
m_state = END_OF_STREAM;
@@ -312,15 +361,42 @@ void FFmpegDecoder::rewindButDontFlushQueues()
{
const AVRational AvTimeBaseQ = { 1, AV_TIME_BASE }; // = AV_TIME_BASE_Q
const int64_t pos = m_clocks.getStartTime() * AV_TIME_BASE;
const int64_t pos = int64_t(m_clocks.getStartTime() * double(AV_TIME_BASE));
const int64_t seek_target = av_rescale_q(pos, AvTimeBaseQ, m_video_stream->time_base);
if (av_seek_frame(m_format_context.get(), m_video_index, seek_target, 0/*AVSEEK_FLAG_BYTE |*/ /*AVSEEK_FLAG_BACKWARD*/) < 0)
throw std::runtime_error("av_seek_frame failed()");
m_clocks.rewind();
m_state = REWINDING;
}
bool FFmpegDecoder::readNextPacketSeeking()
{
const FFmpegPacket packet(FFmpegPacket::PACKET_FLUSH);
if (m_audio_queue.timedPush(packet, 10) && m_video_queue.timedPush(packet, 10))
m_state = NORMAL;
return false;
}
void FFmpegDecoder::seekButDontFlushQueues(double time)
{
const AVRational AvTimeBaseQ = { 1, AV_TIME_BASE }; // = AV_TIME_BASE_Q
const int64_t pos = int64_t(m_clocks.getStartTime()+time * double(AV_TIME_BASE));
const int64_t seek_target = av_rescale_q(pos, AvTimeBaseQ, m_video_stream->time_base);
m_clocks.setSeekTime(time);
if (av_seek_frame(m_format_context.get(), m_video_index, seek_target, 0/*AVSEEK_FLAG_BYTE |*/ /*AVSEEK_FLAG_BACKWARD*/) < 0)
throw std::runtime_error("av_seek_frame failed()");
m_clocks.seek(time);
m_state = SEEKING;
}
} // namespace osgFFmpeg

View File

@@ -68,11 +68,14 @@ public:
bool readNextPacket();
void rewind();
void seek(double time);
void pause();
void loop(bool loop);
bool loop() const;
double duration() const;
double reference();
FFmpegDecoderAudio & audio_decoder();
FFmpegDecoderVideo & video_decoder();
@@ -84,8 +87,10 @@ protected:
enum State
{
NORMAL,
PAUSE,
END_OF_STREAM,
REWINDING
REWINDING,
SEEKING
};
typedef BoundedMessageQueue<FFmpegPacket> PacketQueue;
@@ -97,27 +102,30 @@ protected:
bool readNextPacketNormal();
bool readNextPacketEndOfStream();
bool readNextPacketRewinding();
bool readNextPacketSeeking();
bool readNextPacketPause();
void rewindButDontFlushQueues();
void seekButDontFlushQueues(double time);
FormatContextPtr m_format_context;
AVStream * m_audio_stream;
AVStream * m_video_stream;
AVStream * m_audio_stream;
AVStream * m_video_stream;
unsigned int m_audio_index;
unsigned int m_video_index;
int m_audio_index;
int m_video_index;
FFmpegClocks m_clocks;
FFmpegPacket m_pending_packet;
PacketQueue m_audio_queue;
PacketQueue m_video_queue;
PacketQueue m_audio_queue;
PacketQueue m_video_queue;
FFmpegDecoderAudio m_audio_decoder;
FFmpegDecoderVideo m_video_decoder;
FFmpegDecoderAudio m_audio_decoder;
FFmpegDecoderVideo m_video_decoder;
double m_duration;
double m_start;
double m_duration;
double m_start;
State m_state;
State m_state;
bool m_loop;
};
@@ -142,6 +150,11 @@ inline double FFmpegDecoder::duration() const
return double(m_format_context->duration) / AV_TIME_BASE;
}
inline double FFmpegDecoder::reference()
{
return m_clocks.getCurrentTime();
}
inline FFmpegDecoderAudio & FFmpegDecoder::audio_decoder()
{

View File

@@ -12,6 +12,25 @@
namespace osgFFmpeg {
static int decode_audio(AVCodecContext *avctx, int16_t *samples,
int *frame_size_ptr,
const uint8_t *buf, int buf_size)
{
#if LIBAVCODEC_VERSION_MAJOR >= 53 || (LIBAVCODEC_VERSION_MAJOR==52 && LIBAVCODEC_VERSION_MINOR>=32)
// following code segment copied from ffmpeg's avcodec_decode_audio2()
// implementation to avoid warnings about deprecated function usage.
AVPacket avpkt;
av_init_packet(&avpkt);
avpkt.data = const_cast<uint8_t *>(buf);
avpkt.size = buf_size;
return avcodec_decode_audio3(avctx, samples, frame_size_ptr, &avpkt);
#else
// fallback for older versions of ffmpeg that don't have avcodec_decode_audio3.
return avcodec_decode_audio2(avctx, samples, frame_size_ptr, buf, buf_size);
#endif
}
FFmpegDecoderAudio::FFmpegDecoderAudio(PacketQueue & packets, FFmpegClocks & clocks) :
@@ -25,6 +44,7 @@ FFmpegDecoderAudio::FFmpegDecoderAudio(PacketQueue & packets, FFmpegClocks & clo
m_audio_buf_size(0),
m_audio_buf_index(0),
m_end_of_stream(false),
m_paused(true),
m_exit(false)
{
@@ -88,6 +108,18 @@ void FFmpegDecoderAudio::open(AVStream * const stream)
}
}
void FFmpegDecoderAudio::pause(bool pause)
{
if (pause != m_paused)
{
m_paused = pause;
if (m_audio_sink.valid())
{
if (m_paused) m_audio_sink->pause();
else m_audio_sink->play();
}
}
}
void FFmpegDecoderAudio::close(bool waitForThreadToExit)
{
@@ -99,6 +131,22 @@ void FFmpegDecoderAudio::close(bool waitForThreadToExit)
}
}
void FFmpegDecoderAudio::setVolume(float volume)
{
if (m_audio_sink.valid())
{
m_audio_sink->setVolume(volume);
}
}
float FFmpegDecoderAudio::getVolume() const
{
if (m_audio_sink.valid())
{
return m_audio_sink->getVolume();
}
return 0.0f;
}
void FFmpegDecoderAudio::run()
{
@@ -130,7 +178,6 @@ void FFmpegDecoderAudio::setAudioSink(osg::ref_ptr<osg::AudioSink> audio_sink)
void FFmpegDecoderAudio::fillBuffer(void * const buffer, size_t size)
{
size_t filled = 0;
uint8_t * dst_buffer = reinterpret_cast<uint8_t*>(buffer);
while (size != 0)
@@ -176,7 +223,7 @@ void FFmpegDecoderAudio::decodeLoop()
if (! skip_audio && ! m_audio_sink->playing())
{
m_clocks.audioSetDelay(m_audio_sink->getDelay());
m_audio_sink->startPlaying();
m_audio_sink->play();
}
else
{
@@ -185,6 +232,21 @@ void FFmpegDecoderAudio::decodeLoop()
while (! m_exit)
{
if(m_paused)
{
m_clocks.pause(true);
m_pause_timer.setStartTick();
while(m_paused)
{
microSleep(10000);
}
m_clocks.setPauseTime(m_pause_timer.time_s());
m_clocks.pause(false);
}
// If skipping audio, make sure the audio stream is still consumed.
if (skip_audio)
{
@@ -194,7 +256,6 @@ void FFmpegDecoderAudio::decodeLoop()
if (packet.valid())
packet.clear();
}
// Else, just idle in this thread.
// Note: If m_audio_sink has an audio callback, this thread will still be awaken
// from time to time to refill the audio buffer.
@@ -252,7 +313,7 @@ size_t FFmpegDecoderAudio::decodeFrame(void * const buffer, const size_t size)
{
int data_size = size;
const int bytes_decoded = avcodec_decode_audio2(m_context, reinterpret_cast<int16_t*>(buffer), &data_size, m_packet_data, m_bytes_remaining);
const int bytes_decoded = decode_audio(m_context, reinterpret_cast<int16_t*>(buffer), &data_size, m_packet_data, m_bytes_remaining);
if (bytes_decoded < 0)
{
@@ -285,7 +346,7 @@ size_t FFmpegDecoderAudio::decodeFrame(void * const buffer, const size_t size)
if (m_packet.type == FFmpegPacket::PACKET_DATA)
{
if (m_packet.packet.pts != AV_NOPTS_VALUE)
if (m_packet.packet.pts != int64_t(AV_NOPTS_VALUE))
{
const double pts = av_q2d(m_stream->time_base) * m_packet.packet.pts;
m_clocks.audioSetBufferEndPts(pts);
@@ -301,7 +362,6 @@ size_t FFmpegDecoderAudio::decodeFrame(void * const buffer, const size_t size)
else if (m_packet.type == FFmpegPacket::PACKET_FLUSH)
{
avcodec_flush_buffers(m_context);
m_clocks.rewindAudio();
}
// just output silence when we reached the end of stream

View File

@@ -4,6 +4,8 @@
#include <OpenThreads/Thread>
#include <osg/Timer>
#include "FFmpegClocks.hpp"
#include "FFmpegPacket.hpp"
@@ -29,8 +31,12 @@ public:
~FFmpegDecoderAudio();
void open(AVStream * stream);
void pause(bool pause);
void close(bool waitForThreadToExit);
void setVolume(float volume);
float getVolume() const;
virtual void run();
void setAudioSink(osg::ref_ptr<osg::AudioSink> audio_sink);
@@ -51,26 +57,29 @@ private:
size_t decodeFrame(void * buffer, size_t size);
PacketQueue & m_packets;
FFmpegClocks & m_clocks;
AVStream * m_stream;
AVCodecContext * m_context;
FFmpegPacket m_packet;
const uint8_t * m_packet_data;
int m_bytes_remaining;
PacketQueue & m_packets;
FFmpegClocks & m_clocks;
AVStream * m_stream;
AVCodecContext * m_context;
FFmpegPacket m_packet;
const uint8_t * m_packet_data;
int m_bytes_remaining;
Buffer m_audio_buffer;
size_t m_audio_buf_size;
size_t m_audio_buf_index;
Buffer m_audio_buffer;
size_t m_audio_buf_size;
size_t m_audio_buf_index;
int m_frequency;
int m_nb_channels;
osg::AudioStream::SampleFormat m_sample_format;
int m_frequency;
int m_nb_channels;
osg::AudioStream::SampleFormat m_sample_format;
SinkPtr m_audio_sink;
SinkPtr m_audio_sink;
bool m_end_of_stream;
volatile bool m_exit;
osg::Timer m_pause_timer;
bool m_end_of_stream;
bool m_paused;
volatile bool m_exit;
};

View File

@@ -8,6 +8,28 @@
namespace osgFFmpeg {
static int decode_video(AVCodecContext *avctx, AVFrame *picture,
int *got_picture_ptr,
const uint8_t *buf, int buf_size)
{
#if LIBAVCODEC_VERSION_MAJOR >= 53 || (LIBAVCODEC_VERSION_MAJOR==52 && LIBAVCODEC_VERSION_MINOR>=32)
// following code segment copied from ffmpeg avcodec_decode_video() implementation
// to avoid warnings about deprecated function usage.
AVPacket avpkt;
av_init_packet(&avpkt);
avpkt.data = const_cast<uint8_t *>(buf);
avpkt.size = buf_size;
// HACK for CorePNG to decode as normal PNG by default
avpkt.flags = AV_PKT_FLAG_KEY;
return avcodec_decode_video2(avctx, picture, got_picture_ptr, &avpkt);
#else
// fallback for older versions of ffmpeg that don't have avcodec_decode_video2.
return avcodec_decode_video(avctx, picture, got_picture_ptr, buf, buf_size);
#endif
}
FFmpegDecoderVideo::FFmpegDecoderVideo(PacketQueue & packets, FFmpegClocks & clocks) :
m_packets(packets),
m_clocks(clocks),
@@ -20,6 +42,7 @@ FFmpegDecoderVideo::FFmpegDecoderVideo(PacketQueue & packets, FFmpegClocks & clo
m_writeBuffer(0),
m_user_data(0),
m_publish_func(0),
m_paused(true),
m_exit(false)
#ifdef USE_SWSCALE
,m_swscale_ctx(0)
@@ -32,7 +55,7 @@ FFmpegDecoderVideo::FFmpegDecoderVideo(PacketQueue & packets, FFmpegClocks & clo
FFmpegDecoderVideo::~FFmpegDecoderVideo()
{
osg::notify(osg::NOTICE)<<"Destructing FFmpegDecoderVideo..."<<std::endl;
osg::notify(osg::INFO)<<"Destructing FFmpegDecoderVideo..."<<std::endl;
if (isRunning())
@@ -53,7 +76,7 @@ FFmpegDecoderVideo::~FFmpegDecoderVideo()
}
#endif
osg::notify(osg::NOTICE)<<"Destructed FFmpegDecoderVideo"<<std::endl;
osg::notify(osg::INFO)<<"Destructed FFmpegDecoderVideo"<<std::endl;
}
@@ -117,6 +140,13 @@ void FFmpegDecoderVideo::close(bool waitForThreadToExit)
}
}
void FFmpegDecoderVideo::pause(bool pause)
{
if(pause)
m_paused = true;
else
m_paused = false;
}
void FFmpegDecoderVideo::run()
{
@@ -157,7 +187,7 @@ void FFmpegDecoderVideo::decodeLoop()
int frame_finished = 0;
const int bytes_decoded = avcodec_decode_video(m_context, m_frame.get(), &frame_finished, m_packet_data, m_bytes_remaining);
const int bytes_decoded = decode_video(m_context, m_frame.get(), &frame_finished, m_packet_data, m_bytes_remaining);
if (bytes_decoded < 0)
throw std::runtime_error("avcodec_decode_video failed()");
@@ -167,13 +197,13 @@ void FFmpegDecoderVideo::decodeLoop()
// Find out the frame pts
if (packet.packet.dts == AV_NOPTS_VALUE &&
if (packet.packet.dts == int64_t(AV_NOPTS_VALUE) &&
m_frame->opaque != 0 &&
*reinterpret_cast<const int64_t*>(m_frame->opaque) != AV_NOPTS_VALUE)
*reinterpret_cast<const int64_t*>(m_frame->opaque) != int64_t(AV_NOPTS_VALUE))
{
pts = *reinterpret_cast<const int64_t*>(m_frame->opaque);
}
else if (packet.packet.dts != AV_NOPTS_VALUE)
else if (packet.packet.dts != int64_t(AV_NOPTS_VALUE))
{
pts = packet.packet.dts;
}
@@ -190,10 +220,15 @@ void FFmpegDecoderVideo::decodeLoop()
const double synched_pts = m_clocks.videoSynchClock(m_frame.get(), av_q2d(m_stream->time_base), pts);
const double frame_delay = m_clocks.videoRefreshSchedule(synched_pts);
publishFrame(frame_delay);
publishFrame(frame_delay, m_clocks.audioDisabled());
}
}
while(m_paused && !m_exit)
{
microSleep(10000);
}
// Get the next packet
pts = 0;
@@ -214,7 +249,6 @@ void FFmpegDecoderVideo::decodeLoop()
else if (packet.type == FFmpegPacket::PACKET_FLUSH)
{
avcodec_flush_buffers(m_context);
m_clocks.rewindVideo();
}
}
}
@@ -224,61 +258,69 @@ void FFmpegDecoderVideo::decodeLoop()
void FFmpegDecoderVideo::findAspectRatio()
{
double ratio = 0.0;
float ratio = 0.0f;
if (m_context->sample_aspect_ratio.num != 0)
ratio = (av_q2d(m_context->sample_aspect_ratio) * m_width) / m_height;
ratio = float(av_q2d(m_context->sample_aspect_ratio));
if (ratio <= 0.0)
ratio = double(m_width) / double(m_height);
if (ratio <= 0.0f)
ratio = 1.0f;
m_aspect_ratio = ratio;
m_pixel_aspect_ratio = ratio;
}
int FFmpegDecoderVideo::convert(AVPicture *dst, int dst_pix_fmt, const AVPicture *src,
int FFmpegDecoderVideo::convert(AVPicture *dst, int dst_pix_fmt, AVPicture *src,
int src_pix_fmt, int src_width, int src_height)
{
osg::Timer_t startTick = osg::Timer::instance()->tick();
#ifdef USE_SWSCALE
if (m_swscale_ctx==0)
{
m_swscale_ctx = sws_getContext(src_width, src_height, src_pix_fmt,
src_width, src_height, dst_pix_fmt,
m_swscale_ctx = sws_getContext(src_width, src_height, (PixelFormat) src_pix_fmt,
src_width, src_height, (PixelFormat) dst_pix_fmt,
/*SWS_BILINEAR*/ SWS_BICUBIC, NULL, NULL, NULL);
}
osg::notify(osg::NOTICE)<<"Using sws_scale ";
osg::notify(osg::INFO)<<"Using sws_scale ";
int result = sws_scale(m_swscale_ctx,
src->data, src->linesize, 0, src_height,
dst->data, dst->linesize);
(src->data), (src->linesize), 0, src_height,
(dst->data), (dst->linesize));
#else
osg::notify(osg::NOTICE)<<"Using img_convert ";
osg::notify(osg::INFO)<<"Using img_convert ";
int result = img_convert(dst, dst_pix_fmt, src,
src_pix_fmt, src_width, src_height);
#endif
osg::Timer_t endTick = osg::Timer::instance()->tick();
osg::notify(osg::NOTICE)<<" time = "<<osg::Timer::instance()->delta_m(startTick,endTick)<<"ms"<<std::endl;
osg::notify(osg::INFO)<<" time = "<<osg::Timer::instance()->delta_m(startTick,endTick)<<"ms"<<std::endl;
return result;
}
void FFmpegDecoderVideo::publishFrame(const double delay)
void FFmpegDecoderVideo::publishFrame(const double delay, bool audio_disabled)
{
// If no publishing function, just ignore the frame
if (m_publish_func == 0)
return;
#if 1
// new code from Jean-Sebasiten Guay - needs testing as we're unclear on the best solution
// If the display delay is too small, we better skip the frame.
if (!audio_disabled && delay < -0.010)
return;
#else
// original solution that hung on video stream over web.
// If the display delay is too small, we better skip the frame.
if (delay < -0.010)
return;
const AVPicture * const src = (const AVPicture *) m_frame.get();
#endif
AVPicture * const src = (AVPicture *) m_frame.get();
AVPicture * const dst = (AVPicture *) m_frame_rgba.get();
// Assign appropriate parts of the buffer to image planes in m_frame_rgba
@@ -314,7 +356,7 @@ void FFmpegDecoderVideo::publishFrame(const double delay)
void FFmpegDecoderVideo::yuva420pToRgba(AVPicture * const dst, const AVPicture * const src, int width, int height)
void FFmpegDecoderVideo::yuva420pToRgba(AVPicture * const dst, AVPicture * const src, int width, int height)
{
convert(dst, PIX_FMT_RGB32, src, m_context->pix_fmt, width, height);

View File

@@ -65,6 +65,7 @@ public:
~FFmpegDecoderVideo();
void open(AVStream * stream);
void pause(bool pause);
void close(bool waitForThreadToExit);
virtual void run();
@@ -74,7 +75,7 @@ public:
int width() const;
int height() const;
double aspectRatio() const;
float pixelAspectRatio() const;
bool alphaChannel() const;
double frameRate() const;
const uint8_t * image() const;
@@ -85,11 +86,11 @@ private:
void decodeLoop();
void findAspectRatio();
void publishFrame(double delay);
void publishFrame(double delay, bool audio_disabled);
double synchronizeVideo(double pts);
void yuva420pToRgba(AVPicture *dst, const AVPicture *src, int width, int height);
void yuva420pToRgba(AVPicture *dst, AVPicture *src, int width, int height);
int convert(AVPicture *dst, int dst_pix_fmt, const AVPicture *src,
int convert(AVPicture *dst, int dst_pix_fmt, AVPicture *src,
int src_pix_fmt, int src_width, int src_height);
@@ -114,12 +115,13 @@ private:
PublishFunc m_publish_func;
double m_frame_rate;
double m_aspect_ratio;
float m_pixel_aspect_ratio;
int m_width;
int m_height;
size_t m_next_frame_index;
bool m_alpha_channel;
bool m_paused;
volatile bool m_exit;
#ifdef USE_SWSCALE
@@ -155,9 +157,9 @@ inline int FFmpegDecoderVideo::height() const
}
inline double FFmpegDecoderVideo::aspectRatio() const
inline float FFmpegDecoderVideo::pixelAspectRatio() const
{
return m_aspect_ratio;
return m_pixel_aspect_ratio;
}

View File

@@ -6,6 +6,7 @@
extern "C"
{
#define __STDC_CONSTANT_MACROS
#include <errno.h> // for error codes defined in avformat.h
#include <stdint.h>
#include <avcodec.h>
#include <avformat.h>

View File

@@ -18,7 +18,7 @@ FFmpegImageStream::FFmpegImageStream() :
m_commands(0),
m_frame_published_flag(false)
{
setOrigin(osg::Image::BOTTOM_LEFT);
setOrigin(osg::Image::TOP_LEFT);
std::auto_ptr<FFmpegDecoder> decoder(new FFmpegDecoder);
std::auto_ptr<CommandQueue> commands(new CommandQueue);
@@ -39,11 +39,11 @@ FFmpegImageStream::FFmpegImageStream(const FFmpegImageStream & image, const osg:
FFmpegImageStream::~FFmpegImageStream()
{
osg::notify(osg::NOTICE)<<"Destructing FFMpegImageStream..."<<std::endl;
osg::notify(osg::INFO)<<"Destructing FFmpegImageStream..."<<std::endl;
quit(true);
osg::notify(osg::NOTICE)<<"Have done quit"<<std::endl;
osg::notify(osg::INFO)<<"Have done quit"<<std::endl;
// release athe audio streams to make sure that the decoder doesn't retain any external
// refences.
@@ -55,7 +55,7 @@ FFmpegImageStream::~FFmpegImageStream()
delete m_commands;
osg::notify(osg::NOTICE)<<"Destructed FFMpegImageStream."<<std::endl;
osg::notify(osg::INFO)<<"Destructed FFMpegImageStream."<<std::endl;
}
@@ -71,16 +71,24 @@ bool FFmpegImageStream::open(const std::string & filename)
m_decoder->video_decoder().width(), m_decoder->video_decoder().height(), 1, GL_RGBA, GL_BGRA, GL_UNSIGNED_BYTE,
const_cast<unsigned char *>(m_decoder->video_decoder().image()), NO_DELETE
);
setOrigin(osg::Image::TOP_LEFT);
setPixelAspectRatio(m_decoder->video_decoder().pixelAspectRatio());
osg::notify(osg::NOTICE)<<"ffmpeg::open("<<filename<<") size("<<s()<<", "<<t()<<") aspect ratio "<<m_decoder->video_decoder().pixelAspectRatio()<<std::endl;
#if 1
// swscale is reported errors and then crashing when rescaling video of size less than 10 by 10.
if (s()<=10 || t()<=10) return false;
#endif
m_decoder->video_decoder().setUserData(this);
m_decoder->video_decoder().setPublishCallback(publishNewFrame);
if (m_decoder->audio_decoder().validContext())
{
osg::notify(osg::NOTICE)<<"Attaching FFmpegAudioStream"<<std::endl;
getAudioStreams().push_back(new FFmpegAudioStream(m_decoder.get()));
}
@@ -122,6 +130,11 @@ void FFmpegImageStream::rewind()
m_commands->push(CMD_REWIND);
}
void FFmpegImageStream::seek(double time) {
m_seek_time = time;
m_commands->push(CMD_SEEK);
}
void FFmpegImageStream::quit(bool waitForThreadToExit)
@@ -139,34 +152,43 @@ void FFmpegImageStream::quit(bool waitForThreadToExit)
m_decoder->close(waitForThreadToExit);
}
void FFmpegImageStream::setVolume(float volume)
{
m_decoder->audio_decoder().setVolume(volume);
}
double FFmpegImageStream::duration() const
float FFmpegImageStream::getVolume() const
{
return m_decoder->audio_decoder().getVolume();
}
double FFmpegImageStream::getLength() const
{
return m_decoder->duration();
}
double FFmpegImageStream::getReferenceTime () const
{
return m_decoder->reference();
}
bool FFmpegImageStream::videoAlphaChannel() const
double FFmpegImageStream::getFrameRate() const
{
return m_decoder->video_decoder().frameRate();
}
bool FFmpegImageStream::isImageTranslucent() const
{
return m_decoder->video_decoder().alphaChannel();
}
double FFmpegImageStream::videoAspectRatio() const
{
return m_decoder->video_decoder().aspectRatio();
}
double FFmpegImageStream::videoFrameRate() const
{
return m_decoder->video_decoder().frameRate();
}
void FFmpegImageStream::run()
{
try
@@ -232,6 +254,10 @@ bool FFmpegImageStream::handleCommand(const Command cmd)
cmdRewind();
return true;
case CMD_SEEK:
cmdSeek(m_seek_time);
return true;
case CMD_STOP:
return false;
@@ -252,6 +278,9 @@ void FFmpegImageStream::cmdPlay()
if (! m_decoder->video_decoder().isRunning())
m_decoder->video_decoder().start();
m_decoder->video_decoder().pause(false);
m_decoder->audio_decoder().pause(false);
}
_status = PLAYING;
@@ -263,7 +292,8 @@ void FFmpegImageStream::cmdPause()
{
if (_status == PLAYING)
{
m_decoder->video_decoder().pause(true);
m_decoder->audio_decoder().pause(true);
}
_status = PAUSED;
@@ -276,6 +306,10 @@ void FFmpegImageStream::cmdRewind()
m_decoder->rewind();
}
void FFmpegImageStream::cmdSeek(double time)
{
m_decoder->seek(time);
}
void FFmpegImageStream::publishNewFrame(const FFmpegDecoderVideo &, void * user_data)

View File

@@ -30,14 +30,17 @@ namespace osgFFmpeg
virtual void play();
virtual void pause();
virtual void rewind();
virtual void seek(double time);
virtual void quit(bool waitForThreadToExit = true);
double duration() const;
virtual void setVolume(float volume);
virtual float getVolume() const;
bool videoAlphaChannel() const;
double videoAspectRatio() const;
double videoFrameRate() const;
virtual double getLength() const;
virtual double getReferenceTime () const;
virtual double getFrameRate() const;
virtual bool isImageTranslucent() const;
private:
@@ -46,7 +49,8 @@ namespace osgFFmpeg
CMD_PLAY,
CMD_PAUSE,
CMD_STOP,
CMD_REWIND
CMD_REWIND,
CMD_SEEK
};
typedef MessageQueue<Command> CommandQueue;
@@ -62,6 +66,7 @@ namespace osgFFmpeg
void cmdPlay();
void cmdPause();
void cmdRewind();
void cmdSeek(double time);
static void publishNewFrame(const FFmpegDecoderVideo &, void * user_data);
@@ -70,7 +75,8 @@ namespace osgFFmpeg
Mutex m_mutex;
Condition m_frame_published_cond;
bool m_frame_published_flag;
bool m_frame_published_flag;
double m_seek_time;
};
}

View File

@@ -29,14 +29,22 @@ public:
ReaderWriterFFmpeg()
{
supportsProtocol("http","Read video/audio from http using ffmpeg.");
supportsProtocol("rtsp","Read video/audio from rtsp using ffmpeg.");
supportsExtension("avi", "");
supportsExtension("flv", "");
supportsExtension("mov", "");
supportsExtension("ogg", "Theora movie format");
supportsExtension("mpg", "Mpeg movie format");
supportsExtension("mpv", "Mpeg movie format");
supportsExtension("wmv", "");
supportsExtension("ffmpeg", "");
supportsExtension("avi", "");
supportsExtension("flv", "Flash video");
supportsExtension("mov", "Quicktime");
supportsExtension("ogg", "Theora movie format");
supportsExtension("mpg", "Mpeg movie format");
supportsExtension("mpv", "Mpeg movie format");
supportsExtension("wmv", "Windows Media Video format");
supportsExtension("mkv", "Matroska");
supportsExtension("mjpeg", "Motion JPEG");
supportsExtension("mp4", "MPEG-4");
supportsExtension("sav", "MPEG-4");
supportsExtension("3gp", "MPEG-4");
supportsExtension("sdp", "MPEG-4");
// Register all FFmpeg formats/codecs
av_register_all();
@@ -54,12 +62,14 @@ public:
virtual ReadResult readImage(const std::string & filename, const osgDB::ReaderWriter::Options * options) const
{
const std::string ext = osgDB::getLowerCaseFileExtension(filename);
if (ext=="ffmpeg") return readImage(osgDB::getNameLessExtension(filename),options);
if (filename.compare(0, 5, "/dev/")==0)
{
return readImageStream(filename, options);
}
const std::string ext = osgDB::getLowerCaseFileExtension(filename);
if (! acceptsExtension(ext))
return ReadResult::FILE_NOT_HANDLED;
@@ -70,7 +80,7 @@ public:
if (path.empty())
return ReadResult::FILE_NOT_FOUND;
return readImageStream(filename, options);
return readImageStream(path, options);
}
ReadResult readImageStream(const std::string& filename, const osgDB::ReaderWriter::Options * options) const

View File

@@ -416,9 +416,16 @@ void GeometryTechnique::generateGeometry(Locator* masterLocator, const osg::Vec3
// populate primitive sets
// bool optimizeOrientations = elevations!=0;
bool swapOrientation = !(masterLocator->orientationOpenGL());
bool smallTile = numVertices <= 16384;
// osg::notify(osg::NOTICE)<<"smallTile = "<<smallTile<<std::endl;
osg::ref_ptr<osg::DrawElementsUInt> elements = new osg::DrawElementsUInt(GL_TRIANGLES);
elements->reserve((numRows-1) * (numColumns-1) * 6);
osg::ref_ptr<osg::DrawElements> elements = smallTile ?
static_cast<osg::DrawElements*>(new osg::DrawElementsUShort(GL_TRIANGLES)) :
static_cast<osg::DrawElements*>(new osg::DrawElementsUInt(GL_TRIANGLES));
elements->reserveElements((numRows-1) * (numColumns-1) * 6);
geometry->addPrimitiveSet(elements.get());
@@ -463,31 +470,31 @@ void GeometryTechnique::generateGeometry(Locator* masterLocator, const osg::Vec3
if (fabsf(e00-e11)<fabsf(e01-e10))
{
elements->push_back(i01);
elements->push_back(i00);
elements->push_back(i11);
elements->addElement(i01);
elements->addElement(i00);
elements->addElement(i11);
elements->push_back(i00);
elements->push_back(i10);
elements->push_back(i11);
elements->addElement(i00);
elements->addElement(i10);
elements->addElement(i11);
}
else
{
elements->push_back(i01);
elements->push_back(i00);
elements->push_back(i10);
elements->addElement(i01);
elements->addElement(i00);
elements->addElement(i10);
elements->push_back(i01);
elements->push_back(i10);
elements->push_back(i11);
elements->addElement(i01);
elements->addElement(i10);
elements->addElement(i11);
}
}
else if (numValid==3)
{
if (i00>=0) elements->push_back(i00);
if (i01>=0) elements->push_back(i01);
if (i11>=0) elements->push_back(i11);
if (i10>=0) elements->push_back(i10);
if (i00>=0) elements->addElement(i00);
if (i01>=0) elements->addElement(i01);
if (i11>=0) elements->addElement(i11);
if (i10>=0) elements->addElement(i10);
}
}
@@ -506,7 +513,9 @@ void GeometryTechnique::generateGeometry(Locator* masterLocator, const osg::Vec3
if (createSkirt)
{
osg::ref_ptr<osg::DrawElementsUShort> skirtDrawElements = new osg::DrawElementsUShort(GL_QUAD_STRIP);
osg::ref_ptr<osg::DrawElements> skirtDrawElements = smallTile ?
static_cast<osg::DrawElements*>(new osg::DrawElementsUShort(GL_QUAD_STRIP)) :
static_cast<osg::DrawElements*>(new osg::DrawElementsUInt(GL_QUAD_STRIP));
// create bottom skirt vertices
int r,c;
@@ -528,24 +537,28 @@ void GeometryTechnique::generateGeometry(Locator* masterLocator, const osg::Vec3
itr->second.first->push_back((*itr->second.first)[orig_i]);
}
skirtDrawElements->push_back(orig_i);
skirtDrawElements->push_back(new_i);
skirtDrawElements->addElement(orig_i);
skirtDrawElements->addElement(new_i);
}
else
{
if (!skirtDrawElements->empty())
if (skirtDrawElements->getNumIndices()!=0)
{
geometry->addPrimitiveSet(skirtDrawElements.get());
skirtDrawElements = new osg::DrawElementsUShort(GL_QUAD_STRIP);
skirtDrawElements = smallTile ?
static_cast<osg::DrawElements*>(new osg::DrawElementsUShort(GL_QUAD_STRIP)) :
static_cast<osg::DrawElements*>(new osg::DrawElementsUInt(GL_QUAD_STRIP));
}
}
}
if (!skirtDrawElements->empty())
if (skirtDrawElements->getNumIndices()!=0)
{
geometry->addPrimitiveSet(skirtDrawElements.get());
skirtDrawElements = new osg::DrawElementsUShort(GL_QUAD_STRIP);
skirtDrawElements = smallTile ?
static_cast<osg::DrawElements*>(new osg::DrawElementsUShort(GL_QUAD_STRIP)) :
static_cast<osg::DrawElements*>(new osg::DrawElementsUInt(GL_QUAD_STRIP));
}
// create right skirt vertices
@@ -566,24 +579,28 @@ void GeometryTechnique::generateGeometry(Locator* masterLocator, const osg::Vec3
itr->second.first->push_back((*itr->second.first)[orig_i]);
}
skirtDrawElements->push_back(orig_i);
skirtDrawElements->push_back(new_i);
skirtDrawElements->addElement(orig_i);
skirtDrawElements->addElement(new_i);
}
else
{
if (!skirtDrawElements->empty())
if (skirtDrawElements->getNumIndices()!=0)
{
geometry->addPrimitiveSet(skirtDrawElements.get());
skirtDrawElements = new osg::DrawElementsUShort(GL_QUAD_STRIP);
skirtDrawElements = smallTile ?
static_cast<osg::DrawElements*>(new osg::DrawElementsUShort(GL_QUAD_STRIP)) :
static_cast<osg::DrawElements*>(new osg::DrawElementsUInt(GL_QUAD_STRIP));
}
}
}
if (!skirtDrawElements->empty())
if (skirtDrawElements->getNumIndices()!=0)
{
geometry->addPrimitiveSet(skirtDrawElements.get());
skirtDrawElements = new osg::DrawElementsUShort(GL_QUAD_STRIP);
skirtDrawElements = smallTile ?
static_cast<osg::DrawElements*>(new osg::DrawElementsUShort(GL_QUAD_STRIP)) :
static_cast<osg::DrawElements*>(new osg::DrawElementsUInt(GL_QUAD_STRIP));
}
// create top skirt vertices
@@ -604,24 +621,28 @@ void GeometryTechnique::generateGeometry(Locator* masterLocator, const osg::Vec3
itr->second.first->push_back((*itr->second.first)[orig_i]);
}
skirtDrawElements->push_back(orig_i);
skirtDrawElements->push_back(new_i);
skirtDrawElements->addElement(orig_i);
skirtDrawElements->addElement(new_i);
}
else
{
if (!skirtDrawElements->empty())
if (skirtDrawElements->getNumIndices()!=0)
{
geometry->addPrimitiveSet(skirtDrawElements.get());
skirtDrawElements = new osg::DrawElementsUShort(GL_QUAD_STRIP);
skirtDrawElements = smallTile ?
static_cast<osg::DrawElements*>(new osg::DrawElementsUShort(GL_QUAD_STRIP)) :
static_cast<osg::DrawElements*>(new osg::DrawElementsUInt(GL_QUAD_STRIP));
}
}
}
if (!skirtDrawElements->empty())
if (skirtDrawElements->getNumIndices()!=0)
{
geometry->addPrimitiveSet(skirtDrawElements.get());
skirtDrawElements = new osg::DrawElementsUShort(GL_QUAD_STRIP);
skirtDrawElements = smallTile ?
static_cast<osg::DrawElements*>(new osg::DrawElementsUShort(GL_QUAD_STRIP)) :
static_cast<osg::DrawElements*>(new osg::DrawElementsUInt(GL_QUAD_STRIP));
}
// create left skirt vertices
@@ -642,12 +663,12 @@ void GeometryTechnique::generateGeometry(Locator* masterLocator, const osg::Vec3
itr->second.first->push_back((*itr->second.first)[orig_i]);
}
skirtDrawElements->push_back(orig_i);
skirtDrawElements->push_back(new_i);
skirtDrawElements->addElement(orig_i);
skirtDrawElements->addElement(new_i);
}
else
{
if (!skirtDrawElements->empty())
if (skirtDrawElements->getNumIndices()!=0)
{
geometry->addPrimitiveSet(skirtDrawElements.get());
skirtDrawElements = new osg::DrawElementsUShort(GL_QUAD_STRIP);
@@ -656,10 +677,12 @@ void GeometryTechnique::generateGeometry(Locator* masterLocator, const osg::Vec3
}
}
if (!skirtDrawElements->empty())
if (skirtDrawElements->getNumIndices()!=0)
{
geometry->addPrimitiveSet(skirtDrawElements.get());
skirtDrawElements = new osg::DrawElementsUShort(GL_QUAD_STRIP);
smallTile ?
static_cast<osg::DrawElements*>(new osg::DrawElementsUShort(GL_QUAD_STRIP)) :
static_cast<osg::DrawElements*>(new osg::DrawElementsUInt(GL_QUAD_STRIP));
}
}

View File

@@ -197,6 +197,26 @@ BEGIN_ABSTRACT_OBJECT_REFLECTOR(osg::DrawElements)
__void__releaseGLObjects__State_P1,
"If State is non-zero, this function releases OpenGL objects for the specified graphics context. ",
"Otherwise, releases OpenGL objects for all graphics contexts. ");
I_Method1(void, reserveElements, IN, unsigned int, numIndices,
Properties::PURE_VIRTUAL,
__void__reserveElements__unsigned_int,
"",
"");
I_Method2(void, setElement, IN, unsigned, int, IN, unsigned, int,
Properties::PURE_VIRTUAL,
__void__setElement__unsigned__unsigned,
"",
"");
I_Method1(unsigned int, getElement, IN, unsigned, int,
Properties::PURE_VIRTUAL,
__unsigned_int__getElement__unsigned,
"",
"");
I_Method1(void, addElement, IN, unsigned, int,
Properties::PURE_VIRTUAL,
__void__addElement__unsigned,
"",
"");
I_SimpleProperty(osg::DrawElements *, DrawElements,
__DrawElements_P1__getDrawElements,
0);