Removed the cached matrices from osg::Camera, so that

osg::Camrea::getModelViewMatrix() and osg::Camera::getProjectionMatrix() are
calculated on the fly.  Removed various redudent methods, including the
project and unproject methods which are better supported within osgUtil::SceneView.

Added a computeWindowMatrix() method to Viewport, to make it easier to construct
a MV*P*W matrix for converting local coords into screen coords and visa versa.
Converted SceneView and CullVisitor to use this new method.
This commit is contained in:
Robert Osfield
2002-04-16 11:41:32 +00:00
parent 43fa577566
commit f8340f9ef5
8 changed files with 174 additions and 399 deletions

View File

@@ -106,12 +106,12 @@ class SG_EXPORT Camera: public osg::Referenced
* width/height.*/
void adjustAspectRatio(const double newAspectRatio, const AdjustAspectRatioMode aa);
const double left() const;
const double right() const;
const double top() const;
const double bottom() const;
const double zNear() const;
const double zFar() const;
const double left() const { return _left; }
const double right() const { return _right; }
const double bottom() const { return _bottom; }
const double top() const { return _top; }
const double zNear() const { return _zNear; }
const double zFar() const { return _zFar; }
/** Calculate and return the equivalent fovx for the current project setting.
* This value is only valid for when a symmetric perspective projection exists.
@@ -127,7 +127,6 @@ class SG_EXPORT Camera: public osg::Referenced
* Aspect ratio is defined as width/height.*/
const double calc_aspectRatio() const;
const Matrix& getProjectionMatrix() const;
@@ -222,38 +221,13 @@ class SG_EXPORT Camera: public osg::Referenced
* allow the OSG to update matrices accordingly.*/
void attachTransform(const TransformMode mode, Matrix* modelTransform=0);
/** must be called after you modify an attachedTransform. */
void dirtyTransform();
Matrix* getTransform(const TransformMode mode);
const Matrix* getTransform(const TransformMode mode) const;
const Vec3 getEyePoint_Model() const;
const Vec3 getCenterPoint_Model() const;
const Vec3 getLookVector_Model() const;
const Vec3 getUpVector_Model() const;
const Vec3 getSideVector_Model() const;
/** Get the ModelView matrix.
* If a ModelTransform is supplied then the ModelView matrix is
* created by multiplying the current LookAt by ModelTransform.
* Otherwise it is simply created by using the current LookAt,
* equivalent to using gluLookAt.*/
const Matrix& getModelViewMatrix() const;
/** Map object coordinates into windows coordinates.
* Equivalent to gluProject(...). */
const bool project(const Vec3& obj,const Viewport& viewport,Vec3& win) const;
/** Map window coordinates into object coordinates.
* Equivalent to gluUnProject(...). */
const bool unproject(const Vec3& win,const Viewport& viewport,Vec3& obj) const;
enum FusionDistanceMode
{
PROPORTIONAL_TO_LOOK_DISTANCE,
@@ -263,7 +237,7 @@ class SG_EXPORT Camera: public osg::Referenced
/** Set the mode of the fusion distance function which in use to calculate the
* fusion distance used in stereo rendering. Default value is
* PROPORTIONAL_TO_LOOK_DISTANCE. Use in conjunction with setFusionDistanceRatio(float).*/
void setFusionDistanceMode(FusionDistanceMode mode) { _fusionDistanceMode = mode; _dirty = true; }
void setFusionDistanceMode(FusionDistanceMode mode) { _fusionDistanceMode = mode; }
/** Get the mode of the fusion distance function.*/
FusionDistanceMode getFusionDistanceMode() const { return _fusionDistanceMode; }
@@ -271,7 +245,7 @@ class SG_EXPORT Camera: public osg::Referenced
/** Set the ratio of the fusion distance function which in use to calculate the
* fusion distance used in stereo rendering. Default value is 1.0f
* Use in conjunction with setFusionDistanceMode(..).*/
void setFusionDistanceRatio(float ratio) { _fusionDistanceRatio = ratio; _dirty = true; }
void setFusionDistanceRatio(float ratio) { _fusionDistanceRatio = ratio; }
/** Get the ratio of the fusion distance function.*/
float getFusionDistanceRatio() const { return _fusionDistanceRatio; }
@@ -282,12 +256,30 @@ class SG_EXPORT Camera: public osg::Referenced
/** Set the physical distance between the viewers eyes and the display system.
* Note, only used when rendering in stereo.*/
void setScreenDistance(float screenDistance) { _screenDistance = screenDistance; _dirty = true; }
void setScreenDistance(float screenDistance) { _screenDistance = screenDistance; }
/** Get the physical distance between the viewers eyes and the display system.*/
const float getScreenDistance() const { return _screenDistance; }
/** Get the Projection Matrix.*/
const Matrix getProjectionMatrix() const;
/** Get the ModelView matrix.
* If a ModelTransform is supplied then the ModelView matrix is
* created by multiplying the current LookAt by ModelTransform.
* Otherwise it is simply created by using the current LookAt,
* equivalent to using gluLookAt.*/
const Matrix getModelViewMatrix() const;
protected:
void copy(const Camera&);
@@ -320,20 +312,6 @@ class SG_EXPORT Camera: public osg::Referenced
TransformMode _attachedTransformMode;
ref_ptr<Matrix> _eyeToModelTransform;
ref_ptr<Matrix> _modelToEyeTransform;
// flag to determine if near and far clipping planes are required.
bool _useNearAndFarClippingPlanes;
// cached matrix and clipping volume derived from above settings.
mutable bool _dirty;
mutable ref_ptr<Matrix> _projectionMatrix;
mutable ref_ptr<Matrix> _modelViewMatrix;
mutable ClippingVolume _clippingVolume;
mutable ref_ptr<Matrix> _mp;
mutable ref_ptr<Matrix> _inversemp;
void computeMatrices() const;
float _screenDistance;

View File

@@ -8,6 +8,7 @@
#include <osg/StateAttribute>
#include <osg/StateSet>
#include <osg/Types>
#include <osg/Matrix>
namespace osg {
@@ -72,6 +73,16 @@ class SG_EXPORT Viewport : public StateAttribute
/** Return the aspcetRatio of the viewport, which is equal to width/height.
* If height is zero, the potental division by zero is avoid by simply returning 1.0f.*/
inline const float aspectRatio() const { if (_height!=0) return (float)_width/(float)_height; else return 1.0f; }
/** Compute the Window Matrix which takes projected coords into Window coordinates.
* To converted local coodinates into window coordinates use v_window = v_local * MVPW matrix,
* where the MVPW matrix is ModelViewMatrix * ProjectionMatrix * WindowMatrix, the later supplied by
* viewport::computeWindowMatrix(), the ModelView and Projection Matrix can either be sourced from the
* curre osg::State object, via osgUtil::SceneView or CullVisitor.*/
inline const osg::Matrix computeWindowMatrix() const
{
return osg::Matrix::translate(1.0f,1.0f,1.0f)*osg::Matrix::scale(0.5f*width(),0.5f*height(),0.5f);
}
virtual void apply(State& state) const;

View File

@@ -329,7 +329,7 @@ class OSGUTIL_EXPORT CullVisitor : public osg::NodeVisitor
if (!_viewportStack.empty())
{
osg::Viewport* viewport = _viewportStack.back().get();
return osg::Matrix::scale(0.5f*viewport->width(),viewport->height(),0.5f)*osg::Matrix::translate(0.5f,0.5f,0.5f);
return viewport->computeWindowMatrix();
}
else
{

View File

@@ -248,6 +248,8 @@ class OSGUTIL_EXPORT SceneView : public osg::Referenced
virtual void cullStage(osg::Matrix* projection,osg::Matrix* modelview,osgUtil::CullVisitor* cullVisitor, osgUtil::RenderGraph* rendergraph, osgUtil::RenderStage* renderStage);
virtual void drawStage(osgUtil::RenderStage* renderStage);
const osg::Matrix computeMVPW() const;
osg::ref_ptr<osg::Node> _sceneData;
osg::ref_ptr<osg::StateSet> _globalState;
osg::ref_ptr<osg::Light> _light;

View File

@@ -28,8 +28,6 @@ Camera::Camera(DisplaySettings* ds)
_center.set(0.0f,0.0f,-1.0f);
_up.set(0.0f,1.0f,0.0f);
_useNearAndFarClippingPlanes = false;
_attachedTransformMode = NO_ATTACHED_TRANSFORM;
if (ds) _screenDistance = ds->getScreenDistance();
@@ -83,14 +81,6 @@ void Camera::copy(const Camera& camera)
_eyeToModelTransform = camera._eyeToModelTransform;
_modelToEyeTransform = camera._modelToEyeTransform;
// cached matrix and clipping volume derived from above settings.
_dirty = false;// camera._dirty;
_projectionMatrix = NULL; //camera._projectionMatrix;
_modelViewMatrix = NULL; //camera._modelViewMatrix;
_mp = NULL;
_inversemp = NULL;
_screenDistance = camera._screenDistance;
_fusionDistanceMode = camera._fusionDistanceMode;
_fusionDistanceRatio = camera._fusionDistanceRatio;
@@ -115,8 +105,6 @@ void Camera::setOrtho(const double left, const double right,
_top = top;
_zNear = zNear;
_zFar = zFar;
_dirty = true;
}
@@ -131,8 +119,6 @@ void Camera::setOrtho2D(const double left, const double right,
_top = top;
_zNear = -1.0;
_zFar = 1.0;
_dirty = true;
}
@@ -145,13 +131,12 @@ void Camera::setFrustum(const double left, const double right,
// note, in Frustum/Perspective mode these values are scaled
// by the zNear from when they were initialised to ensure that
// subsequent changes in zNear do not affect them.
_left = left/zNear;
_right = right/zNear;
_bottom = bottom/zNear;
_top = top/zNear;
_left = left;
_right = right;
_bottom = bottom;
_top = top;
_zNear = zNear;
_zFar = zFar;
_dirty = true;
}
@@ -168,15 +153,13 @@ void Camera::setPerspective(const double fovy,const double aspectRatio,
// calculate the appropriate left, right etc.
double tan_fovy = tan(DegreesToRadians(fovy*0.5));
_right = tan_fovy * aspectRatio;
_right = tan_fovy * aspectRatio * zNear;
_left = -_right;
_top = tan_fovy;
_top = tan_fovy * zNear;
_bottom = -_top;
_zNear = zNear;
_zFar = zFar;
_dirty = true;
}
/** Set a sysmetical perspective projection using field of view.*/
@@ -192,29 +175,35 @@ void Camera::setFOV(const double fovx,const double fovy,
// calculate the appropriate left, right etc.
double tan_fovx = tan(DegreesToRadians(fovx*0.5));
double tan_fovy = tan(DegreesToRadians(fovy*0.5));
_right = tan_fovx;
_right = tan_fovx * zNear;
_left = -_right;
_top = tan_fovy;
_top = tan_fovy * zNear;
_bottom = -_top;
_zNear = zNear;
_zFar = zFar;
_dirty = true;
}
/** Set the near and far clipping planes.*/
void Camera::setNearFar(const double zNear, const double zFar)
{
if (_projectionType==FRUSTUM || _projectionType==PERSPECTIVE)
{
double adjustRatio = zNear/_zNear;
_left *= adjustRatio;
_right *= adjustRatio;
_bottom *= adjustRatio;
_top *= adjustRatio;
}
_zNear = zNear;
_zFar = zFar;
_dirty = true;
if (_projectionType==ORTHO2D)
{
if (_zNear!=-1.0 || _zFar!=1.0) _projectionType = ORTHO;
}
_dirty = true;
}
/** Adjust the clipping planes to account for a new window aspcect ratio.
@@ -242,57 +231,6 @@ void Camera::adjustAspectRatio(const double newAspectRatio, const AdjustAspectRa
_top /= deltaRatio;
}
}
_dirty = true;
}
const double Camera::left() const
{
switch(_projectionType)
{
case(FRUSTUM):
case(PERSPECTIVE): return _left * _zNear;
default: return _left;
}
}
const double Camera::right() const
{
switch(_projectionType)
{
case(FRUSTUM):
case(PERSPECTIVE): return _right * _zNear;
default: return _right;
}
}
const double Camera::top() const
{
switch(_projectionType)
{
case(FRUSTUM):
case(PERSPECTIVE): return _top * _zNear;
default: return _top;
}
}
const double Camera::bottom() const
{
switch(_projectionType)
{
case(FRUSTUM):
case(PERSPECTIVE): return _bottom * _zNear;
default: return _bottom;
}
}
const double Camera::zNear() const
{
return _zNear;
}
const double Camera::zFar() const
{
return _zFar;
}
/** Calculate and return the equivilant fovx for the current project setting.
@@ -302,7 +240,7 @@ const double Camera::calc_fovy() const
{
// note, _right & _left are prescaled by znear so
// no need to account for it.
return RadiansToDegrees(atan(_top)-atan(_bottom));
return RadiansToDegrees(atan(_top/_zNear)-atan(_bottom/_zNear));
}
@@ -313,7 +251,7 @@ const double Camera::calc_fovx() const
{
// note, _right & _left are prescaled by znear so
// no need to account for it.
return RadiansToDegrees(atan(_right)-atan(_left));
return RadiansToDegrees(atan(_right/_zNear)-atan(_left/_zNear));
}
@@ -325,10 +263,29 @@ const double Camera::calc_aspectRatio() const
return delta_x/delta_y;
}
const Matrix& Camera::getProjectionMatrix() const
const Matrix Camera::getProjectionMatrix() const
{
if (_dirty) computeMatrices();
return *_projectionMatrix;
// set up the projection matrix.
switch(_projectionType)
{
case(ORTHO):
case(ORTHO2D):
{
return Matrix::ortho(_left,_right,_bottom,_top,_zNear,_zFar);
}
break;
case(FRUSTUM):
case(PERSPECTIVE):
{
return Matrix::frustum(_left,_right,_bottom,_top,_zNear,_zFar);
}
break;
}
// shouldn't get here if camera is set up properly.
// return identity.
return Matrix();
}
void Camera::home()
@@ -338,8 +295,6 @@ void Camera::home()
_eye.set(0.0f,0.0f,0.0f);
_center.set(0.0f,0.0f,-1.0f);
_up.set(0.0f,1.0f,0.0f);
_dirty = true;
}
void Camera::setView(const Vec3& eyePoint, const Vec3& lookPoint, const Vec3& upVector)
@@ -348,7 +303,6 @@ void Camera::setView(const Vec3& eyePoint, const Vec3& lookPoint, const Vec3& up
}
void Camera::setLookAt(const Vec3& eye,
const Vec3& center,
const Vec3& up)
@@ -359,8 +313,6 @@ void Camera::setLookAt(const Vec3& eye,
_up = up;
ensureOrthogonalUpVector();
_dirty = true;
}
@@ -374,8 +326,6 @@ void Camera::setLookAt(const double eyeX, const double eyeY, const double eyeZ,
_up.set(upX,upY,upZ);
ensureOrthogonalUpVector();
_dirty = true;
}
@@ -390,8 +340,6 @@ void Camera::transformLookAt(const Matrix& matrix)
_up.normalize();
_lookAtType=USE_EYE_CENTER_AND_UP;
_dirty = true;
}
const Vec3 Camera::getLookVector() const
@@ -464,37 +412,6 @@ void Camera::attachTransform(const TransformMode mode, Matrix* matrix)
notify(WARN)<<" setting Camera to NO_ATTACHED_TRANSFORM."<<std::endl;
break;
}
_dirty = true;
}
void Camera::dirtyTransform()
{
_dirty = true;
switch(_attachedTransformMode)
{
case(EYE_TO_MODEL):
// should be safe to assume that these matrices are valid
// as attachTransform will ensure it.
if (!_modelToEyeTransform->invert(*_eyeToModelTransform))
{
notify(WARN)<<"Warning: Camera::dirtyTransform() failed to invert _modelToEyeTransform"<<std::endl;
}
break;
case(MODEL_TO_EYE):
// should be safe to assume that these matrices are valid
// as attachTransform will ensure it.
if (!_eyeToModelTransform->invert(*_modelToEyeTransform))
{
notify(WARN)<<"Warning: Camera::dirtyTransform() failed to invert _eyeToModelTransform"<<std::endl;
}
break;
default: // NO_ATTACHED_TRANSFORM
break;
}
}
Matrix* Camera::getTransform(const TransformMode mode)
@@ -518,62 +435,39 @@ const Matrix* Camera::getTransform(const TransformMode mode) const
}
const Vec3 Camera::getEyePoint_Model() const
const Matrix Camera::getModelViewMatrix() const
{
if (_eyeToModelTransform.valid()) return _eye*(*_eyeToModelTransform);
else return _eye;
}
Matrix modelViewMatrix;
const Vec3 Camera::getCenterPoint_Model() const
{
if (_eyeToModelTransform.valid()) return _center*(*_eyeToModelTransform);
else return _center;
}
const Vec3 Camera::getLookVector_Model() const
{
if (_eyeToModelTransform.valid())
// set up the model view matrix.
switch(_lookAtType)
{
Vec3 zero_transformed = Vec3(0.0f,0.0f,0.0f)*(*_eyeToModelTransform);
Vec3 look_transformed = getLookVector()*(*_eyeToModelTransform);
look_transformed -= zero_transformed;
look_transformed.normalize();
return look_transformed;
case(USE_HOME_POSITON):
if (_modelToEyeTransform.valid())
{
modelViewMatrix = *_modelToEyeTransform;
}
else
{
modelViewMatrix.makeIdentity();
}
break;
case(USE_EYE_AND_QUATERNION): // not implemented yet, default to eye,center,up.
case(USE_EYE_CENTER_AND_UP):
default:
{
modelViewMatrix.makeLookAt(_eye,_center,_up);
if (_modelToEyeTransform.valid())
{
modelViewMatrix.preMult(*_modelToEyeTransform);
}
}
break;
}
else return getLookVector();
}
const Vec3 Camera::getUpVector_Model() const
{
if (_eyeToModelTransform.valid())
{
Vec3 zero_transformed = Vec3(0.0f,0.0f,0.0f)*(*_eyeToModelTransform);
Vec3 up_transformed = getUpVector()*(*_eyeToModelTransform);
up_transformed -= zero_transformed;
up_transformed.normalize();
return up_transformed;
}
else return getUpVector();
}
const Vec3 Camera::getSideVector_Model() const
{
if (_eyeToModelTransform.valid())
{
Vec3 zero_transformed = Vec3(0.0f,0.0f,0.0f)*(*_eyeToModelTransform);
Vec3 side_transformed = getSideVector()*(*_eyeToModelTransform);
side_transformed -= zero_transformed;
side_transformed.normalize();
return side_transformed;
}
else return getSideVector();
}
const Matrix& Camera::getModelViewMatrix() const
{
if (_dirty) computeMatrices();
return *_modelViewMatrix;
return modelViewMatrix;
}
const float Camera::getFusionDistance() const
@@ -586,126 +480,6 @@ const float Camera::getFusionDistance() const
}
}
void Camera::computeMatrices() const
{
float left = _left;
float right = _right;
float top = _top;
float bottom = _bottom;
// set up the projection matrix.
switch(_projectionType)
{
case(ORTHO):
case(ORTHO2D):
{
float A = 2.0/(right-left);
float B = 2.0/(top-bottom);
float C = -2.0 / (_zFar-_zNear);
float tx = -(right+left)/(right-left);
float ty = -(top+bottom)/(top-bottom);
float tz = -(_zFar+_zNear)/(_zFar-_zNear);
_projectionMatrix = osgNew Matrix(
A, 0.0f, 0.0f, 0.0f,
0.0f, B, 0.0f, 0.0f,
0.0f, 0.0f, C, 0.0f,
tx, ty, tz, 1.0f );
}
break;
case(FRUSTUM):
case(PERSPECTIVE):
{
// note, in Frustum/Perspective mode these values are scaled
// by the zNear from when they were initialised to ensure that
// subsequent changes in zNear do not affect them.
float A = (2.0)/(right-left);
float B = (2.0)/(top-bottom);
float C = (right+left) / (right-left);
float D = (top+bottom) / (top-bottom);
float E = -(_zFar+_zNear) / (_zFar-_zNear);
float F = -(2.0*_zFar*_zNear) / (_zFar-_zNear);
_projectionMatrix = osgNew Matrix(
A, 0.0f, 0.0f, 0.0f,
0.0f, B, 0.0f, 0.0f,
C, D, E, -1.0f,
0.0f, 0.0f, F, 0.0f );
}
break;
}
// set up the model view matrix.
switch(_lookAtType)
{
case(USE_HOME_POSITON):
if (_modelToEyeTransform.valid())
{
_modelViewMatrix = _modelToEyeTransform;
}
else
{
_modelViewMatrix = osgNew Matrix;
_modelViewMatrix->makeIdentity();
}
break;
case(USE_EYE_AND_QUATERNION): // not implemented yet, default to eye,center,up.
case(USE_EYE_CENTER_AND_UP):
default:
{
Vec3 f(_center-_eye);
f.normalize();
Vec3 s(f^_up);
s.normalize();
Vec3 u(s^f);
u.normalize();
ref_ptr<Matrix> matrix = osgNew Matrix(
s[0], u[0], -f[0], 0.0f,
s[1], u[1], -f[1], 0.0f,
s[2], u[2], -f[2], 0.0f,
0.0f, 0.0f, 0.0f, 1.0f);
(*matrix) = Matrix::translate(-_eye[0], -_eye[1], -_eye[2]) * (*matrix);
if (_modelToEyeTransform.valid())
{
_modelViewMatrix = osgNew Matrix;
(*_modelViewMatrix) = (*matrix) * (*_modelToEyeTransform);
}
else
{
_modelViewMatrix = matrix;
}
}
break;
}
if (!_mp.valid()) _mp = osgNew Matrix;
_mp->mult(*_modelViewMatrix,*_projectionMatrix);
if (!_inversemp.valid()) _inversemp = osgNew Matrix;
if (!_inversemp->invert(*_mp))
{
notify(WARN)<<"Warning: Camera::computeMatrices() failed to invert _mp"<<std::endl;
}
_dirty = false;
}
void Camera::ensureOrthogonalUpVector()
{
Vec3 lv = _center-_eye;
@@ -713,39 +487,3 @@ void Camera::ensureOrthogonalUpVector()
_up = sv^lv;
_up.normalize();
}
const bool Camera::project(const Vec3& obj,const Viewport& viewport,Vec3& win) const
{
if (_mp.valid())
{
Vec3 v = obj * (*_mp);
win.set(
(float)viewport.x() + (float)viewport.width()*(v[0]+1.0f)*0.5f,
(float)viewport.y() + (float)viewport.height()*(v[1]+1.0f)*0.5f,
(v[2]+1.0f)*0.5f
);
return true;
}
else
return false;
}
const bool Camera::unproject(const Vec3& win,const Viewport& viewport,Vec3& obj) const
{
if (_inversemp.valid())
{
Vec3 v(
2.0f*(win[0]-(float)viewport.x())/viewport.width() - 1.0f,
2.0f*(win[1]-(float)viewport.y())/viewport.height() - 1.0f,
2.0f*(win[2]) - 1.0f
);
obj = v * (*_inversemp);
return true;
}
else
return false;
}

View File

@@ -354,6 +354,20 @@ void Matrix::makePerspective(const double fovy,const double aspectRatio,
void Matrix::makeLookAt(const Vec3& eye,const Vec3& center,const Vec3& up)
{
Vec3 f(center-eye);
f.normalize();
Vec3 s(f^up);
s.normalize();
Vec3 u(s^f);
u.normalize();
set(
s[0], u[0], -f[0], 0.0f,
s[1], u[1], -f[1], 0.0f,
s[2], u[2], -f[2], 0.0f,
0.0f, 0.0f, 0.0f, 1.0f);
preMult(Matrix::translate(-eye));
}
#undef SET_ROW

View File

@@ -203,10 +203,19 @@ static unsigned int reservoirBufferSize = 0;
// ---------------------------------------------------------------------------------------------------------------------------------
static void dumpLeakReport();
static void doCleanupLogOnFirstRun();
static void activateStressTest();
#ifdef OSG_USE_MEMORY_MANAGER
static void activateStressTest()
{
randomWipe = true;
alwaysValidateAll = true;
alwaysLogAll = true;
alwaysWipeAll = true;
cleanupLogOnFirstRun = true;
}
class MemStaticTimeTracker
{
public:
@@ -247,15 +256,6 @@ static MemStaticTimeTracker mstt;
// Local functions only
// ---------------------------------------------------------------------------------------------------------------------------------
static void activateStressTest()
{
randomWipe = true;
alwaysValidateAll = true;
alwaysLogAll = true;
alwaysWipeAll = true;
cleanupLogOnFirstRun = true;
}
static void doCleanupLogOnFirstRun()
{

View File

@@ -551,7 +551,12 @@ void SceneView::drawStage(osgUtil::RenderStage* renderStage)
windows coordinates are calculated relative to the bottom left of the window.*/
bool SceneView::projectWindowIntoObject(const osg::Vec3& window,osg::Vec3& object) const
{
return _camera->unproject(window,*_viewport,object);
osg::Matrix inverseMVPW;
inverseMVPW.invert(computeMVPW());
object = window*inverseMVPW;
return true;
}
@@ -562,9 +567,13 @@ bool SceneView::projectWindowIntoObject(const osg::Vec3& window,osg::Vec3& objec
windows coordinates are calculated relative to the bottom left of the window.*/
bool SceneView::projectWindowXYIntoObject(int x,int y,osg::Vec3& near_point,osg::Vec3& far_point) const
{
bool result_near = _camera->unproject(Vec3(x,y,0.0f),*_viewport,near_point);
bool result_far = _camera->unproject(Vec3(x,y,1.0f),*_viewport,far_point);
return result_near & result_far;
osg::Matrix inverseMVPW;
inverseMVPW.invert(computeMVPW());
near_point = osg::Vec3(x,y,0.0f)*inverseMVPW;
far_point = osg::Vec3(x,y,1.0f)*inverseMVPW;
return true;
}
@@ -574,5 +583,28 @@ bool SceneView::projectWindowXYIntoObject(int x,int y,osg::Vec3& near_point,osg:
windows coordinates are calculated relative to the bottom left of the window.*/
bool SceneView::projectObjectIntoWindow(const osg::Vec3& object,osg::Vec3& window) const
{
return _camera->project(object,*_viewport,window);
window = object*computeMVPW();
return true;
}
const osg::Matrix SceneView::computeMVPW() const
{
osg::Matrix matrix;
if (_modelviewMatrix.valid())
matrix = (*_modelviewMatrix);
else if (_camera.valid())
matrix = _camera->getModelViewMatrix();
if (_projectionMatrix.valid())
matrix.postMult(*_projectionMatrix);
else if (_camera.valid())
matrix.postMult(_camera->getProjectionMatrix());
if (_viewport.valid())
matrix.postMult(_viewport->computeWindowMatrix());
else
osg::notify(osg::WARN)<<"osg::Matrix SceneView::computeMVPW() - error no viewport attached to SceneView, coords will be computed inccorectly."<<std::endl;
return matrix;
}