[ filter_virtual_range_scan ]

- last committ before integration
This commit is contained in:
Paolo Cignoni cignoni 2010-09-02 14:54:18 +00:00
parent eb355d1889
commit e0e6abebe1
7 changed files with 127 additions and 43 deletions

View File

@ -75,13 +75,19 @@ void FilterVirtualRangeScan::initParameterSet(QAction* filter,MeshDocument &md,
"The mesh will be sampled uniformly from a texture of v x v pixels, where v is the chosen value.") );
par.addParam( new RichInt( "features_side", 512, "Feature sampling resolution:",
"The filter performs feature detection from a texture of v x v pixels, where v is the chosen value.") );
par.addParam( new RichDynamicFloat( "frontFacingCone", 40, 0.0f, 180.0f,
"Front facing cone:",
par.addParam( new RichDynamicFloat( "frontFacingConeU", 40, 0.0f, 180.0f,
"Front facing cone (uniform):",
QString("Pixels whose normal is directed towards the viewer are considered front-facing.<br />") +
"To be front-facing, these normals must reside within a given cone of directions, whose angle is set with this parameter."));
"To be front-facing, these normals must reside within a given cone of directions, whose angle is set with this parameter.<br />" +
"Only the front-facing pixels form the uniform samples cloud" ) );
par.addParam( new RichDynamicFloat( "bigJump", 0.1, 0.0, 1.0, "Big depth jump threshold:",
QString("The filter detects mesh borders and big offsets within the mesh by testing the depth of neighbours pixels.<br />") +
"This parameter controls the (normalized) minimum depth offset for a depth jump to be recognized.") );
par.addParam( new RichDynamicFloat( "frontFacingConeF", 40, 0.0f, 180.0f,
"Front facing cone (features):",
QString("Look at the <i>Front facing cone (uniform)</i> parameter description to understand when a pixel is") +
"said <i>front-facing</i>. In the feature sensitive sampling step, border pixels are recognized as features if " +
"they are facing the observer within a given cone of direction, whose gap is specified (in degrees) with this parameter." ) );
par.addParam( new RichDynamicFloat( "smallJump", 0.01f, 0.001f, 0.1f, "Small depth jump threshold:",
QString("To be considered on the same mesh patch, neighbours pixels must be within this depth range.<br />") +
"For example, if the max depth value is 0.6 and the min depth value is 0.4, then a value of 0.01 "+
@ -112,13 +118,17 @@ bool FilterVirtualRangeScan::applyFilter( QAction* filter,
vrsParams.coneGap = par.getDynamicFloat( "coneGap" );
vrsParams.uniformResolution = par.getInt( "uniform_side" );
vrsParams.featureResolution = par.getInt( "features_side" );
vrsParams.frontFacingCone = par.getDynamicFloat( "frontFacingCone" );
vrsParams.frontFacingConeU = par.getDynamicFloat( "frontFacingConeU" );
vrsParams.frontFacingConeF = par.getDynamicFloat( "frontFacingConeF" );
vrsParams.bigDepthJump = par.getDynamicFloat( "bigJump" );
vrsParams.smallDepthJump = par.getDynamicFloat( "smallJump" );
vrsParams.angleThreshold = par.getDynamicFloat( "normalsAngle" );
vrsParams.attributeMask = VRSParameters::POSITION |
VRSParameters::NORMAL;
// we don't want to use custom povs
vrsParams.useCustomPovs = false;
bool oneMesh = par.getBool( "oneMesh" );
MeshModel* curMeshModel = md.mm();
CMeshO* startMesh = &( curMeshModel->cm );

View File

@ -50,7 +50,7 @@ public:
virtual QString filterInfo(FilterIDType filter) const;
virtual int getRequirements(QAction *);
virtual void initParameterSet(QAction*, MeshModel&, RichParameterSet &){assert(0);}
virtual void initParameterSet(QAction*, MeshModel&, RichParameterSet &){ ;}
virtual void initParameterSet(QAction *, MeshDocument &, RichParameterSet &);
virtual bool applyFilter (QAction* filter, MeshDocument &md, RichParameterSet & par, vcg::CallBackPos *cb);

View File

@ -5,8 +5,13 @@
#include "utils.h"
#include "configurations.h"
#include <wrap/gl/shot.h>
#include <vcg/space/box2.h>
namespace vrs
{
typedef pair< vcg::Shot< float >, vcg::Box2i > Pov;
class VRSParameters
{
public:
@ -20,7 +25,11 @@ namespace vrs
float smallDepthJump; // the maximum allowable depth jump to recognize mesh patches continuity
float angleThreshold; // the threshold angle (degrees) used in feature detection step
float bigDepthJump; // used to detect big depth offset
float frontFacingCone; // the angle (in degrees) used to determine whether a pixel "is facing" the observer
float frontFacingConeU; // the angle (in degrees) used to determine whether a pixel "is facing" the observer (uniform sampling)
float frontFacingConeF; // the facing cone gap used in the feature sensitive sampling step
vector< Pov > customPovs; // custom point-of-views
bool useCustomPovs;
// the available attributes of each output sample
enum SampleAttributes

View File

@ -61,7 +61,7 @@ namespace vrs
// builds stages
Start< MeshType > start( srcMesh, params->povs, &res );
lastViews = start.views;
ConeFilter coneFilter( params->frontFacingCone, &res );
ConeFilter coneFilter( params->frontFacingConeU, &res );
Compactor inputCompactor( &res );
inputCompactor.setParameters( "mask_pyramid", "outMask", "mask_pyramid",
"input", "best", "best", "input_compactor" );

View File

@ -350,7 +350,7 @@ namespace vrs
const string fragmentProgram = STRINGFY(
uniform sampler2D outMask;
uniform sampler2D eyeNormalBest;
uniform sampler2D inputEyeNormal;
varying vec2 sample_coords;
varying vec3 sample_eye_normal;
@ -362,7 +362,7 @@ namespace vrs
if( mask_val > 0.5 )
{
vec3 current_eye_normal = normalize( texelFetch( eyeNormalBest, frag_coords, 0 ) );
vec3 current_eye_normal = normalize( texelFetch( inputEyeNormal, frag_coords, 0 ) );
vec3 norm_sample_eye_normal = normalize( sample_eye_normal );
vec3 perfect_view = vec3( 0.0, 0.0, -1.0 );
@ -614,12 +614,31 @@ namespace vrs
return ( isValid( first ) && isValid( second ) );
}
bool smallDepthJumpTest( ivec2 first, ivec2 second )
// the three pixels must lie on the same mesh patch, and the central
// one must be the nearest
bool smallDepthJumpTest( ivec2 first, ivec2 second, ivec2 center )
{
float centralDepth = texelFetch( inputDepth, center, 0 ).x;
float firstDepth = texelFetch( inputDepth, first, 0 ).x;
float secondDepth = texelFetch( inputDepth, second, 0 ).x;
float diff = abs( firstDepth - secondDepth );
return ( diff < smallDepthJump );
bool ok = false;
if( firstDepth < centralDepth )
{
ok = ( secondDepth < centralDepth );
}
else
{
ok = ( secondDepth >= centralDepth );
}
if( !ok )
{
return false;
}
float jump = max( abs( firstDepth - centralDepth ), abs( secondDepth - centralDepth ) );
return ( jump <= smallDepthJump );
}
bool attributeTest( ivec2 first, ivec2 second )
@ -636,7 +655,7 @@ namespace vrs
ivec2 n2 = my_coords + neighbours[ pairIndex ][ 1 ];
if( !validTest( n1, n2 ) ) return false;
if( !smallDepthJumpTest( n1, n2 ) ) return false;
if( !smallDepthJumpTest( n1, n2, my_coords ) ) return false;
return ( attributeTest( n1, n2 ) );
}

View File

@ -54,12 +54,12 @@ namespace vrs
PixelData* tmpData = (*(res->pdm))[ "bestPosition" ];
on( "killer", "killer_shader" );
string samplers[] = { "bestPosition", "bestNormal", "outMask", "eyeNormalBest" };
string samplers[] = { "bestPosition", "bestNormal", "outMask", "inputEyeNormal" };
bindPixelData( samplers, 4, samplers );
glClear( GL_COLOR_BUFFER_BIT );
glPointSize( 3.0f );
//glPointSize( 3.0f );
Utils::feed_coords( tmpData->side, tmpData->side, tmpData->length );
glPointSize( 1.0f );
//glPointSize( 1.0f );
off();
// reset matrices
@ -289,7 +289,7 @@ namespace vrs
float bigJump = depthRange * res->params->bigDepthJump;
float angleInRadians = ( res->params->angleThreshold * PI ) / 180.0;
float thresholdCosine = cos( angleInRadians );
angleInRadians = ( (res->params->frontFacingCone/2.0) * PI ) / 180.0;
angleInRadians = ( (res->params->frontFacingConeF/2.0) * PI ) / 180.0;
float frontFacingCosine = cos( angleInRadians );
on( "detection", "detector_shader" );

View File

@ -2,6 +2,7 @@
#define START_H
#include <vcg/space/point3.h>
#include <wrap/gl/shot.h>
#include <vector>
#include <map>
#include "stage.h"
@ -19,21 +20,31 @@ namespace vrs
public:
typedef typename Mesh::ScalarType ScalarType;
typedef typename vcg::Point3< ScalarType > MyPoint;
typedef typename vcg::Shot< ScalarType > ShotType;
Start( Mesh* src, int povs, SamplerResources* res )
:Stage( res )
{
m = src;
this->povs = povs;
radius = ( m->bbox.Diag() / 2 ) * 1.2;
center = m->bbox.Center();
MyPoint coneAxis( res->params->coneAxis[0],
res->params->coneAxis[1],
res->params->coneAxis[2] );
coneAxis.Normalize();
float coneGap = ( res->params->coneGap / 180.0f ) * PI;
generatePovs( povs, radius, center, views, coneAxis, coneGap );
generateUpVectors( views, center, upVector );
if( res->params->useCustomPovs )
{
assert( res->params->customPovs.size() > 0 );
}
else
{ // distribute povs uniformly in a cone of directions
this->povs = povs;
radius = ( m->bbox.Diag() / 2 ) * 1.2;
center = m->bbox.Center();
MyPoint coneAxis( res->params->coneAxis[0],
res->params->coneAxis[1],
res->params->coneAxis[2] );
coneAxis.Normalize();
float coneGap = ( res->params->coneGap / 180.0f ) * PI;
generatePovs( povs, radius, center, views, coneAxis, coneGap );
generateUpVectors( views, center, upVector );
}
currentPov = 0;
}
@ -41,30 +52,57 @@ namespace vrs
virtual void go( void )
{
static int i=0;
char buf[100];
sprintf( buf, "start%d", i++ );
// generates attributes screenshots
on( "start", "start_shader" );
Point3< ScalarType >& p = views[ currentPov ];
Point3< ScalarType >& up = upVector[ currentPov ];
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
glOrtho( -radius, radius, -radius, radius, -radius, radius );
if( res->params->useCustomPovs )
{
// there's no need to call UnsetView(), since the modelview
// and projection matrices are resetted in the killer stage
/*
if( currentPov > 0 )
{
GlShot< ShotType >::UnsetView();
}
*/
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
gluLookAt( center.X(), center.Y(), center.Z(),
p.X(), p.Y(), p.Z(),
up.X(), up.Y(), up.Z() );
Pov& p = res->params->customPovs[ currentPov ];
ScalarType nearPlane, farPlane;
GlShot< ShotType >::GetNearFarPlanes( p.first, m->bbox, nearPlane, farPlane );
GlShot< ShotType >::SetView( p.first, nearPlane, farPlane );
glEnable( GL_SCISSOR_TEST );
Box2i& scissorBox = p.second;
vcg::Point2i min = scissorBox.min;
int width = scissorBox.DimX();
int height = scissorBox.DimY();
glScissor( (GLint)min.X(), (GLint)min.Y(), (GLsizei)width, (GLsizei)height );
}
else
{
Point3< ScalarType >& p = views[ currentPov ];
Point3< ScalarType >& up = upVector[ currentPov ];
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
glOrtho( -radius, radius, -radius, radius, -radius, radius );
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
gluLookAt( center.X(), center.Y(), center.Z(),
p.X(), p.Y(), p.Z(),
up.X(), up.Y(), up.Z() );
}
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
render();
//res->fbo->screenshots( buf );
off();
if( res->params->useCustomPovs )
{
glDisable( GL_SCISSOR_TEST );
}
Utils::saveMatrices();
// inverts depth background to white
@ -77,7 +115,15 @@ namespace vrs
bool nextPov( void )
{
return ( ++currentPov < povs );
currentPov++;
if( res->params->useCustomPovs )
{
return ( currentPov < res->params->customPovs.size() );
}
else
{
return ( currentPov < povs );
}
}
//private:
@ -266,7 +312,7 @@ namespace vrs
void generateUpVectors( vector< MyPoint >& povs, MyPoint& center,
vector< MyPoint >& target )
{
srand( time(NULL) );
srand( 12345 );
target.clear();
MyPoint dir;
MyPoint up;