首页 诗词 字典 板报 句子 名言 友答 励志 学校 网站地图
当前位置: 首页 > 教程频道 > JAVA > J2SE开发 >

初学者!很奇怪的一段代码

2012-11-08 
菜鸟求助!很奇怪的一段代码!public void run( String args ){if ( IJ.versionLessThan( 1.37i ) ) retur

菜鸟求助!很奇怪的一段代码!
public void run( String args )
{
if ( IJ.versionLessThan( "1.37i" ) ) return;

final ImagePlus imp = WindowManager.getCurrentImage();
if ( imp == null ) { System.err.println( "There are no images open" ); return; }

GenericDialog gd = new GenericDialog( "Align stack" );
gd.addNumericField( "steps_per_scale_octave :", steps, 0 );
gd.addNumericField( "initial_gaussian_blur :", initial_sigma, 2 );
gd.addNumericField( "feature_descriptor_size :", fdsize, 0 );
gd.addNumericField( "feature_descriptor_orientation_bins :", fdbins, 0 );
gd.addNumericField( "minimum_image_size :", min_size, 0 );
gd.addNumericField( "maximum_image_size :", max_size, 0 );
gd.addNumericField( "minimal_alignment_error :", min_epsilon, 2 );
gd.addNumericField( "maximal_alignment_error :", max_epsilon, 2 );
gd.addNumericField( "inlier_ratio :", min_inlier_ratio, 2 );
gd.addNumericField( "background_color :", bg, 2 );
gd.addChoice( "interpolation_scheme :", schemes, schemes[ scheme ] );
gd.addCheckbox( "upscale_image_first", upscale );
gd.addCheckbox( "display_correspondences", show_info );
gd.showDialog();
if (gd.wasCanceled()) return;

steps = ( int )gd.getNextNumber();
initial_sigma = ( float )gd.getNextNumber();
fdsize = ( int )gd.getNextNumber();
fdbins = ( int )gd.getNextNumber();
min_size = ( int )gd.getNextNumber();
max_size = ( int )gd.getNextNumber();
min_epsilon = ( float )gd.getNextNumber();
max_epsilon = ( float )gd.getNextNumber();
min_inlier_ratio = ( float )gd.getNextNumber();
bg = ( double )gd.getNextNumber();
scheme = gd.getNextChoiceIndex();
upscale = gd.getNextBoolean();
if ( upscale ) scale = 2.0f;
else scale = 1.0f;
show_info = gd.getNextBoolean();

Affine a = new Affine();

int ischeme = Affine.NEAREST;
switch ( scheme )
{
case 0:
ischeme = Affine.NEAREST;
break;
case 1:
ischeme = Affine.LINEAR;
break;
case 2:
ischeme = Affine.CUBIC;
break;
case 3:
ischeme = Affine.BSPLINE3;
break;
case 4:
ischeme = Affine.OMOMS3;
break;
case 5:
ischeme = Affine.BSPLINE5;
break;
}

ImageStack stack = imp.getStack();
ImageStack stackAligned = new ImageStack( stack.getWidth(), stack.getHeight() );

float vis_scale = 256.0f / imp.getWidth();
//float vis_scale = 1024.0f / imp.getWidth();
ImageStack stackInfo = null;
ImagePlus impInfo = null;

if ( show_info )
stackInfo = new ImageStack(
Math.round( vis_scale * stack.getWidth() ),
Math.round( vis_scale * stack.getHeight() ) );

stackAligned.addSlice( null, stack.getProcessor( 1 ) );
ImagePlus impAligned = new ImagePlus( "Aligned 1 of " + stack.getSize(), stackAligned );
impAligned.show();

ImageProcessor ip1;
ImageProcessor ip2;
ImageProcessor ip3 = null;

Vector< Feature > fs1;
Vector< Feature > fs2;

ip2 = stack.getProcessor( 1 ).convertToFloat();

AffineTransform at = new AffineTransform();

FloatArray2DSIFT sift = new FloatArray2DSIFT( fdsize, fdbins );

FloatArray2D fa = ImageArrayConverter.ImageToFloatArray2D( ip2 );
Filter.enhance( fa, 1.0f );

if ( upscale )
{
FloatArray2D fat = new FloatArray2D( fa.width * 2 - 1, fa.height * 2 - 1 ); 
FloatArray2DScaleOctave.upsample( fa, fat );
fa = fat;
fa = Filter.computeGaussianFastMirror( fa, ( float )Math.sqrt( initial_sigma * initial_sigma - 1.0 ) );


}
else
fa = Filter.computeGaussianFastMirror( fa, ( float )Math.sqrt( initial_sigma * initial_sigma - 0.25 ) );

long start_time = System.currentTimeMillis();
System.out.print( "processing SIFT ..." );
sift.init( fa, steps, initial_sigma, min_size, max_size );
fs2 = sift.run( max_size );
Collections.sort( fs2 );
System.out.println( " took " + ( System.currentTimeMillis() - start_time ) + "ms" );

System.out.println( fs2.size() + " features identified and processed" );

// downscale ip2 for visualisation purposes
if ( show_info )
ip2 = downScale( ( FloatProcessor )ip2, vis_scale );

for ( int i = 1; i < stack.getSize(); ++i )
{
ip1 = ip2;
ip2 = stack.getProcessor( i + 1 ).convertToFloat();
fa = ImageArrayConverter.ImageToFloatArray2D( ip2 );
Filter.enhance( fa, 1.0f );

if ( upscale )
{
FloatArray2D fat = new FloatArray2D( fa.width * 2 - 1, fa.height * 2 - 1 ); 
FloatArray2DScaleOctave.upsample( fa, fat );
fa = fat;
fa = Filter.computeGaussianFastMirror( fa, ( float )Math.sqrt( initial_sigma * initial_sigma - 1.0 ) );
}
else
fa = Filter.computeGaussianFastMirror( fa, ( float )Math.sqrt( initial_sigma * initial_sigma - 0.25 ) );

fs1 = fs2;

start_time = System.currentTimeMillis();
System.out.print( "processing SIFT ..." );
sift.init( fa, steps, initial_sigma, min_size, max_size );
fs2 = sift.run( max_size);
Collections.sort( fs2 );
System.out.println( " took " + ( System.currentTimeMillis() - start_time ) + "ms" );

System.out.println( fs2.size() + " features identified and processed");

start_time = System.currentTimeMillis();
System.out.print( "identifying correspondences using brute force ..." );
Vector< PointMatch > candidates = 
FloatArray2DSIFT.createMatches( fs2, fs1, 1.5f, null, Float.MAX_VALUE );
System.out.println( " took " + ( System.currentTimeMillis() - start_time ) + "ms" );

IJ.log( candidates.size() + " potentially corresponding features identified" );

/**
* draw all correspondence candidates
*/
if ( show_info )
{
ip2 = downScale( ( FloatProcessor )ip2, vis_scale );

ip1 = ip1.convertToRGB();
ip3 = ip2.convertToRGB();
ip1.setColor( Color.red );
ip3.setColor( Color.red );

ip1.setLineWidth( 2 );
ip3.setLineWidth( 2 );
for ( PointMatch m : candidates )
{
float[] m_p1 = m.getP1().getL(); 
float[] m_p2 = m.getP2().getL(); 

ip1.drawDot( ( int )Math.round( vis_scale / scale * m_p2[ 0 ] ), ( int )Math.round( vis_scale / scale * m_p2[ 1 ] ) );
ip3.drawDot( ( int )Math.round( vis_scale / scale * m_p1[ 0 ] ), ( int )Math.round( vis_scale / scale * m_p1[ 1 ] ) );
}
}

Vector< PointMatch > inliers = new Vector< PointMatch >();

TRModel2D model = TRModel2D.estimateBestModel(
candidates,
inliers,
min_epsilon,
max_epsilon,
min_inlier_ratio );

if ( model != null )
{
if ( show_info )
{
ip1.setColor( Color.green );
ip3.setColor( Color.green );
ip1.setLineWidth( 2 );
ip3.setLineWidth( 2 );
for ( PointMatch m : inliers )
{
float[] m_p1 = m.getP1().getL(); 
float[] m_p2 = m.getP2().getL(); 

ip1.drawDot( ( int )Math.round( vis_scale / scale * m_p2[ 0 ] ), ( int )Math.round( vis_scale / scale * m_p2[ 1 ] ) );
ip3.drawDot( ( int )Math.round( vis_scale / scale * m_p1[ 0 ] ), ( int )Math.round( vis_scale / scale * m_p1[ 1 ] ) );


}
}

/**
* append the estimated transformation model

* TODO the current rotation assumes the origin (0,0) of the
* image in the image's "center"
* ( width / 2 - 1.0, height / 2 - 1.0 ). This is, because we
* use imagescience.jar for transformation and they do so...
* Think about using an other transformation class, focusing on
* better interpolation schemes ( Lanczos would be great ).
*/
AffineTransform at_current = new AffineTransform( model.getAffine() );
double[] m = new double[ 6 ];
at_current.getMatrix( m );
m[ 4 ] /= scale;
m[ 5 ] /= scale;
at_current.setTransform( m[ 0 ], m[ 1 ], m[ 2 ], m[ 3 ], m[ 4 ], m[ 5 ] );

double hw = ( double )imp.getWidth() / 2.0 - 1.0;
double hh = ( double )imp.getHeight() / 2.0 - 1.0;

at.translate(
-hw,
-hh );
at.concatenate( at_current );
at.translate(
hw,
hh );
}

double[] m = new double[ 6 ];
at.getMatrix( m );

Image img = Image.wrap( new ImagePlus( "new_layer", stack.getProcessor( i + 1 ) ) );

Image imgAligned = a.run(
img,
new double[][]
{ { m[ 0 ], m[ 2 ], 0, m[ 4 ] },
{ m[ 1 ], m[ 3 ], 0, m[ 5 ] },
{ 0, 0, 1, 0 },
{ 0, 0, 0, 1 } },
ischeme,
adjust,
antialias );
ImagePlus impAlignedSlice = imgAligned.imageplus();
stackAligned.addSlice( null, impAlignedSlice.getProcessor() );
if ( show_info )
{
ImageProcessor tmp;
tmp = ip1.createProcessor( stackInfo.getWidth(), stackInfo.getHeight() );
tmp.insert( ip1, 0, 0 );
stackInfo.addSlice( null, tmp ); // fixing silly 1 pixel size missmatches
tmp = ip3.createProcessor( stackInfo.getWidth(), stackInfo.getHeight() );
tmp.insert( ip3, 0, 0 );
stackInfo.addSlice( null, tmp );
if ( i == 1 )
{
impInfo = new ImagePlus( "Alignment info", stackInfo );
impInfo.show();
}
impInfo.setStack( "Alignment info", stackInfo );
impInfo.updateAndDraw();
}
impAligned.setStack( "Aligned " + stackAligned.getSize() + " of " + stack.getSize(), stackAligned );
impAligned.updateAndDraw();
}
}


  在网上下载的一个关于SIFT算法的Java源码,上面是其中一段,错误出在run上面。
  eclipse说The method run(Image, Transform, int, boolean, boolean) in the type Affine is not applicable for the arguments (Image, double[][], int, boolean, boolean)

  前面定义的run是 public void run( String args ),后面又来了一个
  Image imgAligned = a.run(
img,
new double[][]
{ { m[ 0 ], m[ 2 ], 0, m[ 4 ] },
{ m[ 1 ], m[ 3 ], 0, m[ 5 ] },
{ 0, 0, 1, 0 },
{ 0, 0, 0, 1 } },
ischeme,
adjust,
antialias );
  这是怎么回事?

[解决办法]
Image imgAligned = a.run(
img,
new double[][]
{ { m[ 0 ], m[ 2 ], 0, m[ 4 ] },
{ m[ 1 ], m[ 3 ], 0, m[ 5 ] },
{ 0, 0, 1, 0 },
{ 0, 0, 0, 1 } },
ischeme,
adjust,
antialias );
这个run是对象a调用的,对象a是这样初始化的,Affine a = new Affine();这样a调用的run方法是定义在类Affine里的,看你的报错信息应该是,在类Affine 里面定义的run方法的第二个参数是Transform类型的,不是double类型的,所以调用会报错

热点排行