我正在創建一個程序,該程序將獲取在用戶的全身圖片上繪製的矩形的寬度和長度。我似乎無法得到正確的scaleFactor,minNeighbor和大小。我應該怎樣還是怎樣做才能得到正確的信息的...正確的scaleFactor,minNeighbor和使用hacascascade檢測上半身和下半身的尺寸
這裏是我的代碼:
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using Emgu.CV;
using Emgu.CV.Structure;
using Emgu.Util;
namespace fitting
{
public partial class Form1 : Form
{
HaarCascade UpperBody = new HaarCascade("haarcascade_mcs_upperbody.xml");
HaarCascade LowerBody = new HaarCascade("haarcascade_lowerbody.xml");
Capture camera;
bool captureProcess = false;
Image<Bgr, Byte> img;
public Form1()
{
InitializeComponent();
}
void viewImage(object sender, EventArgs e)
{
img = camera.QueryFrame();
if (img == null)
return;
CamImageBox.Image = img;
}
private void btnCapture_Click(object sender, EventArgs e)
{
if (captureProcess == true)
{
string data;
Application.Idle -= viewImage;
captureProcess = false;
SaveFileDialog dlg = new SaveFileDialog();
//dlg="Image|*.jpg;*png";
if (dlg.ShowDialog() == DialogResult.OK)
{
img.ToBitmap().Save(dlg.FileName + ".jpg", System.Drawing.Imaging.ImageFormat.Png);
data = dlg.FileName + ".jpg";
}
measureImage();
}
}
void measureImage()
{
OpenFileDialog dlg2 = new OpenFileDialog();
dlg2.Filter = "Image|*.jpg;*png";
if (dlg2.ShowDialog() == DialogResult.OK)
{
Image<Bgr, Byte> frame = new Image<Bgr, byte>(dlg2.FileName);
Image<Gray, Byte> Gray_Frame = frame.Convert<Gray, Byte>();
MCvAvgComp[][] LowerBodyDetect = Gray_Frame.DetectHaarCascade(
LowerBody,
1.985603925968,
0,
Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
new Size());
MCvAvgComp[][] UpperBodyDetect = Gray_Frame.DetectHaarCascade(
UpperBody,
1.3,
5,
Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
new Size());
//foreach (MCvAvgComp Upp_Body in UpperBodyDetect[0])
//{
// frame.Draw(Upp_Body.rect, new Bgr(Color.Red), 2);
// double width = (Upp_Body.rect.Width * 0.264583333);
// textBox1.Text = (Convert.ToString(width));
//}
try
{
frame.Draw(UpperBodyDetect[0][0].rect, new Bgr(Color.Red), 2);
double width = (UpperBodyDetect[0][0].rect.Width);
textBox1.Text = (Convert.ToString(width));
}
catch (Exception e)
{
MessageBox.Show(e.Message);
}
//foreach (MCvAvgComp Low_Body in LowerBodyDetect[0])
//{
// frame.Draw(Low_Body.rect, new Bgr(Color.Green), 2);
//}
try
{
frame.Draw(LowerBodyDetect[0][0].rect, new Bgr(Color.Green), 2);
}
catch (Exception e)
{
MessageBox.Show(e.Message);
}
CamImageBox.Image = frame;
}
}
private void Form1_Load(object sender, EventArgs e)
{
bool useCam = false;
if (!useCam)
measureImage();
else {
try
{
camera = new Capture();
}
catch (Exception exc)
{
MessageBox.Show(exc.Message);
return;
}
Application.Idle += viewImage;
captureProcess = true;
}
}
}
}
感謝您的回答和時間先生。 – newbie07