本文介绍了帧速率太慢的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!
问题描述
如何加快帧速率。使用emgu opencv处理深度帧以检测图像中的小提琴弓 使用houghlines但帧速率非常慢,大约每秒3次。我有附加代码,如果有任何提高速度的建议
How to speed up framerate. Using emgu opencv to process depthframe to detect violin bow in image using houghlines but framerate is very slow about 3 per second. I have attached code if any suggestions to increase speed
非常感谢
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
{
BitmapSource depthBmp = null;
blobCount = 0;
bool depthFrameProcessed = false;
MultiSourceFrameReference frameReference = e.FrameReference;
MultiSourceFrame multiSourceFrame = frameReference.AcquireFrame();
DepthFrame depthFrame = null;
ColorFrame colorFrame = null;
BodyIndexFrame bodyIndexFrame = null;
BodyFrame bodyFrame = null;
try
{
multiSourceFrame = frameReference.AcquireFrame();
if (multiSourceFrame != null)
{
DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
BodyIndexFrameReference bodyIndexFrameReference = multiSourceFrame.BodyIndexFrameReference;
BodyFrameReference bodyFrameReference = multiSourceFrame.BodyFrameReference;
if (this.startTime.Ticks == 0)
{
this.startTime = depthFrameReference.RelativeTime;
// this.startTimeDT = DateTime.Now;
}
this.framesSinceUpdate++;
if (DateTime.Now >= this.nextStatusUpdate)
{
// calcuate fps based on last frame received
double fps = 0.0;
if (this.stopwatch.IsRunning)
{
this.stopwatch.Stop();
fps = this.framesSinceUpdate / this.stopwatch.Elapsed.TotalSeconds;
this.stopwatch.Reset();
}
this.nextStatusUpdate = DateTime.Now + TimeSpan.FromSeconds(1);
this.CurrentFPS = fps;
this.txtFrameRateCount.Text = "frames per second: " + fps.ToString();
}
if (!this.stopwatch.IsRunning)
{
this.framesSinceUpdate = 0;
this.stopwatch.Start();
}
depthFrame = depthFrameReference.AcquireFrame();
colorFrame = colorFrameReference.AcquireFrame();
bodyIndexFrame = bodyIndexFrameReference.AcquireFrame();
bodyFrame = bodyFrameReference.AcquireFrame();
// if ((depthFrame != null) && (colorFrame != null) && (bodyIndexFrame != null))
if ((depthFrame != null) && (colorFrame != null) && (bodyIndexFrame != null) && (bodyFrame != null))
{
if (this.bodies == null)
{
this.bodies = new Body[bodyFrame.BodyCount];
}
bodyFrame.GetAndRefreshBodyData(this.bodies);
CameraSpacePoint rightHandCSP = this.bodies.Where(b => b.IsTracked).First().Joints[JointType.HandRight].Position;
CameraSpacePoint spineShoulderCSP = this.bodies.Where(b => b.IsTracked).First().Joints[JointType.SpineShoulder].Position;
spineShoulderCSP.Y = spineShoulderCSP.Y + 0.05f;
DepthSpacePoint spineShoulderdDSP = coordinateMapper.MapCameraPointToDepthSpace(spineShoulderCSP);
// DepthSpacePoint spineShoulderdDSP = coordinateMapper.MapCameraPointToDepthSpace(this.bodies.Where(b => b.IsTracked).First().Joints[JointType.SpineShoulder].Position);
CameraSpacePoint headCSP = this.bodies.Where(b => b.IsTracked).First().Joints[JointType.Head].Position;
headCSP.X = headCSP.X - 0.12f;
DepthSpacePoint rightHanddp = this.coordinateMapper.MapCameraPointToDepthSpace(rightHandCSP);
DepthSpacePoint leftHanddp = this.coordinateMapper.MapCameraPointToDepthSpace(this.bodies.Where(b => b.IsTracked).First().Joints[JointType.HandLeft].Position);
DepthSpacePoint headdp = this.coordinateMapper.MapCameraPointToDepthSpace(headCSP);
FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription;
bodyIndexFrame.CopyFrameDataToArray(this.bodyIndexFrameData);
depthFrameData = new ushort[depthFrame.FrameDescription.LengthInPixels];
depthFrame.CopyFrameDataToArray(depthFrameData);
object[] ob = new object[2];
ob = depthFrame.SliceDepthImage(this.bodyIndexFrameData, depthFrameData, (int)sliderMin.Value, (int)sliderMax.Value);
depthBmp = (BitmapSource) ob[1] ;
this.scrollDepthSpacePoint = (DepthSpacePoint)ob[0];
System.Drawing.Point ScrollDrawingPoint = new System.Drawing.Point((int)this.scrollDepthSpacePoint.X, (int)this.scrollDepthSpacePoint.Y);
Image<Bgr, Byte> openCVImg = new Image<Bgr, byte>(depthBmp.ToBitmap());
Image<Gray, byte> gray_image = openCVImg.Convert<Gray, byte>();
Gray cannyThreshold = new Gray((int)sliderThreshold.Value);
// Gray cannyThresholdLinking = new Gray(120);
Gray cannyThresholdLinking = new Gray((int)sliderThresholdLinking.Value);
Image<Gray, Byte> cannyEdges = gray_image.Canny(cannyThreshold, cannyThresholdLinking);
double tolerance = 4.0f;
LineSegment2D[] lines = cannyEdges.HoughLinesBinary(
1, //Distance resolution in pixel-related units(1)
Math.PI / 45.0f, //Angle resolution measured in radians(45.0).
20, //threshold
10, //min Line width(10)
10//gap between lines(10)
)[0]; //Get the lines from the first channel
lines = lines.Where(l => l.P1.X > leftHanddp.X && l.P2.X > leftHanddp.X && l.P1.Y < rightHanddp.Y && l.P2.Y < rightHanddp.Y && l.P1.X < headdp.X && l.P2.X < headdp.X && ( l.Direction.X ==0 ||( l.Direction.X != 0 && ImageHelpers.Radians2Degrees(Math.Atan(Math.Abs(l.Direction.Y / l.Direction.X)))>65.0f && ImageHelpers.Radians2Degrees(Math.Atan(Math.Abs(l.Direction.Y / l.Direction.X)))<90.0f))).ToArray();
for (int i = 0; i < lines.Count(); i++)
{
LineSegment2D line = lines[i];
openCVImg.Draw(line, new Bgr(System.Drawing.Color.Purple), 2);
}
var combinations = from item in lines
from item2 in lines
where Array.IndexOf(lines, item) < Array.IndexOf(lines, item2) && Math.Abs(item.GetExteriorAngleDegree(item2)) < tolerance && Math.Abs((item.P1.X - item2.P1.X)) < tolerance
select new[] { item, item2 };
int combinationCount = 0;
foreach (LineSegment2D[] linepair in combinations)
{
double degree = linepair[0].GetExteriorAngleDegree(linepair[1]);
double distanceAppart = Math.Abs(linepair[0].P1.X - linepair[1].P1.X);
openCVImg.Draw(linepair[0], new Bgr(System.Drawing.Color.Green), 1);
openCVImg.Draw(linepair[1], new Bgr(System.Drawing.Color.Green), 1);
combinationCount++;
}
float zdiff = Math.Abs(rightHandCSP.Z - headCSP.Z);
float ydiff = Math.Abs(( rightHandCSP.Y- headCSP.Y));
CameraSpacePoint maxHeightCSP = rightHandCSP;
float headHandLength = (float)Math.Sqrt(ydiff*ydiff + zdiff* zdiff);
float maxHeight = rightHandCSP.Y + (ydiff * 0.75f / headHandLength);
maxHeightCSP.Y = maxHeightCSP.Y + (ydiff * 0.75f / headHandLength);
DepthSpacePoint maxHeightDSP = this.coordinateMapper.MapCameraPointToDepthSpace(maxHeightCSP);
float maxDepth = rightHandCSP.Z +(zdiff *0.75f/headHandLength);
ushort maxDepthShort = ushort.Parse(Math.Truncate(maxDepth * 1000.0f).ToString());
LineSegment2D bowLine = combinations.Where(c => c[0].Length>10).OrderBy(c =>this.depthFrameData[(int)( c[0].P1.Y * depthFrame.FrameDescription.Width + c[0].P1.X)]).OrderByDescending(c => c[0].Length).First()[0];
System.Drawing.Point ButtonPoint = new System.Drawing.Point();
System.Drawing.Point BowTipPoint = new System.Drawing.Point();
if (bowLine.P1.Y > bowLine.P2.Y)
{
double gradient = bowLine.Direction.Y/bowLine.Direction.X;
ButtonPoint = findButtonPoint(bowLine.P1,gradient,depthFrame.FrameDescription.Width,depthFrame.FrameDescription.Height);
BowTipPoint = findBowTipPoint(bowLine.P2, gradient, depthFrame.FrameDescription.Width, depthFrame.FrameDescription.Height, maxDepthShort,(int) maxHeightDSP.Y);
}
else
{
double gradient = bowLine.Direction.Y / bowLine.Direction.X;
ButtonPoint = findButtonPoint(bowLine.P2, gradient, depthFrame.FrameDescription.Width, depthFrame.FrameDescription.Height);
BowTipPoint = findBowTipPoint(bowLine.P1, gradient, depthFrame.FrameDescription.Width, depthFrame.FrameDescription.Height, maxDepthShort,(int) maxHeightDSP.Y);
}
openCVImg.Draw(new CircleF(bowLine.P1, 3.0f), new Bgr(System.Drawing.Color.DarkTurquoise), 4);
openCVImg.Draw(new CircleF(bowLine.P2, 3.0f), new Bgr(System.Drawing.Color.Orange), 4);
openCVImg.Draw(new CircleF(ButtonPoint, 3.0f), new Bgr(System.Drawing.Color.Yellow), 4);
openCVImg.Draw(new CircleF(BowTipPoint, 3.0f), new Bgr(System.Drawing.Color.LimeGreen), 4);
openCVImg.Draw(new CircleF(ScrollDrawingPoint, 3.0f), new Bgr(System.Drawing.Color.Aquamarine), 4);
System.Drawing.Point ViolinButtonPoint = new System.Drawing.Point((int)spineShoulderdDSP.X, (int)spineShoulderdDSP.Y);
openCVImg.Draw(new CircleF(ViolinButtonPoint, 3.0f), new Bgr(System.Drawing.Color.Red), 4);
System.Drawing.Point contactPoint = ImageHelpers.GetIntersection(bowLine,new LineSegment2D(ScrollDrawingPoint,ViolinButtonPoint));
openCVImg.Draw(new CircleF(contactPoint, 3.0f), new Bgr(System.Drawing.Color.DarkOliveGreen), 4);
this.txtRectangleCount.Text = combinationCount.ToString() + " Paralell lines " + ImageHelpers.Radians2Degrees(Math.Atan(Math.Abs(bowLine.Direction.Y / bowLine.Direction.X))).ToString() + " degrees";
//}
this.outImg.Source = ImageHelpers.ToBitmapSource(openCVImg);
}
// }
//if (colorFrame != null)
//{
if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
{
colorFrame.CopyRawFrameDataToArray(this.colorPixels);
}
else
{
colorFrame.CopyConvertedFrameDataToArray(this.colorPixels, ColorImageFormat.Bgra);
}
this.colorBitmap.WritePixels(
new Int32Rect(0, 0, colorFrame.FrameDescription.Width, colorFrame.FrameDescription.Height),
this.colorPixels,
colorFrame.FrameDescription.Width * this.cbytesPerPixel,
0);
}
}
catch (Exception ex)
{
string exception = ex.ToString();
}
finally
{
if (depthFrame != null)
{
depthFrame.Dispose();
depthFrame = null;
}
if (colorFrame != null)
{
colorFrame.Dispose();
colorFrame = null;
}
if (bodyIndexFrame != null)
{
bodyIndexFrame.Dispose();
bodyIndexFrame = null;
}
if (bodyFrame != null)
{
bodyFrame.Dispose();
bodyFrame = null;
}
if (multiSourceFrame != null)
{
multiSourceFrame = null;
}
}
}
推荐答案
您需要确保尽快释放帧。获取/复制和处置,在您释放所有帧之前不要处理。从那里你可以实现一个单独的线程来处理Kinect数据并为你的draw
循环准备数据。
You need to ensure you are releasing frames as quickly as possible. Acquire/copy and dispose, do not process until your have released all frames. From there you can implement a separate thread for processing the Kinect data and prepare data for your draw loop.
这篇关于帧速率太慢的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持!