本文介绍了使用GStreamer摄像机无失真插件计算失真系数的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!
问题描述
如何使用失真系数已使用特定的python代码(dist [[7.33183864e+03 1.52395233e-01 2.53983049e+00 2.20587897e+00 2.61869010e-06]]
)和CameraUnsecurt插件进行了计算。
我在文档中看到了以下管道示例:
gst-launch-1.0 -v v4l2src ! videoconvert ! cameraundistort settings= ! autovideosink
但是我不知道我应该给设置什么样的论据,是这样吗我应该将计算的失真系数或矩阵放在哪里?推荐答案
我也在努力寻找这个问题的答案。
有一段代码:
/* set settings property */
g_free (calib->settings);
calib->settings =
camera_serialize_undistort_settings (calib->cameraMatrix,
calib->distCoeffs);
gchar *
camera_serialize_undistort_settings (cv::Mat & cameraMatrix,
cv::Mat & distCoeffs)
{
cv::FileStorage fs (".xml", cv::FileStorage::WRITE + cv::FileStorage::MEMORY);
fs << "cameraMatrix" << cameraMatrix;
fs << "distCoeffs" << distCoeffs;
std::string buf = fs.releaseAndGetString ();
return g_strdup (buf.c_str ());
}
为了找出它的输出是什么样子,我编写了以下代码:
//
// g++ -o out main.cpp `pkg-config --cflags --libs opencv4 glib-2.0`
//
// https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/blob/324e55a3cdd7c6cef24356fd626deee5fba343df/ext/opencv/camerautils.cpp
//
#include <glib.h>
#include <opencv2/opencv.hpp>
#include <iostream>
gchar *
camera_serialize_undistort_settings (cv::Mat & cameraMatrix,
cv::Mat & distCoeffs)
{
cv::FileStorage fs (".xml", cv::FileStorage::WRITE + cv::FileStorage::MEMORY);
fs << "cameraMatrix" << cameraMatrix;
fs << "distCoeffs" << distCoeffs;
std::string buf = fs.releaseAndGetString ();
return g_strdup (buf.c_str ());
}
int main() {
float camera_matrix_data[9] = { 2.8576236805989838e+03, 0.0, 1.9392296211154603e+03,
0.0, 2.8456611938133528e+03, 1.1219585111073625e+03, 0.0,
0.0, 1.0 };
cv::Mat camera_matrix = cv::Mat(3, 3, CV_32F, camera_matrix_data);
float dist_coefficients_data[5] = {-6.1403941690071129e-01, 4.0004545823187132e-01,
1.4713297080556295e-03, 2.4677208656274745e-04,
-1.2040756970288242e-01};
cv::Mat dist_coefficients = cv::Mat(5, 1, CV_32F, dist_coefficients_data);
std::cout << camera_serialize_undistort_settings(camera_matrix, dist_coefficients);
return 0;
}
我将此作为输出,据我所知,camera_deserialize_undistort_settings()
函数可以使用它:
<?xml version="1.0"?>
<opencv_storage>
<cameraMatrix type_id="opencv-matrix">
<rows>3</rows>
<cols>3</cols>
<dt>f</dt>
<data>
2.85762378e+03 0. 1.93922961e+03 0. 2.84566113e+03 1.12195850e+03 0.
0. 1.</data></cameraMatrix>
<distCoeffs type_id="opencv-matrix">
<rows>5</rows>
<cols>1</cols>
<dt>f</dt>
<data>
-6.14039421e-01 4.00045455e-01 1.47132971e-03 2.46772077e-04
-1.20407566e-01</data></distCoeffs>
</opencv_storage>
遗憾的是,我无法克服将此代码提供给命令行的最后一个绊脚石。
编辑:
非命令行解决方案:
希望下面的内容能对某些人有所帮助--由于时间限制,这些内容有点杂乱无章。基本上,我放弃了命令行,找到了一个如何通过代码运行GStreamer管道的示例。我侵入了管道,并能够将参数输入。
校准
//
// g++ play.cpp -o play `pkg-config --cflags --libs gstreamer-1.0 opencv4`
//
#include <gst/gst.h>
#include <opencv2/opencv.hpp>
//
// USB Webcam
//
const char *string2 = "
gst-launch-1.0 -v v4l2src device=/dev/video0 ! videoconvert ! cameraundistort name=undist ! cameracalibrate name=cal ! ximagesink
";
gchar *
camera_serialize_undistort_settings (cv::Mat & cameraMatrix,
cv::Mat & distCoeffs)
{
cv::FileStorage fs (".xml", cv::FileStorage::WRITE + cv::FileStorage::MEMORY);
fs << "cameraMatrix" << cameraMatrix;
fs << "distCoeffs" << distCoeffs;
std::string buf = fs.releaseAndGetString ();
return g_strdup (buf.c_str ());
}
int
main (int argc, char *argv[])
{
gchar value[100] = {0};
GstElement *pipeline;
GstElement *filesrc;
GstMessage *msg;
GstBus *bus;
GError *error = NULL;
gst_init (&argc, &argv);
pipeline = gst_parse_launch (string2, &error);
if (!pipeline) {
g_print ("Parse error: %s
", error->message);
exit (1);
}
gst_element_set_state (pipeline, GST_STATE_PLAYING);
bus = gst_element_get_bus (pipeline);
/* wait until we either get an EOS or an ERROR message. Note that in a real
* program you would probably not use gst_bus_poll(), but rather set up an
* async signal watch on the bus and run a main loop and connect to the
* bus's signals to catch certain messages or all messages */
msg = gst_bus_poll (bus, (GstMessageType) ( GST_MESSAGE_EOS | GST_MESSAGE_ERROR ), -1);
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS: {
g_print ("EOS
");
break;
}
case GST_MESSAGE_ERROR: {
GError *err = NULL; /* error to show to users */
gchar *dbg = NULL; /* additional debug string for developers */
gst_message_parse_error (msg, &err, &dbg);
if (err) {
filesrc = gst_bin_get_by_name (GST_BIN (pipeline), "undist");
gchar *val;
g_object_get (filesrc, "settings", &val, NULL);
g_print("Data:
");
g_print ("%s", val);
g_object_unref (filesrc);
g_print("
Done
");
g_printerr ("ERROR: %s
", err->message);
g_error_free (err);
}
if (dbg) {
g_printerr ("[Debug details: %s]
", dbg);
g_free (dbg);
}
}
default:
g_printerr ("Unexpected message of type %d", GST_MESSAGE_TYPE (msg));
break;
}
gst_message_unref (msg);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
gst_object_unref (bus);
return 0;
}
您可以按照OpenCV推荐的方式使用棋盘进行正常校准。
关闭时,将显示如下输出:
<?xml version="1.0"?>
<opencv_storage>
<cameraMatrix type_id="opencv-matrix">
<rows>3</rows>
<cols>3</cols>
<dt>d</dt>
<data>
3.3860583405334381e+02 0. 2.9082938038777263e+02 0.
3.3860583405334381e+02 1.5389814447354675e+02 0. 0. 1.</data></cameraMatrix>
<distCoeffs type_id="opencv-matrix">
<rows>5</rows>
<cols>1</cols>
<dt>d</dt>
<data>
-7.3120392823523372e-01 6.2624904277006888e-01
-1.0205399762451621e-02 5.1857216532169093e-03
-2.0130033675444331e-01</data></distCoeffs>
</opencv_storage>
然后您可以将其放入另一个程序中,并按如下方式加载:
运行
//
// g++ play.cpp -o play `pkg-config --cflags --libs gstreamer-1.0 opencv4`
//
#include <gst/gst.h>
#include <opencv2/opencv.hpp>
const char *string2 = "
gst-launch-1.0 -v v4l2src device=/dev/video0 ! videoconvert ! cameraundistort name=undist1 ! ximagesink
";
gchar *
camera_serialize_undistort_settings (cv::Mat & cameraMatrix,
cv::Mat & distCoeffs)
{
cv::FileStorage fs (".xml", cv::FileStorage::WRITE + cv::FileStorage::MEMORY);
fs << "cameraMatrix" << cameraMatrix;
fs << "distCoeffs" << distCoeffs;
std::string buf = fs.releaseAndGetString ();
return g_strdup (buf.c_str ());
}
int
main (int argc, char *argv[])
{
float camera_matrix_data[9] = { 3.3860583405334381e+02, 0., 2.9082938038777263e+02, 0.,
3.3860583405334381e+02, 1.5389814447354675e+02, 0., 0., 1. };
cv::Mat camera_matrix = cv::Mat(3, 3, CV_32F, camera_matrix_data);
float dist_coefficients_data[5] = {-7.3120392823523372e-01, 6.2624904277006888e-01,
-1.0205399762451621e-02, 5.1857216532169093e-03,
-2.0130033675444331e-01};
cv::Mat dist_coefficients = cv::Mat(5, 1, CV_32F, dist_coefficients_data);
gchar * dist = camera_serialize_undistort_settings(camera_matrix, dist_coefficients);
GstElement *pipeline;
GstElement *filesrc;
GstMessage *msg;
GstBus *bus;
GError *error = NULL;
gst_init (&argc, &argv);
pipeline = gst_parse_launch (string2, &error);
if (!pipeline) {
g_print ("Parse error: %s
", error->message);
exit (1);
}
// Feed in our settings
filesrc = gst_bin_get_by_name (GST_BIN (pipeline), "undist1");
g_object_set (filesrc, "settings", dist, NULL);
g_object_unref (filesrc);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
bus = gst_element_get_bus (pipeline);
/* wait until we either get an EOS or an ERROR message. Note that in a real
* program you would probably not use gst_bus_poll(), but rather set up an
* async signal watch on the bus and run a main loop and connect to the
* bus's signals to catch certain messages or all messages */
msg = gst_bus_poll (bus, (GstMessageType) ( GST_MESSAGE_EOS | GST_MESSAGE_ERROR ), -1);
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS: {
g_print ("EOS
");
break;
}
case GST_MESSAGE_ERROR: {
GError *err = NULL; /* error to show to users */
gchar *dbg = NULL; /* additional debug string for developers */
gst_message_parse_error (msg, &err, &dbg);
if (err) {
g_printerr ("ERROR: %s
", err->message);
g_error_free (err);
}
if (dbg) {
g_printerr ("[Debug details: %s]
", dbg);
g_free (dbg);
}
}
default:
g_printerr ("Unexpected message of type %d", GST_MESSAGE_TYPE (msg));
break;
}
gst_message_unref (msg);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
gst_object_unref (bus);
return 0;
}
这篇关于使用GStreamer摄像机无失真插件计算失真系数的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持!