Visual Servoing Platform  version 3.5.0
servoPioneerPanSegment3D.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * IBVS on Pioneer P3DX mobile platform
33  *
34  * Authors:
35  * Fabien Spindler
36  *
37  *****************************************************************************/
38 #include <iostream>
39 
40 #include <visp3/core/vpConfig.h>
41 
42 #include <visp3/robot/vpRobotPioneer.h> // Include first to avoid build issues with Status, None, isfinite
43 #include <visp3/blob/vpDot2.h>
44 #include <visp3/core/vpCameraParameters.h>
45 #include <visp3/core/vpHomogeneousMatrix.h>
46 #include <visp3/core/vpImage.h>
47 #include <visp3/core/vpVelocityTwistMatrix.h>
48 #include <visp3/gui/vpDisplayGDI.h>
49 #include <visp3/gui/vpPlot.h>
50 #include <visp3/robot/vpPioneerPan.h>
51 #include <visp3/robot/vpRobotBiclops.h>
52 #include <visp3/sensor/vp1394CMUGrabber.h>
53 #include <visp3/sensor/vp1394TwoGrabber.h>
54 #include <visp3/sensor/vpV4l2Grabber.h>
55 #include <visp3/visual_features/vpFeatureBuilder.h>
56 #include <visp3/visual_features/vpFeatureSegment.h>
57 #include <visp3/vs/vpServo.h>
58 #include <visp3/gui/vpDisplayX.h> // Should be included after vpRobotPioneer.h
59 
60 #define USE_REAL_ROBOT
61 #define USE_PLOTTER
62 #undef VISP_HAVE_V4L2 // To use a firewire camera
63 
83 #if defined(VISP_HAVE_PIONEER) && defined(VISP_HAVE_BICLOPS)
84 int main(int argc, char **argv)
85 {
86 #if defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_CMU1394)
87 #if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
88  try {
89  vpImage<unsigned char> I; // Create a gray level image container
90  double lambda = 0.1;
91  // Scale parameter used to estimate the depth Z of the blob from its
92  // surface
93  // double coef = 0.9/14.85; // At 0.9m, the blob has a surface of 14.85
94  // (Logitec sphere)
95  double coef = 1.2 / 13.0; // At 1m, the blob has a surface of 11.3 (AVT Pike 032C)
96  double L = 0.21; // 3D horizontal segment length
97  double Z_d = 0.8; // Desired distance along Z between camera and segment
98  bool normalized = true; // segment normilized features are used
99 
100  // Warning: To have a non singular task of rank 3, Y_d should be different
101  // from 0 so that the optical axis doesn't intersect the horizontal
102  // segment
103  double Y_d = -.11; // Desired distance along Y between camera and segment.
104  vpColVector qm(2); // Measured head position
105  qm = 0;
106  double qm_pan = 0; // Measured pan position (tilt is not handled in that example)
107 
108 #ifdef USE_REAL_ROBOT
109  // Initialize the biclops head
110 
111  vpRobotBiclops biclops("/usr/share/BiclopsDefault.cfg");
112  biclops.setDenavitHartenbergModel(vpBiclops::DH1);
113 
114  // Move to the initial position
115  vpColVector q(2);
116 
117  q = 0;
118  // q[0] = vpMath::rad(63);
119  // q[1] = vpMath::rad(12); // introduce a tilt angle to compensate camera
120  // sphere tilt so that the camera is parallel to the plane
121 
122  biclops.setRobotState(vpRobot::STATE_POSITION_CONTROL);
123  biclops.setPosition(vpRobot::ARTICULAR_FRAME, q);
124  // biclops.setPositioningVelocity(50);
125  biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
126  qm_pan = qm[0];
127 
128  // Now the head will be controlled in velocity
129  biclops.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
130 
131  // Initialize the pioneer robot
132  vpRobotPioneer pioneer;
133  ArArgumentParser parser(&argc, argv);
134  parser.loadDefaultArguments();
135 
136  // ArRobotConnector connects to the robot, get some initial data from it
137  // such as type and name, and then loads parameter files for this robot.
138  ArRobotConnector robotConnector(&parser, &pioneer);
139  if (!robotConnector.connectRobot()) {
140  ArLog::log(ArLog::Terse, "Could not connect to the pioneer robot.");
141  if (parser.checkHelpAndWarnUnparsed()) {
142  Aria::logOptions();
143  Aria::exit(1);
144  }
145  }
146  if (!Aria::parseArgs()) {
147  Aria::logOptions();
148  Aria::shutdown();
149  return false;
150  }
151 
152  pioneer.useSonar(false); // disable the sonar device usage
153 
154  // Wait 3 sec to be sure that the low level Aria thread used to control
155  // the robot is started. Without this delay we experienced a delay
156  // (arround 2.2 sec) between the velocity send to the robot and the
157  // velocity that is really applied to the wheels.
158  sleep(3);
159 
160  std::cout << "Pioneer robot connected" << std::endl;
161 #endif
162 
163  vpPioneerPan robot_pan; // Generic robot that computes the velocities for
164  // the pioneer and the biclops head
165 
166  // Camera parameters. In this experiment we don't need a precise
167  // calibration of the camera
168  vpCameraParameters cam;
169 
170 // Create the camera framegrabber
171 #if defined(VISP_HAVE_V4L2)
172  // Create a grabber based on v4l2 third party lib (for usb cameras under
173  // Linux)
174  vpV4l2Grabber g;
175  g.setScale(1);
176  g.setInput(0);
177  g.setDevice("/dev/video1");
178  g.open(I);
179  // Logitec sphere parameters
180  cam.initPersProjWithoutDistortion(558, 555, 312, 210);
181 #elif defined(VISP_HAVE_DC1394)
182  // Create a grabber based on libdc1394-2.x third party lib (for firewire
183  // cameras under Linux)
184  vp1394TwoGrabber g(false);
187  // AVT Pike 032C parameters
188  cam.initPersProjWithoutDistortion(800, 795, 320, 216);
189 #elif defined(VISP_HAVE_CMU1394)
190  // Create a grabber based on CMU 1394 third party lib (for firewire
191  // cameras under windows)
193  g.setVideoMode(0, 5); // 640x480 MONO8
194  g.setFramerate(4); // 30 Hz
195  g.open(I);
196  // AVT Pike 032C parameters
197  cam.initPersProjWithoutDistortion(800, 795, 320, 216);
198 #endif
199 
200  // Acquire an image from the grabber
201  g.acquire(I);
202 
203 // Create an image viewer
204 #if defined(VISP_HAVE_X11)
205  vpDisplayX d(I, 10, 10, "Current frame");
206 #elif defined(VISP_HAVE_GDI)
207  vpDisplayGDI d(I, 10, 10, "Current frame");
208 #endif
210  vpDisplay::flush(I);
211 
212  // The 3D segment consists in two horizontal dots
213  vpDot2 dot[2];
214  for (int i = 0; i < 2; i++) {
215  dot[i].setGraphics(true);
216  dot[i].setComputeMoments(true);
217  dot[i].setEllipsoidShapePrecision(0.); // to track a blob without any constraint on the shape
218  dot[i].setGrayLevelPrecision(0.9); // to set the blob gray level bounds for binarisation
219  dot[i].setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad
220  // inner and outside points
221  // with bad gray level
222  dot[i].initTracking(I);
223  vpDisplay::flush(I);
224  }
225 
226  vpServo task;
229  task.setLambda(lambda);
230  vpVelocityTwistMatrix cVe; // keep to identity
231  cVe = robot_pan.get_cVe();
232  task.set_cVe(cVe);
233 
234  std::cout << "cVe: \n" << cVe << std::endl;
235 
236  vpMatrix eJe;
237 
238  // Update the robot jacobian that depends on the pan position
239  robot_pan.set_eJe(qm_pan);
240  // Get the robot jacobian
241  eJe = robot_pan.get_eJe();
242  task.set_eJe(eJe);
243  std::cout << "eJe: \n" << eJe << std::endl;
244 
245  // Define a 3D horizontal segment an its cordinates in the image plane
246  vpPoint P[2];
247  P[0].setWorldCoordinates(-L / 2, 0, 0);
248  P[1].setWorldCoordinates(L / 2, 0, 0);
249  // Define the desired camera position
250  vpHomogeneousMatrix cMo(0, Y_d, Z_d, 0, 0,
251  0); // Here we are in front of the segment
252  for (int i = 0; i < 2; i++) {
253  P[i].changeFrame(cMo);
254  P[i].project(); // Here the x,y parameters obtained by perspective
255  // projection are computed
256  }
257 
258  // Estimate the depth of the segment extremity points
259  double surface[2];
260  double Z[2]; // Depth of the segment points
261  for (int i = 0; i < 2; i++) {
262  // Surface of the blob estimated from the image moment m00 and converted
263  // in meters
264  surface[i] = 1. / sqrt(dot[i].m00 / (cam.get_px() * cam.get_py()));
265 
266  // Initial depth of the blob
267  Z[i] = coef * surface[i];
268  }
269 
270  // Use here a feature segment builder
271  vpFeatureSegment s_segment(normalized),
272  s_segment_d(normalized); // From the segment feature we use only alpha
273  vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
274  s_segment.setZ1(Z[0]);
275  s_segment.setZ2(Z[1]);
276  // Set the desired feature
277  vpFeatureBuilder::create(s_segment_d, P[0], P[1]);
278  s_segment.setZ1(P[0].get_Z()); // Desired depth
279  s_segment.setZ2(P[1].get_Z());
280 
281  task.addFeature(s_segment, s_segment_d,
283 
284 #ifdef USE_PLOTTER
285  // Create a window (500 by 500) at position (700, 10) with two graphics
286  vpPlot graph(2, 500, 500, 700, 10, "Curves...");
287 
288  // The first graphic contains 3 curve and the second graphic contains 3
289  // curves
290  graph.initGraph(0, 3);
291  graph.initGraph(1, 3);
292  graph.setTitle(0, "Velocities");
293  graph.setTitle(1, "Error s-s*");
294  graph.setLegend(0, 0, "vx");
295  graph.setLegend(0, 1, "wz");
296  graph.setLegend(0, 2, "w_pan");
297  graph.setLegend(1, 0, "xm/l");
298  graph.setLegend(1, 1, "1/l");
299  graph.setLegend(1, 2, "alpha");
300 #endif
301 
302  vpColVector v; // vz, wx
303 
304  try {
305  unsigned int iter = 0;
306  while (1) {
307 #ifdef USE_REAL_ROBOT
308  // Get the new pan position
309  biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
310 #endif
311  qm_pan = qm[0];
312 
313  // Acquire a new image
314  g.acquire(I);
315  // Set the image as background of the viewer
317 
318  // Display the desired position of the segment
319  for (int i = 0; i < 2; i++)
320  P[i].display(I, cam, vpColor::red, 3);
321 
322  // Does the blob tracking
323  for (int i = 0; i < 2; i++)
324  dot[i].track(I);
325 
326  for (int i = 0; i < 2; i++) {
327  // Surface of the blob estimated from the image moment m00 and
328  // converted in meters
329  surface[i] = 1. / sqrt(dot[i].m00 / (cam.get_px() * cam.get_py()));
330 
331  // Initial depth of the blob
332  Z[i] = coef * surface[i];
333  }
334 
335  // Update the features
336  vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
337  // Update the depth of the point. Useful only if current interaction
338  // matrix is used when task.setInteractionMatrixType(vpServo::CURRENT,
339  // vpServo::PSEUDO_INVERSE) is set
340  s_segment.setZ1(Z[0]);
341  s_segment.setZ2(Z[1]);
342 
343  robot_pan.get_cVe(cVe);
344  task.set_cVe(cVe);
345 
346  // Update the robot jacobian that depends on the pan position
347  robot_pan.set_eJe(qm_pan);
348  // Get the robot jacobian
349  eJe = robot_pan.get_eJe();
350  // Update the jacobian that will be used to compute the control law
351  task.set_eJe(eJe);
352 
353  // Compute the control law. Velocities are computed in the mobile
354  // robot reference frame
355  v = task.computeControlLaw();
356 
357  // std::cout << "-----" << std::endl;
358  // std::cout << "v: " << v.t() << std::endl;
359  // std::cout << "error: " << task.getError().t() << std::endl;
360  // std::cout << "L:\n " << task.getInteractionMatrix() <<
361  // std::endl; std::cout << "eJe:\n " << task.get_eJe() <<
362  // std::endl; std::cout << "cVe:\n " << task.get_cVe() <<
363  // std::endl; std::cout << "L_cVe_eJe:\n" <<
364  // task.getInteractionMatrix() * task.get_cVe() * task.get_eJe()
365  // << std::endl; task.print() ;
366  if (task.getTaskRank() != 3)
367  std::cout << "Warning: task is of rank " << task.getTaskRank() << std::endl;
368 
369 #ifdef USE_PLOTTER
370  graph.plot(0, iter, v); // plot velocities applied to the robot
371  graph.plot(1, iter, task.getError()); // plot error vector
372 #endif
373 
374 #ifdef USE_REAL_ROBOT
375  // Send the velocity to the robot
376  vpColVector v_pioneer(2); // vx, wz
377  v_pioneer[0] = v[0];
378  v_pioneer[1] = v[1];
379  vpColVector v_biclops(2); // qdot pan and tilt
380  v_biclops[0] = v[2];
381  v_biclops[1] = 0;
382 
383  std::cout << "Send velocity to the pionner: " << v_pioneer[0] << " m/s " << vpMath::deg(v_pioneer[1])
384  << " deg/s" << std::endl;
385  std::cout << "Send velocity to the biclops head: " << vpMath::deg(v_biclops[0]) << " deg/s" << std::endl;
386 
387  pioneer.setVelocity(vpRobot::REFERENCE_FRAME, v_pioneer);
388  biclops.setVelocity(vpRobot::ARTICULAR_FRAME, v_biclops);
389 #endif
390 
391  // Draw a vertical line which corresponds to the desired x coordinate
392  // of the dot cog
393  vpDisplay::displayLine(I, 0, cam.get_u0(), 479, cam.get_u0(), vpColor::red);
394  vpDisplay::flush(I);
395 
396  // A click in the viewer to exit
397  if (vpDisplay::getClick(I, false))
398  break;
399 
400  iter++;
401  // break;
402  }
403  } catch (...) {
404  }
405 
406 #ifdef USE_REAL_ROBOT
407  std::cout << "Ending robot thread..." << std::endl;
408  pioneer.stopRunning();
409 
410  // wait for the thread to stop
411  pioneer.waitForRunExit();
412 #endif
413 
414  // Kill the servo task
415  task.print();
416  return EXIT_SUCCESS;
417  } catch (const vpException &e) {
418  std::cout << "Catch an exception: " << e << std::endl;
419  return EXIT_FAILURE;
420  }
421 #endif
422 #endif
423 }
424 #else
425 int main()
426 {
427  std::cout << "ViSP is not able to control the Pioneer robot" << std::endl;
428  return EXIT_SUCCESS;
429 }
430 #endif
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void setVideoMode(unsigned long format, unsigned long mode)
void acquire(vpImage< unsigned char > &I)
void setFramerate(unsigned long fps)
void open(vpImage< unsigned char > &I)
Class for firewire ieee1394 video devices using libdc1394-2.x api.
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:131
static const vpColor red
Definition: vpColor.h:217
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:129
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:135
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition: vpDot2.h:127
void setGraphics(bool activate)
Definition: vpDot2.h:314
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition: vpDot2.cpp:735
void setEllipsoidBadPointsPercentage(const double &percentage=0.0)
Definition: vpDot2.h:290
void setEllipsoidShapePrecision(const double &ellipsoidShapePrecision)
Definition: vpDot2.cpp:806
void setComputeMoments(bool activate)
Definition: vpDot2.h:276
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition: vpDot2.cpp:253
error that can be emited by ViSP classes.
Definition: vpException.h:72
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 2D segment visual features. This class allow to consider two sets of visual feat...
static unsigned int selectAlpha()
static unsigned int selectXc()
static unsigned int selectL()
Implementation of an homogeneous matrix and operations on such kind of matrices.
static double deg(double rad)
Definition: vpMath.h:103
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:154
Generic functions for Pioneer mobile robots equiped with a pan head.
Definition: vpPioneerPan.h:98
void set_eJe(double q_pan)
Definition: vpPioneerPan.h:146
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition: vpPlot.h:116
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:82
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &cP) const
Definition: vpPoint.cpp:239
void setWorldCoordinates(double oX, double oY, double oZ)
Definition: vpPoint.cpp:113
Interface for the biclops, pan, tilt head control.
Interface for Pioneer mobile robots based on Aria 3rd party library.
void useSonar(bool usage)
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
@ REFERENCE_FRAME
Definition: vpRobot.h:76
@ ARTICULAR_FRAME
Definition: vpRobot.h:78
@ STATE_POSITION_CONTROL
Initialize the position controller.
Definition: vpRobot.h:67
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition: vpRobot.h:66
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:567
@ EYEINHAND_L_cVe_eJe
Definition: vpServo.h:159
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition: vpServo.h:448
unsigned int getTaskRank() const
Definition: vpServo.cpp:1786
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition: vpServo.cpp:306
void setLambda(double c)
Definition: vpServo.h:404
void set_eJe(const vpMatrix &eJe_)
Definition: vpServo.h:506
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:218
vpColVector getError() const
Definition: vpServo.h:278
@ PSEUDO_INVERSE
Definition: vpServo.h:202
vpColVector computeControlLaw()
Definition: vpServo.cpp:929
@ DESIRED
Definition: vpServo.h:186
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:490
vpMatrix get_eJe() const
Definition: vpUnicycle.h:107
vpVelocityTwistMatrix get_cVe() const
Definition: vpUnicycle.h:82
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void setFramerate(vpV4l2FramerateType framerate)
void setInput(unsigned input=vpV4l2Grabber::DEFAULT_INPUT)
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)