ViSP
Main Page
Related Pages
Modules
Classes
Examples
All
Classes
Functions
Variables
Enumerations
Enumerator
Friends
Groups
Pages
servoViper850FourPoints2DArtVelocityInteractionDesired.cpp
1
/****************************************************************************
2
*
3
* $Id: servoViper850FourPoints2DArtVelocityInteractionDesired.cpp 4065 2013-01-11 13:32:47Z fspindle $
4
*
5
* This file is part of the ViSP software.
6
* Copyright (C) 2005 - 2013 by INRIA. All rights reserved.
7
*
8
* This software is free software; you can redistribute it and/or
9
* modify it under the terms of the GNU General Public License
10
* ("GPL") version 2 as published by the Free Software Foundation.
11
* See the file LICENSE.txt at the root directory of this source
12
* distribution for additional information about the GNU GPL.
13
*
14
* For using ViSP with software that can not be combined with the GNU
15
* GPL, please contact INRIA about acquiring a ViSP Professional
16
* Edition License.
17
*
18
* See http://www.irisa.fr/lagadic/visp/visp.html for more information.
19
*
20
* This software was developed at:
21
* INRIA Rennes - Bretagne Atlantique
22
* Campus Universitaire de Beaulieu
23
* 35042 Rennes Cedex
24
* France
25
* http://www.irisa.fr/lagadic
26
*
27
* If you have questions regarding the use of this file, please contact
28
* INRIA at visp@inria.fr
29
*
30
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
31
* WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
32
*
33
*
34
* Description:
35
* tests the control law
36
* eye-in-hand control
37
* velocity computed in the articular frame
38
*
39
* Authors:
40
* Fabien Spindler
41
*
42
*****************************************************************************/
55
#include <visp/vpConfig.h>
56
#include <visp/vpDebug.h>
// Debug trace
57
58
#include <stdio.h>
59
#include <iostream>
60
#include <fstream>
61
#include <sstream>
62
#include <stdlib.h>
63
#if (defined (VISP_HAVE_VIPER850) && defined (VISP_HAVE_DC1394_2))
64
65
#include <visp/vp1394TwoGrabber.h>
66
#include <visp/vpDisplay.h>
67
#include <visp/vpDisplayGTK.h>
68
#include <visp/vpDisplayX.h>
69
#include <visp/vpDisplayOpenCV.h>
70
#include <visp/vpDot2.h>
71
#include <visp/vpFeatureBuilder.h>
72
#include <visp/vpFeaturePoint.h>
73
#include <visp/vpHomogeneousMatrix.h>
74
#include <visp/vpImage.h>
75
#include <visp/vpIoTools.h>
76
#include <visp/vpMath.h>
77
#include <visp/vpPoint.h>
78
#include <visp/vpPose.h>
79
#include <visp/vpRobotViper850.h>
80
#include <visp/vpServo.h>
81
#include <visp/vpServoDisplay.h>
82
83
int
84
main()
85
{
86
// Log file creation in /tmp/$USERNAME/log.dat
87
// This file contains by line:
88
// - the 6 computed joint velocities (m/s, rad/s) to achieve the task
89
// - the 6 mesured joint velocities (m/s, rad/s)
90
// - the 6 mesured joint positions (m, rad)
91
// - the 8 values of s - s*
92
std::string username;
93
// Get the user login name
94
vpIoTools::getUserName
(username);
95
96
// Create a log filename to save velocities...
97
std::string logdirname;
98
logdirname =
"/tmp/"
+ username;
99
100
// Test if the output path exist. If no try to create it
101
if
(
vpIoTools::checkDirectory
(logdirname) ==
false
) {
102
try
{
103
// Create the dirname
104
vpIoTools::makeDirectory
(logdirname);
105
}
106
catch
(...) {
107
std::cerr << std::endl
108
<<
"ERROR:"
<< std::endl;
109
std::cerr <<
" Cannot create "
<< logdirname << std::endl;
110
return
(-1);
111
}
112
}
113
std::string logfilename;
114
logfilename = logdirname +
"/log.dat"
;
115
116
// Open the log file name
117
std::ofstream flog(logfilename.c_str());
118
119
try
{
120
// Define the square CAD model
121
// Square dimention
122
//#define L 0.075
123
#define L 0.05
124
// Distance between the camera and the square at the desired
125
// position after visual servoing convergence
126
#define D 0.5
127
128
vpRobotViper850
robot ;
129
// Load the end-effector to camera frame transformation obtained
130
// using a camera intrinsic model with distortion
131
vpCameraParameters::vpCameraParametersProjType
projModel =
132
vpCameraParameters::perspectiveProjWithDistortion
;
133
robot.
init
(
vpRobotViper850::TOOL_PTGREY_FLEA2_CAMERA
, projModel);
134
135
vpServo
task ;
136
137
vpImage<unsigned char>
I ;
138
int
i ;
139
140
bool
reset =
false
;
141
vp1394TwoGrabber
g(reset);
142
g.setVideoMode(
vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8
);
143
g.setFramerate(
vp1394TwoGrabber::vpFRAMERATE_60
);
144
g.open(I) ;
145
146
g.acquire(I) ;
147
148
#ifdef VISP_HAVE_X11
149
vpDisplayX
display(I,100,100,
"Current image"
) ;
150
#elif defined(VISP_HAVE_OPENCV)
151
vpDisplayOpenCV
display(I,100,100,
"Current image"
) ;
152
#elif defined(VISP_HAVE_GTK)
153
vpDisplayGTK
display(I,100,100,
"Current image"
) ;
154
#endif
155
156
vpDisplay::display
(I) ;
157
vpDisplay::flush
(I) ;
158
159
std::cout << std::endl ;
160
std::cout <<
"-------------------------------------------------------"
<< std::endl ;
161
std::cout <<
" Test program for vpServo "
<<std::endl ;
162
std::cout <<
" Eye-in-hand task control, velocity computed in the joint space"
<< std::endl ;
163
std::cout <<
" Use of the Afma6 robot "
<< std::endl ;
164
std::cout <<
" task : servo 4 points on a square with dimention "
<< L <<
" meters"
<< std::endl ;
165
std::cout <<
"-------------------------------------------------------"
<< std::endl ;
166
std::cout << std::endl ;
167
168
169
vpDot
dot[4] ;
170
vpImagePoint
cog;
171
172
std::cout <<
"Click on the 4 dots clockwise starting from upper/left dot..."
173
<< std::endl;
174
175
for
(i=0 ; i < 4 ; i++) {
176
dot[i].
setGraphics
(
true
) ;
177
dot[i].
initTracking
(I) ;
178
cog = dot[i].
getCog
();
179
vpDisplay::displayCross
(I, cog, 10,
vpColor::blue
) ;
180
vpDisplay::flush
(I);
181
}
182
183
vpCameraParameters
cam ;
184
185
// Update camera parameters
186
robot.
getCameraParameters
(cam, I);
187
188
cam.
printParameters
();
189
190
// Sets the current position of the visual feature
191
vpFeaturePoint
p[4] ;
192
for
(i=0 ; i < 4 ; i++)
193
vpFeatureBuilder::create
(p[i],cam, dot[i]) ;
//retrieve x,y and Z of the vpPoint structure
194
195
// sets the desired position of the visual feature
196
vpFeaturePoint
pd[4] ;
197
198
pd[0].
buildFrom
(-L,-L,D) ;
199
pd[1].
buildFrom
(L,-L,D) ;
200
pd[2].
buildFrom
(L,L,D) ;
201
pd[3].
buildFrom
(-L,L,D) ;
202
203
// We want to see a point on a point
204
std::cout << std::endl ;
205
for
(i=0 ; i < 4 ; i++)
206
task.
addFeature
(p[i],pd[i]) ;
207
208
// Set the proportional gain
209
task.
setLambda
(0.4) ;
210
211
// Display task information
212
task.
print
() ;
213
214
// Define the task
215
// - we want an eye-in-hand control law
216
// - articular velocity are computed
217
task.
setServo
(
vpServo::EYEINHAND_L_cVe_eJe
) ;
218
task.
setInteractionMatrixType
(
vpServo::DESIRED
,
vpServo::PSEUDO_INVERSE
) ;
219
task.
print
() ;
220
221
vpVelocityTwistMatrix
cVe ;
222
robot.
get_cVe
(cVe) ;
223
task.
set_cVe
(cVe) ;
224
task.
print
() ;
225
226
// Set the Jacobian (expressed in the end-effector frame)
227
vpMatrix
eJe ;
228
robot.
get_eJe
(eJe) ;
229
task.
set_eJe
(eJe) ;
230
task.
print
() ;
231
232
// Initialise the velocity control of the robot
233
robot.
setRobotState
(
vpRobot::STATE_VELOCITY_CONTROL
) ;
234
235
std::cout <<
"\nHit CTRL-C to stop the loop...\n"
<< std::flush;
236
for
( ; ; ) {
237
// Acquire a new image from the camera
238
g.acquire(I) ;
239
240
// Display this image
241
vpDisplay::display
(I) ;
242
243
try
{
244
// For each point...
245
for
(i=0 ; i < 4 ; i++) {
246
// Achieve the tracking of the dot in the image
247
dot[i].
track
(I) ;
248
// Display a green cross at the center of gravity position in the
249
// image
250
cog = dot[i].
getCog
();
251
vpDisplay::displayCross
(I, cog, 10,
vpColor::green
) ;
252
}
253
}
254
catch
(...) {
255
flog.close() ;
// Close the log file
256
vpTRACE
(
"Error detected while tracking visual features"
) ;
257
robot.
stopMotion
() ;
258
exit(1) ;
259
}
260
261
// Update the point feature from the dot location
262
for
(i=0 ; i < 4 ; i++)
263
vpFeatureBuilder::create
(p[i],cam, dot[i]);
264
265
// Get the jacobian of the robot
266
robot.
get_eJe
(eJe) ;
267
// Update this jacobian in the task structure. It will be used to compute
268
// the velocity skew (as an articular velocity)
269
// qdot = -lambda * L^+ * cVe * eJe * (s-s*)
270
task.
set_eJe
(eJe) ;
271
272
vpColVector
v ;
273
// Compute the visual servoing skew vector
274
v = task.
computeControlLaw
() ;
275
276
// Display the current and desired feature points in the image display
277
vpServoDisplay::display
(task,cam,I) ;
278
279
// Apply the computed joint velocities to the robot
280
robot.
setVelocity
(
vpRobot::ARTICULAR_FRAME
, v) ;
281
282
// Save velocities applied to the robot in the log file
283
// v[0], v[1], v[2] correspond to joint translation velocities in m/s
284
// v[3], v[4], v[5] correspond to joint rotation velocities in rad/s
285
flog << v[0] <<
" "
<< v[1] <<
" "
<< v[2] <<
" "
286
<< v[3] <<
" "
<< v[4] <<
" "
<< v[5] <<
" "
;
287
288
// Get the measured joint velocities of the robot
289
vpColVector
qvel;
290
robot.
getVelocity
(
vpRobot::ARTICULAR_FRAME
, qvel);
291
// Save measured joint velocities of the robot in the log file:
292
// - qvel[0], qvel[1], qvel[2] correspond to measured joint translation
293
// velocities in m/s
294
// - qvel[3], qvel[4], qvel[5] correspond to measured joint rotation
295
// velocities in rad/s
296
flog << qvel[0] <<
" "
<< qvel[1] <<
" "
<< qvel[2] <<
" "
297
<< qvel[3] <<
" "
<< qvel[4] <<
" "
<< qvel[5] <<
" "
;
298
299
// Get the measured joint positions of the robot
300
vpColVector
q;
301
robot.
getPosition
(
vpRobot::ARTICULAR_FRAME
, q);
302
// Save measured joint positions of the robot in the log file
303
// - q[0], q[1], q[2] correspond to measured joint translation
304
// positions in m
305
// - q[3], q[4], q[5] correspond to measured joint rotation
306
// positions in rad
307
flog << q[0] <<
" "
<< q[1] <<
" "
<< q[2] <<
" "
308
<< q[3] <<
" "
<< q[4] <<
" "
<< q[5] <<
" "
;
309
310
// Save feature error (s-s*) for the 4 feature points. For each feature
311
// point, we have 2 errors (along x and y axis). This error is expressed
312
// in meters in the camera frame
313
flog << ( task.
getError
() ).t() << std::endl;
314
315
// Flush the display
316
vpDisplay::flush
(I) ;
317
318
// std::cout << "|| s - s* || = " << ( task.getError() ).sumSquare() << std::endl;
319
}
320
321
std::cout <<
"Display task information: "
<< std::endl;
322
task.
print
() ;
323
task.
kill
();
324
flog.close() ;
// Close the log file
325
return
0;
326
}
327
catch
(...)
328
{
329
flog.close() ;
// Close the log file
330
vpERROR_TRACE
(
" Test failed"
) ;
331
return
0;
332
}
333
}
334
335
#else
336
int
337
main()
338
{
339
vpERROR_TRACE
(
"You do not have an afma6 robot or a firewire framegrabber connected to your computer..."
);
340
}
341
342
#endif
example
servo-viper850
servoViper850FourPoints2DArtVelocityInteractionDesired.cpp
Generated on Fri Apr 26 2013 19:54:32 for ViSP by
1.8.1.2