1 /* -*- c -*-
2 ----------------------------------------------------------------
3
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
9
10 ----------------------------------------------------------------
11
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
14
15 Copyright (C) 2000-2010 Julian Seward. All rights reserved.
16
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
20
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
23
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
28
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
31
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
35
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47
48 ----------------------------------------------------------------
49
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
54
55 ----------------------------------------------------------------
56 */
57
58
59 /* This file is for inclusion into client (your!) code.
60
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
63
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
72
73 #ifndef __VALGRIND_H
74 #define __VALGRIND_H
75
76
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
80
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
85 X.Y or later" is (eg)
86
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
90 */
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 6
93
94
95 #include <stdarg.h>
96 #include <stdint.h>
97
98 /* Nb: this file might be included in a file compiled with -ansi. So
99 we can't use C++ style "//" comments nor the "asm" keyword (instead
100 use "__asm__"). */
101
102 /* Derive some tags indicating what the target platform is. Note
103 that in this file we're using the compiler's CPP symbols for
104 identifying architectures, which are different to the ones we use
105 within the rest of Valgrind. Note, __powerpc__ is active for both
106 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
107 latter (on Linux, that is).
108
109 Misc note: how to find out what's predefined in gcc by default:
110 gcc -Wp,-dM somefile.c
111 */
112 #undef PLAT_x86_darwin
113 #undef PLAT_amd64_darwin
114 #undef PLAT_x86_win32
115 #undef PLAT_x86_linux
116 #undef PLAT_amd64_linux
117 #undef PLAT_ppc32_linux
118 #undef PLAT_ppc64_linux
119 #undef PLAT_arm_linux
120 #undef PLAT_s390x_linux
121
122
123 #if defined(__APPLE__) && defined(__i386__)
124 # define PLAT_x86_darwin 1
125 #elif defined(__APPLE__) && defined(__x86_64__)
126 # define PLAT_amd64_darwin 1
127 #elif defined(__MINGW32__) || defined(__CYGWIN32__) \
128 || (defined(_WIN32) && defined(_M_IX86))
129 # define PLAT_x86_win32 1
130 #elif defined(__linux__) && defined(__i386__)
131 # define PLAT_x86_linux 1
132 #elif defined(__linux__) && defined(__x86_64__)
133 # define PLAT_amd64_linux 1
134 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
135 # define PLAT_ppc32_linux 1
136 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
137 # define PLAT_ppc64_linux 1
138 #elif defined(__linux__) && defined(__arm__)
139 # define PLAT_arm_linux 1
140 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
141 # define PLAT_s390x_linux 1
142 #else
143 /* If we're not compiling for our target platform, don't generate
144 any inline asms. */
145 # if !defined(NVALGRIND)
146 # define NVALGRIND 1
147 # endif
148 #endif
149
150
151 /* ------------------------------------------------------------------ */
152 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
153 /* in here of use to end-users -- skip to the next section. */
154 /* ------------------------------------------------------------------ */
155
156 /*
157 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
158 * request. Accepts both pointers and integers as arguments.
159 *
160 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
161 * client request and whose value equals the client request result. Accepts
162 * both pointers and integers as arguments.
163 */
164
165 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
166 _zzq_request, _zzq_arg1, _zzq_arg2, \
167 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
168 { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
169 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
170 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); }
171
172 #if defined(NVALGRIND)
173
174 /* Define NVALGRIND to completely remove the Valgrind magic sequence
175 from the compiled code (analogous to NDEBUG's effects on
176 assert()) */
177 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
178 _zzq_default, _zzq_request, \
179 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
180 (_zzq_default)
181
182 #else /* ! NVALGRIND */
183
184 /* The following defines the magic code sequences which the JITter
185 spots and handles magically. Don't look too closely at them as
186 they will rot your brain.
187
188 The assembly code sequences for all architectures is in this one
189 file. This is because this file must be stand-alone, and we don't
190 want to have multiple files.
191
192 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
193 value gets put in the return slot, so that everything works when
194 this is executed not under Valgrind. Args are passed in a memory
195 block, and so there's no intrinsic limit to the number that could
196 be passed, but it's currently five.
197
198 The macro args are:
199 _zzq_rlval result lvalue
200 _zzq_default default value (result returned when running on real CPU)
201 _zzq_request request code
202 _zzq_arg1..5 request params
203
204 The other two macros are used to support function wrapping, and are
205 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
206 guest's NRADDR pseudo-register and whatever other information is
207 needed to safely run the call original from the wrapper: on
208 ppc64-linux, the R2 value at the divert point is also needed. This
209 information is abstracted into a user-visible type, OrigFn.
210
211 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
212 guest, but guarantees that the branch instruction will not be
213 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
214 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
215 complete inline asm, since it needs to be combined with more magic
216 inline asm stuff to be useful.
217 */
218
219 /* ------------------------- x86-{linux,darwin} ---------------- */
220
221 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
222 || (defined(PLAT_x86_win32) && defined(__GNUC__))
223
224 typedef
225 struct {
226 unsigned int nraddr; /* where's the code? */
227 }
228 OrigFn;
229
230 #define __SPECIAL_INSTRUCTION_PREAMBLE \
231 "roll $3, %%edi ; roll $13, %%edi\n\t" \
232 "roll $29, %%edi ; roll $19, %%edi\n\t"
233
234 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
235 _zzq_default, _zzq_request, \
236 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
237 __extension__ \
238 ({volatile unsigned int _zzq_args[6]; \
239 volatile unsigned int _zzq_result; \
240 _zzq_args[0] = (unsigned int)(_zzq_request); \
241 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
242 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
243 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
244 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
245 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
246 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
247 /* %EDX = client_request ( %EAX ) */ \
248 "xchgl %%ebx,%%ebx" \
249 : "=d" (_zzq_result) \
250 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
251 : "cc", "memory" \
252 ); \
253 _zzq_result; \
254 })
255
256 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
257 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
258 volatile unsigned int __addr; \
259 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
260 /* %EAX = guest_NRADDR */ \
261 "xchgl %%ecx,%%ecx" \
262 : "=a" (__addr) \
263 : \
264 : "cc", "memory" \
265 ); \
266 _zzq_orig->nraddr = __addr; \
267 }
268
269 #define VALGRIND_CALL_NOREDIR_EAX \
270 __SPECIAL_INSTRUCTION_PREAMBLE \
271 /* call-noredir *%EAX */ \
272 "xchgl %%edx,%%edx\n\t"
273 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
274
275 /* ------------------------- x86-Win32 ------------------------- */
276
277 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
278
279 typedef
280 struct {
281 unsigned int nraddr; /* where's the code? */
282 }
283 OrigFn;
284
285 #if defined(_MSC_VER)
286
287 #define __SPECIAL_INSTRUCTION_PREAMBLE \
288 __asm rol edi, 3 __asm rol edi, 13 \
289 __asm rol edi, 29 __asm rol edi, 19
290
291 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
292 _zzq_default, _zzq_request, \
293 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
294 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
295 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
296 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
297 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
298
299 static __inline uintptr_t
valgrind_do_client_request_expr(uintptr_t _zzq_default,uintptr_t _zzq_request,uintptr_t _zzq_arg1,uintptr_t _zzq_arg2,uintptr_t _zzq_arg3,uintptr_t _zzq_arg4,uintptr_t _zzq_arg5)300 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
301 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
302 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
303 uintptr_t _zzq_arg5)
304 {
305 volatile uintptr_t _zzq_args[6];
306 volatile unsigned int _zzq_result;
307 _zzq_args[0] = (uintptr_t)(_zzq_request);
308 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
309 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
310 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
311 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
312 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
313 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
314 __SPECIAL_INSTRUCTION_PREAMBLE
315 /* %EDX = client_request ( %EAX ) */
316 __asm xchg ebx,ebx
317 __asm mov _zzq_result, edx
318 }
319 return _zzq_result;
320 }
321
322 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
323 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
324 volatile unsigned int __addr; \
325 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
326 /* %EAX = guest_NRADDR */ \
327 __asm xchg ecx,ecx \
328 __asm mov __addr, eax \
329 } \
330 _zzq_orig->nraddr = __addr; \
331 }
332
333 #define VALGRIND_CALL_NOREDIR_EAX ERROR
334
335 #else
336 #error Unsupported compiler.
337 #endif
338
339 #endif /* PLAT_x86_win32 */
340
341 /* ------------------------ amd64-{linux,darwin} --------------- */
342
343 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
344
345 typedef
346 struct {
347 uint64_t nraddr; /* where's the code? */
348 }
349 OrigFn;
350
351 #define __SPECIAL_INSTRUCTION_PREAMBLE \
352 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
353 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
354
355 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
356 _zzq_default, _zzq_request, \
357 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
358 __extension__ \
359 ({ volatile uint64_t _zzq_args[6]; \
360 volatile uint64_t _zzq_result; \
361 _zzq_args[0] = (uint64_t)(_zzq_request); \
362 _zzq_args[1] = (uint64_t)(_zzq_arg1); \
363 _zzq_args[2] = (uint64_t)(_zzq_arg2); \
364 _zzq_args[3] = (uint64_t)(_zzq_arg3); \
365 _zzq_args[4] = (uint64_t)(_zzq_arg4); \
366 _zzq_args[5] = (uint64_t)(_zzq_arg5); \
367 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
368 /* %RDX = client_request ( %RAX ) */ \
369 "xchgq %%rbx,%%rbx" \
370 : "=d" (_zzq_result) \
371 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
372 : "cc", "memory" \
373 ); \
374 _zzq_result; \
375 })
376
377 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
378 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
379 volatile uint64_t __addr; \
380 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
381 /* %RAX = guest_NRADDR */ \
382 "xchgq %%rcx,%%rcx" \
383 : "=a" (__addr) \
384 : \
385 : "cc", "memory" \
386 ); \
387 _zzq_orig->nraddr = __addr; \
388 }
389
390 #define VALGRIND_CALL_NOREDIR_RAX \
391 __SPECIAL_INSTRUCTION_PREAMBLE \
392 /* call-noredir *%RAX */ \
393 "xchgq %%rdx,%%rdx\n\t"
394 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
395
396 /* ------------------------ ppc32-linux ------------------------ */
397
398 #if defined(PLAT_ppc32_linux)
399
400 typedef
401 struct {
402 unsigned int nraddr; /* where's the code? */
403 }
404 OrigFn;
405
406 #define __SPECIAL_INSTRUCTION_PREAMBLE \
407 "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
408 "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
409
410 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
411 _zzq_default, _zzq_request, \
412 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
413 \
414 __extension__ \
415 ({ unsigned int _zzq_args[6]; \
416 unsigned int _zzq_result; \
417 unsigned int* _zzq_ptr; \
418 _zzq_args[0] = (unsigned int)(_zzq_request); \
419 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
420 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
421 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
422 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
423 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
424 _zzq_ptr = _zzq_args; \
425 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
426 "mr 4,%2\n\t" /*ptr*/ \
427 __SPECIAL_INSTRUCTION_PREAMBLE \
428 /* %R3 = client_request ( %R4 ) */ \
429 "or 1,1,1\n\t" \
430 "mr %0,3" /*result*/ \
431 : "=b" (_zzq_result) \
432 : "b" (_zzq_default), "b" (_zzq_ptr) \
433 : "cc", "memory", "r3", "r4"); \
434 _zzq_result; \
435 })
436
437 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
438 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
439 unsigned int __addr; \
440 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
441 /* %R3 = guest_NRADDR */ \
442 "or 2,2,2\n\t" \
443 "mr %0,3" \
444 : "=b" (__addr) \
445 : \
446 : "cc", "memory", "r3" \
447 ); \
448 _zzq_orig->nraddr = __addr; \
449 }
450
451 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
452 __SPECIAL_INSTRUCTION_PREAMBLE \
453 /* branch-and-link-to-noredir *%R11 */ \
454 "or 3,3,3\n\t"
455 #endif /* PLAT_ppc32_linux */
456
457 /* ------------------------ ppc64-linux ------------------------ */
458
459 #if defined(PLAT_ppc64_linux)
460
461 typedef
462 struct {
463 uint64_t nraddr; /* where's the code? */
464 uint64_t r2; /* what tocptr do we need? */
465 }
466 OrigFn;
467
468 #define __SPECIAL_INSTRUCTION_PREAMBLE \
469 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
470 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
471
472 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
473 _zzq_default, _zzq_request, \
474 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
475 \
476 __extension__ \
477 ({ uint64_t _zzq_args[6]; \
478 register uint64_t _zzq_result __asm__("r3"); \
479 register uint64_t* _zzq_ptr __asm__("r4"); \
480 _zzq_args[0] = (uint64_t)(_zzq_request); \
481 _zzq_args[1] = (uint64_t)(_zzq_arg1); \
482 _zzq_args[2] = (uint64_t)(_zzq_arg2); \
483 _zzq_args[3] = (uint64_t)(_zzq_arg3); \
484 _zzq_args[4] = (uint64_t)(_zzq_arg4); \
485 _zzq_args[5] = (uint64_t)(_zzq_arg5); \
486 _zzq_ptr = _zzq_args; \
487 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
488 /* %R3 = client_request ( %R4 ) */ \
489 "or 1,1,1" \
490 : "=r" (_zzq_result) \
491 : "0" (_zzq_default), "r" (_zzq_ptr) \
492 : "cc", "memory"); \
493 _zzq_result; \
494 })
495
496 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
497 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
498 register uint64_t __addr __asm__("r3"); \
499 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
500 /* %R3 = guest_NRADDR */ \
501 "or 2,2,2" \
502 : "=r" (__addr) \
503 : \
504 : "cc", "memory" \
505 ); \
506 _zzq_orig->nraddr = __addr; \
507 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
508 /* %R3 = guest_NRADDR_GPR2 */ \
509 "or 4,4,4" \
510 : "=r" (__addr) \
511 : \
512 : "cc", "memory" \
513 ); \
514 _zzq_orig->r2 = __addr; \
515 }
516
517 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
518 __SPECIAL_INSTRUCTION_PREAMBLE \
519 /* branch-and-link-to-noredir *%R11 */ \
520 "or 3,3,3\n\t"
521
522 #endif /* PLAT_ppc64_linux */
523
524 /* ------------------------- arm-linux ------------------------- */
525
526 #if defined(PLAT_arm_linux)
527
528 typedef
529 struct {
530 unsigned int nraddr; /* where's the code? */
531 }
532 OrigFn;
533
534 #define __SPECIAL_INSTRUCTION_PREAMBLE \
535 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
536 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
537
538 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
539 _zzq_default, _zzq_request, \
540 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
541 \
542 __extension__ \
543 ({volatile unsigned int _zzq_args[6]; \
544 volatile unsigned int _zzq_result; \
545 _zzq_args[0] = (unsigned int)(_zzq_request); \
546 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
547 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
548 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
549 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
550 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
551 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
552 "mov r4, %2\n\t" /*ptr*/ \
553 __SPECIAL_INSTRUCTION_PREAMBLE \
554 /* R3 = client_request ( R4 ) */ \
555 "orr r10, r10, r10\n\t" \
556 "mov %0, r3" /*result*/ \
557 : "=r" (_zzq_result) \
558 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
559 : "cc","memory", "r3", "r4"); \
560 _zzq_result; \
561 })
562
563 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
564 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
565 unsigned int __addr; \
566 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
567 /* R3 = guest_NRADDR */ \
568 "orr r11, r11, r11\n\t" \
569 "mov %0, r3" \
570 : "=r" (__addr) \
571 : \
572 : "cc", "memory", "r3" \
573 ); \
574 _zzq_orig->nraddr = __addr; \
575 }
576
577 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
578 __SPECIAL_INSTRUCTION_PREAMBLE \
579 /* branch-and-link-to-noredir *%R4 */ \
580 "orr r12, r12, r12\n\t"
581
582 #endif /* PLAT_arm_linux */
583
584 /* ------------------------ s390x-linux ------------------------ */
585
586 #if defined(PLAT_s390x_linux)
587
588 typedef
589 struct {
590 uint64_t nraddr; /* where's the code? */
591 }
592 OrigFn;
593
594 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
595 * code. This detection is implemented in platform specific toIR.c
596 * (e.g. VEX/priv/guest_s390_decoder.c).
597 */
598 #define __SPECIAL_INSTRUCTION_PREAMBLE \
599 "lr 15,15\n\t" \
600 "lr 1,1\n\t" \
601 "lr 2,2\n\t" \
602 "lr 3,3\n\t"
603
604 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
605 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
606 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
607
608 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
609 _zzq_default, _zzq_request, \
610 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
611 __extension__ \
612 ({volatile uint64_t _zzq_args[6]; \
613 volatile uint64_t _zzq_result; \
614 _zzq_args[0] = (uint64_t)(_zzq_request); \
615 _zzq_args[1] = (uint64_t)(_zzq_arg1); \
616 _zzq_args[2] = (uint64_t)(_zzq_arg2); \
617 _zzq_args[3] = (uint64_t)(_zzq_arg3); \
618 _zzq_args[4] = (uint64_t)(_zzq_arg4); \
619 _zzq_args[5] = (uint64_t)(_zzq_arg5); \
620 __asm__ volatile(/* r2 = args */ \
621 "lgr 2,%1\n\t" \
622 /* r3 = default */ \
623 "lgr 3,%2\n\t" \
624 __SPECIAL_INSTRUCTION_PREAMBLE \
625 __CLIENT_REQUEST_CODE \
626 /* results = r3 */ \
627 "lgr %0, 3\n\t" \
628 : "=d" (_zzq_result) \
629 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
630 : "cc", "2", "3", "memory" \
631 ); \
632 _zzq_result; \
633 })
634
635 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
636 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
637 volatile uint64_t __addr; \
638 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
639 __GET_NR_CONTEXT_CODE \
640 "lgr %0, 3\n\t" \
641 : "=a" (__addr) \
642 : \
643 : "cc", "3", "memory" \
644 ); \
645 _zzq_orig->nraddr = __addr; \
646 }
647
648 #define VALGRIND_CALL_NOREDIR_R1 \
649 __SPECIAL_INSTRUCTION_PREAMBLE \
650 __CALL_NO_REDIR_CODE
651
652 #endif /* PLAT_s390x_linux */
653
654 /* Insert assembly code for other platforms here... */
655
656 #endif /* NVALGRIND */
657
658
659 /* ------------------------------------------------------------------ */
660 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
661 /* ugly. It's the least-worst tradeoff I can think of. */
662 /* ------------------------------------------------------------------ */
663
664 /* This section defines magic (a.k.a appalling-hack) macros for doing
665 guaranteed-no-redirection macros, so as to get from function
666 wrappers to the functions they are wrapping. The whole point is to
667 construct standard call sequences, but to do the call itself with a
668 special no-redirect call pseudo-instruction that the JIT
669 understands and handles specially. This section is long and
670 repetitious, and I can't see a way to make it shorter.
671
672 The naming scheme is as follows:
673
674 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
675
676 'W' stands for "word" and 'v' for "void". Hence there are
677 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
678 and for each, the possibility of returning a word-typed result, or
679 no result.
680 */
681
682 /* Use these to write the name of your wrapper. NOTE: duplicates
683 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. */
684
685 /* Use an extra level of macroisation so as to ensure the soname/fnname
686 args are fully macro-expanded before pasting them together. */
687 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
688
689 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
690 VG_CONCAT4(_vgwZU_,soname,_,fnname)
691
692 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
693 VG_CONCAT4(_vgwZZ_,soname,_,fnname)
694
695 /* Use this macro from within a wrapper function to collect the
696 context (address and possibly other info) of the original function.
697 Once you have that you can then use it in one of the CALL_FN_
698 macros. The type of the argument _lval is OrigFn. */
699 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
700
701 /* Derivatives of the main macros below, for calling functions
702 returning void. */
703
704 #define CALL_FN_v_v(fnptr) \
705 do { volatile unsigned long _junk; \
706 CALL_FN_W_v(_junk,fnptr); } while (0)
707
708 #define CALL_FN_v_W(fnptr, arg1) \
709 do { volatile unsigned long _junk; \
710 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
711
712 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
713 do { volatile unsigned long _junk; \
714 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
715
716 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
717 do { volatile unsigned long _junk; \
718 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
719
720 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
721 do { volatile unsigned long _junk; \
722 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
723
724 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
725 do { volatile unsigned long _junk; \
726 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
727
728 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
729 do { volatile unsigned long _junk; \
730 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
731
732 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
733 do { volatile unsigned long _junk; \
734 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
735
736 /* ------------------------- x86-{linux,darwin} ---------------- */
737
738 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
739
740 /* These regs are trashed by the hidden call. No need to mention eax
741 as gcc can already see that, plus causes gcc to bomb. */
742 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
743
744 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
745 long) == 4. */
746
747 #define CALL_FN_W_v(lval, orig) \
748 do { \
749 volatile OrigFn _orig = (orig); \
750 volatile unsigned long _argvec[1]; \
751 volatile unsigned long _res; \
752 _argvec[0] = (unsigned long)_orig.nraddr; \
753 __asm__ volatile( \
754 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
755 VALGRIND_CALL_NOREDIR_EAX \
756 : /*out*/ "=a" (_res) \
757 : /*in*/ "a" (&_argvec[0]) \
758 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
759 ); \
760 lval = (__typeof__(lval)) _res; \
761 } while (0)
762
763 #define CALL_FN_W_W(lval, orig, arg1) \
764 do { \
765 volatile OrigFn _orig = (orig); \
766 volatile unsigned long _argvec[2]; \
767 volatile unsigned long _res; \
768 _argvec[0] = (unsigned long)_orig.nraddr; \
769 _argvec[1] = (unsigned long)(arg1); \
770 __asm__ volatile( \
771 "subl $12, %%esp\n\t" \
772 "pushl 4(%%eax)\n\t" \
773 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
774 VALGRIND_CALL_NOREDIR_EAX \
775 "addl $16, %%esp\n" \
776 : /*out*/ "=a" (_res) \
777 : /*in*/ "a" (&_argvec[0]) \
778 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
779 ); \
780 lval = (__typeof__(lval)) _res; \
781 } while (0)
782
783 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
784 do { \
785 volatile OrigFn _orig = (orig); \
786 volatile unsigned long _argvec[3]; \
787 volatile unsigned long _res; \
788 _argvec[0] = (unsigned long)_orig.nraddr; \
789 _argvec[1] = (unsigned long)(arg1); \
790 _argvec[2] = (unsigned long)(arg2); \
791 __asm__ volatile( \
792 "subl $8, %%esp\n\t" \
793 "pushl 8(%%eax)\n\t" \
794 "pushl 4(%%eax)\n\t" \
795 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
796 VALGRIND_CALL_NOREDIR_EAX \
797 "addl $16, %%esp\n" \
798 : /*out*/ "=a" (_res) \
799 : /*in*/ "a" (&_argvec[0]) \
800 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
801 ); \
802 lval = (__typeof__(lval)) _res; \
803 } while (0)
804
805 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
806 do { \
807 volatile OrigFn _orig = (orig); \
808 volatile unsigned long _argvec[4]; \
809 volatile unsigned long _res; \
810 _argvec[0] = (unsigned long)_orig.nraddr; \
811 _argvec[1] = (unsigned long)(arg1); \
812 _argvec[2] = (unsigned long)(arg2); \
813 _argvec[3] = (unsigned long)(arg3); \
814 __asm__ volatile( \
815 "subl $4, %%esp\n\t" \
816 "pushl 12(%%eax)\n\t" \
817 "pushl 8(%%eax)\n\t" \
818 "pushl 4(%%eax)\n\t" \
819 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
820 VALGRIND_CALL_NOREDIR_EAX \
821 "addl $16, %%esp\n" \
822 : /*out*/ "=a" (_res) \
823 : /*in*/ "a" (&_argvec[0]) \
824 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
825 ); \
826 lval = (__typeof__(lval)) _res; \
827 } while (0)
828
829 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
830 do { \
831 volatile OrigFn _orig = (orig); \
832 volatile unsigned long _argvec[5]; \
833 volatile unsigned long _res; \
834 _argvec[0] = (unsigned long)_orig.nraddr; \
835 _argvec[1] = (unsigned long)(arg1); \
836 _argvec[2] = (unsigned long)(arg2); \
837 _argvec[3] = (unsigned long)(arg3); \
838 _argvec[4] = (unsigned long)(arg4); \
839 __asm__ volatile( \
840 "pushl 16(%%eax)\n\t" \
841 "pushl 12(%%eax)\n\t" \
842 "pushl 8(%%eax)\n\t" \
843 "pushl 4(%%eax)\n\t" \
844 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
845 VALGRIND_CALL_NOREDIR_EAX \
846 "addl $16, %%esp\n" \
847 : /*out*/ "=a" (_res) \
848 : /*in*/ "a" (&_argvec[0]) \
849 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
850 ); \
851 lval = (__typeof__(lval)) _res; \
852 } while (0)
853
854 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
855 do { \
856 volatile OrigFn _orig = (orig); \
857 volatile unsigned long _argvec[6]; \
858 volatile unsigned long _res; \
859 _argvec[0] = (unsigned long)_orig.nraddr; \
860 _argvec[1] = (unsigned long)(arg1); \
861 _argvec[2] = (unsigned long)(arg2); \
862 _argvec[3] = (unsigned long)(arg3); \
863 _argvec[4] = (unsigned long)(arg4); \
864 _argvec[5] = (unsigned long)(arg5); \
865 __asm__ volatile( \
866 "subl $12, %%esp\n\t" \
867 "pushl 20(%%eax)\n\t" \
868 "pushl 16(%%eax)\n\t" \
869 "pushl 12(%%eax)\n\t" \
870 "pushl 8(%%eax)\n\t" \
871 "pushl 4(%%eax)\n\t" \
872 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
873 VALGRIND_CALL_NOREDIR_EAX \
874 "addl $32, %%esp\n" \
875 : /*out*/ "=a" (_res) \
876 : /*in*/ "a" (&_argvec[0]) \
877 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
878 ); \
879 lval = (__typeof__(lval)) _res; \
880 } while (0)
881
882 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
883 do { \
884 volatile OrigFn _orig = (orig); \
885 volatile unsigned long _argvec[7]; \
886 volatile unsigned long _res; \
887 _argvec[0] = (unsigned long)_orig.nraddr; \
888 _argvec[1] = (unsigned long)(arg1); \
889 _argvec[2] = (unsigned long)(arg2); \
890 _argvec[3] = (unsigned long)(arg3); \
891 _argvec[4] = (unsigned long)(arg4); \
892 _argvec[5] = (unsigned long)(arg5); \
893 _argvec[6] = (unsigned long)(arg6); \
894 __asm__ volatile( \
895 "subl $8, %%esp\n\t" \
896 "pushl 24(%%eax)\n\t" \
897 "pushl 20(%%eax)\n\t" \
898 "pushl 16(%%eax)\n\t" \
899 "pushl 12(%%eax)\n\t" \
900 "pushl 8(%%eax)\n\t" \
901 "pushl 4(%%eax)\n\t" \
902 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
903 VALGRIND_CALL_NOREDIR_EAX \
904 "addl $32, %%esp\n" \
905 : /*out*/ "=a" (_res) \
906 : /*in*/ "a" (&_argvec[0]) \
907 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
908 ); \
909 lval = (__typeof__(lval)) _res; \
910 } while (0)
911
912 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
913 arg7) \
914 do { \
915 volatile OrigFn _orig = (orig); \
916 volatile unsigned long _argvec[8]; \
917 volatile unsigned long _res; \
918 _argvec[0] = (unsigned long)_orig.nraddr; \
919 _argvec[1] = (unsigned long)(arg1); \
920 _argvec[2] = (unsigned long)(arg2); \
921 _argvec[3] = (unsigned long)(arg3); \
922 _argvec[4] = (unsigned long)(arg4); \
923 _argvec[5] = (unsigned long)(arg5); \
924 _argvec[6] = (unsigned long)(arg6); \
925 _argvec[7] = (unsigned long)(arg7); \
926 __asm__ volatile( \
927 "subl $4, %%esp\n\t" \
928 "pushl 28(%%eax)\n\t" \
929 "pushl 24(%%eax)\n\t" \
930 "pushl 20(%%eax)\n\t" \
931 "pushl 16(%%eax)\n\t" \
932 "pushl 12(%%eax)\n\t" \
933 "pushl 8(%%eax)\n\t" \
934 "pushl 4(%%eax)\n\t" \
935 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
936 VALGRIND_CALL_NOREDIR_EAX \
937 "addl $32, %%esp\n" \
938 : /*out*/ "=a" (_res) \
939 : /*in*/ "a" (&_argvec[0]) \
940 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
941 ); \
942 lval = (__typeof__(lval)) _res; \
943 } while (0)
944
945 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
946 arg7,arg8) \
947 do { \
948 volatile OrigFn _orig = (orig); \
949 volatile unsigned long _argvec[9]; \
950 volatile unsigned long _res; \
951 _argvec[0] = (unsigned long)_orig.nraddr; \
952 _argvec[1] = (unsigned long)(arg1); \
953 _argvec[2] = (unsigned long)(arg2); \
954 _argvec[3] = (unsigned long)(arg3); \
955 _argvec[4] = (unsigned long)(arg4); \
956 _argvec[5] = (unsigned long)(arg5); \
957 _argvec[6] = (unsigned long)(arg6); \
958 _argvec[7] = (unsigned long)(arg7); \
959 _argvec[8] = (unsigned long)(arg8); \
960 __asm__ volatile( \
961 "pushl 32(%%eax)\n\t" \
962 "pushl 28(%%eax)\n\t" \
963 "pushl 24(%%eax)\n\t" \
964 "pushl 20(%%eax)\n\t" \
965 "pushl 16(%%eax)\n\t" \
966 "pushl 12(%%eax)\n\t" \
967 "pushl 8(%%eax)\n\t" \
968 "pushl 4(%%eax)\n\t" \
969 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
970 VALGRIND_CALL_NOREDIR_EAX \
971 "addl $32, %%esp\n" \
972 : /*out*/ "=a" (_res) \
973 : /*in*/ "a" (&_argvec[0]) \
974 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
975 ); \
976 lval = (__typeof__(lval)) _res; \
977 } while (0)
978
979 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
980 arg7,arg8,arg9) \
981 do { \
982 volatile OrigFn _orig = (orig); \
983 volatile unsigned long _argvec[10]; \
984 volatile unsigned long _res; \
985 _argvec[0] = (unsigned long)_orig.nraddr; \
986 _argvec[1] = (unsigned long)(arg1); \
987 _argvec[2] = (unsigned long)(arg2); \
988 _argvec[3] = (unsigned long)(arg3); \
989 _argvec[4] = (unsigned long)(arg4); \
990 _argvec[5] = (unsigned long)(arg5); \
991 _argvec[6] = (unsigned long)(arg6); \
992 _argvec[7] = (unsigned long)(arg7); \
993 _argvec[8] = (unsigned long)(arg8); \
994 _argvec[9] = (unsigned long)(arg9); \
995 __asm__ volatile( \
996 "subl $12, %%esp\n\t" \
997 "pushl 36(%%eax)\n\t" \
998 "pushl 32(%%eax)\n\t" \
999 "pushl 28(%%eax)\n\t" \
1000 "pushl 24(%%eax)\n\t" \
1001 "pushl 20(%%eax)\n\t" \
1002 "pushl 16(%%eax)\n\t" \
1003 "pushl 12(%%eax)\n\t" \
1004 "pushl 8(%%eax)\n\t" \
1005 "pushl 4(%%eax)\n\t" \
1006 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1007 VALGRIND_CALL_NOREDIR_EAX \
1008 "addl $48, %%esp\n" \
1009 : /*out*/ "=a" (_res) \
1010 : /*in*/ "a" (&_argvec[0]) \
1011 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1012 ); \
1013 lval = (__typeof__(lval)) _res; \
1014 } while (0)
1015
1016 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1017 arg7,arg8,arg9,arg10) \
1018 do { \
1019 volatile OrigFn _orig = (orig); \
1020 volatile unsigned long _argvec[11]; \
1021 volatile unsigned long _res; \
1022 _argvec[0] = (unsigned long)_orig.nraddr; \
1023 _argvec[1] = (unsigned long)(arg1); \
1024 _argvec[2] = (unsigned long)(arg2); \
1025 _argvec[3] = (unsigned long)(arg3); \
1026 _argvec[4] = (unsigned long)(arg4); \
1027 _argvec[5] = (unsigned long)(arg5); \
1028 _argvec[6] = (unsigned long)(arg6); \
1029 _argvec[7] = (unsigned long)(arg7); \
1030 _argvec[8] = (unsigned long)(arg8); \
1031 _argvec[9] = (unsigned long)(arg9); \
1032 _argvec[10] = (unsigned long)(arg10); \
1033 __asm__ volatile( \
1034 "subl $8, %%esp\n\t" \
1035 "pushl 40(%%eax)\n\t" \
1036 "pushl 36(%%eax)\n\t" \
1037 "pushl 32(%%eax)\n\t" \
1038 "pushl 28(%%eax)\n\t" \
1039 "pushl 24(%%eax)\n\t" \
1040 "pushl 20(%%eax)\n\t" \
1041 "pushl 16(%%eax)\n\t" \
1042 "pushl 12(%%eax)\n\t" \
1043 "pushl 8(%%eax)\n\t" \
1044 "pushl 4(%%eax)\n\t" \
1045 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1046 VALGRIND_CALL_NOREDIR_EAX \
1047 "addl $48, %%esp\n" \
1048 : /*out*/ "=a" (_res) \
1049 : /*in*/ "a" (&_argvec[0]) \
1050 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1051 ); \
1052 lval = (__typeof__(lval)) _res; \
1053 } while (0)
1054
1055 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1056 arg6,arg7,arg8,arg9,arg10, \
1057 arg11) \
1058 do { \
1059 volatile OrigFn _orig = (orig); \
1060 volatile unsigned long _argvec[12]; \
1061 volatile unsigned long _res; \
1062 _argvec[0] = (unsigned long)_orig.nraddr; \
1063 _argvec[1] = (unsigned long)(arg1); \
1064 _argvec[2] = (unsigned long)(arg2); \
1065 _argvec[3] = (unsigned long)(arg3); \
1066 _argvec[4] = (unsigned long)(arg4); \
1067 _argvec[5] = (unsigned long)(arg5); \
1068 _argvec[6] = (unsigned long)(arg6); \
1069 _argvec[7] = (unsigned long)(arg7); \
1070 _argvec[8] = (unsigned long)(arg8); \
1071 _argvec[9] = (unsigned long)(arg9); \
1072 _argvec[10] = (unsigned long)(arg10); \
1073 _argvec[11] = (unsigned long)(arg11); \
1074 __asm__ volatile( \
1075 "subl $4, %%esp\n\t" \
1076 "pushl 44(%%eax)\n\t" \
1077 "pushl 40(%%eax)\n\t" \
1078 "pushl 36(%%eax)\n\t" \
1079 "pushl 32(%%eax)\n\t" \
1080 "pushl 28(%%eax)\n\t" \
1081 "pushl 24(%%eax)\n\t" \
1082 "pushl 20(%%eax)\n\t" \
1083 "pushl 16(%%eax)\n\t" \
1084 "pushl 12(%%eax)\n\t" \
1085 "pushl 8(%%eax)\n\t" \
1086 "pushl 4(%%eax)\n\t" \
1087 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1088 VALGRIND_CALL_NOREDIR_EAX \
1089 "addl $48, %%esp\n" \
1090 : /*out*/ "=a" (_res) \
1091 : /*in*/ "a" (&_argvec[0]) \
1092 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1093 ); \
1094 lval = (__typeof__(lval)) _res; \
1095 } while (0)
1096
1097 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1098 arg6,arg7,arg8,arg9,arg10, \
1099 arg11,arg12) \
1100 do { \
1101 volatile OrigFn _orig = (orig); \
1102 volatile unsigned long _argvec[13]; \
1103 volatile unsigned long _res; \
1104 _argvec[0] = (unsigned long)_orig.nraddr; \
1105 _argvec[1] = (unsigned long)(arg1); \
1106 _argvec[2] = (unsigned long)(arg2); \
1107 _argvec[3] = (unsigned long)(arg3); \
1108 _argvec[4] = (unsigned long)(arg4); \
1109 _argvec[5] = (unsigned long)(arg5); \
1110 _argvec[6] = (unsigned long)(arg6); \
1111 _argvec[7] = (unsigned long)(arg7); \
1112 _argvec[8] = (unsigned long)(arg8); \
1113 _argvec[9] = (unsigned long)(arg9); \
1114 _argvec[10] = (unsigned long)(arg10); \
1115 _argvec[11] = (unsigned long)(arg11); \
1116 _argvec[12] = (unsigned long)(arg12); \
1117 __asm__ volatile( \
1118 "pushl 48(%%eax)\n\t" \
1119 "pushl 44(%%eax)\n\t" \
1120 "pushl 40(%%eax)\n\t" \
1121 "pushl 36(%%eax)\n\t" \
1122 "pushl 32(%%eax)\n\t" \
1123 "pushl 28(%%eax)\n\t" \
1124 "pushl 24(%%eax)\n\t" \
1125 "pushl 20(%%eax)\n\t" \
1126 "pushl 16(%%eax)\n\t" \
1127 "pushl 12(%%eax)\n\t" \
1128 "pushl 8(%%eax)\n\t" \
1129 "pushl 4(%%eax)\n\t" \
1130 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1131 VALGRIND_CALL_NOREDIR_EAX \
1132 "addl $48, %%esp\n" \
1133 : /*out*/ "=a" (_res) \
1134 : /*in*/ "a" (&_argvec[0]) \
1135 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1136 ); \
1137 lval = (__typeof__(lval)) _res; \
1138 } while (0)
1139
1140 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1141
1142 /* ------------------------ amd64-{linux,darwin} --------------- */
1143
1144 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1145
1146 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1147
1148 /* These regs are trashed by the hidden call. */
1149 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1150 "rdi", "r8", "r9", "r10", "r11"
1151
1152 /* This is all pretty complex. It's so as to make stack unwinding
1153 work reliably. See bug 243270. The basic problem is the sub and
1154 add of 128 of %rsp in all of the following macros. If gcc believes
1155 the CFA is in %rsp, then unwinding may fail, because what's at the
1156 CFA is not what gcc "expected" when it constructs the CFIs for the
1157 places where the macros are instantiated.
1158
1159 But we can't just add a CFI annotation to increase the CFA offset
1160 by 128, to match the sub of 128 from %rsp, because we don't know
1161 whether gcc has chosen %rsp as the CFA at that point, or whether it
1162 has chosen some other register (eg, %rbp). In the latter case,
1163 adding a CFI annotation to change the CFA offset is simply wrong.
1164
1165 So the solution is to get hold of the CFA using
1166 __builtin_dwarf_cfa(), put it in a known register, and add a
1167 CFI annotation to say what the register is. We choose %rbp for
1168 this (perhaps perversely), because:
1169
1170 (1) %rbp is already subject to unwinding. If a new register was
1171 chosen then the unwinder would have to unwind it in all stack
1172 traces, which is expensive, and
1173
1174 (2) %rbp is already subject to precise exception updates in the
1175 JIT. If a new register was chosen, we'd have to have precise
1176 exceptions for it too, which reduces performance of the
1177 generated code.
1178
1179 However .. one extra complication. We can't just whack the result
1180 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1181 list of trashed registers at the end of the inline assembly
1182 fragments; gcc won't allow %rbp to appear in that list. Hence
1183 instead we need to stash %rbp in %r15 for the duration of the asm,
1184 and say that %r15 is trashed instead. gcc seems happy to go with
1185 that.
1186
1187 Oh .. and this all needs to be conditionalised so that it is
1188 unchanged from before this commit, when compiled with older gccs
1189 that don't support __builtin_dwarf_cfa. Furthermore, since
1190 this header file is freestanding, it has to be independent of
1191 config.h, and so the following conditionalisation cannot depend on
1192 configure time checks.
1193
1194 Although it's not clear from
1195 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1196 this expression excludes Darwin.
1197 .cfi directives in Darwin assembly appear to be completely
1198 different and I haven't investigated how they work.
1199
1200 For even more entertainment value, note we have to use the
1201 completely undocumented __builtin_dwarf_cfa(), which appears to
1202 really compute the CFA, whereas __builtin_frame_address(0) claims
1203 to but actually doesn't. See
1204 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1205 */
1206 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1207 # define __FRAME_POINTER \
1208 ,"r"(__builtin_dwarf_cfa())
1209 # define VALGRIND_CFI_PROLOGUE \
1210 "movq %%rbp, %%r15\n\t" \
1211 "movq %2, %%rbp\n\t" \
1212 ".cfi_remember_state\n\t" \
1213 ".cfi_def_cfa rbp, 0\n\t"
1214 # define VALGRIND_CFI_EPILOGUE \
1215 "movq %%r15, %%rbp\n\t" \
1216 ".cfi_restore_state\n\t"
1217 #else
1218 # define __FRAME_POINTER
1219 # define VALGRIND_CFI_PROLOGUE
1220 # define VALGRIND_CFI_EPILOGUE
1221 #endif
1222
1223
1224 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1225 long) == 8. */
1226
1227 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1228 macros. In order not to trash the stack redzone, we need to drop
1229 %rsp by 128 before the hidden call, and restore afterwards. The
1230 nastyness is that it is only by luck that the stack still appears
1231 to be unwindable during the hidden call - since then the behaviour
1232 of any routine using this macro does not match what the CFI data
1233 says. Sigh.
1234
1235 Why is this important? Imagine that a wrapper has a stack
1236 allocated local, and passes to the hidden call, a pointer to it.
1237 Because gcc does not know about the hidden call, it may allocate
1238 that local in the redzone. Unfortunately the hidden call may then
1239 trash it before it comes to use it. So we must step clear of the
1240 redzone, for the duration of the hidden call, to make it safe.
1241
1242 Probably the same problem afflicts the other redzone-style ABIs too
1243 (ppc64-linux); but for those, the stack is
1244 self describing (none of this CFI nonsense) so at least messing
1245 with the stack pointer doesn't give a danger of non-unwindable
1246 stack. */
1247
1248 #define CALL_FN_W_v(lval, orig) \
1249 do { \
1250 volatile OrigFn _orig = (orig); \
1251 volatile unsigned long _argvec[1]; \
1252 volatile unsigned long _res; \
1253 _argvec[0] = (unsigned long)_orig.nraddr; \
1254 __asm__ volatile( \
1255 VALGRIND_CFI_PROLOGUE \
1256 "subq $128,%%rsp\n\t" \
1257 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1258 VALGRIND_CALL_NOREDIR_RAX \
1259 "addq $128,%%rsp\n\t" \
1260 VALGRIND_CFI_EPILOGUE \
1261 : /*out*/ "=a" (_res) \
1262 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1263 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1264 ); \
1265 lval = (__typeof__(lval)) _res; \
1266 } while (0)
1267
1268 #define CALL_FN_W_W(lval, orig, arg1) \
1269 do { \
1270 volatile OrigFn _orig = (orig); \
1271 volatile unsigned long _argvec[2]; \
1272 volatile unsigned long _res; \
1273 _argvec[0] = (unsigned long)_orig.nraddr; \
1274 _argvec[1] = (unsigned long)(arg1); \
1275 __asm__ volatile( \
1276 VALGRIND_CFI_PROLOGUE \
1277 "subq $128,%%rsp\n\t" \
1278 "movq 8(%%rax), %%rdi\n\t" \
1279 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1280 VALGRIND_CALL_NOREDIR_RAX \
1281 "addq $128,%%rsp\n\t" \
1282 VALGRIND_CFI_EPILOGUE \
1283 : /*out*/ "=a" (_res) \
1284 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1285 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1286 ); \
1287 lval = (__typeof__(lval)) _res; \
1288 } while (0)
1289
1290 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1291 do { \
1292 volatile OrigFn _orig = (orig); \
1293 volatile unsigned long _argvec[3]; \
1294 volatile unsigned long _res; \
1295 _argvec[0] = (unsigned long)_orig.nraddr; \
1296 _argvec[1] = (unsigned long)(arg1); \
1297 _argvec[2] = (unsigned long)(arg2); \
1298 __asm__ volatile( \
1299 VALGRIND_CFI_PROLOGUE \
1300 "subq $128,%%rsp\n\t" \
1301 "movq 16(%%rax), %%rsi\n\t" \
1302 "movq 8(%%rax), %%rdi\n\t" \
1303 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1304 VALGRIND_CALL_NOREDIR_RAX \
1305 "addq $128,%%rsp\n\t" \
1306 VALGRIND_CFI_EPILOGUE \
1307 : /*out*/ "=a" (_res) \
1308 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1309 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1310 ); \
1311 lval = (__typeof__(lval)) _res; \
1312 } while (0)
1313
1314 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1315 do { \
1316 volatile OrigFn _orig = (orig); \
1317 volatile unsigned long _argvec[4]; \
1318 volatile unsigned long _res; \
1319 _argvec[0] = (unsigned long)_orig.nraddr; \
1320 _argvec[1] = (unsigned long)(arg1); \
1321 _argvec[2] = (unsigned long)(arg2); \
1322 _argvec[3] = (unsigned long)(arg3); \
1323 __asm__ volatile( \
1324 VALGRIND_CFI_PROLOGUE \
1325 "subq $128,%%rsp\n\t" \
1326 "movq 24(%%rax), %%rdx\n\t" \
1327 "movq 16(%%rax), %%rsi\n\t" \
1328 "movq 8(%%rax), %%rdi\n\t" \
1329 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1330 VALGRIND_CALL_NOREDIR_RAX \
1331 "addq $128,%%rsp\n\t" \
1332 VALGRIND_CFI_EPILOGUE \
1333 : /*out*/ "=a" (_res) \
1334 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1335 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1336 ); \
1337 lval = (__typeof__(lval)) _res; \
1338 } while (0)
1339
1340 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1341 do { \
1342 volatile OrigFn _orig = (orig); \
1343 volatile unsigned long _argvec[5]; \
1344 volatile unsigned long _res; \
1345 _argvec[0] = (unsigned long)_orig.nraddr; \
1346 _argvec[1] = (unsigned long)(arg1); \
1347 _argvec[2] = (unsigned long)(arg2); \
1348 _argvec[3] = (unsigned long)(arg3); \
1349 _argvec[4] = (unsigned long)(arg4); \
1350 __asm__ volatile( \
1351 VALGRIND_CFI_PROLOGUE \
1352 "subq $128,%%rsp\n\t" \
1353 "movq 32(%%rax), %%rcx\n\t" \
1354 "movq 24(%%rax), %%rdx\n\t" \
1355 "movq 16(%%rax), %%rsi\n\t" \
1356 "movq 8(%%rax), %%rdi\n\t" \
1357 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1358 VALGRIND_CALL_NOREDIR_RAX \
1359 "addq $128,%%rsp\n\t" \
1360 VALGRIND_CFI_EPILOGUE \
1361 : /*out*/ "=a" (_res) \
1362 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1363 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1364 ); \
1365 lval = (__typeof__(lval)) _res; \
1366 } while (0)
1367
1368 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1369 do { \
1370 volatile OrigFn _orig = (orig); \
1371 volatile unsigned long _argvec[6]; \
1372 volatile unsigned long _res; \
1373 _argvec[0] = (unsigned long)_orig.nraddr; \
1374 _argvec[1] = (unsigned long)(arg1); \
1375 _argvec[2] = (unsigned long)(arg2); \
1376 _argvec[3] = (unsigned long)(arg3); \
1377 _argvec[4] = (unsigned long)(arg4); \
1378 _argvec[5] = (unsigned long)(arg5); \
1379 __asm__ volatile( \
1380 VALGRIND_CFI_PROLOGUE \
1381 "subq $128,%%rsp\n\t" \
1382 "movq 40(%%rax), %%r8\n\t" \
1383 "movq 32(%%rax), %%rcx\n\t" \
1384 "movq 24(%%rax), %%rdx\n\t" \
1385 "movq 16(%%rax), %%rsi\n\t" \
1386 "movq 8(%%rax), %%rdi\n\t" \
1387 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1388 VALGRIND_CALL_NOREDIR_RAX \
1389 "addq $128,%%rsp\n\t" \
1390 VALGRIND_CFI_EPILOGUE \
1391 : /*out*/ "=a" (_res) \
1392 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1393 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1394 ); \
1395 lval = (__typeof__(lval)) _res; \
1396 } while (0)
1397
1398 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1399 do { \
1400 volatile OrigFn _orig = (orig); \
1401 volatile unsigned long _argvec[7]; \
1402 volatile unsigned long _res; \
1403 _argvec[0] = (unsigned long)_orig.nraddr; \
1404 _argvec[1] = (unsigned long)(arg1); \
1405 _argvec[2] = (unsigned long)(arg2); \
1406 _argvec[3] = (unsigned long)(arg3); \
1407 _argvec[4] = (unsigned long)(arg4); \
1408 _argvec[5] = (unsigned long)(arg5); \
1409 _argvec[6] = (unsigned long)(arg6); \
1410 __asm__ volatile( \
1411 VALGRIND_CFI_PROLOGUE \
1412 "subq $128,%%rsp\n\t" \
1413 "movq 48(%%rax), %%r9\n\t" \
1414 "movq 40(%%rax), %%r8\n\t" \
1415 "movq 32(%%rax), %%rcx\n\t" \
1416 "movq 24(%%rax), %%rdx\n\t" \
1417 "movq 16(%%rax), %%rsi\n\t" \
1418 "movq 8(%%rax), %%rdi\n\t" \
1419 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1420 VALGRIND_CALL_NOREDIR_RAX \
1421 "addq $128,%%rsp\n\t" \
1422 VALGRIND_CFI_EPILOGUE \
1423 : /*out*/ "=a" (_res) \
1424 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1425 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1426 ); \
1427 lval = (__typeof__(lval)) _res; \
1428 } while (0)
1429
1430 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1431 arg7) \
1432 do { \
1433 volatile OrigFn _orig = (orig); \
1434 volatile unsigned long _argvec[8]; \
1435 volatile unsigned long _res; \
1436 _argvec[0] = (unsigned long)_orig.nraddr; \
1437 _argvec[1] = (unsigned long)(arg1); \
1438 _argvec[2] = (unsigned long)(arg2); \
1439 _argvec[3] = (unsigned long)(arg3); \
1440 _argvec[4] = (unsigned long)(arg4); \
1441 _argvec[5] = (unsigned long)(arg5); \
1442 _argvec[6] = (unsigned long)(arg6); \
1443 _argvec[7] = (unsigned long)(arg7); \
1444 __asm__ volatile( \
1445 VALGRIND_CFI_PROLOGUE \
1446 "subq $136,%%rsp\n\t" \
1447 "pushq 56(%%rax)\n\t" \
1448 "movq 48(%%rax), %%r9\n\t" \
1449 "movq 40(%%rax), %%r8\n\t" \
1450 "movq 32(%%rax), %%rcx\n\t" \
1451 "movq 24(%%rax), %%rdx\n\t" \
1452 "movq 16(%%rax), %%rsi\n\t" \
1453 "movq 8(%%rax), %%rdi\n\t" \
1454 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1455 VALGRIND_CALL_NOREDIR_RAX \
1456 "addq $8, %%rsp\n" \
1457 "addq $136,%%rsp\n\t" \
1458 VALGRIND_CFI_EPILOGUE \
1459 : /*out*/ "=a" (_res) \
1460 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1461 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1462 ); \
1463 lval = (__typeof__(lval)) _res; \
1464 } while (0)
1465
1466 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1467 arg7,arg8) \
1468 do { \
1469 volatile OrigFn _orig = (orig); \
1470 volatile unsigned long _argvec[9]; \
1471 volatile unsigned long _res; \
1472 _argvec[0] = (unsigned long)_orig.nraddr; \
1473 _argvec[1] = (unsigned long)(arg1); \
1474 _argvec[2] = (unsigned long)(arg2); \
1475 _argvec[3] = (unsigned long)(arg3); \
1476 _argvec[4] = (unsigned long)(arg4); \
1477 _argvec[5] = (unsigned long)(arg5); \
1478 _argvec[6] = (unsigned long)(arg6); \
1479 _argvec[7] = (unsigned long)(arg7); \
1480 _argvec[8] = (unsigned long)(arg8); \
1481 __asm__ volatile( \
1482 VALGRIND_CFI_PROLOGUE \
1483 "subq $128,%%rsp\n\t" \
1484 "pushq 64(%%rax)\n\t" \
1485 "pushq 56(%%rax)\n\t" \
1486 "movq 48(%%rax), %%r9\n\t" \
1487 "movq 40(%%rax), %%r8\n\t" \
1488 "movq 32(%%rax), %%rcx\n\t" \
1489 "movq 24(%%rax), %%rdx\n\t" \
1490 "movq 16(%%rax), %%rsi\n\t" \
1491 "movq 8(%%rax), %%rdi\n\t" \
1492 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1493 VALGRIND_CALL_NOREDIR_RAX \
1494 "addq $16, %%rsp\n" \
1495 "addq $128,%%rsp\n\t" \
1496 VALGRIND_CFI_EPILOGUE \
1497 : /*out*/ "=a" (_res) \
1498 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1499 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1500 ); \
1501 lval = (__typeof__(lval)) _res; \
1502 } while (0)
1503
1504 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1505 arg7,arg8,arg9) \
1506 do { \
1507 volatile OrigFn _orig = (orig); \
1508 volatile unsigned long _argvec[10]; \
1509 volatile unsigned long _res; \
1510 _argvec[0] = (unsigned long)_orig.nraddr; \
1511 _argvec[1] = (unsigned long)(arg1); \
1512 _argvec[2] = (unsigned long)(arg2); \
1513 _argvec[3] = (unsigned long)(arg3); \
1514 _argvec[4] = (unsigned long)(arg4); \
1515 _argvec[5] = (unsigned long)(arg5); \
1516 _argvec[6] = (unsigned long)(arg6); \
1517 _argvec[7] = (unsigned long)(arg7); \
1518 _argvec[8] = (unsigned long)(arg8); \
1519 _argvec[9] = (unsigned long)(arg9); \
1520 __asm__ volatile( \
1521 VALGRIND_CFI_PROLOGUE \
1522 "subq $136,%%rsp\n\t" \
1523 "pushq 72(%%rax)\n\t" \
1524 "pushq 64(%%rax)\n\t" \
1525 "pushq 56(%%rax)\n\t" \
1526 "movq 48(%%rax), %%r9\n\t" \
1527 "movq 40(%%rax), %%r8\n\t" \
1528 "movq 32(%%rax), %%rcx\n\t" \
1529 "movq 24(%%rax), %%rdx\n\t" \
1530 "movq 16(%%rax), %%rsi\n\t" \
1531 "movq 8(%%rax), %%rdi\n\t" \
1532 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1533 VALGRIND_CALL_NOREDIR_RAX \
1534 "addq $24, %%rsp\n" \
1535 "addq $136,%%rsp\n\t" \
1536 VALGRIND_CFI_EPILOGUE \
1537 : /*out*/ "=a" (_res) \
1538 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1539 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1540 ); \
1541 lval = (__typeof__(lval)) _res; \
1542 } while (0)
1543
1544 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1545 arg7,arg8,arg9,arg10) \
1546 do { \
1547 volatile OrigFn _orig = (orig); \
1548 volatile unsigned long _argvec[11]; \
1549 volatile unsigned long _res; \
1550 _argvec[0] = (unsigned long)_orig.nraddr; \
1551 _argvec[1] = (unsigned long)(arg1); \
1552 _argvec[2] = (unsigned long)(arg2); \
1553 _argvec[3] = (unsigned long)(arg3); \
1554 _argvec[4] = (unsigned long)(arg4); \
1555 _argvec[5] = (unsigned long)(arg5); \
1556 _argvec[6] = (unsigned long)(arg6); \
1557 _argvec[7] = (unsigned long)(arg7); \
1558 _argvec[8] = (unsigned long)(arg8); \
1559 _argvec[9] = (unsigned long)(arg9); \
1560 _argvec[10] = (unsigned long)(arg10); \
1561 __asm__ volatile( \
1562 VALGRIND_CFI_PROLOGUE \
1563 "subq $128,%%rsp\n\t" \
1564 "pushq 80(%%rax)\n\t" \
1565 "pushq 72(%%rax)\n\t" \
1566 "pushq 64(%%rax)\n\t" \
1567 "pushq 56(%%rax)\n\t" \
1568 "movq 48(%%rax), %%r9\n\t" \
1569 "movq 40(%%rax), %%r8\n\t" \
1570 "movq 32(%%rax), %%rcx\n\t" \
1571 "movq 24(%%rax), %%rdx\n\t" \
1572 "movq 16(%%rax), %%rsi\n\t" \
1573 "movq 8(%%rax), %%rdi\n\t" \
1574 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1575 VALGRIND_CALL_NOREDIR_RAX \
1576 "addq $32, %%rsp\n" \
1577 "addq $128,%%rsp\n\t" \
1578 VALGRIND_CFI_EPILOGUE \
1579 : /*out*/ "=a" (_res) \
1580 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1581 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1582 ); \
1583 lval = (__typeof__(lval)) _res; \
1584 } while (0)
1585
1586 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1587 arg7,arg8,arg9,arg10,arg11) \
1588 do { \
1589 volatile OrigFn _orig = (orig); \
1590 volatile unsigned long _argvec[12]; \
1591 volatile unsigned long _res; \
1592 _argvec[0] = (unsigned long)_orig.nraddr; \
1593 _argvec[1] = (unsigned long)(arg1); \
1594 _argvec[2] = (unsigned long)(arg2); \
1595 _argvec[3] = (unsigned long)(arg3); \
1596 _argvec[4] = (unsigned long)(arg4); \
1597 _argvec[5] = (unsigned long)(arg5); \
1598 _argvec[6] = (unsigned long)(arg6); \
1599 _argvec[7] = (unsigned long)(arg7); \
1600 _argvec[8] = (unsigned long)(arg8); \
1601 _argvec[9] = (unsigned long)(arg9); \
1602 _argvec[10] = (unsigned long)(arg10); \
1603 _argvec[11] = (unsigned long)(arg11); \
1604 __asm__ volatile( \
1605 VALGRIND_CFI_PROLOGUE \
1606 "subq $136,%%rsp\n\t" \
1607 "pushq 88(%%rax)\n\t" \
1608 "pushq 80(%%rax)\n\t" \
1609 "pushq 72(%%rax)\n\t" \
1610 "pushq 64(%%rax)\n\t" \
1611 "pushq 56(%%rax)\n\t" \
1612 "movq 48(%%rax), %%r9\n\t" \
1613 "movq 40(%%rax), %%r8\n\t" \
1614 "movq 32(%%rax), %%rcx\n\t" \
1615 "movq 24(%%rax), %%rdx\n\t" \
1616 "movq 16(%%rax), %%rsi\n\t" \
1617 "movq 8(%%rax), %%rdi\n\t" \
1618 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1619 VALGRIND_CALL_NOREDIR_RAX \
1620 "addq $40, %%rsp\n" \
1621 "addq $136,%%rsp\n\t" \
1622 VALGRIND_CFI_EPILOGUE \
1623 : /*out*/ "=a" (_res) \
1624 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1625 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1626 ); \
1627 lval = (__typeof__(lval)) _res; \
1628 } while (0)
1629
1630 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1631 arg7,arg8,arg9,arg10,arg11,arg12) \
1632 do { \
1633 volatile OrigFn _orig = (orig); \
1634 volatile unsigned long _argvec[13]; \
1635 volatile unsigned long _res; \
1636 _argvec[0] = (unsigned long)_orig.nraddr; \
1637 _argvec[1] = (unsigned long)(arg1); \
1638 _argvec[2] = (unsigned long)(arg2); \
1639 _argvec[3] = (unsigned long)(arg3); \
1640 _argvec[4] = (unsigned long)(arg4); \
1641 _argvec[5] = (unsigned long)(arg5); \
1642 _argvec[6] = (unsigned long)(arg6); \
1643 _argvec[7] = (unsigned long)(arg7); \
1644 _argvec[8] = (unsigned long)(arg8); \
1645 _argvec[9] = (unsigned long)(arg9); \
1646 _argvec[10] = (unsigned long)(arg10); \
1647 _argvec[11] = (unsigned long)(arg11); \
1648 _argvec[12] = (unsigned long)(arg12); \
1649 __asm__ volatile( \
1650 VALGRIND_CFI_PROLOGUE \
1651 "subq $128,%%rsp\n\t" \
1652 "pushq 96(%%rax)\n\t" \
1653 "pushq 88(%%rax)\n\t" \
1654 "pushq 80(%%rax)\n\t" \
1655 "pushq 72(%%rax)\n\t" \
1656 "pushq 64(%%rax)\n\t" \
1657 "pushq 56(%%rax)\n\t" \
1658 "movq 48(%%rax), %%r9\n\t" \
1659 "movq 40(%%rax), %%r8\n\t" \
1660 "movq 32(%%rax), %%rcx\n\t" \
1661 "movq 24(%%rax), %%rdx\n\t" \
1662 "movq 16(%%rax), %%rsi\n\t" \
1663 "movq 8(%%rax), %%rdi\n\t" \
1664 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1665 VALGRIND_CALL_NOREDIR_RAX \
1666 "addq $48, %%rsp\n" \
1667 "addq $128,%%rsp\n\t" \
1668 VALGRIND_CFI_EPILOGUE \
1669 : /*out*/ "=a" (_res) \
1670 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1671 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1672 ); \
1673 lval = (__typeof__(lval)) _res; \
1674 } while (0)
1675
1676 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
1677
1678 /* ------------------------ ppc32-linux ------------------------ */
1679
1680 #if defined(PLAT_ppc32_linux)
1681
1682 /* This is useful for finding out about the on-stack stuff:
1683
1684 extern int f9 ( int,int,int,int,int,int,int,int,int );
1685 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
1686 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
1687 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
1688
1689 int g9 ( void ) {
1690 return f9(11,22,33,44,55,66,77,88,99);
1691 }
1692 int g10 ( void ) {
1693 return f10(11,22,33,44,55,66,77,88,99,110);
1694 }
1695 int g11 ( void ) {
1696 return f11(11,22,33,44,55,66,77,88,99,110,121);
1697 }
1698 int g12 ( void ) {
1699 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
1700 }
1701 */
1702
1703 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
1704
1705 /* These regs are trashed by the hidden call. */
1706 #define __CALLER_SAVED_REGS \
1707 "lr", "ctr", "xer", \
1708 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
1709 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
1710 "r11", "r12", "r13"
1711
1712 /* These CALL_FN_ macros assume that on ppc32-linux,
1713 sizeof(unsigned long) == 4. */
1714
1715 #define CALL_FN_W_v(lval, orig) \
1716 do { \
1717 volatile OrigFn _orig = (orig); \
1718 volatile unsigned long _argvec[1]; \
1719 volatile unsigned long _res; \
1720 _argvec[0] = (unsigned long)_orig.nraddr; \
1721 __asm__ volatile( \
1722 "mr 11,%1\n\t" \
1723 "lwz 11,0(11)\n\t" /* target->r11 */ \
1724 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1725 "mr %0,3" \
1726 : /*out*/ "=r" (_res) \
1727 : /*in*/ "r" (&_argvec[0]) \
1728 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1729 ); \
1730 lval = (__typeof__(lval)) _res; \
1731 } while (0)
1732
1733 #define CALL_FN_W_W(lval, orig, arg1) \
1734 do { \
1735 volatile OrigFn _orig = (orig); \
1736 volatile unsigned long _argvec[2]; \
1737 volatile unsigned long _res; \
1738 _argvec[0] = (unsigned long)_orig.nraddr; \
1739 _argvec[1] = (unsigned long)arg1; \
1740 __asm__ volatile( \
1741 "mr 11,%1\n\t" \
1742 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1743 "lwz 11,0(11)\n\t" /* target->r11 */ \
1744 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1745 "mr %0,3" \
1746 : /*out*/ "=r" (_res) \
1747 : /*in*/ "r" (&_argvec[0]) \
1748 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1749 ); \
1750 lval = (__typeof__(lval)) _res; \
1751 } while (0)
1752
1753 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1754 do { \
1755 volatile OrigFn _orig = (orig); \
1756 volatile unsigned long _argvec[3]; \
1757 volatile unsigned long _res; \
1758 _argvec[0] = (unsigned long)_orig.nraddr; \
1759 _argvec[1] = (unsigned long)arg1; \
1760 _argvec[2] = (unsigned long)arg2; \
1761 __asm__ volatile( \
1762 "mr 11,%1\n\t" \
1763 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1764 "lwz 4,8(11)\n\t" \
1765 "lwz 11,0(11)\n\t" /* target->r11 */ \
1766 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1767 "mr %0,3" \
1768 : /*out*/ "=r" (_res) \
1769 : /*in*/ "r" (&_argvec[0]) \
1770 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1771 ); \
1772 lval = (__typeof__(lval)) _res; \
1773 } while (0)
1774
1775 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1776 do { \
1777 volatile OrigFn _orig = (orig); \
1778 volatile unsigned long _argvec[4]; \
1779 volatile unsigned long _res; \
1780 _argvec[0] = (unsigned long)_orig.nraddr; \
1781 _argvec[1] = (unsigned long)arg1; \
1782 _argvec[2] = (unsigned long)arg2; \
1783 _argvec[3] = (unsigned long)arg3; \
1784 __asm__ volatile( \
1785 "mr 11,%1\n\t" \
1786 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1787 "lwz 4,8(11)\n\t" \
1788 "lwz 5,12(11)\n\t" \
1789 "lwz 11,0(11)\n\t" /* target->r11 */ \
1790 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1791 "mr %0,3" \
1792 : /*out*/ "=r" (_res) \
1793 : /*in*/ "r" (&_argvec[0]) \
1794 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1795 ); \
1796 lval = (__typeof__(lval)) _res; \
1797 } while (0)
1798
1799 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1800 do { \
1801 volatile OrigFn _orig = (orig); \
1802 volatile unsigned long _argvec[5]; \
1803 volatile unsigned long _res; \
1804 _argvec[0] = (unsigned long)_orig.nraddr; \
1805 _argvec[1] = (unsigned long)arg1; \
1806 _argvec[2] = (unsigned long)arg2; \
1807 _argvec[3] = (unsigned long)arg3; \
1808 _argvec[4] = (unsigned long)arg4; \
1809 __asm__ volatile( \
1810 "mr 11,%1\n\t" \
1811 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1812 "lwz 4,8(11)\n\t" \
1813 "lwz 5,12(11)\n\t" \
1814 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1815 "lwz 11,0(11)\n\t" /* target->r11 */ \
1816 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1817 "mr %0,3" \
1818 : /*out*/ "=r" (_res) \
1819 : /*in*/ "r" (&_argvec[0]) \
1820 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1821 ); \
1822 lval = (__typeof__(lval)) _res; \
1823 } while (0)
1824
1825 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1826 do { \
1827 volatile OrigFn _orig = (orig); \
1828 volatile unsigned long _argvec[6]; \
1829 volatile unsigned long _res; \
1830 _argvec[0] = (unsigned long)_orig.nraddr; \
1831 _argvec[1] = (unsigned long)arg1; \
1832 _argvec[2] = (unsigned long)arg2; \
1833 _argvec[3] = (unsigned long)arg3; \
1834 _argvec[4] = (unsigned long)arg4; \
1835 _argvec[5] = (unsigned long)arg5; \
1836 __asm__ volatile( \
1837 "mr 11,%1\n\t" \
1838 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1839 "lwz 4,8(11)\n\t" \
1840 "lwz 5,12(11)\n\t" \
1841 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1842 "lwz 7,20(11)\n\t" \
1843 "lwz 11,0(11)\n\t" /* target->r11 */ \
1844 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1845 "mr %0,3" \
1846 : /*out*/ "=r" (_res) \
1847 : /*in*/ "r" (&_argvec[0]) \
1848 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1849 ); \
1850 lval = (__typeof__(lval)) _res; \
1851 } while (0)
1852
1853 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1854 do { \
1855 volatile OrigFn _orig = (orig); \
1856 volatile unsigned long _argvec[7]; \
1857 volatile unsigned long _res; \
1858 _argvec[0] = (unsigned long)_orig.nraddr; \
1859 _argvec[1] = (unsigned long)arg1; \
1860 _argvec[2] = (unsigned long)arg2; \
1861 _argvec[3] = (unsigned long)arg3; \
1862 _argvec[4] = (unsigned long)arg4; \
1863 _argvec[5] = (unsigned long)arg5; \
1864 _argvec[6] = (unsigned long)arg6; \
1865 __asm__ volatile( \
1866 "mr 11,%1\n\t" \
1867 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1868 "lwz 4,8(11)\n\t" \
1869 "lwz 5,12(11)\n\t" \
1870 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1871 "lwz 7,20(11)\n\t" \
1872 "lwz 8,24(11)\n\t" \
1873 "lwz 11,0(11)\n\t" /* target->r11 */ \
1874 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1875 "mr %0,3" \
1876 : /*out*/ "=r" (_res) \
1877 : /*in*/ "r" (&_argvec[0]) \
1878 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1879 ); \
1880 lval = (__typeof__(lval)) _res; \
1881 } while (0)
1882
1883 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1884 arg7) \
1885 do { \
1886 volatile OrigFn _orig = (orig); \
1887 volatile unsigned long _argvec[8]; \
1888 volatile unsigned long _res; \
1889 _argvec[0] = (unsigned long)_orig.nraddr; \
1890 _argvec[1] = (unsigned long)arg1; \
1891 _argvec[2] = (unsigned long)arg2; \
1892 _argvec[3] = (unsigned long)arg3; \
1893 _argvec[4] = (unsigned long)arg4; \
1894 _argvec[5] = (unsigned long)arg5; \
1895 _argvec[6] = (unsigned long)arg6; \
1896 _argvec[7] = (unsigned long)arg7; \
1897 __asm__ volatile( \
1898 "mr 11,%1\n\t" \
1899 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1900 "lwz 4,8(11)\n\t" \
1901 "lwz 5,12(11)\n\t" \
1902 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1903 "lwz 7,20(11)\n\t" \
1904 "lwz 8,24(11)\n\t" \
1905 "lwz 9,28(11)\n\t" \
1906 "lwz 11,0(11)\n\t" /* target->r11 */ \
1907 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1908 "mr %0,3" \
1909 : /*out*/ "=r" (_res) \
1910 : /*in*/ "r" (&_argvec[0]) \
1911 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1912 ); \
1913 lval = (__typeof__(lval)) _res; \
1914 } while (0)
1915
1916 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1917 arg7,arg8) \
1918 do { \
1919 volatile OrigFn _orig = (orig); \
1920 volatile unsigned long _argvec[9]; \
1921 volatile unsigned long _res; \
1922 _argvec[0] = (unsigned long)_orig.nraddr; \
1923 _argvec[1] = (unsigned long)arg1; \
1924 _argvec[2] = (unsigned long)arg2; \
1925 _argvec[3] = (unsigned long)arg3; \
1926 _argvec[4] = (unsigned long)arg4; \
1927 _argvec[5] = (unsigned long)arg5; \
1928 _argvec[6] = (unsigned long)arg6; \
1929 _argvec[7] = (unsigned long)arg7; \
1930 _argvec[8] = (unsigned long)arg8; \
1931 __asm__ volatile( \
1932 "mr 11,%1\n\t" \
1933 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1934 "lwz 4,8(11)\n\t" \
1935 "lwz 5,12(11)\n\t" \
1936 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1937 "lwz 7,20(11)\n\t" \
1938 "lwz 8,24(11)\n\t" \
1939 "lwz 9,28(11)\n\t" \
1940 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
1941 "lwz 11,0(11)\n\t" /* target->r11 */ \
1942 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1943 "mr %0,3" \
1944 : /*out*/ "=r" (_res) \
1945 : /*in*/ "r" (&_argvec[0]) \
1946 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1947 ); \
1948 lval = (__typeof__(lval)) _res; \
1949 } while (0)
1950
1951 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1952 arg7,arg8,arg9) \
1953 do { \
1954 volatile OrigFn _orig = (orig); \
1955 volatile unsigned long _argvec[10]; \
1956 volatile unsigned long _res; \
1957 _argvec[0] = (unsigned long)_orig.nraddr; \
1958 _argvec[1] = (unsigned long)arg1; \
1959 _argvec[2] = (unsigned long)arg2; \
1960 _argvec[3] = (unsigned long)arg3; \
1961 _argvec[4] = (unsigned long)arg4; \
1962 _argvec[5] = (unsigned long)arg5; \
1963 _argvec[6] = (unsigned long)arg6; \
1964 _argvec[7] = (unsigned long)arg7; \
1965 _argvec[8] = (unsigned long)arg8; \
1966 _argvec[9] = (unsigned long)arg9; \
1967 __asm__ volatile( \
1968 "mr 11,%1\n\t" \
1969 "addi 1,1,-16\n\t" \
1970 /* arg9 */ \
1971 "lwz 3,36(11)\n\t" \
1972 "stw 3,8(1)\n\t" \
1973 /* args1-8 */ \
1974 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1975 "lwz 4,8(11)\n\t" \
1976 "lwz 5,12(11)\n\t" \
1977 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1978 "lwz 7,20(11)\n\t" \
1979 "lwz 8,24(11)\n\t" \
1980 "lwz 9,28(11)\n\t" \
1981 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
1982 "lwz 11,0(11)\n\t" /* target->r11 */ \
1983 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1984 "addi 1,1,16\n\t" \
1985 "mr %0,3" \
1986 : /*out*/ "=r" (_res) \
1987 : /*in*/ "r" (&_argvec[0]) \
1988 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1989 ); \
1990 lval = (__typeof__(lval)) _res; \
1991 } while (0)
1992
1993 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1994 arg7,arg8,arg9,arg10) \
1995 do { \
1996 volatile OrigFn _orig = (orig); \
1997 volatile unsigned long _argvec[11]; \
1998 volatile unsigned long _res; \
1999 _argvec[0] = (unsigned long)_orig.nraddr; \
2000 _argvec[1] = (unsigned long)arg1; \
2001 _argvec[2] = (unsigned long)arg2; \
2002 _argvec[3] = (unsigned long)arg3; \
2003 _argvec[4] = (unsigned long)arg4; \
2004 _argvec[5] = (unsigned long)arg5; \
2005 _argvec[6] = (unsigned long)arg6; \
2006 _argvec[7] = (unsigned long)arg7; \
2007 _argvec[8] = (unsigned long)arg8; \
2008 _argvec[9] = (unsigned long)arg9; \
2009 _argvec[10] = (unsigned long)arg10; \
2010 __asm__ volatile( \
2011 "mr 11,%1\n\t" \
2012 "addi 1,1,-16\n\t" \
2013 /* arg10 */ \
2014 "lwz 3,40(11)\n\t" \
2015 "stw 3,12(1)\n\t" \
2016 /* arg9 */ \
2017 "lwz 3,36(11)\n\t" \
2018 "stw 3,8(1)\n\t" \
2019 /* args1-8 */ \
2020 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2021 "lwz 4,8(11)\n\t" \
2022 "lwz 5,12(11)\n\t" \
2023 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2024 "lwz 7,20(11)\n\t" \
2025 "lwz 8,24(11)\n\t" \
2026 "lwz 9,28(11)\n\t" \
2027 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2028 "lwz 11,0(11)\n\t" /* target->r11 */ \
2029 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2030 "addi 1,1,16\n\t" \
2031 "mr %0,3" \
2032 : /*out*/ "=r" (_res) \
2033 : /*in*/ "r" (&_argvec[0]) \
2034 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2035 ); \
2036 lval = (__typeof__(lval)) _res; \
2037 } while (0)
2038
2039 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2040 arg7,arg8,arg9,arg10,arg11) \
2041 do { \
2042 volatile OrigFn _orig = (orig); \
2043 volatile unsigned long _argvec[12]; \
2044 volatile unsigned long _res; \
2045 _argvec[0] = (unsigned long)_orig.nraddr; \
2046 _argvec[1] = (unsigned long)arg1; \
2047 _argvec[2] = (unsigned long)arg2; \
2048 _argvec[3] = (unsigned long)arg3; \
2049 _argvec[4] = (unsigned long)arg4; \
2050 _argvec[5] = (unsigned long)arg5; \
2051 _argvec[6] = (unsigned long)arg6; \
2052 _argvec[7] = (unsigned long)arg7; \
2053 _argvec[8] = (unsigned long)arg8; \
2054 _argvec[9] = (unsigned long)arg9; \
2055 _argvec[10] = (unsigned long)arg10; \
2056 _argvec[11] = (unsigned long)arg11; \
2057 __asm__ volatile( \
2058 "mr 11,%1\n\t" \
2059 "addi 1,1,-32\n\t" \
2060 /* arg11 */ \
2061 "lwz 3,44(11)\n\t" \
2062 "stw 3,16(1)\n\t" \
2063 /* arg10 */ \
2064 "lwz 3,40(11)\n\t" \
2065 "stw 3,12(1)\n\t" \
2066 /* arg9 */ \
2067 "lwz 3,36(11)\n\t" \
2068 "stw 3,8(1)\n\t" \
2069 /* args1-8 */ \
2070 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2071 "lwz 4,8(11)\n\t" \
2072 "lwz 5,12(11)\n\t" \
2073 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2074 "lwz 7,20(11)\n\t" \
2075 "lwz 8,24(11)\n\t" \
2076 "lwz 9,28(11)\n\t" \
2077 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2078 "lwz 11,0(11)\n\t" /* target->r11 */ \
2079 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2080 "addi 1,1,32\n\t" \
2081 "mr %0,3" \
2082 : /*out*/ "=r" (_res) \
2083 : /*in*/ "r" (&_argvec[0]) \
2084 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2085 ); \
2086 lval = (__typeof__(lval)) _res; \
2087 } while (0)
2088
2089 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2090 arg7,arg8,arg9,arg10,arg11,arg12) \
2091 do { \
2092 volatile OrigFn _orig = (orig); \
2093 volatile unsigned long _argvec[13]; \
2094 volatile unsigned long _res; \
2095 _argvec[0] = (unsigned long)_orig.nraddr; \
2096 _argvec[1] = (unsigned long)arg1; \
2097 _argvec[2] = (unsigned long)arg2; \
2098 _argvec[3] = (unsigned long)arg3; \
2099 _argvec[4] = (unsigned long)arg4; \
2100 _argvec[5] = (unsigned long)arg5; \
2101 _argvec[6] = (unsigned long)arg6; \
2102 _argvec[7] = (unsigned long)arg7; \
2103 _argvec[8] = (unsigned long)arg8; \
2104 _argvec[9] = (unsigned long)arg9; \
2105 _argvec[10] = (unsigned long)arg10; \
2106 _argvec[11] = (unsigned long)arg11; \
2107 _argvec[12] = (unsigned long)arg12; \
2108 __asm__ volatile( \
2109 "mr 11,%1\n\t" \
2110 "addi 1,1,-32\n\t" \
2111 /* arg12 */ \
2112 "lwz 3,48(11)\n\t" \
2113 "stw 3,20(1)\n\t" \
2114 /* arg11 */ \
2115 "lwz 3,44(11)\n\t" \
2116 "stw 3,16(1)\n\t" \
2117 /* arg10 */ \
2118 "lwz 3,40(11)\n\t" \
2119 "stw 3,12(1)\n\t" \
2120 /* arg9 */ \
2121 "lwz 3,36(11)\n\t" \
2122 "stw 3,8(1)\n\t" \
2123 /* args1-8 */ \
2124 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2125 "lwz 4,8(11)\n\t" \
2126 "lwz 5,12(11)\n\t" \
2127 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2128 "lwz 7,20(11)\n\t" \
2129 "lwz 8,24(11)\n\t" \
2130 "lwz 9,28(11)\n\t" \
2131 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2132 "lwz 11,0(11)\n\t" /* target->r11 */ \
2133 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2134 "addi 1,1,32\n\t" \
2135 "mr %0,3" \
2136 : /*out*/ "=r" (_res) \
2137 : /*in*/ "r" (&_argvec[0]) \
2138 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2139 ); \
2140 lval = (__typeof__(lval)) _res; \
2141 } while (0)
2142
2143 #endif /* PLAT_ppc32_linux */
2144
2145 /* ------------------------ ppc64-linux ------------------------ */
2146
2147 #if defined(PLAT_ppc64_linux)
2148
2149 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2150
2151 /* These regs are trashed by the hidden call. */
2152 #define __CALLER_SAVED_REGS \
2153 "lr", "ctr", "xer", \
2154 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2155 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2156 "r11", "r12", "r13"
2157
2158 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2159 long) == 8. */
2160
2161 #define CALL_FN_W_v(lval, orig) \
2162 do { \
2163 volatile OrigFn _orig = (orig); \
2164 volatile unsigned long _argvec[3+0]; \
2165 volatile unsigned long _res; \
2166 /* _argvec[0] holds current r2 across the call */ \
2167 _argvec[1] = (unsigned long)_orig.r2; \
2168 _argvec[2] = (unsigned long)_orig.nraddr; \
2169 __asm__ volatile( \
2170 "mr 11,%1\n\t" \
2171 "std 2,-16(11)\n\t" /* save tocptr */ \
2172 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2173 "ld 11, 0(11)\n\t" /* target->r11 */ \
2174 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2175 "mr 11,%1\n\t" \
2176 "mr %0,3\n\t" \
2177 "ld 2,-16(11)" /* restore tocptr */ \
2178 : /*out*/ "=r" (_res) \
2179 : /*in*/ "r" (&_argvec[2]) \
2180 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2181 ); \
2182 lval = (__typeof__(lval)) _res; \
2183 } while (0)
2184
2185 #define CALL_FN_W_W(lval, orig, arg1) \
2186 do { \
2187 volatile OrigFn _orig = (orig); \
2188 volatile unsigned long _argvec[3+1]; \
2189 volatile unsigned long _res; \
2190 /* _argvec[0] holds current r2 across the call */ \
2191 _argvec[1] = (unsigned long)_orig.r2; \
2192 _argvec[2] = (unsigned long)_orig.nraddr; \
2193 _argvec[2+1] = (unsigned long)arg1; \
2194 __asm__ volatile( \
2195 "mr 11,%1\n\t" \
2196 "std 2,-16(11)\n\t" /* save tocptr */ \
2197 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2198 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2199 "ld 11, 0(11)\n\t" /* target->r11 */ \
2200 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2201 "mr 11,%1\n\t" \
2202 "mr %0,3\n\t" \
2203 "ld 2,-16(11)" /* restore tocptr */ \
2204 : /*out*/ "=r" (_res) \
2205 : /*in*/ "r" (&_argvec[2]) \
2206 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2207 ); \
2208 lval = (__typeof__(lval)) _res; \
2209 } while (0)
2210
2211 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2212 do { \
2213 volatile OrigFn _orig = (orig); \
2214 volatile unsigned long _argvec[3+2]; \
2215 volatile unsigned long _res; \
2216 /* _argvec[0] holds current r2 across the call */ \
2217 _argvec[1] = (unsigned long)_orig.r2; \
2218 _argvec[2] = (unsigned long)_orig.nraddr; \
2219 _argvec[2+1] = (unsigned long)arg1; \
2220 _argvec[2+2] = (unsigned long)arg2; \
2221 __asm__ volatile( \
2222 "mr 11,%1\n\t" \
2223 "std 2,-16(11)\n\t" /* save tocptr */ \
2224 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2225 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2226 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2227 "ld 11, 0(11)\n\t" /* target->r11 */ \
2228 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2229 "mr 11,%1\n\t" \
2230 "mr %0,3\n\t" \
2231 "ld 2,-16(11)" /* restore tocptr */ \
2232 : /*out*/ "=r" (_res) \
2233 : /*in*/ "r" (&_argvec[2]) \
2234 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2235 ); \
2236 lval = (__typeof__(lval)) _res; \
2237 } while (0)
2238
2239 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2240 do { \
2241 volatile OrigFn _orig = (orig); \
2242 volatile unsigned long _argvec[3+3]; \
2243 volatile unsigned long _res; \
2244 /* _argvec[0] holds current r2 across the call */ \
2245 _argvec[1] = (unsigned long)_orig.r2; \
2246 _argvec[2] = (unsigned long)_orig.nraddr; \
2247 _argvec[2+1] = (unsigned long)arg1; \
2248 _argvec[2+2] = (unsigned long)arg2; \
2249 _argvec[2+3] = (unsigned long)arg3; \
2250 __asm__ volatile( \
2251 "mr 11,%1\n\t" \
2252 "std 2,-16(11)\n\t" /* save tocptr */ \
2253 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2254 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2255 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2256 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2257 "ld 11, 0(11)\n\t" /* target->r11 */ \
2258 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2259 "mr 11,%1\n\t" \
2260 "mr %0,3\n\t" \
2261 "ld 2,-16(11)" /* restore tocptr */ \
2262 : /*out*/ "=r" (_res) \
2263 : /*in*/ "r" (&_argvec[2]) \
2264 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2265 ); \
2266 lval = (__typeof__(lval)) _res; \
2267 } while (0)
2268
2269 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2270 do { \
2271 volatile OrigFn _orig = (orig); \
2272 volatile unsigned long _argvec[3+4]; \
2273 volatile unsigned long _res; \
2274 /* _argvec[0] holds current r2 across the call */ \
2275 _argvec[1] = (unsigned long)_orig.r2; \
2276 _argvec[2] = (unsigned long)_orig.nraddr; \
2277 _argvec[2+1] = (unsigned long)arg1; \
2278 _argvec[2+2] = (unsigned long)arg2; \
2279 _argvec[2+3] = (unsigned long)arg3; \
2280 _argvec[2+4] = (unsigned long)arg4; \
2281 __asm__ volatile( \
2282 "mr 11,%1\n\t" \
2283 "std 2,-16(11)\n\t" /* save tocptr */ \
2284 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2285 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2286 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2287 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2288 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2289 "ld 11, 0(11)\n\t" /* target->r11 */ \
2290 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2291 "mr 11,%1\n\t" \
2292 "mr %0,3\n\t" \
2293 "ld 2,-16(11)" /* restore tocptr */ \
2294 : /*out*/ "=r" (_res) \
2295 : /*in*/ "r" (&_argvec[2]) \
2296 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2297 ); \
2298 lval = (__typeof__(lval)) _res; \
2299 } while (0)
2300
2301 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2302 do { \
2303 volatile OrigFn _orig = (orig); \
2304 volatile unsigned long _argvec[3+5]; \
2305 volatile unsigned long _res; \
2306 /* _argvec[0] holds current r2 across the call */ \
2307 _argvec[1] = (unsigned long)_orig.r2; \
2308 _argvec[2] = (unsigned long)_orig.nraddr; \
2309 _argvec[2+1] = (unsigned long)arg1; \
2310 _argvec[2+2] = (unsigned long)arg2; \
2311 _argvec[2+3] = (unsigned long)arg3; \
2312 _argvec[2+4] = (unsigned long)arg4; \
2313 _argvec[2+5] = (unsigned long)arg5; \
2314 __asm__ volatile( \
2315 "mr 11,%1\n\t" \
2316 "std 2,-16(11)\n\t" /* save tocptr */ \
2317 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2318 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2319 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2320 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2321 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2322 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2323 "ld 11, 0(11)\n\t" /* target->r11 */ \
2324 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2325 "mr 11,%1\n\t" \
2326 "mr %0,3\n\t" \
2327 "ld 2,-16(11)" /* restore tocptr */ \
2328 : /*out*/ "=r" (_res) \
2329 : /*in*/ "r" (&_argvec[2]) \
2330 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2331 ); \
2332 lval = (__typeof__(lval)) _res; \
2333 } while (0)
2334
2335 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2336 do { \
2337 volatile OrigFn _orig = (orig); \
2338 volatile unsigned long _argvec[3+6]; \
2339 volatile unsigned long _res; \
2340 /* _argvec[0] holds current r2 across the call */ \
2341 _argvec[1] = (unsigned long)_orig.r2; \
2342 _argvec[2] = (unsigned long)_orig.nraddr; \
2343 _argvec[2+1] = (unsigned long)arg1; \
2344 _argvec[2+2] = (unsigned long)arg2; \
2345 _argvec[2+3] = (unsigned long)arg3; \
2346 _argvec[2+4] = (unsigned long)arg4; \
2347 _argvec[2+5] = (unsigned long)arg5; \
2348 _argvec[2+6] = (unsigned long)arg6; \
2349 __asm__ volatile( \
2350 "mr 11,%1\n\t" \
2351 "std 2,-16(11)\n\t" /* save tocptr */ \
2352 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2353 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2354 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2355 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2356 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2357 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2358 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2359 "ld 11, 0(11)\n\t" /* target->r11 */ \
2360 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2361 "mr 11,%1\n\t" \
2362 "mr %0,3\n\t" \
2363 "ld 2,-16(11)" /* restore tocptr */ \
2364 : /*out*/ "=r" (_res) \
2365 : /*in*/ "r" (&_argvec[2]) \
2366 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2367 ); \
2368 lval = (__typeof__(lval)) _res; \
2369 } while (0)
2370
2371 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2372 arg7) \
2373 do { \
2374 volatile OrigFn _orig = (orig); \
2375 volatile unsigned long _argvec[3+7]; \
2376 volatile unsigned long _res; \
2377 /* _argvec[0] holds current r2 across the call */ \
2378 _argvec[1] = (unsigned long)_orig.r2; \
2379 _argvec[2] = (unsigned long)_orig.nraddr; \
2380 _argvec[2+1] = (unsigned long)arg1; \
2381 _argvec[2+2] = (unsigned long)arg2; \
2382 _argvec[2+3] = (unsigned long)arg3; \
2383 _argvec[2+4] = (unsigned long)arg4; \
2384 _argvec[2+5] = (unsigned long)arg5; \
2385 _argvec[2+6] = (unsigned long)arg6; \
2386 _argvec[2+7] = (unsigned long)arg7; \
2387 __asm__ volatile( \
2388 "mr 11,%1\n\t" \
2389 "std 2,-16(11)\n\t" /* save tocptr */ \
2390 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2391 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2392 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2393 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2394 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2395 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2396 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2397 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2398 "ld 11, 0(11)\n\t" /* target->r11 */ \
2399 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2400 "mr 11,%1\n\t" \
2401 "mr %0,3\n\t" \
2402 "ld 2,-16(11)" /* restore tocptr */ \
2403 : /*out*/ "=r" (_res) \
2404 : /*in*/ "r" (&_argvec[2]) \
2405 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2406 ); \
2407 lval = (__typeof__(lval)) _res; \
2408 } while (0)
2409
2410 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2411 arg7,arg8) \
2412 do { \
2413 volatile OrigFn _orig = (orig); \
2414 volatile unsigned long _argvec[3+8]; \
2415 volatile unsigned long _res; \
2416 /* _argvec[0] holds current r2 across the call */ \
2417 _argvec[1] = (unsigned long)_orig.r2; \
2418 _argvec[2] = (unsigned long)_orig.nraddr; \
2419 _argvec[2+1] = (unsigned long)arg1; \
2420 _argvec[2+2] = (unsigned long)arg2; \
2421 _argvec[2+3] = (unsigned long)arg3; \
2422 _argvec[2+4] = (unsigned long)arg4; \
2423 _argvec[2+5] = (unsigned long)arg5; \
2424 _argvec[2+6] = (unsigned long)arg6; \
2425 _argvec[2+7] = (unsigned long)arg7; \
2426 _argvec[2+8] = (unsigned long)arg8; \
2427 __asm__ volatile( \
2428 "mr 11,%1\n\t" \
2429 "std 2,-16(11)\n\t" /* save tocptr */ \
2430 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2431 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2432 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2433 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2434 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2435 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2436 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2437 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2438 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2439 "ld 11, 0(11)\n\t" /* target->r11 */ \
2440 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2441 "mr 11,%1\n\t" \
2442 "mr %0,3\n\t" \
2443 "ld 2,-16(11)" /* restore tocptr */ \
2444 : /*out*/ "=r" (_res) \
2445 : /*in*/ "r" (&_argvec[2]) \
2446 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2447 ); \
2448 lval = (__typeof__(lval)) _res; \
2449 } while (0)
2450
2451 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2452 arg7,arg8,arg9) \
2453 do { \
2454 volatile OrigFn _orig = (orig); \
2455 volatile unsigned long _argvec[3+9]; \
2456 volatile unsigned long _res; \
2457 /* _argvec[0] holds current r2 across the call */ \
2458 _argvec[1] = (unsigned long)_orig.r2; \
2459 _argvec[2] = (unsigned long)_orig.nraddr; \
2460 _argvec[2+1] = (unsigned long)arg1; \
2461 _argvec[2+2] = (unsigned long)arg2; \
2462 _argvec[2+3] = (unsigned long)arg3; \
2463 _argvec[2+4] = (unsigned long)arg4; \
2464 _argvec[2+5] = (unsigned long)arg5; \
2465 _argvec[2+6] = (unsigned long)arg6; \
2466 _argvec[2+7] = (unsigned long)arg7; \
2467 _argvec[2+8] = (unsigned long)arg8; \
2468 _argvec[2+9] = (unsigned long)arg9; \
2469 __asm__ volatile( \
2470 "mr 11,%1\n\t" \
2471 "std 2,-16(11)\n\t" /* save tocptr */ \
2472 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2473 "addi 1,1,-128\n\t" /* expand stack frame */ \
2474 /* arg9 */ \
2475 "ld 3,72(11)\n\t" \
2476 "std 3,112(1)\n\t" \
2477 /* args1-8 */ \
2478 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2479 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2480 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2481 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2482 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2483 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2484 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2485 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2486 "ld 11, 0(11)\n\t" /* target->r11 */ \
2487 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2488 "mr 11,%1\n\t" \
2489 "mr %0,3\n\t" \
2490 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2491 "addi 1,1,128" /* restore frame */ \
2492 : /*out*/ "=r" (_res) \
2493 : /*in*/ "r" (&_argvec[2]) \
2494 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2495 ); \
2496 lval = (__typeof__(lval)) _res; \
2497 } while (0)
2498
2499 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2500 arg7,arg8,arg9,arg10) \
2501 do { \
2502 volatile OrigFn _orig = (orig); \
2503 volatile unsigned long _argvec[3+10]; \
2504 volatile unsigned long _res; \
2505 /* _argvec[0] holds current r2 across the call */ \
2506 _argvec[1] = (unsigned long)_orig.r2; \
2507 _argvec[2] = (unsigned long)_orig.nraddr; \
2508 _argvec[2+1] = (unsigned long)arg1; \
2509 _argvec[2+2] = (unsigned long)arg2; \
2510 _argvec[2+3] = (unsigned long)arg3; \
2511 _argvec[2+4] = (unsigned long)arg4; \
2512 _argvec[2+5] = (unsigned long)arg5; \
2513 _argvec[2+6] = (unsigned long)arg6; \
2514 _argvec[2+7] = (unsigned long)arg7; \
2515 _argvec[2+8] = (unsigned long)arg8; \
2516 _argvec[2+9] = (unsigned long)arg9; \
2517 _argvec[2+10] = (unsigned long)arg10; \
2518 __asm__ volatile( \
2519 "mr 11,%1\n\t" \
2520 "std 2,-16(11)\n\t" /* save tocptr */ \
2521 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2522 "addi 1,1,-128\n\t" /* expand stack frame */ \
2523 /* arg10 */ \
2524 "ld 3,80(11)\n\t" \
2525 "std 3,120(1)\n\t" \
2526 /* arg9 */ \
2527 "ld 3,72(11)\n\t" \
2528 "std 3,112(1)\n\t" \
2529 /* args1-8 */ \
2530 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2531 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2532 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2533 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2534 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2535 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2536 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2537 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2538 "ld 11, 0(11)\n\t" /* target->r11 */ \
2539 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2540 "mr 11,%1\n\t" \
2541 "mr %0,3\n\t" \
2542 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2543 "addi 1,1,128" /* restore frame */ \
2544 : /*out*/ "=r" (_res) \
2545 : /*in*/ "r" (&_argvec[2]) \
2546 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2547 ); \
2548 lval = (__typeof__(lval)) _res; \
2549 } while (0)
2550
2551 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2552 arg7,arg8,arg9,arg10,arg11) \
2553 do { \
2554 volatile OrigFn _orig = (orig); \
2555 volatile unsigned long _argvec[3+11]; \
2556 volatile unsigned long _res; \
2557 /* _argvec[0] holds current r2 across the call */ \
2558 _argvec[1] = (unsigned long)_orig.r2; \
2559 _argvec[2] = (unsigned long)_orig.nraddr; \
2560 _argvec[2+1] = (unsigned long)arg1; \
2561 _argvec[2+2] = (unsigned long)arg2; \
2562 _argvec[2+3] = (unsigned long)arg3; \
2563 _argvec[2+4] = (unsigned long)arg4; \
2564 _argvec[2+5] = (unsigned long)arg5; \
2565 _argvec[2+6] = (unsigned long)arg6; \
2566 _argvec[2+7] = (unsigned long)arg7; \
2567 _argvec[2+8] = (unsigned long)arg8; \
2568 _argvec[2+9] = (unsigned long)arg9; \
2569 _argvec[2+10] = (unsigned long)arg10; \
2570 _argvec[2+11] = (unsigned long)arg11; \
2571 __asm__ volatile( \
2572 "mr 11,%1\n\t" \
2573 "std 2,-16(11)\n\t" /* save tocptr */ \
2574 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2575 "addi 1,1,-144\n\t" /* expand stack frame */ \
2576 /* arg11 */ \
2577 "ld 3,88(11)\n\t" \
2578 "std 3,128(1)\n\t" \
2579 /* arg10 */ \
2580 "ld 3,80(11)\n\t" \
2581 "std 3,120(1)\n\t" \
2582 /* arg9 */ \
2583 "ld 3,72(11)\n\t" \
2584 "std 3,112(1)\n\t" \
2585 /* args1-8 */ \
2586 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2587 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2588 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2589 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2590 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2591 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2592 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2593 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2594 "ld 11, 0(11)\n\t" /* target->r11 */ \
2595 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2596 "mr 11,%1\n\t" \
2597 "mr %0,3\n\t" \
2598 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2599 "addi 1,1,144" /* restore frame */ \
2600 : /*out*/ "=r" (_res) \
2601 : /*in*/ "r" (&_argvec[2]) \
2602 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2603 ); \
2604 lval = (__typeof__(lval)) _res; \
2605 } while (0)
2606
2607 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2608 arg7,arg8,arg9,arg10,arg11,arg12) \
2609 do { \
2610 volatile OrigFn _orig = (orig); \
2611 volatile unsigned long _argvec[3+12]; \
2612 volatile unsigned long _res; \
2613 /* _argvec[0] holds current r2 across the call */ \
2614 _argvec[1] = (unsigned long)_orig.r2; \
2615 _argvec[2] = (unsigned long)_orig.nraddr; \
2616 _argvec[2+1] = (unsigned long)arg1; \
2617 _argvec[2+2] = (unsigned long)arg2; \
2618 _argvec[2+3] = (unsigned long)arg3; \
2619 _argvec[2+4] = (unsigned long)arg4; \
2620 _argvec[2+5] = (unsigned long)arg5; \
2621 _argvec[2+6] = (unsigned long)arg6; \
2622 _argvec[2+7] = (unsigned long)arg7; \
2623 _argvec[2+8] = (unsigned long)arg8; \
2624 _argvec[2+9] = (unsigned long)arg9; \
2625 _argvec[2+10] = (unsigned long)arg10; \
2626 _argvec[2+11] = (unsigned long)arg11; \
2627 _argvec[2+12] = (unsigned long)arg12; \
2628 __asm__ volatile( \
2629 "mr 11,%1\n\t" \
2630 "std 2,-16(11)\n\t" /* save tocptr */ \
2631 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2632 "addi 1,1,-144\n\t" /* expand stack frame */ \
2633 /* arg12 */ \
2634 "ld 3,96(11)\n\t" \
2635 "std 3,136(1)\n\t" \
2636 /* arg11 */ \
2637 "ld 3,88(11)\n\t" \
2638 "std 3,128(1)\n\t" \
2639 /* arg10 */ \
2640 "ld 3,80(11)\n\t" \
2641 "std 3,120(1)\n\t" \
2642 /* arg9 */ \
2643 "ld 3,72(11)\n\t" \
2644 "std 3,112(1)\n\t" \
2645 /* args1-8 */ \
2646 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2647 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2648 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2649 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2650 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2651 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2652 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2653 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2654 "ld 11, 0(11)\n\t" /* target->r11 */ \
2655 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2656 "mr 11,%1\n\t" \
2657 "mr %0,3\n\t" \
2658 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2659 "addi 1,1,144" /* restore frame */ \
2660 : /*out*/ "=r" (_res) \
2661 : /*in*/ "r" (&_argvec[2]) \
2662 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2663 ); \
2664 lval = (__typeof__(lval)) _res; \
2665 } while (0)
2666
2667 #endif /* PLAT_ppc64_linux */
2668
2669 /* ------------------------- arm-linux ------------------------- */
2670
2671 #if defined(PLAT_arm_linux)
2672
2673 /* These regs are trashed by the hidden call. */
2674 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
2675
2676 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
2677 long) == 4. */
2678
2679 #define CALL_FN_W_v(lval, orig) \
2680 do { \
2681 volatile OrigFn _orig = (orig); \
2682 volatile unsigned long _argvec[1]; \
2683 volatile unsigned long _res; \
2684 _argvec[0] = (unsigned long)_orig.nraddr; \
2685 __asm__ volatile( \
2686 "ldr r4, [%1] \n\t" /* target->r4 */ \
2687 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2688 "mov %0, r0\n" \
2689 : /*out*/ "=r" (_res) \
2690 : /*in*/ "0" (&_argvec[0]) \
2691 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2692 ); \
2693 lval = (__typeof__(lval)) _res; \
2694 } while (0)
2695
2696 #define CALL_FN_W_W(lval, orig, arg1) \
2697 do { \
2698 volatile OrigFn _orig = (orig); \
2699 volatile unsigned long _argvec[2]; \
2700 volatile unsigned long _res; \
2701 _argvec[0] = (unsigned long)_orig.nraddr; \
2702 _argvec[1] = (unsigned long)(arg1); \
2703 __asm__ volatile( \
2704 "ldr r0, [%1, #4] \n\t" \
2705 "ldr r4, [%1] \n\t" /* target->r4 */ \
2706 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2707 "mov %0, r0\n" \
2708 : /*out*/ "=r" (_res) \
2709 : /*in*/ "0" (&_argvec[0]) \
2710 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2711 ); \
2712 lval = (__typeof__(lval)) _res; \
2713 } while (0)
2714
2715 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2716 do { \
2717 volatile OrigFn _orig = (orig); \
2718 volatile unsigned long _argvec[3]; \
2719 volatile unsigned long _res; \
2720 _argvec[0] = (unsigned long)_orig.nraddr; \
2721 _argvec[1] = (unsigned long)(arg1); \
2722 _argvec[2] = (unsigned long)(arg2); \
2723 __asm__ volatile( \
2724 "ldr r0, [%1, #4] \n\t" \
2725 "ldr r1, [%1, #8] \n\t" \
2726 "ldr r4, [%1] \n\t" /* target->r4 */ \
2727 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2728 "mov %0, r0\n" \
2729 : /*out*/ "=r" (_res) \
2730 : /*in*/ "0" (&_argvec[0]) \
2731 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2732 ); \
2733 lval = (__typeof__(lval)) _res; \
2734 } while (0)
2735
2736 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2737 do { \
2738 volatile OrigFn _orig = (orig); \
2739 volatile unsigned long _argvec[4]; \
2740 volatile unsigned long _res; \
2741 _argvec[0] = (unsigned long)_orig.nraddr; \
2742 _argvec[1] = (unsigned long)(arg1); \
2743 _argvec[2] = (unsigned long)(arg2); \
2744 _argvec[3] = (unsigned long)(arg3); \
2745 __asm__ volatile( \
2746 "ldr r0, [%1, #4] \n\t" \
2747 "ldr r1, [%1, #8] \n\t" \
2748 "ldr r2, [%1, #12] \n\t" \
2749 "ldr r4, [%1] \n\t" /* target->r4 */ \
2750 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2751 "mov %0, r0\n" \
2752 : /*out*/ "=r" (_res) \
2753 : /*in*/ "0" (&_argvec[0]) \
2754 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2755 ); \
2756 lval = (__typeof__(lval)) _res; \
2757 } while (0)
2758
2759 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2760 do { \
2761 volatile OrigFn _orig = (orig); \
2762 volatile unsigned long _argvec[5]; \
2763 volatile unsigned long _res; \
2764 _argvec[0] = (unsigned long)_orig.nraddr; \
2765 _argvec[1] = (unsigned long)(arg1); \
2766 _argvec[2] = (unsigned long)(arg2); \
2767 _argvec[3] = (unsigned long)(arg3); \
2768 _argvec[4] = (unsigned long)(arg4); \
2769 __asm__ volatile( \
2770 "ldr r0, [%1, #4] \n\t" \
2771 "ldr r1, [%1, #8] \n\t" \
2772 "ldr r2, [%1, #12] \n\t" \
2773 "ldr r3, [%1, #16] \n\t" \
2774 "ldr r4, [%1] \n\t" /* target->r4 */ \
2775 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2776 "mov %0, r0" \
2777 : /*out*/ "=r" (_res) \
2778 : /*in*/ "0" (&_argvec[0]) \
2779 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2780 ); \
2781 lval = (__typeof__(lval)) _res; \
2782 } while (0)
2783
2784 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2785 do { \
2786 volatile OrigFn _orig = (orig); \
2787 volatile unsigned long _argvec[6]; \
2788 volatile unsigned long _res; \
2789 _argvec[0] = (unsigned long)_orig.nraddr; \
2790 _argvec[1] = (unsigned long)(arg1); \
2791 _argvec[2] = (unsigned long)(arg2); \
2792 _argvec[3] = (unsigned long)(arg3); \
2793 _argvec[4] = (unsigned long)(arg4); \
2794 _argvec[5] = (unsigned long)(arg5); \
2795 __asm__ volatile( \
2796 "ldr r0, [%1, #20] \n\t" \
2797 "push {r0} \n\t" \
2798 "ldr r0, [%1, #4] \n\t" \
2799 "ldr r1, [%1, #8] \n\t" \
2800 "ldr r2, [%1, #12] \n\t" \
2801 "ldr r3, [%1, #16] \n\t" \
2802 "ldr r4, [%1] \n\t" /* target->r4 */ \
2803 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2804 "add sp, sp, #4 \n\t" \
2805 "mov %0, r0" \
2806 : /*out*/ "=r" (_res) \
2807 : /*in*/ "0" (&_argvec[0]) \
2808 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2809 ); \
2810 lval = (__typeof__(lval)) _res; \
2811 } while (0)
2812
2813 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2814 do { \
2815 volatile OrigFn _orig = (orig); \
2816 volatile unsigned long _argvec[7]; \
2817 volatile unsigned long _res; \
2818 _argvec[0] = (unsigned long)_orig.nraddr; \
2819 _argvec[1] = (unsigned long)(arg1); \
2820 _argvec[2] = (unsigned long)(arg2); \
2821 _argvec[3] = (unsigned long)(arg3); \
2822 _argvec[4] = (unsigned long)(arg4); \
2823 _argvec[5] = (unsigned long)(arg5); \
2824 _argvec[6] = (unsigned long)(arg6); \
2825 __asm__ volatile( \
2826 "ldr r0, [%1, #20] \n\t" \
2827 "ldr r1, [%1, #24] \n\t" \
2828 "push {r0, r1} \n\t" \
2829 "ldr r0, [%1, #4] \n\t" \
2830 "ldr r1, [%1, #8] \n\t" \
2831 "ldr r2, [%1, #12] \n\t" \
2832 "ldr r3, [%1, #16] \n\t" \
2833 "ldr r4, [%1] \n\t" /* target->r4 */ \
2834 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2835 "add sp, sp, #8 \n\t" \
2836 "mov %0, r0" \
2837 : /*out*/ "=r" (_res) \
2838 : /*in*/ "0" (&_argvec[0]) \
2839 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2840 ); \
2841 lval = (__typeof__(lval)) _res; \
2842 } while (0)
2843
2844 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2845 arg7) \
2846 do { \
2847 volatile OrigFn _orig = (orig); \
2848 volatile unsigned long _argvec[8]; \
2849 volatile unsigned long _res; \
2850 _argvec[0] = (unsigned long)_orig.nraddr; \
2851 _argvec[1] = (unsigned long)(arg1); \
2852 _argvec[2] = (unsigned long)(arg2); \
2853 _argvec[3] = (unsigned long)(arg3); \
2854 _argvec[4] = (unsigned long)(arg4); \
2855 _argvec[5] = (unsigned long)(arg5); \
2856 _argvec[6] = (unsigned long)(arg6); \
2857 _argvec[7] = (unsigned long)(arg7); \
2858 __asm__ volatile( \
2859 "ldr r0, [%1, #20] \n\t" \
2860 "ldr r1, [%1, #24] \n\t" \
2861 "ldr r2, [%1, #28] \n\t" \
2862 "push {r0, r1, r2} \n\t" \
2863 "ldr r0, [%1, #4] \n\t" \
2864 "ldr r1, [%1, #8] \n\t" \
2865 "ldr r2, [%1, #12] \n\t" \
2866 "ldr r3, [%1, #16] \n\t" \
2867 "ldr r4, [%1] \n\t" /* target->r4 */ \
2868 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2869 "add sp, sp, #12 \n\t" \
2870 "mov %0, r0" \
2871 : /*out*/ "=r" (_res) \
2872 : /*in*/ "0" (&_argvec[0]) \
2873 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2874 ); \
2875 lval = (__typeof__(lval)) _res; \
2876 } while (0)
2877
2878 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2879 arg7,arg8) \
2880 do { \
2881 volatile OrigFn _orig = (orig); \
2882 volatile unsigned long _argvec[9]; \
2883 volatile unsigned long _res; \
2884 _argvec[0] = (unsigned long)_orig.nraddr; \
2885 _argvec[1] = (unsigned long)(arg1); \
2886 _argvec[2] = (unsigned long)(arg2); \
2887 _argvec[3] = (unsigned long)(arg3); \
2888 _argvec[4] = (unsigned long)(arg4); \
2889 _argvec[5] = (unsigned long)(arg5); \
2890 _argvec[6] = (unsigned long)(arg6); \
2891 _argvec[7] = (unsigned long)(arg7); \
2892 _argvec[8] = (unsigned long)(arg8); \
2893 __asm__ volatile( \
2894 "ldr r0, [%1, #20] \n\t" \
2895 "ldr r1, [%1, #24] \n\t" \
2896 "ldr r2, [%1, #28] \n\t" \
2897 "ldr r3, [%1, #32] \n\t" \
2898 "push {r0, r1, r2, r3} \n\t" \
2899 "ldr r0, [%1, #4] \n\t" \
2900 "ldr r1, [%1, #8] \n\t" \
2901 "ldr r2, [%1, #12] \n\t" \
2902 "ldr r3, [%1, #16] \n\t" \
2903 "ldr r4, [%1] \n\t" /* target->r4 */ \
2904 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2905 "add sp, sp, #16 \n\t" \
2906 "mov %0, r0" \
2907 : /*out*/ "=r" (_res) \
2908 : /*in*/ "0" (&_argvec[0]) \
2909 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2910 ); \
2911 lval = (__typeof__(lval)) _res; \
2912 } while (0)
2913
2914 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2915 arg7,arg8,arg9) \
2916 do { \
2917 volatile OrigFn _orig = (orig); \
2918 volatile unsigned long _argvec[10]; \
2919 volatile unsigned long _res; \
2920 _argvec[0] = (unsigned long)_orig.nraddr; \
2921 _argvec[1] = (unsigned long)(arg1); \
2922 _argvec[2] = (unsigned long)(arg2); \
2923 _argvec[3] = (unsigned long)(arg3); \
2924 _argvec[4] = (unsigned long)(arg4); \
2925 _argvec[5] = (unsigned long)(arg5); \
2926 _argvec[6] = (unsigned long)(arg6); \
2927 _argvec[7] = (unsigned long)(arg7); \
2928 _argvec[8] = (unsigned long)(arg8); \
2929 _argvec[9] = (unsigned long)(arg9); \
2930 __asm__ volatile( \
2931 "ldr r0, [%1, #20] \n\t" \
2932 "ldr r1, [%1, #24] \n\t" \
2933 "ldr r2, [%1, #28] \n\t" \
2934 "ldr r3, [%1, #32] \n\t" \
2935 "ldr r4, [%1, #36] \n\t" \
2936 "push {r0, r1, r2, r3, r4} \n\t" \
2937 "ldr r0, [%1, #4] \n\t" \
2938 "ldr r1, [%1, #8] \n\t" \
2939 "ldr r2, [%1, #12] \n\t" \
2940 "ldr r3, [%1, #16] \n\t" \
2941 "ldr r4, [%1] \n\t" /* target->r4 */ \
2942 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2943 "add sp, sp, #20 \n\t" \
2944 "mov %0, r0" \
2945 : /*out*/ "=r" (_res) \
2946 : /*in*/ "0" (&_argvec[0]) \
2947 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2948 ); \
2949 lval = (__typeof__(lval)) _res; \
2950 } while (0)
2951
2952 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2953 arg7,arg8,arg9,arg10) \
2954 do { \
2955 volatile OrigFn _orig = (orig); \
2956 volatile unsigned long _argvec[11]; \
2957 volatile unsigned long _res; \
2958 _argvec[0] = (unsigned long)_orig.nraddr; \
2959 _argvec[1] = (unsigned long)(arg1); \
2960 _argvec[2] = (unsigned long)(arg2); \
2961 _argvec[3] = (unsigned long)(arg3); \
2962 _argvec[4] = (unsigned long)(arg4); \
2963 _argvec[5] = (unsigned long)(arg5); \
2964 _argvec[6] = (unsigned long)(arg6); \
2965 _argvec[7] = (unsigned long)(arg7); \
2966 _argvec[8] = (unsigned long)(arg8); \
2967 _argvec[9] = (unsigned long)(arg9); \
2968 _argvec[10] = (unsigned long)(arg10); \
2969 __asm__ volatile( \
2970 "ldr r0, [%1, #40] \n\t" \
2971 "push {r0} \n\t" \
2972 "ldr r0, [%1, #20] \n\t" \
2973 "ldr r1, [%1, #24] \n\t" \
2974 "ldr r2, [%1, #28] \n\t" \
2975 "ldr r3, [%1, #32] \n\t" \
2976 "ldr r4, [%1, #36] \n\t" \
2977 "push {r0, r1, r2, r3, r4} \n\t" \
2978 "ldr r0, [%1, #4] \n\t" \
2979 "ldr r1, [%1, #8] \n\t" \
2980 "ldr r2, [%1, #12] \n\t" \
2981 "ldr r3, [%1, #16] \n\t" \
2982 "ldr r4, [%1] \n\t" /* target->r4 */ \
2983 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2984 "add sp, sp, #24 \n\t" \
2985 "mov %0, r0" \
2986 : /*out*/ "=r" (_res) \
2987 : /*in*/ "0" (&_argvec[0]) \
2988 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2989 ); \
2990 lval = (__typeof__(lval)) _res; \
2991 } while (0)
2992
2993 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
2994 arg6,arg7,arg8,arg9,arg10, \
2995 arg11) \
2996 do { \
2997 volatile OrigFn _orig = (orig); \
2998 volatile unsigned long _argvec[12]; \
2999 volatile unsigned long _res; \
3000 _argvec[0] = (unsigned long)_orig.nraddr; \
3001 _argvec[1] = (unsigned long)(arg1); \
3002 _argvec[2] = (unsigned long)(arg2); \
3003 _argvec[3] = (unsigned long)(arg3); \
3004 _argvec[4] = (unsigned long)(arg4); \
3005 _argvec[5] = (unsigned long)(arg5); \
3006 _argvec[6] = (unsigned long)(arg6); \
3007 _argvec[7] = (unsigned long)(arg7); \
3008 _argvec[8] = (unsigned long)(arg8); \
3009 _argvec[9] = (unsigned long)(arg9); \
3010 _argvec[10] = (unsigned long)(arg10); \
3011 _argvec[11] = (unsigned long)(arg11); \
3012 __asm__ volatile( \
3013 "ldr r0, [%1, #40] \n\t" \
3014 "ldr r1, [%1, #44] \n\t" \
3015 "push {r0, r1} \n\t" \
3016 "ldr r0, [%1, #20] \n\t" \
3017 "ldr r1, [%1, #24] \n\t" \
3018 "ldr r2, [%1, #28] \n\t" \
3019 "ldr r3, [%1, #32] \n\t" \
3020 "ldr r4, [%1, #36] \n\t" \
3021 "push {r0, r1, r2, r3, r4} \n\t" \
3022 "ldr r0, [%1, #4] \n\t" \
3023 "ldr r1, [%1, #8] \n\t" \
3024 "ldr r2, [%1, #12] \n\t" \
3025 "ldr r3, [%1, #16] \n\t" \
3026 "ldr r4, [%1] \n\t" /* target->r4 */ \
3027 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3028 "add sp, sp, #28 \n\t" \
3029 "mov %0, r0" \
3030 : /*out*/ "=r" (_res) \
3031 : /*in*/ "0" (&_argvec[0]) \
3032 : /*trash*/ "cc", "memory",__CALLER_SAVED_REGS \
3033 ); \
3034 lval = (__typeof__(lval)) _res; \
3035 } while (0)
3036
3037 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3038 arg6,arg7,arg8,arg9,arg10, \
3039 arg11,arg12) \
3040 do { \
3041 volatile OrigFn _orig = (orig); \
3042 volatile unsigned long _argvec[13]; \
3043 volatile unsigned long _res; \
3044 _argvec[0] = (unsigned long)_orig.nraddr; \
3045 _argvec[1] = (unsigned long)(arg1); \
3046 _argvec[2] = (unsigned long)(arg2); \
3047 _argvec[3] = (unsigned long)(arg3); \
3048 _argvec[4] = (unsigned long)(arg4); \
3049 _argvec[5] = (unsigned long)(arg5); \
3050 _argvec[6] = (unsigned long)(arg6); \
3051 _argvec[7] = (unsigned long)(arg7); \
3052 _argvec[8] = (unsigned long)(arg8); \
3053 _argvec[9] = (unsigned long)(arg9); \
3054 _argvec[10] = (unsigned long)(arg10); \
3055 _argvec[11] = (unsigned long)(arg11); \
3056 _argvec[12] = (unsigned long)(arg12); \
3057 __asm__ volatile( \
3058 "ldr r0, [%1, #40] \n\t" \
3059 "ldr r1, [%1, #44] \n\t" \
3060 "ldr r2, [%1, #48] \n\t" \
3061 "push {r0, r1, r2} \n\t" \
3062 "ldr r0, [%1, #20] \n\t" \
3063 "ldr r1, [%1, #24] \n\t" \
3064 "ldr r2, [%1, #28] \n\t" \
3065 "ldr r3, [%1, #32] \n\t" \
3066 "ldr r4, [%1, #36] \n\t" \
3067 "push {r0, r1, r2, r3, r4} \n\t" \
3068 "ldr r0, [%1, #4] \n\t" \
3069 "ldr r1, [%1, #8] \n\t" \
3070 "ldr r2, [%1, #12] \n\t" \
3071 "ldr r3, [%1, #16] \n\t" \
3072 "ldr r4, [%1] \n\t" /* target->r4 */ \
3073 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3074 "add sp, sp, #32 \n\t" \
3075 "mov %0, r0" \
3076 : /*out*/ "=r" (_res) \
3077 : /*in*/ "0" (&_argvec[0]) \
3078 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
3079 ); \
3080 lval = (__typeof__(lval)) _res; \
3081 } while (0)
3082
3083 #endif /* PLAT_arm_linux */
3084
3085 /* ------------------------- s390x-linux ------------------------- */
3086
3087 #if defined(PLAT_s390x_linux)
3088
3089 /* Similar workaround as amd64 (see above), but we use r11 as frame
3090 pointer and save the old r11 in r7. r11 might be used for
3091 argvec, therefore we copy argvec in r1 since r1 is clobbered
3092 after the call anyway. */
3093 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
3094 # define __FRAME_POINTER \
3095 ,"d"(__builtin_dwarf_cfa())
3096 # define VALGRIND_CFI_PROLOGUE \
3097 ".cfi_remember_state\n\t" \
3098 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
3099 "lgr 7,11\n\t" \
3100 "lgr 11,%2\n\t" \
3101 ".cfi_def_cfa r11, 0\n\t"
3102 # define VALGRIND_CFI_EPILOGUE \
3103 "lgr 11, 7\n\t" \
3104 ".cfi_restore_state\n\t"
3105 #else
3106 # define __FRAME_POINTER
3107 # define VALGRIND_CFI_PROLOGUE \
3108 "lgr 1,%1\n\t"
3109 # define VALGRIND_CFI_EPILOGUE
3110 #endif
3111
3112
3113
3114
3115 /* These regs are trashed by the hidden call. Note that we overwrite
3116 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
3117 function a proper return address. All others are ABI defined call
3118 clobbers. */
3119 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
3120 "f0","f1","f2","f3","f4","f5","f6","f7"
3121
3122
3123 #define CALL_FN_W_v(lval, orig) \
3124 do { \
3125 volatile OrigFn _orig = (orig); \
3126 volatile unsigned long _argvec[1]; \
3127 volatile unsigned long _res; \
3128 _argvec[0] = (unsigned long)_orig.nraddr; \
3129 __asm__ volatile( \
3130 VALGRIND_CFI_PROLOGUE \
3131 "aghi 15,-160\n\t" \
3132 "lg 1, 0(1)\n\t" /* target->r1 */ \
3133 VALGRIND_CALL_NOREDIR_R1 \
3134 "lgr %0, 2\n\t" \
3135 "aghi 15,160\n\t" \
3136 VALGRIND_CFI_EPILOGUE \
3137 : /*out*/ "=d" (_res) \
3138 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
3139 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3140 ); \
3141 lval = (__typeof__(lval)) _res; \
3142 } while (0)
3143
3144 /* The call abi has the arguments in r2-r6 and stack */
3145 #define CALL_FN_W_W(lval, orig, arg1) \
3146 do { \
3147 volatile OrigFn _orig = (orig); \
3148 volatile unsigned long _argvec[2]; \
3149 volatile unsigned long _res; \
3150 _argvec[0] = (unsigned long)_orig.nraddr; \
3151 _argvec[1] = (unsigned long)arg1; \
3152 __asm__ volatile( \
3153 VALGRIND_CFI_PROLOGUE \
3154 "aghi 15,-160\n\t" \
3155 "lg 2, 8(1)\n\t" \
3156 "lg 1, 0(1)\n\t" \
3157 VALGRIND_CALL_NOREDIR_R1 \
3158 "lgr %0, 2\n\t" \
3159 "aghi 15,160\n\t" \
3160 VALGRIND_CFI_EPILOGUE \
3161 : /*out*/ "=d" (_res) \
3162 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3163 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3164 ); \
3165 lval = (__typeof__(lval)) _res; \
3166 } while (0)
3167
3168 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
3169 do { \
3170 volatile OrigFn _orig = (orig); \
3171 volatile unsigned long _argvec[3]; \
3172 volatile unsigned long _res; \
3173 _argvec[0] = (unsigned long)_orig.nraddr; \
3174 _argvec[1] = (unsigned long)arg1; \
3175 _argvec[2] = (unsigned long)arg2; \
3176 __asm__ volatile( \
3177 VALGRIND_CFI_PROLOGUE \
3178 "aghi 15,-160\n\t" \
3179 "lg 2, 8(1)\n\t" \
3180 "lg 3,16(1)\n\t" \
3181 "lg 1, 0(1)\n\t" \
3182 VALGRIND_CALL_NOREDIR_R1 \
3183 "lgr %0, 2\n\t" \
3184 "aghi 15,160\n\t" \
3185 VALGRIND_CFI_EPILOGUE \
3186 : /*out*/ "=d" (_res) \
3187 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3188 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3189 ); \
3190 lval = (__typeof__(lval)) _res; \
3191 } while (0)
3192
3193 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
3194 do { \
3195 volatile OrigFn _orig = (orig); \
3196 volatile unsigned long _argvec[4]; \
3197 volatile unsigned long _res; \
3198 _argvec[0] = (unsigned long)_orig.nraddr; \
3199 _argvec[1] = (unsigned long)arg1; \
3200 _argvec[2] = (unsigned long)arg2; \
3201 _argvec[3] = (unsigned long)arg3; \
3202 __asm__ volatile( \
3203 VALGRIND_CFI_PROLOGUE \
3204 "aghi 15,-160\n\t" \
3205 "lg 2, 8(1)\n\t" \
3206 "lg 3,16(1)\n\t" \
3207 "lg 4,24(1)\n\t" \
3208 "lg 1, 0(1)\n\t" \
3209 VALGRIND_CALL_NOREDIR_R1 \
3210 "lgr %0, 2\n\t" \
3211 "aghi 15,160\n\t" \
3212 VALGRIND_CFI_EPILOGUE \
3213 : /*out*/ "=d" (_res) \
3214 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3215 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3216 ); \
3217 lval = (__typeof__(lval)) _res; \
3218 } while (0)
3219
3220 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
3221 do { \
3222 volatile OrigFn _orig = (orig); \
3223 volatile unsigned long _argvec[5]; \
3224 volatile unsigned long _res; \
3225 _argvec[0] = (unsigned long)_orig.nraddr; \
3226 _argvec[1] = (unsigned long)arg1; \
3227 _argvec[2] = (unsigned long)arg2; \
3228 _argvec[3] = (unsigned long)arg3; \
3229 _argvec[4] = (unsigned long)arg4; \
3230 __asm__ volatile( \
3231 VALGRIND_CFI_PROLOGUE \
3232 "aghi 15,-160\n\t" \
3233 "lg 2, 8(1)\n\t" \
3234 "lg 3,16(1)\n\t" \
3235 "lg 4,24(1)\n\t" \
3236 "lg 5,32(1)\n\t" \
3237 "lg 1, 0(1)\n\t" \
3238 VALGRIND_CALL_NOREDIR_R1 \
3239 "lgr %0, 2\n\t" \
3240 "aghi 15,160\n\t" \
3241 VALGRIND_CFI_EPILOGUE \
3242 : /*out*/ "=d" (_res) \
3243 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3244 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3245 ); \
3246 lval = (__typeof__(lval)) _res; \
3247 } while (0)
3248
3249 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
3250 do { \
3251 volatile OrigFn _orig = (orig); \
3252 volatile unsigned long _argvec[6]; \
3253 volatile unsigned long _res; \
3254 _argvec[0] = (unsigned long)_orig.nraddr; \
3255 _argvec[1] = (unsigned long)arg1; \
3256 _argvec[2] = (unsigned long)arg2; \
3257 _argvec[3] = (unsigned long)arg3; \
3258 _argvec[4] = (unsigned long)arg4; \
3259 _argvec[5] = (unsigned long)arg5; \
3260 __asm__ volatile( \
3261 VALGRIND_CFI_PROLOGUE \
3262 "aghi 15,-160\n\t" \
3263 "lg 2, 8(1)\n\t" \
3264 "lg 3,16(1)\n\t" \
3265 "lg 4,24(1)\n\t" \
3266 "lg 5,32(1)\n\t" \
3267 "lg 6,40(1)\n\t" \
3268 "lg 1, 0(1)\n\t" \
3269 VALGRIND_CALL_NOREDIR_R1 \
3270 "lgr %0, 2\n\t" \
3271 "aghi 15,160\n\t" \
3272 VALGRIND_CFI_EPILOGUE \
3273 : /*out*/ "=d" (_res) \
3274 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3275 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3276 ); \
3277 lval = (__typeof__(lval)) _res; \
3278 } while (0)
3279
3280 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3281 arg6) \
3282 do { \
3283 volatile OrigFn _orig = (orig); \
3284 volatile unsigned long _argvec[7]; \
3285 volatile unsigned long _res; \
3286 _argvec[0] = (unsigned long)_orig.nraddr; \
3287 _argvec[1] = (unsigned long)arg1; \
3288 _argvec[2] = (unsigned long)arg2; \
3289 _argvec[3] = (unsigned long)arg3; \
3290 _argvec[4] = (unsigned long)arg4; \
3291 _argvec[5] = (unsigned long)arg5; \
3292 _argvec[6] = (unsigned long)arg6; \
3293 __asm__ volatile( \
3294 VALGRIND_CFI_PROLOGUE \
3295 "aghi 15,-168\n\t" \
3296 "lg 2, 8(1)\n\t" \
3297 "lg 3,16(1)\n\t" \
3298 "lg 4,24(1)\n\t" \
3299 "lg 5,32(1)\n\t" \
3300 "lg 6,40(1)\n\t" \
3301 "mvc 160(8,15), 48(1)\n\t" \
3302 "lg 1, 0(1)\n\t" \
3303 VALGRIND_CALL_NOREDIR_R1 \
3304 "lgr %0, 2\n\t" \
3305 "aghi 15,168\n\t" \
3306 VALGRIND_CFI_EPILOGUE \
3307 : /*out*/ "=d" (_res) \
3308 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3309 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3310 ); \
3311 lval = (__typeof__(lval)) _res; \
3312 } while (0)
3313
3314 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3315 arg6, arg7) \
3316 do { \
3317 volatile OrigFn _orig = (orig); \
3318 volatile unsigned long _argvec[8]; \
3319 volatile unsigned long _res; \
3320 _argvec[0] = (unsigned long)_orig.nraddr; \
3321 _argvec[1] = (unsigned long)arg1; \
3322 _argvec[2] = (unsigned long)arg2; \
3323 _argvec[3] = (unsigned long)arg3; \
3324 _argvec[4] = (unsigned long)arg4; \
3325 _argvec[5] = (unsigned long)arg5; \
3326 _argvec[6] = (unsigned long)arg6; \
3327 _argvec[7] = (unsigned long)arg7; \
3328 __asm__ volatile( \
3329 VALGRIND_CFI_PROLOGUE \
3330 "aghi 15,-176\n\t" \
3331 "lg 2, 8(1)\n\t" \
3332 "lg 3,16(1)\n\t" \
3333 "lg 4,24(1)\n\t" \
3334 "lg 5,32(1)\n\t" \
3335 "lg 6,40(1)\n\t" \
3336 "mvc 160(8,15), 48(1)\n\t" \
3337 "mvc 168(8,15), 56(1)\n\t" \
3338 "lg 1, 0(1)\n\t" \
3339 VALGRIND_CALL_NOREDIR_R1 \
3340 "lgr %0, 2\n\t" \
3341 "aghi 15,176\n\t" \
3342 VALGRIND_CFI_EPILOGUE \
3343 : /*out*/ "=d" (_res) \
3344 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3345 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3346 ); \
3347 lval = (__typeof__(lval)) _res; \
3348 } while (0)
3349
3350 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3351 arg6, arg7 ,arg8) \
3352 do { \
3353 volatile OrigFn _orig = (orig); \
3354 volatile unsigned long _argvec[9]; \
3355 volatile unsigned long _res; \
3356 _argvec[0] = (unsigned long)_orig.nraddr; \
3357 _argvec[1] = (unsigned long)arg1; \
3358 _argvec[2] = (unsigned long)arg2; \
3359 _argvec[3] = (unsigned long)arg3; \
3360 _argvec[4] = (unsigned long)arg4; \
3361 _argvec[5] = (unsigned long)arg5; \
3362 _argvec[6] = (unsigned long)arg6; \
3363 _argvec[7] = (unsigned long)arg7; \
3364 _argvec[8] = (unsigned long)arg8; \
3365 __asm__ volatile( \
3366 VALGRIND_CFI_PROLOGUE \
3367 "aghi 15,-184\n\t" \
3368 "lg 2, 8(1)\n\t" \
3369 "lg 3,16(1)\n\t" \
3370 "lg 4,24(1)\n\t" \
3371 "lg 5,32(1)\n\t" \
3372 "lg 6,40(1)\n\t" \
3373 "mvc 160(8,15), 48(1)\n\t" \
3374 "mvc 168(8,15), 56(1)\n\t" \
3375 "mvc 176(8,15), 64(1)\n\t" \
3376 "lg 1, 0(1)\n\t" \
3377 VALGRIND_CALL_NOREDIR_R1 \
3378 "lgr %0, 2\n\t" \
3379 "aghi 15,184\n\t" \
3380 VALGRIND_CFI_EPILOGUE \
3381 : /*out*/ "=d" (_res) \
3382 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3383 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3384 ); \
3385 lval = (__typeof__(lval)) _res; \
3386 } while (0)
3387
3388 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3389 arg6, arg7 ,arg8, arg9) \
3390 do { \
3391 volatile OrigFn _orig = (orig); \
3392 volatile unsigned long _argvec[10]; \
3393 volatile unsigned long _res; \
3394 _argvec[0] = (unsigned long)_orig.nraddr; \
3395 _argvec[1] = (unsigned long)arg1; \
3396 _argvec[2] = (unsigned long)arg2; \
3397 _argvec[3] = (unsigned long)arg3; \
3398 _argvec[4] = (unsigned long)arg4; \
3399 _argvec[5] = (unsigned long)arg5; \
3400 _argvec[6] = (unsigned long)arg6; \
3401 _argvec[7] = (unsigned long)arg7; \
3402 _argvec[8] = (unsigned long)arg8; \
3403 _argvec[9] = (unsigned long)arg9; \
3404 __asm__ volatile( \
3405 VALGRIND_CFI_PROLOGUE \
3406 "aghi 15,-192\n\t" \
3407 "lg 2, 8(1)\n\t" \
3408 "lg 3,16(1)\n\t" \
3409 "lg 4,24(1)\n\t" \
3410 "lg 5,32(1)\n\t" \
3411 "lg 6,40(1)\n\t" \
3412 "mvc 160(8,15), 48(1)\n\t" \
3413 "mvc 168(8,15), 56(1)\n\t" \
3414 "mvc 176(8,15), 64(1)\n\t" \
3415 "mvc 184(8,15), 72(1)\n\t" \
3416 "lg 1, 0(1)\n\t" \
3417 VALGRIND_CALL_NOREDIR_R1 \
3418 "lgr %0, 2\n\t" \
3419 "aghi 15,192\n\t" \
3420 VALGRIND_CFI_EPILOGUE \
3421 : /*out*/ "=d" (_res) \
3422 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3423 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3424 ); \
3425 lval = (__typeof__(lval)) _res; \
3426 } while (0)
3427
3428 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3429 arg6, arg7 ,arg8, arg9, arg10) \
3430 do { \
3431 volatile OrigFn _orig = (orig); \
3432 volatile unsigned long _argvec[11]; \
3433 volatile unsigned long _res; \
3434 _argvec[0] = (unsigned long)_orig.nraddr; \
3435 _argvec[1] = (unsigned long)arg1; \
3436 _argvec[2] = (unsigned long)arg2; \
3437 _argvec[3] = (unsigned long)arg3; \
3438 _argvec[4] = (unsigned long)arg4; \
3439 _argvec[5] = (unsigned long)arg5; \
3440 _argvec[6] = (unsigned long)arg6; \
3441 _argvec[7] = (unsigned long)arg7; \
3442 _argvec[8] = (unsigned long)arg8; \
3443 _argvec[9] = (unsigned long)arg9; \
3444 _argvec[10] = (unsigned long)arg10; \
3445 __asm__ volatile( \
3446 VALGRIND_CFI_PROLOGUE \
3447 "aghi 15,-200\n\t" \
3448 "lg 2, 8(1)\n\t" \
3449 "lg 3,16(1)\n\t" \
3450 "lg 4,24(1)\n\t" \
3451 "lg 5,32(1)\n\t" \
3452 "lg 6,40(1)\n\t" \
3453 "mvc 160(8,15), 48(1)\n\t" \
3454 "mvc 168(8,15), 56(1)\n\t" \
3455 "mvc 176(8,15), 64(1)\n\t" \
3456 "mvc 184(8,15), 72(1)\n\t" \
3457 "mvc 192(8,15), 80(1)\n\t" \
3458 "lg 1, 0(1)\n\t" \
3459 VALGRIND_CALL_NOREDIR_R1 \
3460 "lgr %0, 2\n\t" \
3461 "aghi 15,200\n\t" \
3462 VALGRIND_CFI_EPILOGUE \
3463 : /*out*/ "=d" (_res) \
3464 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3465 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3466 ); \
3467 lval = (__typeof__(lval)) _res; \
3468 } while (0)
3469
3470 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3471 arg6, arg7 ,arg8, arg9, arg10, arg11) \
3472 do { \
3473 volatile OrigFn _orig = (orig); \
3474 volatile unsigned long _argvec[12]; \
3475 volatile unsigned long _res; \
3476 _argvec[0] = (unsigned long)_orig.nraddr; \
3477 _argvec[1] = (unsigned long)arg1; \
3478 _argvec[2] = (unsigned long)arg2; \
3479 _argvec[3] = (unsigned long)arg3; \
3480 _argvec[4] = (unsigned long)arg4; \
3481 _argvec[5] = (unsigned long)arg5; \
3482 _argvec[6] = (unsigned long)arg6; \
3483 _argvec[7] = (unsigned long)arg7; \
3484 _argvec[8] = (unsigned long)arg8; \
3485 _argvec[9] = (unsigned long)arg9; \
3486 _argvec[10] = (unsigned long)arg10; \
3487 _argvec[11] = (unsigned long)arg11; \
3488 __asm__ volatile( \
3489 VALGRIND_CFI_PROLOGUE \
3490 "aghi 15,-208\n\t" \
3491 "lg 2, 8(1)\n\t" \
3492 "lg 3,16(1)\n\t" \
3493 "lg 4,24(1)\n\t" \
3494 "lg 5,32(1)\n\t" \
3495 "lg 6,40(1)\n\t" \
3496 "mvc 160(8,15), 48(1)\n\t" \
3497 "mvc 168(8,15), 56(1)\n\t" \
3498 "mvc 176(8,15), 64(1)\n\t" \
3499 "mvc 184(8,15), 72(1)\n\t" \
3500 "mvc 192(8,15), 80(1)\n\t" \
3501 "mvc 200(8,15), 88(1)\n\t" \
3502 "lg 1, 0(1)\n\t" \
3503 VALGRIND_CALL_NOREDIR_R1 \
3504 "lgr %0, 2\n\t" \
3505 "aghi 15,208\n\t" \
3506 VALGRIND_CFI_EPILOGUE \
3507 : /*out*/ "=d" (_res) \
3508 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3509 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3510 ); \
3511 lval = (__typeof__(lval)) _res; \
3512 } while (0)
3513
3514 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3515 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
3516 do { \
3517 volatile OrigFn _orig = (orig); \
3518 volatile unsigned long _argvec[13]; \
3519 volatile unsigned long _res; \
3520 _argvec[0] = (unsigned long)_orig.nraddr; \
3521 _argvec[1] = (unsigned long)arg1; \
3522 _argvec[2] = (unsigned long)arg2; \
3523 _argvec[3] = (unsigned long)arg3; \
3524 _argvec[4] = (unsigned long)arg4; \
3525 _argvec[5] = (unsigned long)arg5; \
3526 _argvec[6] = (unsigned long)arg6; \
3527 _argvec[7] = (unsigned long)arg7; \
3528 _argvec[8] = (unsigned long)arg8; \
3529 _argvec[9] = (unsigned long)arg9; \
3530 _argvec[10] = (unsigned long)arg10; \
3531 _argvec[11] = (unsigned long)arg11; \
3532 _argvec[12] = (unsigned long)arg12; \
3533 __asm__ volatile( \
3534 VALGRIND_CFI_PROLOGUE \
3535 "aghi 15,-216\n\t" \
3536 "lg 2, 8(1)\n\t" \
3537 "lg 3,16(1)\n\t" \
3538 "lg 4,24(1)\n\t" \
3539 "lg 5,32(1)\n\t" \
3540 "lg 6,40(1)\n\t" \
3541 "mvc 160(8,15), 48(1)\n\t" \
3542 "mvc 168(8,15), 56(1)\n\t" \
3543 "mvc 176(8,15), 64(1)\n\t" \
3544 "mvc 184(8,15), 72(1)\n\t" \
3545 "mvc 192(8,15), 80(1)\n\t" \
3546 "mvc 200(8,15), 88(1)\n\t" \
3547 "mvc 208(8,15), 96(1)\n\t" \
3548 "lg 1, 0(1)\n\t" \
3549 VALGRIND_CALL_NOREDIR_R1 \
3550 "lgr %0, 2\n\t" \
3551 "aghi 15,216\n\t" \
3552 VALGRIND_CFI_EPILOGUE \
3553 : /*out*/ "=d" (_res) \
3554 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3555 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3556 ); \
3557 lval = (__typeof__(lval)) _res; \
3558 } while (0)
3559
3560
3561 #endif /* PLAT_s390x_linux */
3562
3563
3564 /* ------------------------------------------------------------------ */
3565 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
3566 /* */
3567 /* ------------------------------------------------------------------ */
3568
3569 /* Some request codes. There are many more of these, but most are not
3570 exposed to end-user view. These are the public ones, all of the
3571 form 0x1000 + small_number.
3572
3573 Core ones are in the range 0x00000000--0x0000ffff. The non-public
3574 ones start at 0x2000.
3575 */
3576
3577 /* These macros are used by tools -- they must be public, but don't
3578 embed them into other programs. */
3579 #define VG_USERREQ_TOOL_BASE(a,b) \
3580 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
3581 #define VG_IS_TOOL_USERREQ(a, b, v) \
3582 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
3583
3584 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
3585 This enum comprises an ABI exported by Valgrind to programs
3586 which use client requests. DO NOT CHANGE THE ORDER OF THESE
3587 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
3588 typedef
3589 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
3590 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
3591
3592 /* These allow any function to be called from the simulated
3593 CPU but run on the real CPU. Nb: the first arg passed to
3594 the function is always the ThreadId of the running
3595 thread! So CLIENT_CALL0 actually requires a 1 arg
3596 function, etc. */
3597 VG_USERREQ__CLIENT_CALL0 = 0x1101,
3598 VG_USERREQ__CLIENT_CALL1 = 0x1102,
3599 VG_USERREQ__CLIENT_CALL2 = 0x1103,
3600 VG_USERREQ__CLIENT_CALL3 = 0x1104,
3601
3602 /* Can be useful in regression testing suites -- eg. can
3603 send Valgrind's output to /dev/null and still count
3604 errors. */
3605 VG_USERREQ__COUNT_ERRORS = 0x1201,
3606
3607 /* Allows a string (gdb monitor command) to be passed to the tool
3608 Used for interaction with vgdb/gdb */
3609 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
3610
3611 /* These are useful and can be interpreted by any tool that
3612 tracks malloc() et al, by using vg_replace_malloc.c. */
3613 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
3614 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
3615 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
3616 /* Memory pool support. */
3617 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
3618 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
3619 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
3620 VG_USERREQ__MEMPOOL_FREE = 0x1306,
3621 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
3622 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
3623 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
3624 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
3625
3626 /* Allow printfs to valgrind log. */
3627 /* The first two pass the va_list argument by value, which
3628 assumes it is the same size as or smaller than a UWord,
3629 which generally isn't the case. Hence are deprecated.
3630 The second two pass the vargs by reference and so are
3631 immune to this problem. */
3632 /* both :: char* fmt, va_list vargs (DEPRECATED) */
3633 VG_USERREQ__PRINTF = 0x1401,
3634 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
3635 /* both :: char* fmt, va_list* vargs */
3636 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
3637 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
3638
3639 /* Stack support. */
3640 VG_USERREQ__STACK_REGISTER = 0x1501,
3641 VG_USERREQ__STACK_DEREGISTER = 0x1502,
3642 VG_USERREQ__STACK_CHANGE = 0x1503,
3643
3644 /* Wine support */
3645 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
3646
3647 /* Querying of debug info. */
3648 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701
3649 } Vg_ClientRequest;
3650
3651 #if !defined(__GNUC__)
3652 # define __extension__ /* */
3653 #endif
3654
3655
3656 /* Returns the number of Valgrinds this code is running under. That
3657 is, 0 if running natively, 1 if running under Valgrind, 2 if
3658 running under Valgrind which is running under another Valgrind,
3659 etc. */
3660 #define RUNNING_ON_VALGRIND \
3661 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
3662 VG_USERREQ__RUNNING_ON_VALGRIND, \
3663 0, 0, 0, 0, 0) \
3664
3665
3666 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
3667 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
3668 since it provides a way to make sure valgrind will retranslate the
3669 invalidated area. Returns no value. */
3670 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
3671 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3672 VG_USERREQ__DISCARD_TRANSLATIONS, \
3673 _qzz_addr, _qzz_len, 0, 0, 0)
3674
3675
3676 /* These requests are for getting Valgrind itself to print something.
3677 Possibly with a backtrace. This is a really ugly hack. The return value
3678 is the number of characters printed, excluding the "**<pid>** " part at the
3679 start and the backtrace (if present). */
3680
3681 #if defined(__GNUC__) || defined(__INTEL_COMPILER)
3682 /* Modern GCC will optimize the static routine out if unused,
3683 and unused attribute will shut down warnings about it. */
3684 static int VALGRIND_PRINTF(const char *format, ...)
3685 __attribute__((format(__printf__, 1, 2), __unused__));
3686 #endif
3687 static int
3688 #if defined(_MSC_VER)
3689 __inline
3690 #endif
VALGRIND_PRINTF(const char * format,...)3691 VALGRIND_PRINTF(const char *format, ...)
3692 {
3693 #if defined(NVALGRIND)
3694 return 0;
3695 #else /* NVALGRIND */
3696 #if defined(_MSC_VER)
3697 uintptr_t _qzz_res;
3698 #else
3699 unsigned long _qzz_res;
3700 #endif
3701 va_list vargs;
3702 va_start(vargs, format);
3703 #if defined(_MSC_VER)
3704 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
3705 VG_USERREQ__PRINTF_VALIST_BY_REF,
3706 (uintptr_t)format,
3707 (uintptr_t)&vargs,
3708 0, 0, 0);
3709 #else
3710 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
3711 VG_USERREQ__PRINTF_VALIST_BY_REF,
3712 (unsigned long)format,
3713 (unsigned long)&vargs,
3714 0, 0, 0);
3715 #endif
3716 va_end(vargs);
3717 return (int)_qzz_res;
3718 #endif /* NVALGRIND */
3719 }
3720
3721 #if defined(__GNUC__) || defined(__INTEL_COMPILER)
3722 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
3723 __attribute__((format(__printf__, 1, 2), __unused__));
3724 #endif
3725 static int
3726 #if defined(_MSC_VER)
3727 __inline
3728 #endif
VALGRIND_PRINTF_BACKTRACE(const char * format,...)3729 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
3730 {
3731 #if defined(NVALGRIND)
3732 return 0;
3733 #else /* NVALGRIND */
3734 #if defined(_MSC_VER)
3735 uintptr_t _qzz_res;
3736 #else
3737 unsigned long _qzz_res;
3738 #endif
3739 va_list vargs;
3740 va_start(vargs, format);
3741 #if defined(_MSC_VER)
3742 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
3743 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
3744 (uintptr_t)format,
3745 (uintptr_t)&vargs,
3746 0, 0, 0);
3747 #else
3748 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
3749 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
3750 (unsigned long)format,
3751 (unsigned long)&vargs,
3752 0, 0, 0);
3753 #endif
3754 va_end(vargs);
3755 return (int)_qzz_res;
3756 #endif /* NVALGRIND */
3757 }
3758
3759
3760 /* These requests allow control to move from the simulated CPU to the
3761 real CPU, calling an arbitary function.
3762
3763 Note that the current ThreadId is inserted as the first argument.
3764 So this call:
3765
3766 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
3767
3768 requires f to have this signature:
3769
3770 Word f(Word tid, Word arg1, Word arg2)
3771
3772 where "Word" is a word-sized type.
3773
3774 Note that these client requests are not entirely reliable. For example,
3775 if you call a function with them that subsequently calls printf(),
3776 there's a high chance Valgrind will crash. Generally, your prospects of
3777 these working are made higher if the called function does not refer to
3778 any global variables, and does not refer to any libc or other functions
3779 (printf et al). Any kind of entanglement with libc or dynamic linking is
3780 likely to have a bad outcome, for tricky reasons which we've grappled
3781 with a lot in the past.
3782 */
3783 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
3784 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
3785 VG_USERREQ__CLIENT_CALL0, \
3786 _qyy_fn, \
3787 0, 0, 0, 0)
3788
3789 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
3790 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
3791 VG_USERREQ__CLIENT_CALL1, \
3792 _qyy_fn, \
3793 _qyy_arg1, 0, 0, 0)
3794
3795 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
3796 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
3797 VG_USERREQ__CLIENT_CALL2, \
3798 _qyy_fn, \
3799 _qyy_arg1, _qyy_arg2, 0, 0)
3800
3801 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
3802 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
3803 VG_USERREQ__CLIENT_CALL3, \
3804 _qyy_fn, \
3805 _qyy_arg1, _qyy_arg2, \
3806 _qyy_arg3, 0)
3807
3808
3809 /* Counts the number of errors that have been recorded by a tool. Nb:
3810 the tool must record the errors with VG_(maybe_record_error)() or
3811 VG_(unique_error)() for them to be counted. */
3812 #define VALGRIND_COUNT_ERRORS \
3813 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
3814 0 /* default return */, \
3815 VG_USERREQ__COUNT_ERRORS, \
3816 0, 0, 0, 0, 0)
3817
3818 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
3819 when heap blocks are allocated in order to give accurate results. This
3820 happens automatically for the standard allocator functions such as
3821 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
3822 delete[], etc.
3823
3824 But if your program uses a custom allocator, this doesn't automatically
3825 happen, and Valgrind will not do as well. For example, if you allocate
3826 superblocks with mmap() and then allocates chunks of the superblocks, all
3827 Valgrind's observations will be at the mmap() level and it won't know that
3828 the chunks should be considered separate entities. In Memcheck's case,
3829 that means you probably won't get heap block overrun detection (because
3830 there won't be redzones marked as unaddressable) and you definitely won't
3831 get any leak detection.
3832
3833 The following client requests allow a custom allocator to be annotated so
3834 that it can be handled accurately by Valgrind.
3835
3836 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
3837 by a malloc()-like function. For Memcheck (an illustrative case), this
3838 does two things:
3839
3840 - It records that the block has been allocated. This means any addresses
3841 within the block mentioned in error messages will be
3842 identified as belonging to the block. It also means that if the block
3843 isn't freed it will be detected by the leak checker.
3844
3845 - It marks the block as being addressable and undefined (if 'is_zeroed' is
3846 not set), or addressable and defined (if 'is_zeroed' is set). This
3847 controls how accesses to the block by the program are handled.
3848
3849 'addr' is the start of the usable block (ie. after any
3850 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
3851 can apply redzones -- these are blocks of padding at the start and end of
3852 each block. Adding redzones is recommended as it makes it much more likely
3853 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
3854 zeroed (or filled with another predictable value), as is the case for
3855 calloc().
3856
3857 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
3858 heap block -- that will be used by the client program -- is allocated.
3859 It's best to put it at the outermost level of the allocator if possible;
3860 for example, if you have a function my_alloc() which calls
3861 internal_alloc(), and the client request is put inside internal_alloc(),
3862 stack traces relating to the heap block will contain entries for both
3863 my_alloc() and internal_alloc(), which is probably not what you want.
3864
3865 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
3866 custom blocks from within a heap block, B, that has been allocated with
3867 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
3868 -- the custom blocks will take precedence.
3869
3870 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
3871 Memcheck, it does two things:
3872
3873 - It records that the block has been deallocated. This assumes that the
3874 block was annotated as having been allocated via
3875 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
3876
3877 - It marks the block as being unaddressable.
3878
3879 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
3880 heap block is deallocated.
3881
3882 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
3883 Memcheck, it does four things:
3884
3885 - It records that the size of a block has been changed. This assumes that
3886 the block was annotated as having been allocated via
3887 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
3888
3889 - If the block shrunk, it marks the freed memory as being unaddressable.
3890
3891 - If the block grew, it marks the new area as undefined and defines a red
3892 zone past the end of the new block.
3893
3894 - The V-bits of the overlap between the old and the new block are preserved.
3895
3896 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
3897 and before deallocation of the old block.
3898
3899 In many cases, these three client requests will not be enough to get your
3900 allocator working well with Memcheck. More specifically, if your allocator
3901 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
3902 will be necessary to mark the memory as addressable just before the zeroing
3903 occurs, otherwise you'll get a lot of invalid write errors. For example,
3904 you'll need to do this if your allocator recycles freed blocks, but it
3905 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
3906 Alternatively, if your allocator reuses freed blocks for allocator-internal
3907 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
3908
3909 Really, what's happening is a blurring of the lines between the client
3910 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
3911 memory should be considered unaddressable to the client program, but the
3912 allocator knows more than the rest of the client program and so may be able
3913 to safely access it. Extra client requests are necessary for Valgrind to
3914 understand the distinction between the allocator and the rest of the
3915 program.
3916
3917 Ignored if addr == 0.
3918 */
3919 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
3920 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3921 VG_USERREQ__MALLOCLIKE_BLOCK, \
3922 addr, sizeB, rzB, is_zeroed, 0)
3923
3924 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
3925 Ignored if addr == 0.
3926 */
3927 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
3928 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3929 VG_USERREQ__RESIZEINPLACE_BLOCK, \
3930 addr, oldSizeB, newSizeB, rzB, 0)
3931
3932 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
3933 Ignored if addr == 0.
3934 */
3935 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
3936 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3937 VG_USERREQ__FREELIKE_BLOCK, \
3938 addr, rzB, 0, 0, 0)
3939
3940 /* Create a memory pool. */
3941 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
3942 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3943 VG_USERREQ__CREATE_MEMPOOL, \
3944 pool, rzB, is_zeroed, 0, 0)
3945
3946 /* Destroy a memory pool. */
3947 #define VALGRIND_DESTROY_MEMPOOL(pool) \
3948 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3949 VG_USERREQ__DESTROY_MEMPOOL, \
3950 pool, 0, 0, 0, 0)
3951
3952 /* Associate a piece of memory with a memory pool. */
3953 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
3954 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3955 VG_USERREQ__MEMPOOL_ALLOC, \
3956 pool, addr, size, 0, 0)
3957
3958 /* Disassociate a piece of memory from a memory pool. */
3959 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
3960 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3961 VG_USERREQ__MEMPOOL_FREE, \
3962 pool, addr, 0, 0, 0)
3963
3964 /* Disassociate any pieces outside a particular range. */
3965 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
3966 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3967 VG_USERREQ__MEMPOOL_TRIM, \
3968 pool, addr, size, 0, 0)
3969
3970 /* Resize and/or move a piece associated with a memory pool. */
3971 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
3972 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3973 VG_USERREQ__MOVE_MEMPOOL, \
3974 poolA, poolB, 0, 0, 0)
3975
3976 /* Resize and/or move a piece associated with a memory pool. */
3977 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
3978 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3979 VG_USERREQ__MEMPOOL_CHANGE, \
3980 pool, addrA, addrB, size, 0)
3981
3982 /* Return 1 if a mempool exists, else 0. */
3983 #define VALGRIND_MEMPOOL_EXISTS(pool) \
3984 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3985 VG_USERREQ__MEMPOOL_EXISTS, \
3986 pool, 0, 0, 0, 0)
3987
3988 /* Mark a piece of memory as being a stack. Returns a stack id. */
3989 #define VALGRIND_STACK_REGISTER(start, end) \
3990 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3991 VG_USERREQ__STACK_REGISTER, \
3992 start, end, 0, 0, 0)
3993
3994 /* Unmark the piece of memory associated with a stack id as being a
3995 stack. */
3996 #define VALGRIND_STACK_DEREGISTER(id) \
3997 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3998 VG_USERREQ__STACK_DEREGISTER, \
3999 id, 0, 0, 0, 0)
4000
4001 /* Change the start and end address of the stack id. */
4002 #define VALGRIND_STACK_CHANGE(id, start, end) \
4003 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
4004 VG_USERREQ__STACK_CHANGE, \
4005 id, start, end, 0, 0)
4006
4007 /* Load PDB debug info for Wine PE image_map. */
4008 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
4009 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
4010 VG_USERREQ__LOAD_PDB_DEBUGINFO, \
4011 fd, ptr, total_size, delta, 0)
4012
4013 /* Map a code address to a source file name and line number. buf64
4014 must point to a 64-byte buffer in the caller's address space. The
4015 result will be dumped in there and is guaranteed to be zero
4016 terminated. If no info is found, the first byte is set to zero. */
4017 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
4018 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
4019 VG_USERREQ__MAP_IP_TO_SRCLOC, \
4020 addr, buf64, 0, 0, 0)
4021
4022
4023 #undef PLAT_x86_darwin
4024 #undef PLAT_amd64_darwin
4025 #undef PLAT_x86_win32
4026 #undef PLAT_x86_linux
4027 #undef PLAT_amd64_linux
4028 #undef PLAT_ppc32_linux
4029 #undef PLAT_ppc64_linux
4030 #undef PLAT_arm_linux
4031 #undef PLAT_s390x_linux
4032
4033 #endif /* __VALGRIND_H */
4034