1 /* -*- c -*-
2 ----------------------------------------------------------------
3
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
9
10 ----------------------------------------------------------------
11
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
14
15 Copyright (C) 2000-2015 Julian Seward. All rights reserved.
16
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
20
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
23
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
28
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
31
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
35
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47
48 ----------------------------------------------------------------
49
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
54
55 ----------------------------------------------------------------
56 */
57
58
59 /* This file is for inclusion into client (your!) code.
60
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
63
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
72
73 #ifndef __VALGRIND_H
74 #define __VALGRIND_H
75
76
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
80
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
85 X.Y or later" is (eg)
86
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
90 */
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 12
93
94
95 #include <stdarg.h>
96
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
99 use "__asm__"). */
100
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
107
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
110 */
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_amd64_win64
115 #undef PLAT_x86_linux
116 #undef PLAT_amd64_linux
117 #undef PLAT_ppc32_linux
118 #undef PLAT_ppc64be_linux
119 #undef PLAT_ppc64le_linux
120 #undef PLAT_arm_linux
121 #undef PLAT_arm64_linux
122 #undef PLAT_s390x_linux
123 #undef PLAT_mips32_linux
124 #undef PLAT_mips64_linux
125 #undef PLAT_tilegx_linux
126 #undef PLAT_x86_solaris
127 #undef PLAT_amd64_solaris
128
129
130 #if defined(__APPLE__) && defined(__i386__)
131 # define PLAT_x86_darwin 1
132 #elif defined(__APPLE__) && defined(__x86_64__)
133 # define PLAT_amd64_darwin 1
134 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
135 || defined(__CYGWIN32__) \
136 || (defined(_WIN32) && defined(_M_IX86))
137 # define PLAT_x86_win32 1
138 #elif defined(__MINGW64__) \
139 || (defined(_WIN64) && defined(_M_X64))
140 # define PLAT_amd64_win64 1
141 #elif defined(__linux__) && defined(__i386__)
142 # define PLAT_x86_linux 1
143 #elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
144 # define PLAT_amd64_linux 1
145 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
146 # define PLAT_ppc32_linux 1
147 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
148 /* Big Endian uses ELF version 1 */
149 # define PLAT_ppc64be_linux 1
150 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
151 /* Little Endian uses ELF version 2 */
152 # define PLAT_ppc64le_linux 1
153 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
154 # define PLAT_arm_linux 1
155 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
156 # define PLAT_arm64_linux 1
157 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
158 # define PLAT_s390x_linux 1
159 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
160 # define PLAT_mips64_linux 1
161 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
162 # define PLAT_mips32_linux 1
163 #elif defined(__linux__) && defined(__tilegx__)
164 # define PLAT_tilegx_linux 1
165 #elif defined(__sun) && defined(__i386__)
166 # define PLAT_x86_solaris 1
167 #elif defined(__sun) && defined(__x86_64__)
168 # define PLAT_amd64_solaris 1
169 #else
170 /* If we're not compiling for our target platform, don't generate
171 any inline asms. */
172 # if !defined(NVALGRIND)
173 # define NVALGRIND 1
174 # endif
175 #endif
176
177
178 /* ------------------------------------------------------------------ */
179 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
180 /* in here of use to end-users -- skip to the next section. */
181 /* ------------------------------------------------------------------ */
182
183 /*
184 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
185 * request. Accepts both pointers and integers as arguments.
186 *
187 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
188 * client request that does not return a value.
189
190 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
191 * client request and whose value equals the client request result. Accepts
192 * both pointers and integers as arguments. Note that such calls are not
193 * necessarily pure functions -- they may have side effects.
194 */
195
196 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
197 _zzq_request, _zzq_arg1, _zzq_arg2, \
198 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
199 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
200 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
201 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
202
203 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
204 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
205 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
206 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
207 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
208
209 #if defined(NVALGRIND)
210
211 /* Define NVALGRIND to completely remove the Valgrind magic sequence
212 from the compiled code (analogous to NDEBUG's effects on
213 assert()) */
214 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
215 _zzq_default, _zzq_request, \
216 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
217 (_zzq_default)
218
219 #else /* ! NVALGRIND */
220
221 /* The following defines the magic code sequences which the JITter
222 spots and handles magically. Don't look too closely at them as
223 they will rot your brain.
224
225 The assembly code sequences for all architectures is in this one
226 file. This is because this file must be stand-alone, and we don't
227 want to have multiple files.
228
229 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
230 value gets put in the return slot, so that everything works when
231 this is executed not under Valgrind. Args are passed in a memory
232 block, and so there's no intrinsic limit to the number that could
233 be passed, but it's currently five.
234
235 The macro args are:
236 _zzq_rlval result lvalue
237 _zzq_default default value (result returned when running on real CPU)
238 _zzq_request request code
239 _zzq_arg1..5 request params
240
241 The other two macros are used to support function wrapping, and are
242 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
243 guest's NRADDR pseudo-register and whatever other information is
244 needed to safely run the call original from the wrapper: on
245 ppc64-linux, the R2 value at the divert point is also needed. This
246 information is abstracted into a user-visible type, OrigFn.
247
248 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
249 guest, but guarantees that the branch instruction will not be
250 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
251 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
252 complete inline asm, since it needs to be combined with more magic
253 inline asm stuff to be useful.
254 */
255
256 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
257
258 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
259 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
260 || defined(PLAT_x86_solaris)
261
262 typedef
263 struct {
264 unsigned int nraddr; /* where's the code? */
265 }
266 OrigFn;
267
268 #define __SPECIAL_INSTRUCTION_PREAMBLE \
269 "roll $3, %%edi ; roll $13, %%edi\n\t" \
270 "roll $29, %%edi ; roll $19, %%edi\n\t"
271
272 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
273 _zzq_default, _zzq_request, \
274 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
275 __extension__ \
276 ({volatile unsigned int _zzq_args[6]; \
277 volatile unsigned int _zzq_result; \
278 _zzq_args[0] = (unsigned int)(_zzq_request); \
279 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
280 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
281 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
282 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
283 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
284 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
285 /* %EDX = client_request ( %EAX ) */ \
286 "xchgl %%ebx,%%ebx" \
287 : "=d" (_zzq_result) \
288 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
289 : "cc", "memory" \
290 ); \
291 _zzq_result; \
292 })
293
294 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
295 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
296 volatile unsigned int __addr; \
297 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
298 /* %EAX = guest_NRADDR */ \
299 "xchgl %%ecx,%%ecx" \
300 : "=a" (__addr) \
301 : \
302 : "cc", "memory" \
303 ); \
304 _zzq_orig->nraddr = __addr; \
305 }
306
307 #define VALGRIND_CALL_NOREDIR_EAX \
308 __SPECIAL_INSTRUCTION_PREAMBLE \
309 /* call-noredir *%EAX */ \
310 "xchgl %%edx,%%edx\n\t"
311
312 #define VALGRIND_VEX_INJECT_IR() \
313 do { \
314 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
315 "xchgl %%edi,%%edi\n\t" \
316 : : : "cc", "memory" \
317 ); \
318 } while (0)
319
320 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
321 || PLAT_x86_solaris */
322
323 /* ------------------------- x86-Win32 ------------------------- */
324
325 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
326
327 typedef
328 struct {
329 unsigned int nraddr; /* where's the code? */
330 }
331 OrigFn;
332
333 #if defined(_MSC_VER)
334
335 #define __SPECIAL_INSTRUCTION_PREAMBLE \
336 __asm rol edi, 3 __asm rol edi, 13 \
337 __asm rol edi, 29 __asm rol edi, 19
338
339 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
340 _zzq_default, _zzq_request, \
341 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
342 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
343 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
344 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
345 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
346
347 static __inline uintptr_t
valgrind_do_client_request_expr(uintptr_t _zzq_default,uintptr_t _zzq_request,uintptr_t _zzq_arg1,uintptr_t _zzq_arg2,uintptr_t _zzq_arg3,uintptr_t _zzq_arg4,uintptr_t _zzq_arg5)348 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
349 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
350 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
351 uintptr_t _zzq_arg5)
352 {
353 volatile uintptr_t _zzq_args[6];
354 volatile unsigned int _zzq_result;
355 _zzq_args[0] = (uintptr_t)(_zzq_request);
356 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
357 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
358 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
359 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
360 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
361 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
362 __SPECIAL_INSTRUCTION_PREAMBLE
363 /* %EDX = client_request ( %EAX ) */
364 __asm xchg ebx,ebx
365 __asm mov _zzq_result, edx
366 }
367 return _zzq_result;
368 }
369
370 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
371 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
372 volatile unsigned int __addr; \
373 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
374 /* %EAX = guest_NRADDR */ \
375 __asm xchg ecx,ecx \
376 __asm mov __addr, eax \
377 } \
378 _zzq_orig->nraddr = __addr; \
379 }
380
381 #define VALGRIND_CALL_NOREDIR_EAX ERROR
382
383 #define VALGRIND_VEX_INJECT_IR() \
384 do { \
385 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
386 __asm xchg edi,edi \
387 } \
388 } while (0)
389
390 #else
391 #error Unsupported compiler.
392 #endif
393
394 #endif /* PLAT_x86_win32 */
395
396 /* ----------------- amd64-{linux,darwin,solaris} --------------- */
397
398 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
399 || defined(PLAT_amd64_solaris) \
400 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
401
402 typedef
403 struct {
404 unsigned long int nraddr; /* where's the code? */
405 }
406 OrigFn;
407
408 #define __SPECIAL_INSTRUCTION_PREAMBLE \
409 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
410 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
411
412 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
413 _zzq_default, _zzq_request, \
414 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
415 __extension__ \
416 ({ volatile unsigned long int _zzq_args[6]; \
417 volatile unsigned long int _zzq_result; \
418 _zzq_args[0] = (unsigned long int)(_zzq_request); \
419 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
420 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
421 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
422 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
423 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
424 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
425 /* %RDX = client_request ( %RAX ) */ \
426 "xchgq %%rbx,%%rbx" \
427 : "=d" (_zzq_result) \
428 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
429 : "cc", "memory" \
430 ); \
431 _zzq_result; \
432 })
433
434 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
435 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
436 volatile unsigned long int __addr; \
437 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
438 /* %RAX = guest_NRADDR */ \
439 "xchgq %%rcx,%%rcx" \
440 : "=a" (__addr) \
441 : \
442 : "cc", "memory" \
443 ); \
444 _zzq_orig->nraddr = __addr; \
445 }
446
447 #define VALGRIND_CALL_NOREDIR_RAX \
448 __SPECIAL_INSTRUCTION_PREAMBLE \
449 /* call-noredir *%RAX */ \
450 "xchgq %%rdx,%%rdx\n\t"
451
452 #define VALGRIND_VEX_INJECT_IR() \
453 do { \
454 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
455 "xchgq %%rdi,%%rdi\n\t" \
456 : : : "cc", "memory" \
457 ); \
458 } while (0)
459
460 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
461
462 /* ------------------------- amd64-Win64 ------------------------- */
463
464 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
465
466 #error Unsupported compiler.
467
468 #endif /* PLAT_amd64_win64 */
469
470 /* ------------------------ ppc32-linux ------------------------ */
471
472 #if defined(PLAT_ppc32_linux)
473
474 typedef
475 struct {
476 unsigned int nraddr; /* where's the code? */
477 }
478 OrigFn;
479
480 #define __SPECIAL_INSTRUCTION_PREAMBLE \
481 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
482 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
483
484 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
485 _zzq_default, _zzq_request, \
486 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
487 \
488 __extension__ \
489 ({ unsigned int _zzq_args[6]; \
490 unsigned int _zzq_result; \
491 unsigned int* _zzq_ptr; \
492 _zzq_args[0] = (unsigned int)(_zzq_request); \
493 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
494 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
495 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
496 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
497 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
498 _zzq_ptr = _zzq_args; \
499 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
500 "mr 4,%2\n\t" /*ptr*/ \
501 __SPECIAL_INSTRUCTION_PREAMBLE \
502 /* %R3 = client_request ( %R4 ) */ \
503 "or 1,1,1\n\t" \
504 "mr %0,3" /*result*/ \
505 : "=b" (_zzq_result) \
506 : "b" (_zzq_default), "b" (_zzq_ptr) \
507 : "cc", "memory", "r3", "r4"); \
508 _zzq_result; \
509 })
510
511 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
512 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
513 unsigned int __addr; \
514 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
515 /* %R3 = guest_NRADDR */ \
516 "or 2,2,2\n\t" \
517 "mr %0,3" \
518 : "=b" (__addr) \
519 : \
520 : "cc", "memory", "r3" \
521 ); \
522 _zzq_orig->nraddr = __addr; \
523 }
524
525 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
526 __SPECIAL_INSTRUCTION_PREAMBLE \
527 /* branch-and-link-to-noredir *%R11 */ \
528 "or 3,3,3\n\t"
529
530 #define VALGRIND_VEX_INJECT_IR() \
531 do { \
532 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
533 "or 5,5,5\n\t" \
534 ); \
535 } while (0)
536
537 #endif /* PLAT_ppc32_linux */
538
539 /* ------------------------ ppc64-linux ------------------------ */
540
541 #if defined(PLAT_ppc64be_linux)
542
543 typedef
544 struct {
545 unsigned long int nraddr; /* where's the code? */
546 unsigned long int r2; /* what tocptr do we need? */
547 }
548 OrigFn;
549
550 #define __SPECIAL_INSTRUCTION_PREAMBLE \
551 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
552 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
553
554 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
555 _zzq_default, _zzq_request, \
556 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
557 \
558 __extension__ \
559 ({ unsigned long int _zzq_args[6]; \
560 unsigned long int _zzq_result; \
561 unsigned long int* _zzq_ptr; \
562 _zzq_args[0] = (unsigned long int)(_zzq_request); \
563 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
564 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
565 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
566 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
567 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
568 _zzq_ptr = _zzq_args; \
569 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
570 "mr 4,%2\n\t" /*ptr*/ \
571 __SPECIAL_INSTRUCTION_PREAMBLE \
572 /* %R3 = client_request ( %R4 ) */ \
573 "or 1,1,1\n\t" \
574 "mr %0,3" /*result*/ \
575 : "=b" (_zzq_result) \
576 : "b" (_zzq_default), "b" (_zzq_ptr) \
577 : "cc", "memory", "r3", "r4"); \
578 _zzq_result; \
579 })
580
581 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
582 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
583 unsigned long int __addr; \
584 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
585 /* %R3 = guest_NRADDR */ \
586 "or 2,2,2\n\t" \
587 "mr %0,3" \
588 : "=b" (__addr) \
589 : \
590 : "cc", "memory", "r3" \
591 ); \
592 _zzq_orig->nraddr = __addr; \
593 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
594 /* %R3 = guest_NRADDR_GPR2 */ \
595 "or 4,4,4\n\t" \
596 "mr %0,3" \
597 : "=b" (__addr) \
598 : \
599 : "cc", "memory", "r3" \
600 ); \
601 _zzq_orig->r2 = __addr; \
602 }
603
604 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
605 __SPECIAL_INSTRUCTION_PREAMBLE \
606 /* branch-and-link-to-noredir *%R11 */ \
607 "or 3,3,3\n\t"
608
609 #define VALGRIND_VEX_INJECT_IR() \
610 do { \
611 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
612 "or 5,5,5\n\t" \
613 ); \
614 } while (0)
615
616 #endif /* PLAT_ppc64be_linux */
617
618 #if defined(PLAT_ppc64le_linux)
619
620 typedef
621 struct {
622 unsigned long int nraddr; /* where's the code? */
623 unsigned long int r2; /* what tocptr do we need? */
624 }
625 OrigFn;
626
627 #define __SPECIAL_INSTRUCTION_PREAMBLE \
628 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
629 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
630
631 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
632 _zzq_default, _zzq_request, \
633 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
634 \
635 __extension__ \
636 ({ unsigned long int _zzq_args[6]; \
637 unsigned long int _zzq_result; \
638 unsigned long int* _zzq_ptr; \
639 _zzq_args[0] = (unsigned long int)(_zzq_request); \
640 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
641 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
642 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
643 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
644 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
645 _zzq_ptr = _zzq_args; \
646 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
647 "mr 4,%2\n\t" /*ptr*/ \
648 __SPECIAL_INSTRUCTION_PREAMBLE \
649 /* %R3 = client_request ( %R4 ) */ \
650 "or 1,1,1\n\t" \
651 "mr %0,3" /*result*/ \
652 : "=b" (_zzq_result) \
653 : "b" (_zzq_default), "b" (_zzq_ptr) \
654 : "cc", "memory", "r3", "r4"); \
655 _zzq_result; \
656 })
657
658 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
659 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
660 unsigned long int __addr; \
661 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
662 /* %R3 = guest_NRADDR */ \
663 "or 2,2,2\n\t" \
664 "mr %0,3" \
665 : "=b" (__addr) \
666 : \
667 : "cc", "memory", "r3" \
668 ); \
669 _zzq_orig->nraddr = __addr; \
670 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
671 /* %R3 = guest_NRADDR_GPR2 */ \
672 "or 4,4,4\n\t" \
673 "mr %0,3" \
674 : "=b" (__addr) \
675 : \
676 : "cc", "memory", "r3" \
677 ); \
678 _zzq_orig->r2 = __addr; \
679 }
680
681 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
682 __SPECIAL_INSTRUCTION_PREAMBLE \
683 /* branch-and-link-to-noredir *%R12 */ \
684 "or 3,3,3\n\t"
685
686 #define VALGRIND_VEX_INJECT_IR() \
687 do { \
688 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
689 "or 5,5,5\n\t" \
690 ); \
691 } while (0)
692
693 #endif /* PLAT_ppc64le_linux */
694
695 /* ------------------------- arm-linux ------------------------- */
696
697 #if defined(PLAT_arm_linux)
698
699 typedef
700 struct {
701 unsigned int nraddr; /* where's the code? */
702 }
703 OrigFn;
704
705 #define __SPECIAL_INSTRUCTION_PREAMBLE \
706 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
707 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
708
709 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
710 _zzq_default, _zzq_request, \
711 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
712 \
713 __extension__ \
714 ({volatile unsigned int _zzq_args[6]; \
715 volatile unsigned int _zzq_result; \
716 _zzq_args[0] = (unsigned int)(_zzq_request); \
717 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
718 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
719 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
720 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
721 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
722 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
723 "mov r4, %2\n\t" /*ptr*/ \
724 __SPECIAL_INSTRUCTION_PREAMBLE \
725 /* R3 = client_request ( R4 ) */ \
726 "orr r10, r10, r10\n\t" \
727 "mov %0, r3" /*result*/ \
728 : "=r" (_zzq_result) \
729 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
730 : "cc","memory", "r3", "r4"); \
731 _zzq_result; \
732 })
733
734 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
735 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
736 unsigned int __addr; \
737 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
738 /* R3 = guest_NRADDR */ \
739 "orr r11, r11, r11\n\t" \
740 "mov %0, r3" \
741 : "=r" (__addr) \
742 : \
743 : "cc", "memory", "r3" \
744 ); \
745 _zzq_orig->nraddr = __addr; \
746 }
747
748 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
749 __SPECIAL_INSTRUCTION_PREAMBLE \
750 /* branch-and-link-to-noredir *%R4 */ \
751 "orr r12, r12, r12\n\t"
752
753 #define VALGRIND_VEX_INJECT_IR() \
754 do { \
755 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
756 "orr r9, r9, r9\n\t" \
757 : : : "cc", "memory" \
758 ); \
759 } while (0)
760
761 #endif /* PLAT_arm_linux */
762
763 /* ------------------------ arm64-linux ------------------------- */
764
765 #if defined(PLAT_arm64_linux)
766
767 typedef
768 struct {
769 unsigned long int nraddr; /* where's the code? */
770 }
771 OrigFn;
772
773 #define __SPECIAL_INSTRUCTION_PREAMBLE \
774 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
775 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
776
777 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
778 _zzq_default, _zzq_request, \
779 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
780 \
781 __extension__ \
782 ({volatile unsigned long int _zzq_args[6]; \
783 volatile unsigned long int _zzq_result; \
784 _zzq_args[0] = (unsigned long int)(_zzq_request); \
785 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
786 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
787 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
788 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
789 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
790 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
791 "mov x4, %2\n\t" /*ptr*/ \
792 __SPECIAL_INSTRUCTION_PREAMBLE \
793 /* X3 = client_request ( X4 ) */ \
794 "orr x10, x10, x10\n\t" \
795 "mov %0, x3" /*result*/ \
796 : "=r" (_zzq_result) \
797 : "r" ((unsigned long int)(_zzq_default)), \
798 "r" (&_zzq_args[0]) \
799 : "cc","memory", "x3", "x4"); \
800 _zzq_result; \
801 })
802
803 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
804 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
805 unsigned long int __addr; \
806 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
807 /* X3 = guest_NRADDR */ \
808 "orr x11, x11, x11\n\t" \
809 "mov %0, x3" \
810 : "=r" (__addr) \
811 : \
812 : "cc", "memory", "x3" \
813 ); \
814 _zzq_orig->nraddr = __addr; \
815 }
816
817 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
818 __SPECIAL_INSTRUCTION_PREAMBLE \
819 /* branch-and-link-to-noredir X8 */ \
820 "orr x12, x12, x12\n\t"
821
822 #define VALGRIND_VEX_INJECT_IR() \
823 do { \
824 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
825 "orr x9, x9, x9\n\t" \
826 : : : "cc", "memory" \
827 ); \
828 } while (0)
829
830 #endif /* PLAT_arm64_linux */
831
832 /* ------------------------ s390x-linux ------------------------ */
833
834 #if defined(PLAT_s390x_linux)
835
836 typedef
837 struct {
838 unsigned long int nraddr; /* where's the code? */
839 }
840 OrigFn;
841
842 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
843 * code. This detection is implemented in platform specific toIR.c
844 * (e.g. VEX/priv/guest_s390_decoder.c).
845 */
846 #define __SPECIAL_INSTRUCTION_PREAMBLE \
847 "lr 15,15\n\t" \
848 "lr 1,1\n\t" \
849 "lr 2,2\n\t" \
850 "lr 3,3\n\t"
851
852 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
853 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
854 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
855 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
856
857 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
858 _zzq_default, _zzq_request, \
859 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
860 __extension__ \
861 ({volatile unsigned long int _zzq_args[6]; \
862 volatile unsigned long int _zzq_result; \
863 _zzq_args[0] = (unsigned long int)(_zzq_request); \
864 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
865 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
866 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
867 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
868 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
869 __asm__ volatile(/* r2 = args */ \
870 "lgr 2,%1\n\t" \
871 /* r3 = default */ \
872 "lgr 3,%2\n\t" \
873 __SPECIAL_INSTRUCTION_PREAMBLE \
874 __CLIENT_REQUEST_CODE \
875 /* results = r3 */ \
876 "lgr %0, 3\n\t" \
877 : "=d" (_zzq_result) \
878 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
879 : "cc", "2", "3", "memory" \
880 ); \
881 _zzq_result; \
882 })
883
884 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
885 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
886 volatile unsigned long int __addr; \
887 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
888 __GET_NR_CONTEXT_CODE \
889 "lgr %0, 3\n\t" \
890 : "=a" (__addr) \
891 : \
892 : "cc", "3", "memory" \
893 ); \
894 _zzq_orig->nraddr = __addr; \
895 }
896
897 #define VALGRIND_CALL_NOREDIR_R1 \
898 __SPECIAL_INSTRUCTION_PREAMBLE \
899 __CALL_NO_REDIR_CODE
900
901 #define VALGRIND_VEX_INJECT_IR() \
902 do { \
903 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
904 __VEX_INJECT_IR_CODE); \
905 } while (0)
906
907 #endif /* PLAT_s390x_linux */
908
909 /* ------------------------- mips32-linux ---------------- */
910
911 #if defined(PLAT_mips32_linux)
912
913 typedef
914 struct {
915 unsigned int nraddr; /* where's the code? */
916 }
917 OrigFn;
918
919 /* .word 0x342
920 * .word 0x742
921 * .word 0xC2
922 * .word 0x4C2*/
923 #define __SPECIAL_INSTRUCTION_PREAMBLE \
924 "srl $0, $0, 13\n\t" \
925 "srl $0, $0, 29\n\t" \
926 "srl $0, $0, 3\n\t" \
927 "srl $0, $0, 19\n\t"
928
929 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
930 _zzq_default, _zzq_request, \
931 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
932 __extension__ \
933 ({ volatile unsigned int _zzq_args[6]; \
934 volatile unsigned int _zzq_result; \
935 _zzq_args[0] = (unsigned int)(_zzq_request); \
936 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
937 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
938 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
939 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
940 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
941 __asm__ volatile("move $11, %1\n\t" /*default*/ \
942 "move $12, %2\n\t" /*ptr*/ \
943 __SPECIAL_INSTRUCTION_PREAMBLE \
944 /* T3 = client_request ( T4 ) */ \
945 "or $13, $13, $13\n\t" \
946 "move %0, $11\n\t" /*result*/ \
947 : "=r" (_zzq_result) \
948 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
949 : "$11", "$12"); \
950 _zzq_result; \
951 })
952
953 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
954 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
955 volatile unsigned int __addr; \
956 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
957 /* %t9 = guest_NRADDR */ \
958 "or $14, $14, $14\n\t" \
959 "move %0, $11" /*result*/ \
960 : "=r" (__addr) \
961 : \
962 : "$11" \
963 ); \
964 _zzq_orig->nraddr = __addr; \
965 }
966
967 #define VALGRIND_CALL_NOREDIR_T9 \
968 __SPECIAL_INSTRUCTION_PREAMBLE \
969 /* call-noredir *%t9 */ \
970 "or $15, $15, $15\n\t"
971
972 #define VALGRIND_VEX_INJECT_IR() \
973 do { \
974 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
975 "or $11, $11, $11\n\t" \
976 ); \
977 } while (0)
978
979
980 #endif /* PLAT_mips32_linux */
981
982 /* ------------------------- mips64-linux ---------------- */
983
984 #if defined(PLAT_mips64_linux)
985
986 typedef
987 struct {
988 unsigned long nraddr; /* where's the code? */
989 }
990 OrigFn;
991
992 /* dsll $0,$0, 3
993 * dsll $0,$0, 13
994 * dsll $0,$0, 29
995 * dsll $0,$0, 19*/
996 #define __SPECIAL_INSTRUCTION_PREAMBLE \
997 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
998 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
999
1000 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1001 _zzq_default, _zzq_request, \
1002 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1003 __extension__ \
1004 ({ volatile unsigned long int _zzq_args[6]; \
1005 volatile unsigned long int _zzq_result; \
1006 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1007 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1008 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1009 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1010 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1011 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1012 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1013 "move $12, %2\n\t" /*ptr*/ \
1014 __SPECIAL_INSTRUCTION_PREAMBLE \
1015 /* $11 = client_request ( $12 ) */ \
1016 "or $13, $13, $13\n\t" \
1017 "move %0, $11\n\t" /*result*/ \
1018 : "=r" (_zzq_result) \
1019 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1020 : "$11", "$12"); \
1021 _zzq_result; \
1022 })
1023
1024 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1025 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1026 volatile unsigned long int __addr; \
1027 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1028 /* $11 = guest_NRADDR */ \
1029 "or $14, $14, $14\n\t" \
1030 "move %0, $11" /*result*/ \
1031 : "=r" (__addr) \
1032 : \
1033 : "$11"); \
1034 _zzq_orig->nraddr = __addr; \
1035 }
1036
1037 #define VALGRIND_CALL_NOREDIR_T9 \
1038 __SPECIAL_INSTRUCTION_PREAMBLE \
1039 /* call-noredir $25 */ \
1040 "or $15, $15, $15\n\t"
1041
1042 #define VALGRIND_VEX_INJECT_IR() \
1043 do { \
1044 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1045 "or $11, $11, $11\n\t" \
1046 ); \
1047 } while (0)
1048
1049 #endif /* PLAT_mips64_linux */
1050
1051 /* ------------------------ tilegx-linux --------------- */
1052 #if defined(PLAT_tilegx_linux)
1053
1054 typedef
1055 struct {
1056 unsigned long long int nraddr; /* where's the code? */
1057 }
1058 OrigFn;
1059 /*** special instruction sequence.
1060 0:02b3c7ff91234fff { moveli zero, 4660 ; moveli zero, 22136 }
1061 8:0091a7ff95678fff { moveli zero, 22136 ; moveli zero, 4660 }
1062 ****/
1063
1064 #define __SPECIAL_INSTRUCTION_PREAMBLE \
1065 ".quad 0x02b3c7ff91234fff\n" \
1066 ".quad 0x0091a7ff95678fff\n"
1067
1068 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1069 _zzq_default, _zzq_request, \
1070 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1071 ({ volatile unsigned long long int _zzq_args[6]; \
1072 volatile unsigned long long int _zzq_result; \
1073 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
1074 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
1075 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
1076 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
1077 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
1078 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
1079 __asm__ volatile("move r11, %1\n\t" /*default*/ \
1080 "move r12, %2\n\t" /*ptr*/ \
1081 __SPECIAL_INSTRUCTION_PREAMBLE \
1082 /* r11 = client_request */ \
1083 "or r13, r13, r13\n\t" \
1084 "move %0, r11\n\t" /*result*/ \
1085 : "=r" (_zzq_result) \
1086 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1087 : "memory", "r11", "r12"); \
1088 _zzq_result; \
1089 })
1090
1091 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1092 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1093 volatile unsigned long long int __addr; \
1094 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1095 /* r11 = guest_NRADDR */ \
1096 "or r14, r14, r14\n" \
1097 "move %0, r11\n" \
1098 : "=r" (__addr) \
1099 : \
1100 : "memory", "r11" \
1101 ); \
1102 _zzq_orig->nraddr = __addr; \
1103 }
1104
1105 #define VALGRIND_CALL_NOREDIR_R12 \
1106 __SPECIAL_INSTRUCTION_PREAMBLE \
1107 "or r15, r15, r15\n\t"
1108
1109 #define VALGRIND_VEX_INJECT_IR() \
1110 do { \
1111 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1112 "or r11, r11, r11\n\t" \
1113 ); \
1114 } while (0)
1115
1116 #endif /* PLAT_tilegx_linux */
1117
1118 /* Insert assembly code for other platforms here... */
1119
1120 #endif /* NVALGRIND */
1121
1122
1123 /* ------------------------------------------------------------------ */
1124 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1125 /* ugly. It's the least-worst tradeoff I can think of. */
1126 /* ------------------------------------------------------------------ */
1127
1128 /* This section defines magic (a.k.a appalling-hack) macros for doing
1129 guaranteed-no-redirection macros, so as to get from function
1130 wrappers to the functions they are wrapping. The whole point is to
1131 construct standard call sequences, but to do the call itself with a
1132 special no-redirect call pseudo-instruction that the JIT
1133 understands and handles specially. This section is long and
1134 repetitious, and I can't see a way to make it shorter.
1135
1136 The naming scheme is as follows:
1137
1138 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1139
1140 'W' stands for "word" and 'v' for "void". Hence there are
1141 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1142 and for each, the possibility of returning a word-typed result, or
1143 no result.
1144 */
1145
1146 /* Use these to write the name of your wrapper. NOTE: duplicates
1147 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1148 the default behaviour equivalance class tag "0000" into the name.
1149 See pub_tool_redir.h for details -- normally you don't need to
1150 think about this, though. */
1151
1152 /* Use an extra level of macroisation so as to ensure the soname/fnname
1153 args are fully macro-expanded before pasting them together. */
1154 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1155
1156 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1157 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1158
1159 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1160 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1161
1162 /* Use this macro from within a wrapper function to collect the
1163 context (address and possibly other info) of the original function.
1164 Once you have that you can then use it in one of the CALL_FN_
1165 macros. The type of the argument _lval is OrigFn. */
1166 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1167
1168 /* Also provide end-user facilities for function replacement, rather
1169 than wrapping. A replacement function differs from a wrapper in
1170 that it has no way to get hold of the original function being
1171 called, and hence no way to call onwards to it. In a replacement
1172 function, VALGRIND_GET_ORIG_FN always returns zero. */
1173
1174 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1175 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1176
1177 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1178 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1179
1180 /* Derivatives of the main macros below, for calling functions
1181 returning void. */
1182
1183 #define CALL_FN_v_v(fnptr) \
1184 do { volatile unsigned long _junk; \
1185 CALL_FN_W_v(_junk,fnptr); } while (0)
1186
1187 #define CALL_FN_v_W(fnptr, arg1) \
1188 do { volatile unsigned long _junk; \
1189 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1190
1191 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1192 do { volatile unsigned long _junk; \
1193 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1194
1195 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1196 do { volatile unsigned long _junk; \
1197 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1198
1199 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1200 do { volatile unsigned long _junk; \
1201 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1202
1203 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1204 do { volatile unsigned long _junk; \
1205 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1206
1207 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1208 do { volatile unsigned long _junk; \
1209 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1210
1211 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1212 do { volatile unsigned long _junk; \
1213 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1214
1215 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
1216
1217 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1218 || defined(PLAT_x86_solaris)
1219
1220 /* These regs are trashed by the hidden call. No need to mention eax
1221 as gcc can already see that, plus causes gcc to bomb. */
1222 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1223
1224 /* Macros to save and align the stack before making a function
1225 call and restore it afterwards as gcc may not keep the stack
1226 pointer aligned if it doesn't realise calls are being made
1227 to other functions. */
1228
1229 #define VALGRIND_ALIGN_STACK \
1230 "movl %%esp,%%edi\n\t" \
1231 "andl $0xfffffff0,%%esp\n\t"
1232 #define VALGRIND_RESTORE_STACK \
1233 "movl %%edi,%%esp\n\t"
1234
1235 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1236 long) == 4. */
1237
1238 #define CALL_FN_W_v(lval, orig) \
1239 do { \
1240 volatile OrigFn _orig = (orig); \
1241 volatile unsigned long _argvec[1]; \
1242 volatile unsigned long _res; \
1243 _argvec[0] = (unsigned long)_orig.nraddr; \
1244 __asm__ volatile( \
1245 VALGRIND_ALIGN_STACK \
1246 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1247 VALGRIND_CALL_NOREDIR_EAX \
1248 VALGRIND_RESTORE_STACK \
1249 : /*out*/ "=a" (_res) \
1250 : /*in*/ "a" (&_argvec[0]) \
1251 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1252 ); \
1253 lval = (__typeof__(lval)) _res; \
1254 } while (0)
1255
1256 #define CALL_FN_W_W(lval, orig, arg1) \
1257 do { \
1258 volatile OrigFn _orig = (orig); \
1259 volatile unsigned long _argvec[2]; \
1260 volatile unsigned long _res; \
1261 _argvec[0] = (unsigned long)_orig.nraddr; \
1262 _argvec[1] = (unsigned long)(arg1); \
1263 __asm__ volatile( \
1264 VALGRIND_ALIGN_STACK \
1265 "subl $12, %%esp\n\t" \
1266 "pushl 4(%%eax)\n\t" \
1267 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1268 VALGRIND_CALL_NOREDIR_EAX \
1269 VALGRIND_RESTORE_STACK \
1270 : /*out*/ "=a" (_res) \
1271 : /*in*/ "a" (&_argvec[0]) \
1272 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1273 ); \
1274 lval = (__typeof__(lval)) _res; \
1275 } while (0)
1276
1277 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1278 do { \
1279 volatile OrigFn _orig = (orig); \
1280 volatile unsigned long _argvec[3]; \
1281 volatile unsigned long _res; \
1282 _argvec[0] = (unsigned long)_orig.nraddr; \
1283 _argvec[1] = (unsigned long)(arg1); \
1284 _argvec[2] = (unsigned long)(arg2); \
1285 __asm__ volatile( \
1286 VALGRIND_ALIGN_STACK \
1287 "subl $8, %%esp\n\t" \
1288 "pushl 8(%%eax)\n\t" \
1289 "pushl 4(%%eax)\n\t" \
1290 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1291 VALGRIND_CALL_NOREDIR_EAX \
1292 VALGRIND_RESTORE_STACK \
1293 : /*out*/ "=a" (_res) \
1294 : /*in*/ "a" (&_argvec[0]) \
1295 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1296 ); \
1297 lval = (__typeof__(lval)) _res; \
1298 } while (0)
1299
1300 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1301 do { \
1302 volatile OrigFn _orig = (orig); \
1303 volatile unsigned long _argvec[4]; \
1304 volatile unsigned long _res; \
1305 _argvec[0] = (unsigned long)_orig.nraddr; \
1306 _argvec[1] = (unsigned long)(arg1); \
1307 _argvec[2] = (unsigned long)(arg2); \
1308 _argvec[3] = (unsigned long)(arg3); \
1309 __asm__ volatile( \
1310 VALGRIND_ALIGN_STACK \
1311 "subl $4, %%esp\n\t" \
1312 "pushl 12(%%eax)\n\t" \
1313 "pushl 8(%%eax)\n\t" \
1314 "pushl 4(%%eax)\n\t" \
1315 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1316 VALGRIND_CALL_NOREDIR_EAX \
1317 VALGRIND_RESTORE_STACK \
1318 : /*out*/ "=a" (_res) \
1319 : /*in*/ "a" (&_argvec[0]) \
1320 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1321 ); \
1322 lval = (__typeof__(lval)) _res; \
1323 } while (0)
1324
1325 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1326 do { \
1327 volatile OrigFn _orig = (orig); \
1328 volatile unsigned long _argvec[5]; \
1329 volatile unsigned long _res; \
1330 _argvec[0] = (unsigned long)_orig.nraddr; \
1331 _argvec[1] = (unsigned long)(arg1); \
1332 _argvec[2] = (unsigned long)(arg2); \
1333 _argvec[3] = (unsigned long)(arg3); \
1334 _argvec[4] = (unsigned long)(arg4); \
1335 __asm__ volatile( \
1336 VALGRIND_ALIGN_STACK \
1337 "pushl 16(%%eax)\n\t" \
1338 "pushl 12(%%eax)\n\t" \
1339 "pushl 8(%%eax)\n\t" \
1340 "pushl 4(%%eax)\n\t" \
1341 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1342 VALGRIND_CALL_NOREDIR_EAX \
1343 VALGRIND_RESTORE_STACK \
1344 : /*out*/ "=a" (_res) \
1345 : /*in*/ "a" (&_argvec[0]) \
1346 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1347 ); \
1348 lval = (__typeof__(lval)) _res; \
1349 } while (0)
1350
1351 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1352 do { \
1353 volatile OrigFn _orig = (orig); \
1354 volatile unsigned long _argvec[6]; \
1355 volatile unsigned long _res; \
1356 _argvec[0] = (unsigned long)_orig.nraddr; \
1357 _argvec[1] = (unsigned long)(arg1); \
1358 _argvec[2] = (unsigned long)(arg2); \
1359 _argvec[3] = (unsigned long)(arg3); \
1360 _argvec[4] = (unsigned long)(arg4); \
1361 _argvec[5] = (unsigned long)(arg5); \
1362 __asm__ volatile( \
1363 VALGRIND_ALIGN_STACK \
1364 "subl $12, %%esp\n\t" \
1365 "pushl 20(%%eax)\n\t" \
1366 "pushl 16(%%eax)\n\t" \
1367 "pushl 12(%%eax)\n\t" \
1368 "pushl 8(%%eax)\n\t" \
1369 "pushl 4(%%eax)\n\t" \
1370 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1371 VALGRIND_CALL_NOREDIR_EAX \
1372 VALGRIND_RESTORE_STACK \
1373 : /*out*/ "=a" (_res) \
1374 : /*in*/ "a" (&_argvec[0]) \
1375 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1376 ); \
1377 lval = (__typeof__(lval)) _res; \
1378 } while (0)
1379
1380 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1381 do { \
1382 volatile OrigFn _orig = (orig); \
1383 volatile unsigned long _argvec[7]; \
1384 volatile unsigned long _res; \
1385 _argvec[0] = (unsigned long)_orig.nraddr; \
1386 _argvec[1] = (unsigned long)(arg1); \
1387 _argvec[2] = (unsigned long)(arg2); \
1388 _argvec[3] = (unsigned long)(arg3); \
1389 _argvec[4] = (unsigned long)(arg4); \
1390 _argvec[5] = (unsigned long)(arg5); \
1391 _argvec[6] = (unsigned long)(arg6); \
1392 __asm__ volatile( \
1393 VALGRIND_ALIGN_STACK \
1394 "subl $8, %%esp\n\t" \
1395 "pushl 24(%%eax)\n\t" \
1396 "pushl 20(%%eax)\n\t" \
1397 "pushl 16(%%eax)\n\t" \
1398 "pushl 12(%%eax)\n\t" \
1399 "pushl 8(%%eax)\n\t" \
1400 "pushl 4(%%eax)\n\t" \
1401 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1402 VALGRIND_CALL_NOREDIR_EAX \
1403 VALGRIND_RESTORE_STACK \
1404 : /*out*/ "=a" (_res) \
1405 : /*in*/ "a" (&_argvec[0]) \
1406 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1407 ); \
1408 lval = (__typeof__(lval)) _res; \
1409 } while (0)
1410
1411 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1412 arg7) \
1413 do { \
1414 volatile OrigFn _orig = (orig); \
1415 volatile unsigned long _argvec[8]; \
1416 volatile unsigned long _res; \
1417 _argvec[0] = (unsigned long)_orig.nraddr; \
1418 _argvec[1] = (unsigned long)(arg1); \
1419 _argvec[2] = (unsigned long)(arg2); \
1420 _argvec[3] = (unsigned long)(arg3); \
1421 _argvec[4] = (unsigned long)(arg4); \
1422 _argvec[5] = (unsigned long)(arg5); \
1423 _argvec[6] = (unsigned long)(arg6); \
1424 _argvec[7] = (unsigned long)(arg7); \
1425 __asm__ volatile( \
1426 VALGRIND_ALIGN_STACK \
1427 "subl $4, %%esp\n\t" \
1428 "pushl 28(%%eax)\n\t" \
1429 "pushl 24(%%eax)\n\t" \
1430 "pushl 20(%%eax)\n\t" \
1431 "pushl 16(%%eax)\n\t" \
1432 "pushl 12(%%eax)\n\t" \
1433 "pushl 8(%%eax)\n\t" \
1434 "pushl 4(%%eax)\n\t" \
1435 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1436 VALGRIND_CALL_NOREDIR_EAX \
1437 VALGRIND_RESTORE_STACK \
1438 : /*out*/ "=a" (_res) \
1439 : /*in*/ "a" (&_argvec[0]) \
1440 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1441 ); \
1442 lval = (__typeof__(lval)) _res; \
1443 } while (0)
1444
1445 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1446 arg7,arg8) \
1447 do { \
1448 volatile OrigFn _orig = (orig); \
1449 volatile unsigned long _argvec[9]; \
1450 volatile unsigned long _res; \
1451 _argvec[0] = (unsigned long)_orig.nraddr; \
1452 _argvec[1] = (unsigned long)(arg1); \
1453 _argvec[2] = (unsigned long)(arg2); \
1454 _argvec[3] = (unsigned long)(arg3); \
1455 _argvec[4] = (unsigned long)(arg4); \
1456 _argvec[5] = (unsigned long)(arg5); \
1457 _argvec[6] = (unsigned long)(arg6); \
1458 _argvec[7] = (unsigned long)(arg7); \
1459 _argvec[8] = (unsigned long)(arg8); \
1460 __asm__ volatile( \
1461 VALGRIND_ALIGN_STACK \
1462 "pushl 32(%%eax)\n\t" \
1463 "pushl 28(%%eax)\n\t" \
1464 "pushl 24(%%eax)\n\t" \
1465 "pushl 20(%%eax)\n\t" \
1466 "pushl 16(%%eax)\n\t" \
1467 "pushl 12(%%eax)\n\t" \
1468 "pushl 8(%%eax)\n\t" \
1469 "pushl 4(%%eax)\n\t" \
1470 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1471 VALGRIND_CALL_NOREDIR_EAX \
1472 VALGRIND_RESTORE_STACK \
1473 : /*out*/ "=a" (_res) \
1474 : /*in*/ "a" (&_argvec[0]) \
1475 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1476 ); \
1477 lval = (__typeof__(lval)) _res; \
1478 } while (0)
1479
1480 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1481 arg7,arg8,arg9) \
1482 do { \
1483 volatile OrigFn _orig = (orig); \
1484 volatile unsigned long _argvec[10]; \
1485 volatile unsigned long _res; \
1486 _argvec[0] = (unsigned long)_orig.nraddr; \
1487 _argvec[1] = (unsigned long)(arg1); \
1488 _argvec[2] = (unsigned long)(arg2); \
1489 _argvec[3] = (unsigned long)(arg3); \
1490 _argvec[4] = (unsigned long)(arg4); \
1491 _argvec[5] = (unsigned long)(arg5); \
1492 _argvec[6] = (unsigned long)(arg6); \
1493 _argvec[7] = (unsigned long)(arg7); \
1494 _argvec[8] = (unsigned long)(arg8); \
1495 _argvec[9] = (unsigned long)(arg9); \
1496 __asm__ volatile( \
1497 VALGRIND_ALIGN_STACK \
1498 "subl $12, %%esp\n\t" \
1499 "pushl 36(%%eax)\n\t" \
1500 "pushl 32(%%eax)\n\t" \
1501 "pushl 28(%%eax)\n\t" \
1502 "pushl 24(%%eax)\n\t" \
1503 "pushl 20(%%eax)\n\t" \
1504 "pushl 16(%%eax)\n\t" \
1505 "pushl 12(%%eax)\n\t" \
1506 "pushl 8(%%eax)\n\t" \
1507 "pushl 4(%%eax)\n\t" \
1508 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1509 VALGRIND_CALL_NOREDIR_EAX \
1510 VALGRIND_RESTORE_STACK \
1511 : /*out*/ "=a" (_res) \
1512 : /*in*/ "a" (&_argvec[0]) \
1513 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1514 ); \
1515 lval = (__typeof__(lval)) _res; \
1516 } while (0)
1517
1518 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1519 arg7,arg8,arg9,arg10) \
1520 do { \
1521 volatile OrigFn _orig = (orig); \
1522 volatile unsigned long _argvec[11]; \
1523 volatile unsigned long _res; \
1524 _argvec[0] = (unsigned long)_orig.nraddr; \
1525 _argvec[1] = (unsigned long)(arg1); \
1526 _argvec[2] = (unsigned long)(arg2); \
1527 _argvec[3] = (unsigned long)(arg3); \
1528 _argvec[4] = (unsigned long)(arg4); \
1529 _argvec[5] = (unsigned long)(arg5); \
1530 _argvec[6] = (unsigned long)(arg6); \
1531 _argvec[7] = (unsigned long)(arg7); \
1532 _argvec[8] = (unsigned long)(arg8); \
1533 _argvec[9] = (unsigned long)(arg9); \
1534 _argvec[10] = (unsigned long)(arg10); \
1535 __asm__ volatile( \
1536 VALGRIND_ALIGN_STACK \
1537 "subl $8, %%esp\n\t" \
1538 "pushl 40(%%eax)\n\t" \
1539 "pushl 36(%%eax)\n\t" \
1540 "pushl 32(%%eax)\n\t" \
1541 "pushl 28(%%eax)\n\t" \
1542 "pushl 24(%%eax)\n\t" \
1543 "pushl 20(%%eax)\n\t" \
1544 "pushl 16(%%eax)\n\t" \
1545 "pushl 12(%%eax)\n\t" \
1546 "pushl 8(%%eax)\n\t" \
1547 "pushl 4(%%eax)\n\t" \
1548 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1549 VALGRIND_CALL_NOREDIR_EAX \
1550 VALGRIND_RESTORE_STACK \
1551 : /*out*/ "=a" (_res) \
1552 : /*in*/ "a" (&_argvec[0]) \
1553 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1554 ); \
1555 lval = (__typeof__(lval)) _res; \
1556 } while (0)
1557
1558 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1559 arg6,arg7,arg8,arg9,arg10, \
1560 arg11) \
1561 do { \
1562 volatile OrigFn _orig = (orig); \
1563 volatile unsigned long _argvec[12]; \
1564 volatile unsigned long _res; \
1565 _argvec[0] = (unsigned long)_orig.nraddr; \
1566 _argvec[1] = (unsigned long)(arg1); \
1567 _argvec[2] = (unsigned long)(arg2); \
1568 _argvec[3] = (unsigned long)(arg3); \
1569 _argvec[4] = (unsigned long)(arg4); \
1570 _argvec[5] = (unsigned long)(arg5); \
1571 _argvec[6] = (unsigned long)(arg6); \
1572 _argvec[7] = (unsigned long)(arg7); \
1573 _argvec[8] = (unsigned long)(arg8); \
1574 _argvec[9] = (unsigned long)(arg9); \
1575 _argvec[10] = (unsigned long)(arg10); \
1576 _argvec[11] = (unsigned long)(arg11); \
1577 __asm__ volatile( \
1578 VALGRIND_ALIGN_STACK \
1579 "subl $4, %%esp\n\t" \
1580 "pushl 44(%%eax)\n\t" \
1581 "pushl 40(%%eax)\n\t" \
1582 "pushl 36(%%eax)\n\t" \
1583 "pushl 32(%%eax)\n\t" \
1584 "pushl 28(%%eax)\n\t" \
1585 "pushl 24(%%eax)\n\t" \
1586 "pushl 20(%%eax)\n\t" \
1587 "pushl 16(%%eax)\n\t" \
1588 "pushl 12(%%eax)\n\t" \
1589 "pushl 8(%%eax)\n\t" \
1590 "pushl 4(%%eax)\n\t" \
1591 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1592 VALGRIND_CALL_NOREDIR_EAX \
1593 VALGRIND_RESTORE_STACK \
1594 : /*out*/ "=a" (_res) \
1595 : /*in*/ "a" (&_argvec[0]) \
1596 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1597 ); \
1598 lval = (__typeof__(lval)) _res; \
1599 } while (0)
1600
1601 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1602 arg6,arg7,arg8,arg9,arg10, \
1603 arg11,arg12) \
1604 do { \
1605 volatile OrigFn _orig = (orig); \
1606 volatile unsigned long _argvec[13]; \
1607 volatile unsigned long _res; \
1608 _argvec[0] = (unsigned long)_orig.nraddr; \
1609 _argvec[1] = (unsigned long)(arg1); \
1610 _argvec[2] = (unsigned long)(arg2); \
1611 _argvec[3] = (unsigned long)(arg3); \
1612 _argvec[4] = (unsigned long)(arg4); \
1613 _argvec[5] = (unsigned long)(arg5); \
1614 _argvec[6] = (unsigned long)(arg6); \
1615 _argvec[7] = (unsigned long)(arg7); \
1616 _argvec[8] = (unsigned long)(arg8); \
1617 _argvec[9] = (unsigned long)(arg9); \
1618 _argvec[10] = (unsigned long)(arg10); \
1619 _argvec[11] = (unsigned long)(arg11); \
1620 _argvec[12] = (unsigned long)(arg12); \
1621 __asm__ volatile( \
1622 VALGRIND_ALIGN_STACK \
1623 "pushl 48(%%eax)\n\t" \
1624 "pushl 44(%%eax)\n\t" \
1625 "pushl 40(%%eax)\n\t" \
1626 "pushl 36(%%eax)\n\t" \
1627 "pushl 32(%%eax)\n\t" \
1628 "pushl 28(%%eax)\n\t" \
1629 "pushl 24(%%eax)\n\t" \
1630 "pushl 20(%%eax)\n\t" \
1631 "pushl 16(%%eax)\n\t" \
1632 "pushl 12(%%eax)\n\t" \
1633 "pushl 8(%%eax)\n\t" \
1634 "pushl 4(%%eax)\n\t" \
1635 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1636 VALGRIND_CALL_NOREDIR_EAX \
1637 VALGRIND_RESTORE_STACK \
1638 : /*out*/ "=a" (_res) \
1639 : /*in*/ "a" (&_argvec[0]) \
1640 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1641 ); \
1642 lval = (__typeof__(lval)) _res; \
1643 } while (0)
1644
1645 #endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1646
1647 /* ---------------- amd64-{linux,darwin,solaris} --------------- */
1648
1649 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1650 || defined(PLAT_amd64_solaris)
1651
1652 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1653
1654 /* These regs are trashed by the hidden call. */
1655 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1656 "rdi", "r8", "r9", "r10", "r11"
1657
1658 /* This is all pretty complex. It's so as to make stack unwinding
1659 work reliably. See bug 243270. The basic problem is the sub and
1660 add of 128 of %rsp in all of the following macros. If gcc believes
1661 the CFA is in %rsp, then unwinding may fail, because what's at the
1662 CFA is not what gcc "expected" when it constructs the CFIs for the
1663 places where the macros are instantiated.
1664
1665 But we can't just add a CFI annotation to increase the CFA offset
1666 by 128, to match the sub of 128 from %rsp, because we don't know
1667 whether gcc has chosen %rsp as the CFA at that point, or whether it
1668 has chosen some other register (eg, %rbp). In the latter case,
1669 adding a CFI annotation to change the CFA offset is simply wrong.
1670
1671 So the solution is to get hold of the CFA using
1672 __builtin_dwarf_cfa(), put it in a known register, and add a
1673 CFI annotation to say what the register is. We choose %rbp for
1674 this (perhaps perversely), because:
1675
1676 (1) %rbp is already subject to unwinding. If a new register was
1677 chosen then the unwinder would have to unwind it in all stack
1678 traces, which is expensive, and
1679
1680 (2) %rbp is already subject to precise exception updates in the
1681 JIT. If a new register was chosen, we'd have to have precise
1682 exceptions for it too, which reduces performance of the
1683 generated code.
1684
1685 However .. one extra complication. We can't just whack the result
1686 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1687 list of trashed registers at the end of the inline assembly
1688 fragments; gcc won't allow %rbp to appear in that list. Hence
1689 instead we need to stash %rbp in %r15 for the duration of the asm,
1690 and say that %r15 is trashed instead. gcc seems happy to go with
1691 that.
1692
1693 Oh .. and this all needs to be conditionalised so that it is
1694 unchanged from before this commit, when compiled with older gccs
1695 that don't support __builtin_dwarf_cfa. Furthermore, since
1696 this header file is freestanding, it has to be independent of
1697 config.h, and so the following conditionalisation cannot depend on
1698 configure time checks.
1699
1700 Although it's not clear from
1701 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1702 this expression excludes Darwin.
1703 .cfi directives in Darwin assembly appear to be completely
1704 different and I haven't investigated how they work.
1705
1706 For even more entertainment value, note we have to use the
1707 completely undocumented __builtin_dwarf_cfa(), which appears to
1708 really compute the CFA, whereas __builtin_frame_address(0) claims
1709 to but actually doesn't. See
1710 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1711 */
1712 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1713 # define __FRAME_POINTER \
1714 ,"r"(__builtin_dwarf_cfa())
1715 # define VALGRIND_CFI_PROLOGUE \
1716 "movq %%rbp, %%r15\n\t" \
1717 "movq %2, %%rbp\n\t" \
1718 ".cfi_remember_state\n\t" \
1719 ".cfi_def_cfa rbp, 0\n\t"
1720 # define VALGRIND_CFI_EPILOGUE \
1721 "movq %%r15, %%rbp\n\t" \
1722 ".cfi_restore_state\n\t"
1723 #else
1724 # define __FRAME_POINTER
1725 # define VALGRIND_CFI_PROLOGUE
1726 # define VALGRIND_CFI_EPILOGUE
1727 #endif
1728
1729 /* Macros to save and align the stack before making a function
1730 call and restore it afterwards as gcc may not keep the stack
1731 pointer aligned if it doesn't realise calls are being made
1732 to other functions. */
1733
1734 #define VALGRIND_ALIGN_STACK \
1735 "movq %%rsp,%%r14\n\t" \
1736 "andq $0xfffffffffffffff0,%%rsp\n\t"
1737 #define VALGRIND_RESTORE_STACK \
1738 "movq %%r14,%%rsp\n\t"
1739
1740 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1741 long) == 8. */
1742
1743 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1744 macros. In order not to trash the stack redzone, we need to drop
1745 %rsp by 128 before the hidden call, and restore afterwards. The
1746 nastyness is that it is only by luck that the stack still appears
1747 to be unwindable during the hidden call - since then the behaviour
1748 of any routine using this macro does not match what the CFI data
1749 says. Sigh.
1750
1751 Why is this important? Imagine that a wrapper has a stack
1752 allocated local, and passes to the hidden call, a pointer to it.
1753 Because gcc does not know about the hidden call, it may allocate
1754 that local in the redzone. Unfortunately the hidden call may then
1755 trash it before it comes to use it. So we must step clear of the
1756 redzone, for the duration of the hidden call, to make it safe.
1757
1758 Probably the same problem afflicts the other redzone-style ABIs too
1759 (ppc64-linux); but for those, the stack is
1760 self describing (none of this CFI nonsense) so at least messing
1761 with the stack pointer doesn't give a danger of non-unwindable
1762 stack. */
1763
1764 #define CALL_FN_W_v(lval, orig) \
1765 do { \
1766 volatile OrigFn _orig = (orig); \
1767 volatile unsigned long _argvec[1]; \
1768 volatile unsigned long _res; \
1769 _argvec[0] = (unsigned long)_orig.nraddr; \
1770 __asm__ volatile( \
1771 VALGRIND_CFI_PROLOGUE \
1772 VALGRIND_ALIGN_STACK \
1773 "subq $128,%%rsp\n\t" \
1774 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1775 VALGRIND_CALL_NOREDIR_RAX \
1776 VALGRIND_RESTORE_STACK \
1777 VALGRIND_CFI_EPILOGUE \
1778 : /*out*/ "=a" (_res) \
1779 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1780 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1781 ); \
1782 lval = (__typeof__(lval)) _res; \
1783 } while (0)
1784
1785 #define CALL_FN_W_W(lval, orig, arg1) \
1786 do { \
1787 volatile OrigFn _orig = (orig); \
1788 volatile unsigned long _argvec[2]; \
1789 volatile unsigned long _res; \
1790 _argvec[0] = (unsigned long)_orig.nraddr; \
1791 _argvec[1] = (unsigned long)(arg1); \
1792 __asm__ volatile( \
1793 VALGRIND_CFI_PROLOGUE \
1794 VALGRIND_ALIGN_STACK \
1795 "subq $128,%%rsp\n\t" \
1796 "movq 8(%%rax), %%rdi\n\t" \
1797 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1798 VALGRIND_CALL_NOREDIR_RAX \
1799 VALGRIND_RESTORE_STACK \
1800 VALGRIND_CFI_EPILOGUE \
1801 : /*out*/ "=a" (_res) \
1802 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1803 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1804 ); \
1805 lval = (__typeof__(lval)) _res; \
1806 } while (0)
1807
1808 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1809 do { \
1810 volatile OrigFn _orig = (orig); \
1811 volatile unsigned long _argvec[3]; \
1812 volatile unsigned long _res; \
1813 _argvec[0] = (unsigned long)_orig.nraddr; \
1814 _argvec[1] = (unsigned long)(arg1); \
1815 _argvec[2] = (unsigned long)(arg2); \
1816 __asm__ volatile( \
1817 VALGRIND_CFI_PROLOGUE \
1818 VALGRIND_ALIGN_STACK \
1819 "subq $128,%%rsp\n\t" \
1820 "movq 16(%%rax), %%rsi\n\t" \
1821 "movq 8(%%rax), %%rdi\n\t" \
1822 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1823 VALGRIND_CALL_NOREDIR_RAX \
1824 VALGRIND_RESTORE_STACK \
1825 VALGRIND_CFI_EPILOGUE \
1826 : /*out*/ "=a" (_res) \
1827 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1828 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1829 ); \
1830 lval = (__typeof__(lval)) _res; \
1831 } while (0)
1832
1833 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1834 do { \
1835 volatile OrigFn _orig = (orig); \
1836 volatile unsigned long _argvec[4]; \
1837 volatile unsigned long _res; \
1838 _argvec[0] = (unsigned long)_orig.nraddr; \
1839 _argvec[1] = (unsigned long)(arg1); \
1840 _argvec[2] = (unsigned long)(arg2); \
1841 _argvec[3] = (unsigned long)(arg3); \
1842 __asm__ volatile( \
1843 VALGRIND_CFI_PROLOGUE \
1844 VALGRIND_ALIGN_STACK \
1845 "subq $128,%%rsp\n\t" \
1846 "movq 24(%%rax), %%rdx\n\t" \
1847 "movq 16(%%rax), %%rsi\n\t" \
1848 "movq 8(%%rax), %%rdi\n\t" \
1849 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1850 VALGRIND_CALL_NOREDIR_RAX \
1851 VALGRIND_RESTORE_STACK \
1852 VALGRIND_CFI_EPILOGUE \
1853 : /*out*/ "=a" (_res) \
1854 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1855 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1856 ); \
1857 lval = (__typeof__(lval)) _res; \
1858 } while (0)
1859
1860 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1861 do { \
1862 volatile OrigFn _orig = (orig); \
1863 volatile unsigned long _argvec[5]; \
1864 volatile unsigned long _res; \
1865 _argvec[0] = (unsigned long)_orig.nraddr; \
1866 _argvec[1] = (unsigned long)(arg1); \
1867 _argvec[2] = (unsigned long)(arg2); \
1868 _argvec[3] = (unsigned long)(arg3); \
1869 _argvec[4] = (unsigned long)(arg4); \
1870 __asm__ volatile( \
1871 VALGRIND_CFI_PROLOGUE \
1872 VALGRIND_ALIGN_STACK \
1873 "subq $128,%%rsp\n\t" \
1874 "movq 32(%%rax), %%rcx\n\t" \
1875 "movq 24(%%rax), %%rdx\n\t" \
1876 "movq 16(%%rax), %%rsi\n\t" \
1877 "movq 8(%%rax), %%rdi\n\t" \
1878 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1879 VALGRIND_CALL_NOREDIR_RAX \
1880 VALGRIND_RESTORE_STACK \
1881 VALGRIND_CFI_EPILOGUE \
1882 : /*out*/ "=a" (_res) \
1883 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1884 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1885 ); \
1886 lval = (__typeof__(lval)) _res; \
1887 } while (0)
1888
1889 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1890 do { \
1891 volatile OrigFn _orig = (orig); \
1892 volatile unsigned long _argvec[6]; \
1893 volatile unsigned long _res; \
1894 _argvec[0] = (unsigned long)_orig.nraddr; \
1895 _argvec[1] = (unsigned long)(arg1); \
1896 _argvec[2] = (unsigned long)(arg2); \
1897 _argvec[3] = (unsigned long)(arg3); \
1898 _argvec[4] = (unsigned long)(arg4); \
1899 _argvec[5] = (unsigned long)(arg5); \
1900 __asm__ volatile( \
1901 VALGRIND_CFI_PROLOGUE \
1902 VALGRIND_ALIGN_STACK \
1903 "subq $128,%%rsp\n\t" \
1904 "movq 40(%%rax), %%r8\n\t" \
1905 "movq 32(%%rax), %%rcx\n\t" \
1906 "movq 24(%%rax), %%rdx\n\t" \
1907 "movq 16(%%rax), %%rsi\n\t" \
1908 "movq 8(%%rax), %%rdi\n\t" \
1909 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1910 VALGRIND_CALL_NOREDIR_RAX \
1911 VALGRIND_RESTORE_STACK \
1912 VALGRIND_CFI_EPILOGUE \
1913 : /*out*/ "=a" (_res) \
1914 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1915 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1916 ); \
1917 lval = (__typeof__(lval)) _res; \
1918 } while (0)
1919
1920 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1921 do { \
1922 volatile OrigFn _orig = (orig); \
1923 volatile unsigned long _argvec[7]; \
1924 volatile unsigned long _res; \
1925 _argvec[0] = (unsigned long)_orig.nraddr; \
1926 _argvec[1] = (unsigned long)(arg1); \
1927 _argvec[2] = (unsigned long)(arg2); \
1928 _argvec[3] = (unsigned long)(arg3); \
1929 _argvec[4] = (unsigned long)(arg4); \
1930 _argvec[5] = (unsigned long)(arg5); \
1931 _argvec[6] = (unsigned long)(arg6); \
1932 __asm__ volatile( \
1933 VALGRIND_CFI_PROLOGUE \
1934 VALGRIND_ALIGN_STACK \
1935 "subq $128,%%rsp\n\t" \
1936 "movq 48(%%rax), %%r9\n\t" \
1937 "movq 40(%%rax), %%r8\n\t" \
1938 "movq 32(%%rax), %%rcx\n\t" \
1939 "movq 24(%%rax), %%rdx\n\t" \
1940 "movq 16(%%rax), %%rsi\n\t" \
1941 "movq 8(%%rax), %%rdi\n\t" \
1942 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1943 VALGRIND_CALL_NOREDIR_RAX \
1944 VALGRIND_RESTORE_STACK \
1945 VALGRIND_CFI_EPILOGUE \
1946 : /*out*/ "=a" (_res) \
1947 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1948 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1949 ); \
1950 lval = (__typeof__(lval)) _res; \
1951 } while (0)
1952
1953 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1954 arg7) \
1955 do { \
1956 volatile OrigFn _orig = (orig); \
1957 volatile unsigned long _argvec[8]; \
1958 volatile unsigned long _res; \
1959 _argvec[0] = (unsigned long)_orig.nraddr; \
1960 _argvec[1] = (unsigned long)(arg1); \
1961 _argvec[2] = (unsigned long)(arg2); \
1962 _argvec[3] = (unsigned long)(arg3); \
1963 _argvec[4] = (unsigned long)(arg4); \
1964 _argvec[5] = (unsigned long)(arg5); \
1965 _argvec[6] = (unsigned long)(arg6); \
1966 _argvec[7] = (unsigned long)(arg7); \
1967 __asm__ volatile( \
1968 VALGRIND_CFI_PROLOGUE \
1969 VALGRIND_ALIGN_STACK \
1970 "subq $136,%%rsp\n\t" \
1971 "pushq 56(%%rax)\n\t" \
1972 "movq 48(%%rax), %%r9\n\t" \
1973 "movq 40(%%rax), %%r8\n\t" \
1974 "movq 32(%%rax), %%rcx\n\t" \
1975 "movq 24(%%rax), %%rdx\n\t" \
1976 "movq 16(%%rax), %%rsi\n\t" \
1977 "movq 8(%%rax), %%rdi\n\t" \
1978 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1979 VALGRIND_CALL_NOREDIR_RAX \
1980 VALGRIND_RESTORE_STACK \
1981 VALGRIND_CFI_EPILOGUE \
1982 : /*out*/ "=a" (_res) \
1983 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1984 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1985 ); \
1986 lval = (__typeof__(lval)) _res; \
1987 } while (0)
1988
1989 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1990 arg7,arg8) \
1991 do { \
1992 volatile OrigFn _orig = (orig); \
1993 volatile unsigned long _argvec[9]; \
1994 volatile unsigned long _res; \
1995 _argvec[0] = (unsigned long)_orig.nraddr; \
1996 _argvec[1] = (unsigned long)(arg1); \
1997 _argvec[2] = (unsigned long)(arg2); \
1998 _argvec[3] = (unsigned long)(arg3); \
1999 _argvec[4] = (unsigned long)(arg4); \
2000 _argvec[5] = (unsigned long)(arg5); \
2001 _argvec[6] = (unsigned long)(arg6); \
2002 _argvec[7] = (unsigned long)(arg7); \
2003 _argvec[8] = (unsigned long)(arg8); \
2004 __asm__ volatile( \
2005 VALGRIND_CFI_PROLOGUE \
2006 VALGRIND_ALIGN_STACK \
2007 "subq $128,%%rsp\n\t" \
2008 "pushq 64(%%rax)\n\t" \
2009 "pushq 56(%%rax)\n\t" \
2010 "movq 48(%%rax), %%r9\n\t" \
2011 "movq 40(%%rax), %%r8\n\t" \
2012 "movq 32(%%rax), %%rcx\n\t" \
2013 "movq 24(%%rax), %%rdx\n\t" \
2014 "movq 16(%%rax), %%rsi\n\t" \
2015 "movq 8(%%rax), %%rdi\n\t" \
2016 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2017 VALGRIND_CALL_NOREDIR_RAX \
2018 VALGRIND_RESTORE_STACK \
2019 VALGRIND_CFI_EPILOGUE \
2020 : /*out*/ "=a" (_res) \
2021 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2022 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2023 ); \
2024 lval = (__typeof__(lval)) _res; \
2025 } while (0)
2026
2027 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2028 arg7,arg8,arg9) \
2029 do { \
2030 volatile OrigFn _orig = (orig); \
2031 volatile unsigned long _argvec[10]; \
2032 volatile unsigned long _res; \
2033 _argvec[0] = (unsigned long)_orig.nraddr; \
2034 _argvec[1] = (unsigned long)(arg1); \
2035 _argvec[2] = (unsigned long)(arg2); \
2036 _argvec[3] = (unsigned long)(arg3); \
2037 _argvec[4] = (unsigned long)(arg4); \
2038 _argvec[5] = (unsigned long)(arg5); \
2039 _argvec[6] = (unsigned long)(arg6); \
2040 _argvec[7] = (unsigned long)(arg7); \
2041 _argvec[8] = (unsigned long)(arg8); \
2042 _argvec[9] = (unsigned long)(arg9); \
2043 __asm__ volatile( \
2044 VALGRIND_CFI_PROLOGUE \
2045 VALGRIND_ALIGN_STACK \
2046 "subq $136,%%rsp\n\t" \
2047 "pushq 72(%%rax)\n\t" \
2048 "pushq 64(%%rax)\n\t" \
2049 "pushq 56(%%rax)\n\t" \
2050 "movq 48(%%rax), %%r9\n\t" \
2051 "movq 40(%%rax), %%r8\n\t" \
2052 "movq 32(%%rax), %%rcx\n\t" \
2053 "movq 24(%%rax), %%rdx\n\t" \
2054 "movq 16(%%rax), %%rsi\n\t" \
2055 "movq 8(%%rax), %%rdi\n\t" \
2056 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2057 VALGRIND_CALL_NOREDIR_RAX \
2058 VALGRIND_RESTORE_STACK \
2059 VALGRIND_CFI_EPILOGUE \
2060 : /*out*/ "=a" (_res) \
2061 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2062 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2063 ); \
2064 lval = (__typeof__(lval)) _res; \
2065 } while (0)
2066
2067 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2068 arg7,arg8,arg9,arg10) \
2069 do { \
2070 volatile OrigFn _orig = (orig); \
2071 volatile unsigned long _argvec[11]; \
2072 volatile unsigned long _res; \
2073 _argvec[0] = (unsigned long)_orig.nraddr; \
2074 _argvec[1] = (unsigned long)(arg1); \
2075 _argvec[2] = (unsigned long)(arg2); \
2076 _argvec[3] = (unsigned long)(arg3); \
2077 _argvec[4] = (unsigned long)(arg4); \
2078 _argvec[5] = (unsigned long)(arg5); \
2079 _argvec[6] = (unsigned long)(arg6); \
2080 _argvec[7] = (unsigned long)(arg7); \
2081 _argvec[8] = (unsigned long)(arg8); \
2082 _argvec[9] = (unsigned long)(arg9); \
2083 _argvec[10] = (unsigned long)(arg10); \
2084 __asm__ volatile( \
2085 VALGRIND_CFI_PROLOGUE \
2086 VALGRIND_ALIGN_STACK \
2087 "subq $128,%%rsp\n\t" \
2088 "pushq 80(%%rax)\n\t" \
2089 "pushq 72(%%rax)\n\t" \
2090 "pushq 64(%%rax)\n\t" \
2091 "pushq 56(%%rax)\n\t" \
2092 "movq 48(%%rax), %%r9\n\t" \
2093 "movq 40(%%rax), %%r8\n\t" \
2094 "movq 32(%%rax), %%rcx\n\t" \
2095 "movq 24(%%rax), %%rdx\n\t" \
2096 "movq 16(%%rax), %%rsi\n\t" \
2097 "movq 8(%%rax), %%rdi\n\t" \
2098 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2099 VALGRIND_CALL_NOREDIR_RAX \
2100 VALGRIND_RESTORE_STACK \
2101 VALGRIND_CFI_EPILOGUE \
2102 : /*out*/ "=a" (_res) \
2103 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2104 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2105 ); \
2106 lval = (__typeof__(lval)) _res; \
2107 } while (0)
2108
2109 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2110 arg7,arg8,arg9,arg10,arg11) \
2111 do { \
2112 volatile OrigFn _orig = (orig); \
2113 volatile unsigned long _argvec[12]; \
2114 volatile unsigned long _res; \
2115 _argvec[0] = (unsigned long)_orig.nraddr; \
2116 _argvec[1] = (unsigned long)(arg1); \
2117 _argvec[2] = (unsigned long)(arg2); \
2118 _argvec[3] = (unsigned long)(arg3); \
2119 _argvec[4] = (unsigned long)(arg4); \
2120 _argvec[5] = (unsigned long)(arg5); \
2121 _argvec[6] = (unsigned long)(arg6); \
2122 _argvec[7] = (unsigned long)(arg7); \
2123 _argvec[8] = (unsigned long)(arg8); \
2124 _argvec[9] = (unsigned long)(arg9); \
2125 _argvec[10] = (unsigned long)(arg10); \
2126 _argvec[11] = (unsigned long)(arg11); \
2127 __asm__ volatile( \
2128 VALGRIND_CFI_PROLOGUE \
2129 VALGRIND_ALIGN_STACK \
2130 "subq $136,%%rsp\n\t" \
2131 "pushq 88(%%rax)\n\t" \
2132 "pushq 80(%%rax)\n\t" \
2133 "pushq 72(%%rax)\n\t" \
2134 "pushq 64(%%rax)\n\t" \
2135 "pushq 56(%%rax)\n\t" \
2136 "movq 48(%%rax), %%r9\n\t" \
2137 "movq 40(%%rax), %%r8\n\t" \
2138 "movq 32(%%rax), %%rcx\n\t" \
2139 "movq 24(%%rax), %%rdx\n\t" \
2140 "movq 16(%%rax), %%rsi\n\t" \
2141 "movq 8(%%rax), %%rdi\n\t" \
2142 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2143 VALGRIND_CALL_NOREDIR_RAX \
2144 VALGRIND_RESTORE_STACK \
2145 VALGRIND_CFI_EPILOGUE \
2146 : /*out*/ "=a" (_res) \
2147 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2148 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2149 ); \
2150 lval = (__typeof__(lval)) _res; \
2151 } while (0)
2152
2153 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2154 arg7,arg8,arg9,arg10,arg11,arg12) \
2155 do { \
2156 volatile OrigFn _orig = (orig); \
2157 volatile unsigned long _argvec[13]; \
2158 volatile unsigned long _res; \
2159 _argvec[0] = (unsigned long)_orig.nraddr; \
2160 _argvec[1] = (unsigned long)(arg1); \
2161 _argvec[2] = (unsigned long)(arg2); \
2162 _argvec[3] = (unsigned long)(arg3); \
2163 _argvec[4] = (unsigned long)(arg4); \
2164 _argvec[5] = (unsigned long)(arg5); \
2165 _argvec[6] = (unsigned long)(arg6); \
2166 _argvec[7] = (unsigned long)(arg7); \
2167 _argvec[8] = (unsigned long)(arg8); \
2168 _argvec[9] = (unsigned long)(arg9); \
2169 _argvec[10] = (unsigned long)(arg10); \
2170 _argvec[11] = (unsigned long)(arg11); \
2171 _argvec[12] = (unsigned long)(arg12); \
2172 __asm__ volatile( \
2173 VALGRIND_CFI_PROLOGUE \
2174 VALGRIND_ALIGN_STACK \
2175 "subq $128,%%rsp\n\t" \
2176 "pushq 96(%%rax)\n\t" \
2177 "pushq 88(%%rax)\n\t" \
2178 "pushq 80(%%rax)\n\t" \
2179 "pushq 72(%%rax)\n\t" \
2180 "pushq 64(%%rax)\n\t" \
2181 "pushq 56(%%rax)\n\t" \
2182 "movq 48(%%rax), %%r9\n\t" \
2183 "movq 40(%%rax), %%r8\n\t" \
2184 "movq 32(%%rax), %%rcx\n\t" \
2185 "movq 24(%%rax), %%rdx\n\t" \
2186 "movq 16(%%rax), %%rsi\n\t" \
2187 "movq 8(%%rax), %%rdi\n\t" \
2188 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2189 VALGRIND_CALL_NOREDIR_RAX \
2190 VALGRIND_RESTORE_STACK \
2191 VALGRIND_CFI_EPILOGUE \
2192 : /*out*/ "=a" (_res) \
2193 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2194 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2195 ); \
2196 lval = (__typeof__(lval)) _res; \
2197 } while (0)
2198
2199 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2200
2201 /* ------------------------ ppc32-linux ------------------------ */
2202
2203 #if defined(PLAT_ppc32_linux)
2204
2205 /* This is useful for finding out about the on-stack stuff:
2206
2207 extern int f9 ( int,int,int,int,int,int,int,int,int );
2208 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2209 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2210 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2211
2212 int g9 ( void ) {
2213 return f9(11,22,33,44,55,66,77,88,99);
2214 }
2215 int g10 ( void ) {
2216 return f10(11,22,33,44,55,66,77,88,99,110);
2217 }
2218 int g11 ( void ) {
2219 return f11(11,22,33,44,55,66,77,88,99,110,121);
2220 }
2221 int g12 ( void ) {
2222 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2223 }
2224 */
2225
2226 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2227
2228 /* These regs are trashed by the hidden call. */
2229 #define __CALLER_SAVED_REGS \
2230 "lr", "ctr", "xer", \
2231 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2232 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2233 "r11", "r12", "r13"
2234
2235 /* Macros to save and align the stack before making a function
2236 call and restore it afterwards as gcc may not keep the stack
2237 pointer aligned if it doesn't realise calls are being made
2238 to other functions. */
2239
2240 #define VALGRIND_ALIGN_STACK \
2241 "mr 28,1\n\t" \
2242 "rlwinm 1,1,0,0,27\n\t"
2243 #define VALGRIND_RESTORE_STACK \
2244 "mr 1,28\n\t"
2245
2246 /* These CALL_FN_ macros assume that on ppc32-linux,
2247 sizeof(unsigned long) == 4. */
2248
2249 #define CALL_FN_W_v(lval, orig) \
2250 do { \
2251 volatile OrigFn _orig = (orig); \
2252 volatile unsigned long _argvec[1]; \
2253 volatile unsigned long _res; \
2254 _argvec[0] = (unsigned long)_orig.nraddr; \
2255 __asm__ volatile( \
2256 VALGRIND_ALIGN_STACK \
2257 "mr 11,%1\n\t" \
2258 "lwz 11,0(11)\n\t" /* target->r11 */ \
2259 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2260 VALGRIND_RESTORE_STACK \
2261 "mr %0,3" \
2262 : /*out*/ "=r" (_res) \
2263 : /*in*/ "r" (&_argvec[0]) \
2264 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2265 ); \
2266 lval = (__typeof__(lval)) _res; \
2267 } while (0)
2268
2269 #define CALL_FN_W_W(lval, orig, arg1) \
2270 do { \
2271 volatile OrigFn _orig = (orig); \
2272 volatile unsigned long _argvec[2]; \
2273 volatile unsigned long _res; \
2274 _argvec[0] = (unsigned long)_orig.nraddr; \
2275 _argvec[1] = (unsigned long)arg1; \
2276 __asm__ volatile( \
2277 VALGRIND_ALIGN_STACK \
2278 "mr 11,%1\n\t" \
2279 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2280 "lwz 11,0(11)\n\t" /* target->r11 */ \
2281 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2282 VALGRIND_RESTORE_STACK \
2283 "mr %0,3" \
2284 : /*out*/ "=r" (_res) \
2285 : /*in*/ "r" (&_argvec[0]) \
2286 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2287 ); \
2288 lval = (__typeof__(lval)) _res; \
2289 } while (0)
2290
2291 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2292 do { \
2293 volatile OrigFn _orig = (orig); \
2294 volatile unsigned long _argvec[3]; \
2295 volatile unsigned long _res; \
2296 _argvec[0] = (unsigned long)_orig.nraddr; \
2297 _argvec[1] = (unsigned long)arg1; \
2298 _argvec[2] = (unsigned long)arg2; \
2299 __asm__ volatile( \
2300 VALGRIND_ALIGN_STACK \
2301 "mr 11,%1\n\t" \
2302 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2303 "lwz 4,8(11)\n\t" \
2304 "lwz 11,0(11)\n\t" /* target->r11 */ \
2305 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2306 VALGRIND_RESTORE_STACK \
2307 "mr %0,3" \
2308 : /*out*/ "=r" (_res) \
2309 : /*in*/ "r" (&_argvec[0]) \
2310 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2311 ); \
2312 lval = (__typeof__(lval)) _res; \
2313 } while (0)
2314
2315 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2316 do { \
2317 volatile OrigFn _orig = (orig); \
2318 volatile unsigned long _argvec[4]; \
2319 volatile unsigned long _res; \
2320 _argvec[0] = (unsigned long)_orig.nraddr; \
2321 _argvec[1] = (unsigned long)arg1; \
2322 _argvec[2] = (unsigned long)arg2; \
2323 _argvec[3] = (unsigned long)arg3; \
2324 __asm__ volatile( \
2325 VALGRIND_ALIGN_STACK \
2326 "mr 11,%1\n\t" \
2327 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2328 "lwz 4,8(11)\n\t" \
2329 "lwz 5,12(11)\n\t" \
2330 "lwz 11,0(11)\n\t" /* target->r11 */ \
2331 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2332 VALGRIND_RESTORE_STACK \
2333 "mr %0,3" \
2334 : /*out*/ "=r" (_res) \
2335 : /*in*/ "r" (&_argvec[0]) \
2336 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2337 ); \
2338 lval = (__typeof__(lval)) _res; \
2339 } while (0)
2340
2341 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2342 do { \
2343 volatile OrigFn _orig = (orig); \
2344 volatile unsigned long _argvec[5]; \
2345 volatile unsigned long _res; \
2346 _argvec[0] = (unsigned long)_orig.nraddr; \
2347 _argvec[1] = (unsigned long)arg1; \
2348 _argvec[2] = (unsigned long)arg2; \
2349 _argvec[3] = (unsigned long)arg3; \
2350 _argvec[4] = (unsigned long)arg4; \
2351 __asm__ volatile( \
2352 VALGRIND_ALIGN_STACK \
2353 "mr 11,%1\n\t" \
2354 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2355 "lwz 4,8(11)\n\t" \
2356 "lwz 5,12(11)\n\t" \
2357 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2358 "lwz 11,0(11)\n\t" /* target->r11 */ \
2359 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2360 VALGRIND_RESTORE_STACK \
2361 "mr %0,3" \
2362 : /*out*/ "=r" (_res) \
2363 : /*in*/ "r" (&_argvec[0]) \
2364 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2365 ); \
2366 lval = (__typeof__(lval)) _res; \
2367 } while (0)
2368
2369 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2370 do { \
2371 volatile OrigFn _orig = (orig); \
2372 volatile unsigned long _argvec[6]; \
2373 volatile unsigned long _res; \
2374 _argvec[0] = (unsigned long)_orig.nraddr; \
2375 _argvec[1] = (unsigned long)arg1; \
2376 _argvec[2] = (unsigned long)arg2; \
2377 _argvec[3] = (unsigned long)arg3; \
2378 _argvec[4] = (unsigned long)arg4; \
2379 _argvec[5] = (unsigned long)arg5; \
2380 __asm__ volatile( \
2381 VALGRIND_ALIGN_STACK \
2382 "mr 11,%1\n\t" \
2383 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2384 "lwz 4,8(11)\n\t" \
2385 "lwz 5,12(11)\n\t" \
2386 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2387 "lwz 7,20(11)\n\t" \
2388 "lwz 11,0(11)\n\t" /* target->r11 */ \
2389 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2390 VALGRIND_RESTORE_STACK \
2391 "mr %0,3" \
2392 : /*out*/ "=r" (_res) \
2393 : /*in*/ "r" (&_argvec[0]) \
2394 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2395 ); \
2396 lval = (__typeof__(lval)) _res; \
2397 } while (0)
2398
2399 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2400 do { \
2401 volatile OrigFn _orig = (orig); \
2402 volatile unsigned long _argvec[7]; \
2403 volatile unsigned long _res; \
2404 _argvec[0] = (unsigned long)_orig.nraddr; \
2405 _argvec[1] = (unsigned long)arg1; \
2406 _argvec[2] = (unsigned long)arg2; \
2407 _argvec[3] = (unsigned long)arg3; \
2408 _argvec[4] = (unsigned long)arg4; \
2409 _argvec[5] = (unsigned long)arg5; \
2410 _argvec[6] = (unsigned long)arg6; \
2411 __asm__ volatile( \
2412 VALGRIND_ALIGN_STACK \
2413 "mr 11,%1\n\t" \
2414 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2415 "lwz 4,8(11)\n\t" \
2416 "lwz 5,12(11)\n\t" \
2417 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2418 "lwz 7,20(11)\n\t" \
2419 "lwz 8,24(11)\n\t" \
2420 "lwz 11,0(11)\n\t" /* target->r11 */ \
2421 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2422 VALGRIND_RESTORE_STACK \
2423 "mr %0,3" \
2424 : /*out*/ "=r" (_res) \
2425 : /*in*/ "r" (&_argvec[0]) \
2426 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2427 ); \
2428 lval = (__typeof__(lval)) _res; \
2429 } while (0)
2430
2431 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2432 arg7) \
2433 do { \
2434 volatile OrigFn _orig = (orig); \
2435 volatile unsigned long _argvec[8]; \
2436 volatile unsigned long _res; \
2437 _argvec[0] = (unsigned long)_orig.nraddr; \
2438 _argvec[1] = (unsigned long)arg1; \
2439 _argvec[2] = (unsigned long)arg2; \
2440 _argvec[3] = (unsigned long)arg3; \
2441 _argvec[4] = (unsigned long)arg4; \
2442 _argvec[5] = (unsigned long)arg5; \
2443 _argvec[6] = (unsigned long)arg6; \
2444 _argvec[7] = (unsigned long)arg7; \
2445 __asm__ volatile( \
2446 VALGRIND_ALIGN_STACK \
2447 "mr 11,%1\n\t" \
2448 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2449 "lwz 4,8(11)\n\t" \
2450 "lwz 5,12(11)\n\t" \
2451 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2452 "lwz 7,20(11)\n\t" \
2453 "lwz 8,24(11)\n\t" \
2454 "lwz 9,28(11)\n\t" \
2455 "lwz 11,0(11)\n\t" /* target->r11 */ \
2456 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2457 VALGRIND_RESTORE_STACK \
2458 "mr %0,3" \
2459 : /*out*/ "=r" (_res) \
2460 : /*in*/ "r" (&_argvec[0]) \
2461 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2462 ); \
2463 lval = (__typeof__(lval)) _res; \
2464 } while (0)
2465
2466 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2467 arg7,arg8) \
2468 do { \
2469 volatile OrigFn _orig = (orig); \
2470 volatile unsigned long _argvec[9]; \
2471 volatile unsigned long _res; \
2472 _argvec[0] = (unsigned long)_orig.nraddr; \
2473 _argvec[1] = (unsigned long)arg1; \
2474 _argvec[2] = (unsigned long)arg2; \
2475 _argvec[3] = (unsigned long)arg3; \
2476 _argvec[4] = (unsigned long)arg4; \
2477 _argvec[5] = (unsigned long)arg5; \
2478 _argvec[6] = (unsigned long)arg6; \
2479 _argvec[7] = (unsigned long)arg7; \
2480 _argvec[8] = (unsigned long)arg8; \
2481 __asm__ volatile( \
2482 VALGRIND_ALIGN_STACK \
2483 "mr 11,%1\n\t" \
2484 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2485 "lwz 4,8(11)\n\t" \
2486 "lwz 5,12(11)\n\t" \
2487 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2488 "lwz 7,20(11)\n\t" \
2489 "lwz 8,24(11)\n\t" \
2490 "lwz 9,28(11)\n\t" \
2491 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2492 "lwz 11,0(11)\n\t" /* target->r11 */ \
2493 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2494 VALGRIND_RESTORE_STACK \
2495 "mr %0,3" \
2496 : /*out*/ "=r" (_res) \
2497 : /*in*/ "r" (&_argvec[0]) \
2498 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2499 ); \
2500 lval = (__typeof__(lval)) _res; \
2501 } while (0)
2502
2503 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2504 arg7,arg8,arg9) \
2505 do { \
2506 volatile OrigFn _orig = (orig); \
2507 volatile unsigned long _argvec[10]; \
2508 volatile unsigned long _res; \
2509 _argvec[0] = (unsigned long)_orig.nraddr; \
2510 _argvec[1] = (unsigned long)arg1; \
2511 _argvec[2] = (unsigned long)arg2; \
2512 _argvec[3] = (unsigned long)arg3; \
2513 _argvec[4] = (unsigned long)arg4; \
2514 _argvec[5] = (unsigned long)arg5; \
2515 _argvec[6] = (unsigned long)arg6; \
2516 _argvec[7] = (unsigned long)arg7; \
2517 _argvec[8] = (unsigned long)arg8; \
2518 _argvec[9] = (unsigned long)arg9; \
2519 __asm__ volatile( \
2520 VALGRIND_ALIGN_STACK \
2521 "mr 11,%1\n\t" \
2522 "addi 1,1,-16\n\t" \
2523 /* arg9 */ \
2524 "lwz 3,36(11)\n\t" \
2525 "stw 3,8(1)\n\t" \
2526 /* args1-8 */ \
2527 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2528 "lwz 4,8(11)\n\t" \
2529 "lwz 5,12(11)\n\t" \
2530 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2531 "lwz 7,20(11)\n\t" \
2532 "lwz 8,24(11)\n\t" \
2533 "lwz 9,28(11)\n\t" \
2534 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2535 "lwz 11,0(11)\n\t" /* target->r11 */ \
2536 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2537 VALGRIND_RESTORE_STACK \
2538 "mr %0,3" \
2539 : /*out*/ "=r" (_res) \
2540 : /*in*/ "r" (&_argvec[0]) \
2541 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2542 ); \
2543 lval = (__typeof__(lval)) _res; \
2544 } while (0)
2545
2546 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2547 arg7,arg8,arg9,arg10) \
2548 do { \
2549 volatile OrigFn _orig = (orig); \
2550 volatile unsigned long _argvec[11]; \
2551 volatile unsigned long _res; \
2552 _argvec[0] = (unsigned long)_orig.nraddr; \
2553 _argvec[1] = (unsigned long)arg1; \
2554 _argvec[2] = (unsigned long)arg2; \
2555 _argvec[3] = (unsigned long)arg3; \
2556 _argvec[4] = (unsigned long)arg4; \
2557 _argvec[5] = (unsigned long)arg5; \
2558 _argvec[6] = (unsigned long)arg6; \
2559 _argvec[7] = (unsigned long)arg7; \
2560 _argvec[8] = (unsigned long)arg8; \
2561 _argvec[9] = (unsigned long)arg9; \
2562 _argvec[10] = (unsigned long)arg10; \
2563 __asm__ volatile( \
2564 VALGRIND_ALIGN_STACK \
2565 "mr 11,%1\n\t" \
2566 "addi 1,1,-16\n\t" \
2567 /* arg10 */ \
2568 "lwz 3,40(11)\n\t" \
2569 "stw 3,12(1)\n\t" \
2570 /* arg9 */ \
2571 "lwz 3,36(11)\n\t" \
2572 "stw 3,8(1)\n\t" \
2573 /* args1-8 */ \
2574 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2575 "lwz 4,8(11)\n\t" \
2576 "lwz 5,12(11)\n\t" \
2577 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2578 "lwz 7,20(11)\n\t" \
2579 "lwz 8,24(11)\n\t" \
2580 "lwz 9,28(11)\n\t" \
2581 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2582 "lwz 11,0(11)\n\t" /* target->r11 */ \
2583 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2584 VALGRIND_RESTORE_STACK \
2585 "mr %0,3" \
2586 : /*out*/ "=r" (_res) \
2587 : /*in*/ "r" (&_argvec[0]) \
2588 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2589 ); \
2590 lval = (__typeof__(lval)) _res; \
2591 } while (0)
2592
2593 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2594 arg7,arg8,arg9,arg10,arg11) \
2595 do { \
2596 volatile OrigFn _orig = (orig); \
2597 volatile unsigned long _argvec[12]; \
2598 volatile unsigned long _res; \
2599 _argvec[0] = (unsigned long)_orig.nraddr; \
2600 _argvec[1] = (unsigned long)arg1; \
2601 _argvec[2] = (unsigned long)arg2; \
2602 _argvec[3] = (unsigned long)arg3; \
2603 _argvec[4] = (unsigned long)arg4; \
2604 _argvec[5] = (unsigned long)arg5; \
2605 _argvec[6] = (unsigned long)arg6; \
2606 _argvec[7] = (unsigned long)arg7; \
2607 _argvec[8] = (unsigned long)arg8; \
2608 _argvec[9] = (unsigned long)arg9; \
2609 _argvec[10] = (unsigned long)arg10; \
2610 _argvec[11] = (unsigned long)arg11; \
2611 __asm__ volatile( \
2612 VALGRIND_ALIGN_STACK \
2613 "mr 11,%1\n\t" \
2614 "addi 1,1,-32\n\t" \
2615 /* arg11 */ \
2616 "lwz 3,44(11)\n\t" \
2617 "stw 3,16(1)\n\t" \
2618 /* arg10 */ \
2619 "lwz 3,40(11)\n\t" \
2620 "stw 3,12(1)\n\t" \
2621 /* arg9 */ \
2622 "lwz 3,36(11)\n\t" \
2623 "stw 3,8(1)\n\t" \
2624 /* args1-8 */ \
2625 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2626 "lwz 4,8(11)\n\t" \
2627 "lwz 5,12(11)\n\t" \
2628 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2629 "lwz 7,20(11)\n\t" \
2630 "lwz 8,24(11)\n\t" \
2631 "lwz 9,28(11)\n\t" \
2632 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2633 "lwz 11,0(11)\n\t" /* target->r11 */ \
2634 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2635 VALGRIND_RESTORE_STACK \
2636 "mr %0,3" \
2637 : /*out*/ "=r" (_res) \
2638 : /*in*/ "r" (&_argvec[0]) \
2639 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2640 ); \
2641 lval = (__typeof__(lval)) _res; \
2642 } while (0)
2643
2644 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2645 arg7,arg8,arg9,arg10,arg11,arg12) \
2646 do { \
2647 volatile OrigFn _orig = (orig); \
2648 volatile unsigned long _argvec[13]; \
2649 volatile unsigned long _res; \
2650 _argvec[0] = (unsigned long)_orig.nraddr; \
2651 _argvec[1] = (unsigned long)arg1; \
2652 _argvec[2] = (unsigned long)arg2; \
2653 _argvec[3] = (unsigned long)arg3; \
2654 _argvec[4] = (unsigned long)arg4; \
2655 _argvec[5] = (unsigned long)arg5; \
2656 _argvec[6] = (unsigned long)arg6; \
2657 _argvec[7] = (unsigned long)arg7; \
2658 _argvec[8] = (unsigned long)arg8; \
2659 _argvec[9] = (unsigned long)arg9; \
2660 _argvec[10] = (unsigned long)arg10; \
2661 _argvec[11] = (unsigned long)arg11; \
2662 _argvec[12] = (unsigned long)arg12; \
2663 __asm__ volatile( \
2664 VALGRIND_ALIGN_STACK \
2665 "mr 11,%1\n\t" \
2666 "addi 1,1,-32\n\t" \
2667 /* arg12 */ \
2668 "lwz 3,48(11)\n\t" \
2669 "stw 3,20(1)\n\t" \
2670 /* arg11 */ \
2671 "lwz 3,44(11)\n\t" \
2672 "stw 3,16(1)\n\t" \
2673 /* arg10 */ \
2674 "lwz 3,40(11)\n\t" \
2675 "stw 3,12(1)\n\t" \
2676 /* arg9 */ \
2677 "lwz 3,36(11)\n\t" \
2678 "stw 3,8(1)\n\t" \
2679 /* args1-8 */ \
2680 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2681 "lwz 4,8(11)\n\t" \
2682 "lwz 5,12(11)\n\t" \
2683 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2684 "lwz 7,20(11)\n\t" \
2685 "lwz 8,24(11)\n\t" \
2686 "lwz 9,28(11)\n\t" \
2687 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2688 "lwz 11,0(11)\n\t" /* target->r11 */ \
2689 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2690 VALGRIND_RESTORE_STACK \
2691 "mr %0,3" \
2692 : /*out*/ "=r" (_res) \
2693 : /*in*/ "r" (&_argvec[0]) \
2694 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2695 ); \
2696 lval = (__typeof__(lval)) _res; \
2697 } while (0)
2698
2699 #endif /* PLAT_ppc32_linux */
2700
2701 /* ------------------------ ppc64-linux ------------------------ */
2702
2703 #if defined(PLAT_ppc64be_linux)
2704
2705 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2706
2707 /* These regs are trashed by the hidden call. */
2708 #define __CALLER_SAVED_REGS \
2709 "lr", "ctr", "xer", \
2710 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2711 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2712 "r11", "r12", "r13"
2713
2714 /* Macros to save and align the stack before making a function
2715 call and restore it afterwards as gcc may not keep the stack
2716 pointer aligned if it doesn't realise calls are being made
2717 to other functions. */
2718
2719 #define VALGRIND_ALIGN_STACK \
2720 "mr 28,1\n\t" \
2721 "rldicr 1,1,0,59\n\t"
2722 #define VALGRIND_RESTORE_STACK \
2723 "mr 1,28\n\t"
2724
2725 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2726 long) == 8. */
2727
2728 #define CALL_FN_W_v(lval, orig) \
2729 do { \
2730 volatile OrigFn _orig = (orig); \
2731 volatile unsigned long _argvec[3+0]; \
2732 volatile unsigned long _res; \
2733 /* _argvec[0] holds current r2 across the call */ \
2734 _argvec[1] = (unsigned long)_orig.r2; \
2735 _argvec[2] = (unsigned long)_orig.nraddr; \
2736 __asm__ volatile( \
2737 VALGRIND_ALIGN_STACK \
2738 "mr 11,%1\n\t" \
2739 "std 2,-16(11)\n\t" /* save tocptr */ \
2740 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2741 "ld 11, 0(11)\n\t" /* target->r11 */ \
2742 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2743 "mr 11,%1\n\t" \
2744 "mr %0,3\n\t" \
2745 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2746 VALGRIND_RESTORE_STACK \
2747 : /*out*/ "=r" (_res) \
2748 : /*in*/ "r" (&_argvec[2]) \
2749 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2750 ); \
2751 lval = (__typeof__(lval)) _res; \
2752 } while (0)
2753
2754 #define CALL_FN_W_W(lval, orig, arg1) \
2755 do { \
2756 volatile OrigFn _orig = (orig); \
2757 volatile unsigned long _argvec[3+1]; \
2758 volatile unsigned long _res; \
2759 /* _argvec[0] holds current r2 across the call */ \
2760 _argvec[1] = (unsigned long)_orig.r2; \
2761 _argvec[2] = (unsigned long)_orig.nraddr; \
2762 _argvec[2+1] = (unsigned long)arg1; \
2763 __asm__ volatile( \
2764 VALGRIND_ALIGN_STACK \
2765 "mr 11,%1\n\t" \
2766 "std 2,-16(11)\n\t" /* save tocptr */ \
2767 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2768 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2769 "ld 11, 0(11)\n\t" /* target->r11 */ \
2770 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2771 "mr 11,%1\n\t" \
2772 "mr %0,3\n\t" \
2773 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2774 VALGRIND_RESTORE_STACK \
2775 : /*out*/ "=r" (_res) \
2776 : /*in*/ "r" (&_argvec[2]) \
2777 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2778 ); \
2779 lval = (__typeof__(lval)) _res; \
2780 } while (0)
2781
2782 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2783 do { \
2784 volatile OrigFn _orig = (orig); \
2785 volatile unsigned long _argvec[3+2]; \
2786 volatile unsigned long _res; \
2787 /* _argvec[0] holds current r2 across the call */ \
2788 _argvec[1] = (unsigned long)_orig.r2; \
2789 _argvec[2] = (unsigned long)_orig.nraddr; \
2790 _argvec[2+1] = (unsigned long)arg1; \
2791 _argvec[2+2] = (unsigned long)arg2; \
2792 __asm__ volatile( \
2793 VALGRIND_ALIGN_STACK \
2794 "mr 11,%1\n\t" \
2795 "std 2,-16(11)\n\t" /* save tocptr */ \
2796 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2797 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2798 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2799 "ld 11, 0(11)\n\t" /* target->r11 */ \
2800 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2801 "mr 11,%1\n\t" \
2802 "mr %0,3\n\t" \
2803 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2804 VALGRIND_RESTORE_STACK \
2805 : /*out*/ "=r" (_res) \
2806 : /*in*/ "r" (&_argvec[2]) \
2807 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2808 ); \
2809 lval = (__typeof__(lval)) _res; \
2810 } while (0)
2811
2812 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2813 do { \
2814 volatile OrigFn _orig = (orig); \
2815 volatile unsigned long _argvec[3+3]; \
2816 volatile unsigned long _res; \
2817 /* _argvec[0] holds current r2 across the call */ \
2818 _argvec[1] = (unsigned long)_orig.r2; \
2819 _argvec[2] = (unsigned long)_orig.nraddr; \
2820 _argvec[2+1] = (unsigned long)arg1; \
2821 _argvec[2+2] = (unsigned long)arg2; \
2822 _argvec[2+3] = (unsigned long)arg3; \
2823 __asm__ volatile( \
2824 VALGRIND_ALIGN_STACK \
2825 "mr 11,%1\n\t" \
2826 "std 2,-16(11)\n\t" /* save tocptr */ \
2827 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2828 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2829 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2830 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2831 "ld 11, 0(11)\n\t" /* target->r11 */ \
2832 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2833 "mr 11,%1\n\t" \
2834 "mr %0,3\n\t" \
2835 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2836 VALGRIND_RESTORE_STACK \
2837 : /*out*/ "=r" (_res) \
2838 : /*in*/ "r" (&_argvec[2]) \
2839 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2840 ); \
2841 lval = (__typeof__(lval)) _res; \
2842 } while (0)
2843
2844 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2845 do { \
2846 volatile OrigFn _orig = (orig); \
2847 volatile unsigned long _argvec[3+4]; \
2848 volatile unsigned long _res; \
2849 /* _argvec[0] holds current r2 across the call */ \
2850 _argvec[1] = (unsigned long)_orig.r2; \
2851 _argvec[2] = (unsigned long)_orig.nraddr; \
2852 _argvec[2+1] = (unsigned long)arg1; \
2853 _argvec[2+2] = (unsigned long)arg2; \
2854 _argvec[2+3] = (unsigned long)arg3; \
2855 _argvec[2+4] = (unsigned long)arg4; \
2856 __asm__ volatile( \
2857 VALGRIND_ALIGN_STACK \
2858 "mr 11,%1\n\t" \
2859 "std 2,-16(11)\n\t" /* save tocptr */ \
2860 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2861 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2862 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2863 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2864 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2865 "ld 11, 0(11)\n\t" /* target->r11 */ \
2866 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2867 "mr 11,%1\n\t" \
2868 "mr %0,3\n\t" \
2869 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2870 VALGRIND_RESTORE_STACK \
2871 : /*out*/ "=r" (_res) \
2872 : /*in*/ "r" (&_argvec[2]) \
2873 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2874 ); \
2875 lval = (__typeof__(lval)) _res; \
2876 } while (0)
2877
2878 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2879 do { \
2880 volatile OrigFn _orig = (orig); \
2881 volatile unsigned long _argvec[3+5]; \
2882 volatile unsigned long _res; \
2883 /* _argvec[0] holds current r2 across the call */ \
2884 _argvec[1] = (unsigned long)_orig.r2; \
2885 _argvec[2] = (unsigned long)_orig.nraddr; \
2886 _argvec[2+1] = (unsigned long)arg1; \
2887 _argvec[2+2] = (unsigned long)arg2; \
2888 _argvec[2+3] = (unsigned long)arg3; \
2889 _argvec[2+4] = (unsigned long)arg4; \
2890 _argvec[2+5] = (unsigned long)arg5; \
2891 __asm__ volatile( \
2892 VALGRIND_ALIGN_STACK \
2893 "mr 11,%1\n\t" \
2894 "std 2,-16(11)\n\t" /* save tocptr */ \
2895 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2896 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2897 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2898 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2899 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2900 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2901 "ld 11, 0(11)\n\t" /* target->r11 */ \
2902 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2903 "mr 11,%1\n\t" \
2904 "mr %0,3\n\t" \
2905 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2906 VALGRIND_RESTORE_STACK \
2907 : /*out*/ "=r" (_res) \
2908 : /*in*/ "r" (&_argvec[2]) \
2909 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2910 ); \
2911 lval = (__typeof__(lval)) _res; \
2912 } while (0)
2913
2914 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2915 do { \
2916 volatile OrigFn _orig = (orig); \
2917 volatile unsigned long _argvec[3+6]; \
2918 volatile unsigned long _res; \
2919 /* _argvec[0] holds current r2 across the call */ \
2920 _argvec[1] = (unsigned long)_orig.r2; \
2921 _argvec[2] = (unsigned long)_orig.nraddr; \
2922 _argvec[2+1] = (unsigned long)arg1; \
2923 _argvec[2+2] = (unsigned long)arg2; \
2924 _argvec[2+3] = (unsigned long)arg3; \
2925 _argvec[2+4] = (unsigned long)arg4; \
2926 _argvec[2+5] = (unsigned long)arg5; \
2927 _argvec[2+6] = (unsigned long)arg6; \
2928 __asm__ volatile( \
2929 VALGRIND_ALIGN_STACK \
2930 "mr 11,%1\n\t" \
2931 "std 2,-16(11)\n\t" /* save tocptr */ \
2932 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2933 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2934 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2935 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2936 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2937 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2938 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2939 "ld 11, 0(11)\n\t" /* target->r11 */ \
2940 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2941 "mr 11,%1\n\t" \
2942 "mr %0,3\n\t" \
2943 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2944 VALGRIND_RESTORE_STACK \
2945 : /*out*/ "=r" (_res) \
2946 : /*in*/ "r" (&_argvec[2]) \
2947 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2948 ); \
2949 lval = (__typeof__(lval)) _res; \
2950 } while (0)
2951
2952 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2953 arg7) \
2954 do { \
2955 volatile OrigFn _orig = (orig); \
2956 volatile unsigned long _argvec[3+7]; \
2957 volatile unsigned long _res; \
2958 /* _argvec[0] holds current r2 across the call */ \
2959 _argvec[1] = (unsigned long)_orig.r2; \
2960 _argvec[2] = (unsigned long)_orig.nraddr; \
2961 _argvec[2+1] = (unsigned long)arg1; \
2962 _argvec[2+2] = (unsigned long)arg2; \
2963 _argvec[2+3] = (unsigned long)arg3; \
2964 _argvec[2+4] = (unsigned long)arg4; \
2965 _argvec[2+5] = (unsigned long)arg5; \
2966 _argvec[2+6] = (unsigned long)arg6; \
2967 _argvec[2+7] = (unsigned long)arg7; \
2968 __asm__ volatile( \
2969 VALGRIND_ALIGN_STACK \
2970 "mr 11,%1\n\t" \
2971 "std 2,-16(11)\n\t" /* save tocptr */ \
2972 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2973 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2974 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2975 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2976 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2977 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2978 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2979 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2980 "ld 11, 0(11)\n\t" /* target->r11 */ \
2981 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2982 "mr 11,%1\n\t" \
2983 "mr %0,3\n\t" \
2984 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2985 VALGRIND_RESTORE_STACK \
2986 : /*out*/ "=r" (_res) \
2987 : /*in*/ "r" (&_argvec[2]) \
2988 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2989 ); \
2990 lval = (__typeof__(lval)) _res; \
2991 } while (0)
2992
2993 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2994 arg7,arg8) \
2995 do { \
2996 volatile OrigFn _orig = (orig); \
2997 volatile unsigned long _argvec[3+8]; \
2998 volatile unsigned long _res; \
2999 /* _argvec[0] holds current r2 across the call */ \
3000 _argvec[1] = (unsigned long)_orig.r2; \
3001 _argvec[2] = (unsigned long)_orig.nraddr; \
3002 _argvec[2+1] = (unsigned long)arg1; \
3003 _argvec[2+2] = (unsigned long)arg2; \
3004 _argvec[2+3] = (unsigned long)arg3; \
3005 _argvec[2+4] = (unsigned long)arg4; \
3006 _argvec[2+5] = (unsigned long)arg5; \
3007 _argvec[2+6] = (unsigned long)arg6; \
3008 _argvec[2+7] = (unsigned long)arg7; \
3009 _argvec[2+8] = (unsigned long)arg8; \
3010 __asm__ volatile( \
3011 VALGRIND_ALIGN_STACK \
3012 "mr 11,%1\n\t" \
3013 "std 2,-16(11)\n\t" /* save tocptr */ \
3014 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3015 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3016 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3017 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3018 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3019 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3020 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3021 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3022 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3023 "ld 11, 0(11)\n\t" /* target->r11 */ \
3024 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3025 "mr 11,%1\n\t" \
3026 "mr %0,3\n\t" \
3027 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3028 VALGRIND_RESTORE_STACK \
3029 : /*out*/ "=r" (_res) \
3030 : /*in*/ "r" (&_argvec[2]) \
3031 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3032 ); \
3033 lval = (__typeof__(lval)) _res; \
3034 } while (0)
3035
3036 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3037 arg7,arg8,arg9) \
3038 do { \
3039 volatile OrigFn _orig = (orig); \
3040 volatile unsigned long _argvec[3+9]; \
3041 volatile unsigned long _res; \
3042 /* _argvec[0] holds current r2 across the call */ \
3043 _argvec[1] = (unsigned long)_orig.r2; \
3044 _argvec[2] = (unsigned long)_orig.nraddr; \
3045 _argvec[2+1] = (unsigned long)arg1; \
3046 _argvec[2+2] = (unsigned long)arg2; \
3047 _argvec[2+3] = (unsigned long)arg3; \
3048 _argvec[2+4] = (unsigned long)arg4; \
3049 _argvec[2+5] = (unsigned long)arg5; \
3050 _argvec[2+6] = (unsigned long)arg6; \
3051 _argvec[2+7] = (unsigned long)arg7; \
3052 _argvec[2+8] = (unsigned long)arg8; \
3053 _argvec[2+9] = (unsigned long)arg9; \
3054 __asm__ volatile( \
3055 VALGRIND_ALIGN_STACK \
3056 "mr 11,%1\n\t" \
3057 "std 2,-16(11)\n\t" /* save tocptr */ \
3058 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3059 "addi 1,1,-128\n\t" /* expand stack frame */ \
3060 /* arg9 */ \
3061 "ld 3,72(11)\n\t" \
3062 "std 3,112(1)\n\t" \
3063 /* args1-8 */ \
3064 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3065 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3066 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3067 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3068 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3069 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3070 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3071 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3072 "ld 11, 0(11)\n\t" /* target->r11 */ \
3073 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3074 "mr 11,%1\n\t" \
3075 "mr %0,3\n\t" \
3076 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3077 VALGRIND_RESTORE_STACK \
3078 : /*out*/ "=r" (_res) \
3079 : /*in*/ "r" (&_argvec[2]) \
3080 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3081 ); \
3082 lval = (__typeof__(lval)) _res; \
3083 } while (0)
3084
3085 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3086 arg7,arg8,arg9,arg10) \
3087 do { \
3088 volatile OrigFn _orig = (orig); \
3089 volatile unsigned long _argvec[3+10]; \
3090 volatile unsigned long _res; \
3091 /* _argvec[0] holds current r2 across the call */ \
3092 _argvec[1] = (unsigned long)_orig.r2; \
3093 _argvec[2] = (unsigned long)_orig.nraddr; \
3094 _argvec[2+1] = (unsigned long)arg1; \
3095 _argvec[2+2] = (unsigned long)arg2; \
3096 _argvec[2+3] = (unsigned long)arg3; \
3097 _argvec[2+4] = (unsigned long)arg4; \
3098 _argvec[2+5] = (unsigned long)arg5; \
3099 _argvec[2+6] = (unsigned long)arg6; \
3100 _argvec[2+7] = (unsigned long)arg7; \
3101 _argvec[2+8] = (unsigned long)arg8; \
3102 _argvec[2+9] = (unsigned long)arg9; \
3103 _argvec[2+10] = (unsigned long)arg10; \
3104 __asm__ volatile( \
3105 VALGRIND_ALIGN_STACK \
3106 "mr 11,%1\n\t" \
3107 "std 2,-16(11)\n\t" /* save tocptr */ \
3108 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3109 "addi 1,1,-128\n\t" /* expand stack frame */ \
3110 /* arg10 */ \
3111 "ld 3,80(11)\n\t" \
3112 "std 3,120(1)\n\t" \
3113 /* arg9 */ \
3114 "ld 3,72(11)\n\t" \
3115 "std 3,112(1)\n\t" \
3116 /* args1-8 */ \
3117 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3118 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3119 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3120 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3121 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3122 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3123 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3124 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3125 "ld 11, 0(11)\n\t" /* target->r11 */ \
3126 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3127 "mr 11,%1\n\t" \
3128 "mr %0,3\n\t" \
3129 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3130 VALGRIND_RESTORE_STACK \
3131 : /*out*/ "=r" (_res) \
3132 : /*in*/ "r" (&_argvec[2]) \
3133 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3134 ); \
3135 lval = (__typeof__(lval)) _res; \
3136 } while (0)
3137
3138 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3139 arg7,arg8,arg9,arg10,arg11) \
3140 do { \
3141 volatile OrigFn _orig = (orig); \
3142 volatile unsigned long _argvec[3+11]; \
3143 volatile unsigned long _res; \
3144 /* _argvec[0] holds current r2 across the call */ \
3145 _argvec[1] = (unsigned long)_orig.r2; \
3146 _argvec[2] = (unsigned long)_orig.nraddr; \
3147 _argvec[2+1] = (unsigned long)arg1; \
3148 _argvec[2+2] = (unsigned long)arg2; \
3149 _argvec[2+3] = (unsigned long)arg3; \
3150 _argvec[2+4] = (unsigned long)arg4; \
3151 _argvec[2+5] = (unsigned long)arg5; \
3152 _argvec[2+6] = (unsigned long)arg6; \
3153 _argvec[2+7] = (unsigned long)arg7; \
3154 _argvec[2+8] = (unsigned long)arg8; \
3155 _argvec[2+9] = (unsigned long)arg9; \
3156 _argvec[2+10] = (unsigned long)arg10; \
3157 _argvec[2+11] = (unsigned long)arg11; \
3158 __asm__ volatile( \
3159 VALGRIND_ALIGN_STACK \
3160 "mr 11,%1\n\t" \
3161 "std 2,-16(11)\n\t" /* save tocptr */ \
3162 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3163 "addi 1,1,-144\n\t" /* expand stack frame */ \
3164 /* arg11 */ \
3165 "ld 3,88(11)\n\t" \
3166 "std 3,128(1)\n\t" \
3167 /* arg10 */ \
3168 "ld 3,80(11)\n\t" \
3169 "std 3,120(1)\n\t" \
3170 /* arg9 */ \
3171 "ld 3,72(11)\n\t" \
3172 "std 3,112(1)\n\t" \
3173 /* args1-8 */ \
3174 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3175 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3176 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3177 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3178 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3179 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3180 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3181 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3182 "ld 11, 0(11)\n\t" /* target->r11 */ \
3183 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3184 "mr 11,%1\n\t" \
3185 "mr %0,3\n\t" \
3186 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3187 VALGRIND_RESTORE_STACK \
3188 : /*out*/ "=r" (_res) \
3189 : /*in*/ "r" (&_argvec[2]) \
3190 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3191 ); \
3192 lval = (__typeof__(lval)) _res; \
3193 } while (0)
3194
3195 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3196 arg7,arg8,arg9,arg10,arg11,arg12) \
3197 do { \
3198 volatile OrigFn _orig = (orig); \
3199 volatile unsigned long _argvec[3+12]; \
3200 volatile unsigned long _res; \
3201 /* _argvec[0] holds current r2 across the call */ \
3202 _argvec[1] = (unsigned long)_orig.r2; \
3203 _argvec[2] = (unsigned long)_orig.nraddr; \
3204 _argvec[2+1] = (unsigned long)arg1; \
3205 _argvec[2+2] = (unsigned long)arg2; \
3206 _argvec[2+3] = (unsigned long)arg3; \
3207 _argvec[2+4] = (unsigned long)arg4; \
3208 _argvec[2+5] = (unsigned long)arg5; \
3209 _argvec[2+6] = (unsigned long)arg6; \
3210 _argvec[2+7] = (unsigned long)arg7; \
3211 _argvec[2+8] = (unsigned long)arg8; \
3212 _argvec[2+9] = (unsigned long)arg9; \
3213 _argvec[2+10] = (unsigned long)arg10; \
3214 _argvec[2+11] = (unsigned long)arg11; \
3215 _argvec[2+12] = (unsigned long)arg12; \
3216 __asm__ volatile( \
3217 VALGRIND_ALIGN_STACK \
3218 "mr 11,%1\n\t" \
3219 "std 2,-16(11)\n\t" /* save tocptr */ \
3220 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3221 "addi 1,1,-144\n\t" /* expand stack frame */ \
3222 /* arg12 */ \
3223 "ld 3,96(11)\n\t" \
3224 "std 3,136(1)\n\t" \
3225 /* arg11 */ \
3226 "ld 3,88(11)\n\t" \
3227 "std 3,128(1)\n\t" \
3228 /* arg10 */ \
3229 "ld 3,80(11)\n\t" \
3230 "std 3,120(1)\n\t" \
3231 /* arg9 */ \
3232 "ld 3,72(11)\n\t" \
3233 "std 3,112(1)\n\t" \
3234 /* args1-8 */ \
3235 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3236 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3237 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3238 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3239 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3240 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3241 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3242 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3243 "ld 11, 0(11)\n\t" /* target->r11 */ \
3244 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3245 "mr 11,%1\n\t" \
3246 "mr %0,3\n\t" \
3247 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3248 VALGRIND_RESTORE_STACK \
3249 : /*out*/ "=r" (_res) \
3250 : /*in*/ "r" (&_argvec[2]) \
3251 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3252 ); \
3253 lval = (__typeof__(lval)) _res; \
3254 } while (0)
3255
3256 #endif /* PLAT_ppc64be_linux */
3257
3258 /* ------------------------- ppc64le-linux ----------------------- */
3259 #if defined(PLAT_ppc64le_linux)
3260
3261 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3262
3263 /* These regs are trashed by the hidden call. */
3264 #define __CALLER_SAVED_REGS \
3265 "lr", "ctr", "xer", \
3266 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3267 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3268 "r11", "r12", "r13"
3269
3270 /* Macros to save and align the stack before making a function
3271 call and restore it afterwards as gcc may not keep the stack
3272 pointer aligned if it doesn't realise calls are being made
3273 to other functions. */
3274
3275 #define VALGRIND_ALIGN_STACK \
3276 "mr 28,1\n\t" \
3277 "rldicr 1,1,0,59\n\t"
3278 #define VALGRIND_RESTORE_STACK \
3279 "mr 1,28\n\t"
3280
3281 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3282 long) == 8. */
3283
3284 #define CALL_FN_W_v(lval, orig) \
3285 do { \
3286 volatile OrigFn _orig = (orig); \
3287 volatile unsigned long _argvec[3+0]; \
3288 volatile unsigned long _res; \
3289 /* _argvec[0] holds current r2 across the call */ \
3290 _argvec[1] = (unsigned long)_orig.r2; \
3291 _argvec[2] = (unsigned long)_orig.nraddr; \
3292 __asm__ volatile( \
3293 VALGRIND_ALIGN_STACK \
3294 "mr 12,%1\n\t" \
3295 "std 2,-16(12)\n\t" /* save tocptr */ \
3296 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3297 "ld 12, 0(12)\n\t" /* target->r12 */ \
3298 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3299 "mr 12,%1\n\t" \
3300 "mr %0,3\n\t" \
3301 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3302 VALGRIND_RESTORE_STACK \
3303 : /*out*/ "=r" (_res) \
3304 : /*in*/ "r" (&_argvec[2]) \
3305 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3306 ); \
3307 lval = (__typeof__(lval)) _res; \
3308 } while (0)
3309
3310 #define CALL_FN_W_W(lval, orig, arg1) \
3311 do { \
3312 volatile OrigFn _orig = (orig); \
3313 volatile unsigned long _argvec[3+1]; \
3314 volatile unsigned long _res; \
3315 /* _argvec[0] holds current r2 across the call */ \
3316 _argvec[1] = (unsigned long)_orig.r2; \
3317 _argvec[2] = (unsigned long)_orig.nraddr; \
3318 _argvec[2+1] = (unsigned long)arg1; \
3319 __asm__ volatile( \
3320 VALGRIND_ALIGN_STACK \
3321 "mr 12,%1\n\t" \
3322 "std 2,-16(12)\n\t" /* save tocptr */ \
3323 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3324 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3325 "ld 12, 0(12)\n\t" /* target->r12 */ \
3326 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3327 "mr 12,%1\n\t" \
3328 "mr %0,3\n\t" \
3329 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3330 VALGRIND_RESTORE_STACK \
3331 : /*out*/ "=r" (_res) \
3332 : /*in*/ "r" (&_argvec[2]) \
3333 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3334 ); \
3335 lval = (__typeof__(lval)) _res; \
3336 } while (0)
3337
3338 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3339 do { \
3340 volatile OrigFn _orig = (orig); \
3341 volatile unsigned long _argvec[3+2]; \
3342 volatile unsigned long _res; \
3343 /* _argvec[0] holds current r2 across the call */ \
3344 _argvec[1] = (unsigned long)_orig.r2; \
3345 _argvec[2] = (unsigned long)_orig.nraddr; \
3346 _argvec[2+1] = (unsigned long)arg1; \
3347 _argvec[2+2] = (unsigned long)arg2; \
3348 __asm__ volatile( \
3349 VALGRIND_ALIGN_STACK \
3350 "mr 12,%1\n\t" \
3351 "std 2,-16(12)\n\t" /* save tocptr */ \
3352 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3353 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3354 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3355 "ld 12, 0(12)\n\t" /* target->r12 */ \
3356 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3357 "mr 12,%1\n\t" \
3358 "mr %0,3\n\t" \
3359 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3360 VALGRIND_RESTORE_STACK \
3361 : /*out*/ "=r" (_res) \
3362 : /*in*/ "r" (&_argvec[2]) \
3363 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3364 ); \
3365 lval = (__typeof__(lval)) _res; \
3366 } while (0)
3367
3368 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3369 do { \
3370 volatile OrigFn _orig = (orig); \
3371 volatile unsigned long _argvec[3+3]; \
3372 volatile unsigned long _res; \
3373 /* _argvec[0] holds current r2 across the call */ \
3374 _argvec[1] = (unsigned long)_orig.r2; \
3375 _argvec[2] = (unsigned long)_orig.nraddr; \
3376 _argvec[2+1] = (unsigned long)arg1; \
3377 _argvec[2+2] = (unsigned long)arg2; \
3378 _argvec[2+3] = (unsigned long)arg3; \
3379 __asm__ volatile( \
3380 VALGRIND_ALIGN_STACK \
3381 "mr 12,%1\n\t" \
3382 "std 2,-16(12)\n\t" /* save tocptr */ \
3383 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3384 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3385 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3386 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3387 "ld 12, 0(12)\n\t" /* target->r12 */ \
3388 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3389 "mr 12,%1\n\t" \
3390 "mr %0,3\n\t" \
3391 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3392 VALGRIND_RESTORE_STACK \
3393 : /*out*/ "=r" (_res) \
3394 : /*in*/ "r" (&_argvec[2]) \
3395 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3396 ); \
3397 lval = (__typeof__(lval)) _res; \
3398 } while (0)
3399
3400 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3401 do { \
3402 volatile OrigFn _orig = (orig); \
3403 volatile unsigned long _argvec[3+4]; \
3404 volatile unsigned long _res; \
3405 /* _argvec[0] holds current r2 across the call */ \
3406 _argvec[1] = (unsigned long)_orig.r2; \
3407 _argvec[2] = (unsigned long)_orig.nraddr; \
3408 _argvec[2+1] = (unsigned long)arg1; \
3409 _argvec[2+2] = (unsigned long)arg2; \
3410 _argvec[2+3] = (unsigned long)arg3; \
3411 _argvec[2+4] = (unsigned long)arg4; \
3412 __asm__ volatile( \
3413 VALGRIND_ALIGN_STACK \
3414 "mr 12,%1\n\t" \
3415 "std 2,-16(12)\n\t" /* save tocptr */ \
3416 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3417 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3418 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3419 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3420 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3421 "ld 12, 0(12)\n\t" /* target->r12 */ \
3422 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3423 "mr 12,%1\n\t" \
3424 "mr %0,3\n\t" \
3425 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3426 VALGRIND_RESTORE_STACK \
3427 : /*out*/ "=r" (_res) \
3428 : /*in*/ "r" (&_argvec[2]) \
3429 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3430 ); \
3431 lval = (__typeof__(lval)) _res; \
3432 } while (0)
3433
3434 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3435 do { \
3436 volatile OrigFn _orig = (orig); \
3437 volatile unsigned long _argvec[3+5]; \
3438 volatile unsigned long _res; \
3439 /* _argvec[0] holds current r2 across the call */ \
3440 _argvec[1] = (unsigned long)_orig.r2; \
3441 _argvec[2] = (unsigned long)_orig.nraddr; \
3442 _argvec[2+1] = (unsigned long)arg1; \
3443 _argvec[2+2] = (unsigned long)arg2; \
3444 _argvec[2+3] = (unsigned long)arg3; \
3445 _argvec[2+4] = (unsigned long)arg4; \
3446 _argvec[2+5] = (unsigned long)arg5; \
3447 __asm__ volatile( \
3448 VALGRIND_ALIGN_STACK \
3449 "mr 12,%1\n\t" \
3450 "std 2,-16(12)\n\t" /* save tocptr */ \
3451 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3452 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3453 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3454 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3455 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3456 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3457 "ld 12, 0(12)\n\t" /* target->r12 */ \
3458 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3459 "mr 12,%1\n\t" \
3460 "mr %0,3\n\t" \
3461 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3462 VALGRIND_RESTORE_STACK \
3463 : /*out*/ "=r" (_res) \
3464 : /*in*/ "r" (&_argvec[2]) \
3465 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3466 ); \
3467 lval = (__typeof__(lval)) _res; \
3468 } while (0)
3469
3470 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3471 do { \
3472 volatile OrigFn _orig = (orig); \
3473 volatile unsigned long _argvec[3+6]; \
3474 volatile unsigned long _res; \
3475 /* _argvec[0] holds current r2 across the call */ \
3476 _argvec[1] = (unsigned long)_orig.r2; \
3477 _argvec[2] = (unsigned long)_orig.nraddr; \
3478 _argvec[2+1] = (unsigned long)arg1; \
3479 _argvec[2+2] = (unsigned long)arg2; \
3480 _argvec[2+3] = (unsigned long)arg3; \
3481 _argvec[2+4] = (unsigned long)arg4; \
3482 _argvec[2+5] = (unsigned long)arg5; \
3483 _argvec[2+6] = (unsigned long)arg6; \
3484 __asm__ volatile( \
3485 VALGRIND_ALIGN_STACK \
3486 "mr 12,%1\n\t" \
3487 "std 2,-16(12)\n\t" /* save tocptr */ \
3488 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3489 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3490 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3491 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3492 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3493 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3494 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3495 "ld 12, 0(12)\n\t" /* target->r12 */ \
3496 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3497 "mr 12,%1\n\t" \
3498 "mr %0,3\n\t" \
3499 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3500 VALGRIND_RESTORE_STACK \
3501 : /*out*/ "=r" (_res) \
3502 : /*in*/ "r" (&_argvec[2]) \
3503 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3504 ); \
3505 lval = (__typeof__(lval)) _res; \
3506 } while (0)
3507
3508 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3509 arg7) \
3510 do { \
3511 volatile OrigFn _orig = (orig); \
3512 volatile unsigned long _argvec[3+7]; \
3513 volatile unsigned long _res; \
3514 /* _argvec[0] holds current r2 across the call */ \
3515 _argvec[1] = (unsigned long)_orig.r2; \
3516 _argvec[2] = (unsigned long)_orig.nraddr; \
3517 _argvec[2+1] = (unsigned long)arg1; \
3518 _argvec[2+2] = (unsigned long)arg2; \
3519 _argvec[2+3] = (unsigned long)arg3; \
3520 _argvec[2+4] = (unsigned long)arg4; \
3521 _argvec[2+5] = (unsigned long)arg5; \
3522 _argvec[2+6] = (unsigned long)arg6; \
3523 _argvec[2+7] = (unsigned long)arg7; \
3524 __asm__ volatile( \
3525 VALGRIND_ALIGN_STACK \
3526 "mr 12,%1\n\t" \
3527 "std 2,-16(12)\n\t" /* save tocptr */ \
3528 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3529 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3530 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3531 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3532 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3533 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3534 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3535 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3536 "ld 12, 0(12)\n\t" /* target->r12 */ \
3537 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3538 "mr 12,%1\n\t" \
3539 "mr %0,3\n\t" \
3540 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3541 VALGRIND_RESTORE_STACK \
3542 : /*out*/ "=r" (_res) \
3543 : /*in*/ "r" (&_argvec[2]) \
3544 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3545 ); \
3546 lval = (__typeof__(lval)) _res; \
3547 } while (0)
3548
3549 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3550 arg7,arg8) \
3551 do { \
3552 volatile OrigFn _orig = (orig); \
3553 volatile unsigned long _argvec[3+8]; \
3554 volatile unsigned long _res; \
3555 /* _argvec[0] holds current r2 across the call */ \
3556 _argvec[1] = (unsigned long)_orig.r2; \
3557 _argvec[2] = (unsigned long)_orig.nraddr; \
3558 _argvec[2+1] = (unsigned long)arg1; \
3559 _argvec[2+2] = (unsigned long)arg2; \
3560 _argvec[2+3] = (unsigned long)arg3; \
3561 _argvec[2+4] = (unsigned long)arg4; \
3562 _argvec[2+5] = (unsigned long)arg5; \
3563 _argvec[2+6] = (unsigned long)arg6; \
3564 _argvec[2+7] = (unsigned long)arg7; \
3565 _argvec[2+8] = (unsigned long)arg8; \
3566 __asm__ volatile( \
3567 VALGRIND_ALIGN_STACK \
3568 "mr 12,%1\n\t" \
3569 "std 2,-16(12)\n\t" /* save tocptr */ \
3570 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3571 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3572 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3573 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3574 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3575 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3576 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3577 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3578 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3579 "ld 12, 0(12)\n\t" /* target->r12 */ \
3580 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3581 "mr 12,%1\n\t" \
3582 "mr %0,3\n\t" \
3583 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3584 VALGRIND_RESTORE_STACK \
3585 : /*out*/ "=r" (_res) \
3586 : /*in*/ "r" (&_argvec[2]) \
3587 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3588 ); \
3589 lval = (__typeof__(lval)) _res; \
3590 } while (0)
3591
3592 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3593 arg7,arg8,arg9) \
3594 do { \
3595 volatile OrigFn _orig = (orig); \
3596 volatile unsigned long _argvec[3+9]; \
3597 volatile unsigned long _res; \
3598 /* _argvec[0] holds current r2 across the call */ \
3599 _argvec[1] = (unsigned long)_orig.r2; \
3600 _argvec[2] = (unsigned long)_orig.nraddr; \
3601 _argvec[2+1] = (unsigned long)arg1; \
3602 _argvec[2+2] = (unsigned long)arg2; \
3603 _argvec[2+3] = (unsigned long)arg3; \
3604 _argvec[2+4] = (unsigned long)arg4; \
3605 _argvec[2+5] = (unsigned long)arg5; \
3606 _argvec[2+6] = (unsigned long)arg6; \
3607 _argvec[2+7] = (unsigned long)arg7; \
3608 _argvec[2+8] = (unsigned long)arg8; \
3609 _argvec[2+9] = (unsigned long)arg9; \
3610 __asm__ volatile( \
3611 VALGRIND_ALIGN_STACK \
3612 "mr 12,%1\n\t" \
3613 "std 2,-16(12)\n\t" /* save tocptr */ \
3614 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3615 "addi 1,1,-128\n\t" /* expand stack frame */ \
3616 /* arg9 */ \
3617 "ld 3,72(12)\n\t" \
3618 "std 3,96(1)\n\t" \
3619 /* args1-8 */ \
3620 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3621 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3622 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3623 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3624 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3625 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3626 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3627 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3628 "ld 12, 0(12)\n\t" /* target->r12 */ \
3629 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3630 "mr 12,%1\n\t" \
3631 "mr %0,3\n\t" \
3632 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3633 VALGRIND_RESTORE_STACK \
3634 : /*out*/ "=r" (_res) \
3635 : /*in*/ "r" (&_argvec[2]) \
3636 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3637 ); \
3638 lval = (__typeof__(lval)) _res; \
3639 } while (0)
3640
3641 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3642 arg7,arg8,arg9,arg10) \
3643 do { \
3644 volatile OrigFn _orig = (orig); \
3645 volatile unsigned long _argvec[3+10]; \
3646 volatile unsigned long _res; \
3647 /* _argvec[0] holds current r2 across the call */ \
3648 _argvec[1] = (unsigned long)_orig.r2; \
3649 _argvec[2] = (unsigned long)_orig.nraddr; \
3650 _argvec[2+1] = (unsigned long)arg1; \
3651 _argvec[2+2] = (unsigned long)arg2; \
3652 _argvec[2+3] = (unsigned long)arg3; \
3653 _argvec[2+4] = (unsigned long)arg4; \
3654 _argvec[2+5] = (unsigned long)arg5; \
3655 _argvec[2+6] = (unsigned long)arg6; \
3656 _argvec[2+7] = (unsigned long)arg7; \
3657 _argvec[2+8] = (unsigned long)arg8; \
3658 _argvec[2+9] = (unsigned long)arg9; \
3659 _argvec[2+10] = (unsigned long)arg10; \
3660 __asm__ volatile( \
3661 VALGRIND_ALIGN_STACK \
3662 "mr 12,%1\n\t" \
3663 "std 2,-16(12)\n\t" /* save tocptr */ \
3664 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3665 "addi 1,1,-128\n\t" /* expand stack frame */ \
3666 /* arg10 */ \
3667 "ld 3,80(12)\n\t" \
3668 "std 3,104(1)\n\t" \
3669 /* arg9 */ \
3670 "ld 3,72(12)\n\t" \
3671 "std 3,96(1)\n\t" \
3672 /* args1-8 */ \
3673 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3674 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3675 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3676 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3677 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3678 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3679 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3680 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3681 "ld 12, 0(12)\n\t" /* target->r12 */ \
3682 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3683 "mr 12,%1\n\t" \
3684 "mr %0,3\n\t" \
3685 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3686 VALGRIND_RESTORE_STACK \
3687 : /*out*/ "=r" (_res) \
3688 : /*in*/ "r" (&_argvec[2]) \
3689 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3690 ); \
3691 lval = (__typeof__(lval)) _res; \
3692 } while (0)
3693
3694 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3695 arg7,arg8,arg9,arg10,arg11) \
3696 do { \
3697 volatile OrigFn _orig = (orig); \
3698 volatile unsigned long _argvec[3+11]; \
3699 volatile unsigned long _res; \
3700 /* _argvec[0] holds current r2 across the call */ \
3701 _argvec[1] = (unsigned long)_orig.r2; \
3702 _argvec[2] = (unsigned long)_orig.nraddr; \
3703 _argvec[2+1] = (unsigned long)arg1; \
3704 _argvec[2+2] = (unsigned long)arg2; \
3705 _argvec[2+3] = (unsigned long)arg3; \
3706 _argvec[2+4] = (unsigned long)arg4; \
3707 _argvec[2+5] = (unsigned long)arg5; \
3708 _argvec[2+6] = (unsigned long)arg6; \
3709 _argvec[2+7] = (unsigned long)arg7; \
3710 _argvec[2+8] = (unsigned long)arg8; \
3711 _argvec[2+9] = (unsigned long)arg9; \
3712 _argvec[2+10] = (unsigned long)arg10; \
3713 _argvec[2+11] = (unsigned long)arg11; \
3714 __asm__ volatile( \
3715 VALGRIND_ALIGN_STACK \
3716 "mr 12,%1\n\t" \
3717 "std 2,-16(12)\n\t" /* save tocptr */ \
3718 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3719 "addi 1,1,-144\n\t" /* expand stack frame */ \
3720 /* arg11 */ \
3721 "ld 3,88(12)\n\t" \
3722 "std 3,112(1)\n\t" \
3723 /* arg10 */ \
3724 "ld 3,80(12)\n\t" \
3725 "std 3,104(1)\n\t" \
3726 /* arg9 */ \
3727 "ld 3,72(12)\n\t" \
3728 "std 3,96(1)\n\t" \
3729 /* args1-8 */ \
3730 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3731 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3732 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3733 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3734 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3735 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3736 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3737 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3738 "ld 12, 0(12)\n\t" /* target->r12 */ \
3739 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3740 "mr 12,%1\n\t" \
3741 "mr %0,3\n\t" \
3742 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3743 VALGRIND_RESTORE_STACK \
3744 : /*out*/ "=r" (_res) \
3745 : /*in*/ "r" (&_argvec[2]) \
3746 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3747 ); \
3748 lval = (__typeof__(lval)) _res; \
3749 } while (0)
3750
3751 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3752 arg7,arg8,arg9,arg10,arg11,arg12) \
3753 do { \
3754 volatile OrigFn _orig = (orig); \
3755 volatile unsigned long _argvec[3+12]; \
3756 volatile unsigned long _res; \
3757 /* _argvec[0] holds current r2 across the call */ \
3758 _argvec[1] = (unsigned long)_orig.r2; \
3759 _argvec[2] = (unsigned long)_orig.nraddr; \
3760 _argvec[2+1] = (unsigned long)arg1; \
3761 _argvec[2+2] = (unsigned long)arg2; \
3762 _argvec[2+3] = (unsigned long)arg3; \
3763 _argvec[2+4] = (unsigned long)arg4; \
3764 _argvec[2+5] = (unsigned long)arg5; \
3765 _argvec[2+6] = (unsigned long)arg6; \
3766 _argvec[2+7] = (unsigned long)arg7; \
3767 _argvec[2+8] = (unsigned long)arg8; \
3768 _argvec[2+9] = (unsigned long)arg9; \
3769 _argvec[2+10] = (unsigned long)arg10; \
3770 _argvec[2+11] = (unsigned long)arg11; \
3771 _argvec[2+12] = (unsigned long)arg12; \
3772 __asm__ volatile( \
3773 VALGRIND_ALIGN_STACK \
3774 "mr 12,%1\n\t" \
3775 "std 2,-16(12)\n\t" /* save tocptr */ \
3776 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3777 "addi 1,1,-144\n\t" /* expand stack frame */ \
3778 /* arg12 */ \
3779 "ld 3,96(12)\n\t" \
3780 "std 3,120(1)\n\t" \
3781 /* arg11 */ \
3782 "ld 3,88(12)\n\t" \
3783 "std 3,112(1)\n\t" \
3784 /* arg10 */ \
3785 "ld 3,80(12)\n\t" \
3786 "std 3,104(1)\n\t" \
3787 /* arg9 */ \
3788 "ld 3,72(12)\n\t" \
3789 "std 3,96(1)\n\t" \
3790 /* args1-8 */ \
3791 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3792 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3793 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3794 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3795 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3796 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3797 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3798 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3799 "ld 12, 0(12)\n\t" /* target->r12 */ \
3800 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3801 "mr 12,%1\n\t" \
3802 "mr %0,3\n\t" \
3803 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3804 VALGRIND_RESTORE_STACK \
3805 : /*out*/ "=r" (_res) \
3806 : /*in*/ "r" (&_argvec[2]) \
3807 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3808 ); \
3809 lval = (__typeof__(lval)) _res; \
3810 } while (0)
3811
3812 #endif /* PLAT_ppc64le_linux */
3813
3814 /* ------------------------- arm-linux ------------------------- */
3815
3816 #if defined(PLAT_arm_linux)
3817
3818 /* These regs are trashed by the hidden call. */
3819 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3820
3821 /* Macros to save and align the stack before making a function
3822 call and restore it afterwards as gcc may not keep the stack
3823 pointer aligned if it doesn't realise calls are being made
3824 to other functions. */
3825
3826 /* This is a bit tricky. We store the original stack pointer in r10
3827 as it is callee-saves. gcc doesn't allow the use of r11 for some
3828 reason. Also, we can't directly "bic" the stack pointer in thumb
3829 mode since r13 isn't an allowed register number in that context.
3830 So use r4 as a temporary, since that is about to get trashed
3831 anyway, just after each use of this macro. Side effect is we need
3832 to be very careful about any future changes, since
3833 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3834 #define VALGRIND_ALIGN_STACK \
3835 "mov r10, sp\n\t" \
3836 "mov r4, sp\n\t" \
3837 "bic r4, r4, #7\n\t" \
3838 "mov sp, r4\n\t"
3839 #define VALGRIND_RESTORE_STACK \
3840 "mov sp, r10\n\t"
3841
3842 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3843 long) == 4. */
3844
3845 #define CALL_FN_W_v(lval, orig) \
3846 do { \
3847 volatile OrigFn _orig = (orig); \
3848 volatile unsigned long _argvec[1]; \
3849 volatile unsigned long _res; \
3850 _argvec[0] = (unsigned long)_orig.nraddr; \
3851 __asm__ volatile( \
3852 VALGRIND_ALIGN_STACK \
3853 "ldr r4, [%1] \n\t" /* target->r4 */ \
3854 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3855 VALGRIND_RESTORE_STACK \
3856 "mov %0, r0\n" \
3857 : /*out*/ "=r" (_res) \
3858 : /*in*/ "0" (&_argvec[0]) \
3859 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3860 ); \
3861 lval = (__typeof__(lval)) _res; \
3862 } while (0)
3863
3864 #define CALL_FN_W_W(lval, orig, arg1) \
3865 do { \
3866 volatile OrigFn _orig = (orig); \
3867 volatile unsigned long _argvec[2]; \
3868 volatile unsigned long _res; \
3869 _argvec[0] = (unsigned long)_orig.nraddr; \
3870 _argvec[1] = (unsigned long)(arg1); \
3871 __asm__ volatile( \
3872 VALGRIND_ALIGN_STACK \
3873 "ldr r0, [%1, #4] \n\t" \
3874 "ldr r4, [%1] \n\t" /* target->r4 */ \
3875 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3876 VALGRIND_RESTORE_STACK \
3877 "mov %0, r0\n" \
3878 : /*out*/ "=r" (_res) \
3879 : /*in*/ "0" (&_argvec[0]) \
3880 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3881 ); \
3882 lval = (__typeof__(lval)) _res; \
3883 } while (0)
3884
3885 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3886 do { \
3887 volatile OrigFn _orig = (orig); \
3888 volatile unsigned long _argvec[3]; \
3889 volatile unsigned long _res; \
3890 _argvec[0] = (unsigned long)_orig.nraddr; \
3891 _argvec[1] = (unsigned long)(arg1); \
3892 _argvec[2] = (unsigned long)(arg2); \
3893 __asm__ volatile( \
3894 VALGRIND_ALIGN_STACK \
3895 "ldr r0, [%1, #4] \n\t" \
3896 "ldr r1, [%1, #8] \n\t" \
3897 "ldr r4, [%1] \n\t" /* target->r4 */ \
3898 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3899 VALGRIND_RESTORE_STACK \
3900 "mov %0, r0\n" \
3901 : /*out*/ "=r" (_res) \
3902 : /*in*/ "0" (&_argvec[0]) \
3903 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3904 ); \
3905 lval = (__typeof__(lval)) _res; \
3906 } while (0)
3907
3908 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3909 do { \
3910 volatile OrigFn _orig = (orig); \
3911 volatile unsigned long _argvec[4]; \
3912 volatile unsigned long _res; \
3913 _argvec[0] = (unsigned long)_orig.nraddr; \
3914 _argvec[1] = (unsigned long)(arg1); \
3915 _argvec[2] = (unsigned long)(arg2); \
3916 _argvec[3] = (unsigned long)(arg3); \
3917 __asm__ volatile( \
3918 VALGRIND_ALIGN_STACK \
3919 "ldr r0, [%1, #4] \n\t" \
3920 "ldr r1, [%1, #8] \n\t" \
3921 "ldr r2, [%1, #12] \n\t" \
3922 "ldr r4, [%1] \n\t" /* target->r4 */ \
3923 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3924 VALGRIND_RESTORE_STACK \
3925 "mov %0, r0\n" \
3926 : /*out*/ "=r" (_res) \
3927 : /*in*/ "0" (&_argvec[0]) \
3928 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3929 ); \
3930 lval = (__typeof__(lval)) _res; \
3931 } while (0)
3932
3933 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3934 do { \
3935 volatile OrigFn _orig = (orig); \
3936 volatile unsigned long _argvec[5]; \
3937 volatile unsigned long _res; \
3938 _argvec[0] = (unsigned long)_orig.nraddr; \
3939 _argvec[1] = (unsigned long)(arg1); \
3940 _argvec[2] = (unsigned long)(arg2); \
3941 _argvec[3] = (unsigned long)(arg3); \
3942 _argvec[4] = (unsigned long)(arg4); \
3943 __asm__ volatile( \
3944 VALGRIND_ALIGN_STACK \
3945 "ldr r0, [%1, #4] \n\t" \
3946 "ldr r1, [%1, #8] \n\t" \
3947 "ldr r2, [%1, #12] \n\t" \
3948 "ldr r3, [%1, #16] \n\t" \
3949 "ldr r4, [%1] \n\t" /* target->r4 */ \
3950 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3951 VALGRIND_RESTORE_STACK \
3952 "mov %0, r0" \
3953 : /*out*/ "=r" (_res) \
3954 : /*in*/ "0" (&_argvec[0]) \
3955 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3956 ); \
3957 lval = (__typeof__(lval)) _res; \
3958 } while (0)
3959
3960 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3961 do { \
3962 volatile OrigFn _orig = (orig); \
3963 volatile unsigned long _argvec[6]; \
3964 volatile unsigned long _res; \
3965 _argvec[0] = (unsigned long)_orig.nraddr; \
3966 _argvec[1] = (unsigned long)(arg1); \
3967 _argvec[2] = (unsigned long)(arg2); \
3968 _argvec[3] = (unsigned long)(arg3); \
3969 _argvec[4] = (unsigned long)(arg4); \
3970 _argvec[5] = (unsigned long)(arg5); \
3971 __asm__ volatile( \
3972 VALGRIND_ALIGN_STACK \
3973 "sub sp, sp, #4 \n\t" \
3974 "ldr r0, [%1, #20] \n\t" \
3975 "push {r0} \n\t" \
3976 "ldr r0, [%1, #4] \n\t" \
3977 "ldr r1, [%1, #8] \n\t" \
3978 "ldr r2, [%1, #12] \n\t" \
3979 "ldr r3, [%1, #16] \n\t" \
3980 "ldr r4, [%1] \n\t" /* target->r4 */ \
3981 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3982 VALGRIND_RESTORE_STACK \
3983 "mov %0, r0" \
3984 : /*out*/ "=r" (_res) \
3985 : /*in*/ "0" (&_argvec[0]) \
3986 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3987 ); \
3988 lval = (__typeof__(lval)) _res; \
3989 } while (0)
3990
3991 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3992 do { \
3993 volatile OrigFn _orig = (orig); \
3994 volatile unsigned long _argvec[7]; \
3995 volatile unsigned long _res; \
3996 _argvec[0] = (unsigned long)_orig.nraddr; \
3997 _argvec[1] = (unsigned long)(arg1); \
3998 _argvec[2] = (unsigned long)(arg2); \
3999 _argvec[3] = (unsigned long)(arg3); \
4000 _argvec[4] = (unsigned long)(arg4); \
4001 _argvec[5] = (unsigned long)(arg5); \
4002 _argvec[6] = (unsigned long)(arg6); \
4003 __asm__ volatile( \
4004 VALGRIND_ALIGN_STACK \
4005 "ldr r0, [%1, #20] \n\t" \
4006 "ldr r1, [%1, #24] \n\t" \
4007 "push {r0, r1} \n\t" \
4008 "ldr r0, [%1, #4] \n\t" \
4009 "ldr r1, [%1, #8] \n\t" \
4010 "ldr r2, [%1, #12] \n\t" \
4011 "ldr r3, [%1, #16] \n\t" \
4012 "ldr r4, [%1] \n\t" /* target->r4 */ \
4013 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4014 VALGRIND_RESTORE_STACK \
4015 "mov %0, r0" \
4016 : /*out*/ "=r" (_res) \
4017 : /*in*/ "0" (&_argvec[0]) \
4018 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4019 ); \
4020 lval = (__typeof__(lval)) _res; \
4021 } while (0)
4022
4023 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4024 arg7) \
4025 do { \
4026 volatile OrigFn _orig = (orig); \
4027 volatile unsigned long _argvec[8]; \
4028 volatile unsigned long _res; \
4029 _argvec[0] = (unsigned long)_orig.nraddr; \
4030 _argvec[1] = (unsigned long)(arg1); \
4031 _argvec[2] = (unsigned long)(arg2); \
4032 _argvec[3] = (unsigned long)(arg3); \
4033 _argvec[4] = (unsigned long)(arg4); \
4034 _argvec[5] = (unsigned long)(arg5); \
4035 _argvec[6] = (unsigned long)(arg6); \
4036 _argvec[7] = (unsigned long)(arg7); \
4037 __asm__ volatile( \
4038 VALGRIND_ALIGN_STACK \
4039 "sub sp, sp, #4 \n\t" \
4040 "ldr r0, [%1, #20] \n\t" \
4041 "ldr r1, [%1, #24] \n\t" \
4042 "ldr r2, [%1, #28] \n\t" \
4043 "push {r0, r1, r2} \n\t" \
4044 "ldr r0, [%1, #4] \n\t" \
4045 "ldr r1, [%1, #8] \n\t" \
4046 "ldr r2, [%1, #12] \n\t" \
4047 "ldr r3, [%1, #16] \n\t" \
4048 "ldr r4, [%1] \n\t" /* target->r4 */ \
4049 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4050 VALGRIND_RESTORE_STACK \
4051 "mov %0, r0" \
4052 : /*out*/ "=r" (_res) \
4053 : /*in*/ "0" (&_argvec[0]) \
4054 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4055 ); \
4056 lval = (__typeof__(lval)) _res; \
4057 } while (0)
4058
4059 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4060 arg7,arg8) \
4061 do { \
4062 volatile OrigFn _orig = (orig); \
4063 volatile unsigned long _argvec[9]; \
4064 volatile unsigned long _res; \
4065 _argvec[0] = (unsigned long)_orig.nraddr; \
4066 _argvec[1] = (unsigned long)(arg1); \
4067 _argvec[2] = (unsigned long)(arg2); \
4068 _argvec[3] = (unsigned long)(arg3); \
4069 _argvec[4] = (unsigned long)(arg4); \
4070 _argvec[5] = (unsigned long)(arg5); \
4071 _argvec[6] = (unsigned long)(arg6); \
4072 _argvec[7] = (unsigned long)(arg7); \
4073 _argvec[8] = (unsigned long)(arg8); \
4074 __asm__ volatile( \
4075 VALGRIND_ALIGN_STACK \
4076 "ldr r0, [%1, #20] \n\t" \
4077 "ldr r1, [%1, #24] \n\t" \
4078 "ldr r2, [%1, #28] \n\t" \
4079 "ldr r3, [%1, #32] \n\t" \
4080 "push {r0, r1, r2, r3} \n\t" \
4081 "ldr r0, [%1, #4] \n\t" \
4082 "ldr r1, [%1, #8] \n\t" \
4083 "ldr r2, [%1, #12] \n\t" \
4084 "ldr r3, [%1, #16] \n\t" \
4085 "ldr r4, [%1] \n\t" /* target->r4 */ \
4086 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4087 VALGRIND_RESTORE_STACK \
4088 "mov %0, r0" \
4089 : /*out*/ "=r" (_res) \
4090 : /*in*/ "0" (&_argvec[0]) \
4091 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4092 ); \
4093 lval = (__typeof__(lval)) _res; \
4094 } while (0)
4095
4096 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4097 arg7,arg8,arg9) \
4098 do { \
4099 volatile OrigFn _orig = (orig); \
4100 volatile unsigned long _argvec[10]; \
4101 volatile unsigned long _res; \
4102 _argvec[0] = (unsigned long)_orig.nraddr; \
4103 _argvec[1] = (unsigned long)(arg1); \
4104 _argvec[2] = (unsigned long)(arg2); \
4105 _argvec[3] = (unsigned long)(arg3); \
4106 _argvec[4] = (unsigned long)(arg4); \
4107 _argvec[5] = (unsigned long)(arg5); \
4108 _argvec[6] = (unsigned long)(arg6); \
4109 _argvec[7] = (unsigned long)(arg7); \
4110 _argvec[8] = (unsigned long)(arg8); \
4111 _argvec[9] = (unsigned long)(arg9); \
4112 __asm__ volatile( \
4113 VALGRIND_ALIGN_STACK \
4114 "sub sp, sp, #4 \n\t" \
4115 "ldr r0, [%1, #20] \n\t" \
4116 "ldr r1, [%1, #24] \n\t" \
4117 "ldr r2, [%1, #28] \n\t" \
4118 "ldr r3, [%1, #32] \n\t" \
4119 "ldr r4, [%1, #36] \n\t" \
4120 "push {r0, r1, r2, r3, r4} \n\t" \
4121 "ldr r0, [%1, #4] \n\t" \
4122 "ldr r1, [%1, #8] \n\t" \
4123 "ldr r2, [%1, #12] \n\t" \
4124 "ldr r3, [%1, #16] \n\t" \
4125 "ldr r4, [%1] \n\t" /* target->r4 */ \
4126 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4127 VALGRIND_RESTORE_STACK \
4128 "mov %0, r0" \
4129 : /*out*/ "=r" (_res) \
4130 : /*in*/ "0" (&_argvec[0]) \
4131 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4132 ); \
4133 lval = (__typeof__(lval)) _res; \
4134 } while (0)
4135
4136 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4137 arg7,arg8,arg9,arg10) \
4138 do { \
4139 volatile OrigFn _orig = (orig); \
4140 volatile unsigned long _argvec[11]; \
4141 volatile unsigned long _res; \
4142 _argvec[0] = (unsigned long)_orig.nraddr; \
4143 _argvec[1] = (unsigned long)(arg1); \
4144 _argvec[2] = (unsigned long)(arg2); \
4145 _argvec[3] = (unsigned long)(arg3); \
4146 _argvec[4] = (unsigned long)(arg4); \
4147 _argvec[5] = (unsigned long)(arg5); \
4148 _argvec[6] = (unsigned long)(arg6); \
4149 _argvec[7] = (unsigned long)(arg7); \
4150 _argvec[8] = (unsigned long)(arg8); \
4151 _argvec[9] = (unsigned long)(arg9); \
4152 _argvec[10] = (unsigned long)(arg10); \
4153 __asm__ volatile( \
4154 VALGRIND_ALIGN_STACK \
4155 "ldr r0, [%1, #40] \n\t" \
4156 "push {r0} \n\t" \
4157 "ldr r0, [%1, #20] \n\t" \
4158 "ldr r1, [%1, #24] \n\t" \
4159 "ldr r2, [%1, #28] \n\t" \
4160 "ldr r3, [%1, #32] \n\t" \
4161 "ldr r4, [%1, #36] \n\t" \
4162 "push {r0, r1, r2, r3, r4} \n\t" \
4163 "ldr r0, [%1, #4] \n\t" \
4164 "ldr r1, [%1, #8] \n\t" \
4165 "ldr r2, [%1, #12] \n\t" \
4166 "ldr r3, [%1, #16] \n\t" \
4167 "ldr r4, [%1] \n\t" /* target->r4 */ \
4168 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4169 VALGRIND_RESTORE_STACK \
4170 "mov %0, r0" \
4171 : /*out*/ "=r" (_res) \
4172 : /*in*/ "0" (&_argvec[0]) \
4173 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4174 ); \
4175 lval = (__typeof__(lval)) _res; \
4176 } while (0)
4177
4178 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4179 arg6,arg7,arg8,arg9,arg10, \
4180 arg11) \
4181 do { \
4182 volatile OrigFn _orig = (orig); \
4183 volatile unsigned long _argvec[12]; \
4184 volatile unsigned long _res; \
4185 _argvec[0] = (unsigned long)_orig.nraddr; \
4186 _argvec[1] = (unsigned long)(arg1); \
4187 _argvec[2] = (unsigned long)(arg2); \
4188 _argvec[3] = (unsigned long)(arg3); \
4189 _argvec[4] = (unsigned long)(arg4); \
4190 _argvec[5] = (unsigned long)(arg5); \
4191 _argvec[6] = (unsigned long)(arg6); \
4192 _argvec[7] = (unsigned long)(arg7); \
4193 _argvec[8] = (unsigned long)(arg8); \
4194 _argvec[9] = (unsigned long)(arg9); \
4195 _argvec[10] = (unsigned long)(arg10); \
4196 _argvec[11] = (unsigned long)(arg11); \
4197 __asm__ volatile( \
4198 VALGRIND_ALIGN_STACK \
4199 "sub sp, sp, #4 \n\t" \
4200 "ldr r0, [%1, #40] \n\t" \
4201 "ldr r1, [%1, #44] \n\t" \
4202 "push {r0, r1} \n\t" \
4203 "ldr r0, [%1, #20] \n\t" \
4204 "ldr r1, [%1, #24] \n\t" \
4205 "ldr r2, [%1, #28] \n\t" \
4206 "ldr r3, [%1, #32] \n\t" \
4207 "ldr r4, [%1, #36] \n\t" \
4208 "push {r0, r1, r2, r3, r4} \n\t" \
4209 "ldr r0, [%1, #4] \n\t" \
4210 "ldr r1, [%1, #8] \n\t" \
4211 "ldr r2, [%1, #12] \n\t" \
4212 "ldr r3, [%1, #16] \n\t" \
4213 "ldr r4, [%1] \n\t" /* target->r4 */ \
4214 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4215 VALGRIND_RESTORE_STACK \
4216 "mov %0, r0" \
4217 : /*out*/ "=r" (_res) \
4218 : /*in*/ "0" (&_argvec[0]) \
4219 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4220 ); \
4221 lval = (__typeof__(lval)) _res; \
4222 } while (0)
4223
4224 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4225 arg6,arg7,arg8,arg9,arg10, \
4226 arg11,arg12) \
4227 do { \
4228 volatile OrigFn _orig = (orig); \
4229 volatile unsigned long _argvec[13]; \
4230 volatile unsigned long _res; \
4231 _argvec[0] = (unsigned long)_orig.nraddr; \
4232 _argvec[1] = (unsigned long)(arg1); \
4233 _argvec[2] = (unsigned long)(arg2); \
4234 _argvec[3] = (unsigned long)(arg3); \
4235 _argvec[4] = (unsigned long)(arg4); \
4236 _argvec[5] = (unsigned long)(arg5); \
4237 _argvec[6] = (unsigned long)(arg6); \
4238 _argvec[7] = (unsigned long)(arg7); \
4239 _argvec[8] = (unsigned long)(arg8); \
4240 _argvec[9] = (unsigned long)(arg9); \
4241 _argvec[10] = (unsigned long)(arg10); \
4242 _argvec[11] = (unsigned long)(arg11); \
4243 _argvec[12] = (unsigned long)(arg12); \
4244 __asm__ volatile( \
4245 VALGRIND_ALIGN_STACK \
4246 "ldr r0, [%1, #40] \n\t" \
4247 "ldr r1, [%1, #44] \n\t" \
4248 "ldr r2, [%1, #48] \n\t" \
4249 "push {r0, r1, r2} \n\t" \
4250 "ldr r0, [%1, #20] \n\t" \
4251 "ldr r1, [%1, #24] \n\t" \
4252 "ldr r2, [%1, #28] \n\t" \
4253 "ldr r3, [%1, #32] \n\t" \
4254 "ldr r4, [%1, #36] \n\t" \
4255 "push {r0, r1, r2, r3, r4} \n\t" \
4256 "ldr r0, [%1, #4] \n\t" \
4257 "ldr r1, [%1, #8] \n\t" \
4258 "ldr r2, [%1, #12] \n\t" \
4259 "ldr r3, [%1, #16] \n\t" \
4260 "ldr r4, [%1] \n\t" /* target->r4 */ \
4261 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4262 VALGRIND_RESTORE_STACK \
4263 "mov %0, r0" \
4264 : /*out*/ "=r" (_res) \
4265 : /*in*/ "0" (&_argvec[0]) \
4266 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4267 ); \
4268 lval = (__typeof__(lval)) _res; \
4269 } while (0)
4270
4271 #endif /* PLAT_arm_linux */
4272
4273 /* ------------------------ arm64-linux ------------------------ */
4274
4275 #if defined(PLAT_arm64_linux)
4276
4277 /* These regs are trashed by the hidden call. */
4278 #define __CALLER_SAVED_REGS \
4279 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4280 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4281 "x18", "x19", "x20", "x30", \
4282 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4283 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4284 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4285 "v26", "v27", "v28", "v29", "v30", "v31"
4286
4287 /* x21 is callee-saved, so we can use it to save and restore SP around
4288 the hidden call. */
4289 #define VALGRIND_ALIGN_STACK \
4290 "mov x21, sp\n\t" \
4291 "bic sp, x21, #15\n\t"
4292 #define VALGRIND_RESTORE_STACK \
4293 "mov sp, x21\n\t"
4294
4295 /* These CALL_FN_ macros assume that on arm64-linux,
4296 sizeof(unsigned long) == 8. */
4297
4298 #define CALL_FN_W_v(lval, orig) \
4299 do { \
4300 volatile OrigFn _orig = (orig); \
4301 volatile unsigned long _argvec[1]; \
4302 volatile unsigned long _res; \
4303 _argvec[0] = (unsigned long)_orig.nraddr; \
4304 __asm__ volatile( \
4305 VALGRIND_ALIGN_STACK \
4306 "ldr x8, [%1] \n\t" /* target->x8 */ \
4307 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4308 VALGRIND_RESTORE_STACK \
4309 "mov %0, x0\n" \
4310 : /*out*/ "=r" (_res) \
4311 : /*in*/ "0" (&_argvec[0]) \
4312 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4313 ); \
4314 lval = (__typeof__(lval)) _res; \
4315 } while (0)
4316
4317 #define CALL_FN_W_W(lval, orig, arg1) \
4318 do { \
4319 volatile OrigFn _orig = (orig); \
4320 volatile unsigned long _argvec[2]; \
4321 volatile unsigned long _res; \
4322 _argvec[0] = (unsigned long)_orig.nraddr; \
4323 _argvec[1] = (unsigned long)(arg1); \
4324 __asm__ volatile( \
4325 VALGRIND_ALIGN_STACK \
4326 "ldr x0, [%1, #8] \n\t" \
4327 "ldr x8, [%1] \n\t" /* target->x8 */ \
4328 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4329 VALGRIND_RESTORE_STACK \
4330 "mov %0, x0\n" \
4331 : /*out*/ "=r" (_res) \
4332 : /*in*/ "0" (&_argvec[0]) \
4333 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4334 ); \
4335 lval = (__typeof__(lval)) _res; \
4336 } while (0)
4337
4338 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4339 do { \
4340 volatile OrigFn _orig = (orig); \
4341 volatile unsigned long _argvec[3]; \
4342 volatile unsigned long _res; \
4343 _argvec[0] = (unsigned long)_orig.nraddr; \
4344 _argvec[1] = (unsigned long)(arg1); \
4345 _argvec[2] = (unsigned long)(arg2); \
4346 __asm__ volatile( \
4347 VALGRIND_ALIGN_STACK \
4348 "ldr x0, [%1, #8] \n\t" \
4349 "ldr x1, [%1, #16] \n\t" \
4350 "ldr x8, [%1] \n\t" /* target->x8 */ \
4351 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4352 VALGRIND_RESTORE_STACK \
4353 "mov %0, x0\n" \
4354 : /*out*/ "=r" (_res) \
4355 : /*in*/ "0" (&_argvec[0]) \
4356 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4357 ); \
4358 lval = (__typeof__(lval)) _res; \
4359 } while (0)
4360
4361 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4362 do { \
4363 volatile OrigFn _orig = (orig); \
4364 volatile unsigned long _argvec[4]; \
4365 volatile unsigned long _res; \
4366 _argvec[0] = (unsigned long)_orig.nraddr; \
4367 _argvec[1] = (unsigned long)(arg1); \
4368 _argvec[2] = (unsigned long)(arg2); \
4369 _argvec[3] = (unsigned long)(arg3); \
4370 __asm__ volatile( \
4371 VALGRIND_ALIGN_STACK \
4372 "ldr x0, [%1, #8] \n\t" \
4373 "ldr x1, [%1, #16] \n\t" \
4374 "ldr x2, [%1, #24] \n\t" \
4375 "ldr x8, [%1] \n\t" /* target->x8 */ \
4376 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4377 VALGRIND_RESTORE_STACK \
4378 "mov %0, x0\n" \
4379 : /*out*/ "=r" (_res) \
4380 : /*in*/ "0" (&_argvec[0]) \
4381 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4382 ); \
4383 lval = (__typeof__(lval)) _res; \
4384 } while (0)
4385
4386 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4387 do { \
4388 volatile OrigFn _orig = (orig); \
4389 volatile unsigned long _argvec[5]; \
4390 volatile unsigned long _res; \
4391 _argvec[0] = (unsigned long)_orig.nraddr; \
4392 _argvec[1] = (unsigned long)(arg1); \
4393 _argvec[2] = (unsigned long)(arg2); \
4394 _argvec[3] = (unsigned long)(arg3); \
4395 _argvec[4] = (unsigned long)(arg4); \
4396 __asm__ volatile( \
4397 VALGRIND_ALIGN_STACK \
4398 "ldr x0, [%1, #8] \n\t" \
4399 "ldr x1, [%1, #16] \n\t" \
4400 "ldr x2, [%1, #24] \n\t" \
4401 "ldr x3, [%1, #32] \n\t" \
4402 "ldr x8, [%1] \n\t" /* target->x8 */ \
4403 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4404 VALGRIND_RESTORE_STACK \
4405 "mov %0, x0" \
4406 : /*out*/ "=r" (_res) \
4407 : /*in*/ "0" (&_argvec[0]) \
4408 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4409 ); \
4410 lval = (__typeof__(lval)) _res; \
4411 } while (0)
4412
4413 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4414 do { \
4415 volatile OrigFn _orig = (orig); \
4416 volatile unsigned long _argvec[6]; \
4417 volatile unsigned long _res; \
4418 _argvec[0] = (unsigned long)_orig.nraddr; \
4419 _argvec[1] = (unsigned long)(arg1); \
4420 _argvec[2] = (unsigned long)(arg2); \
4421 _argvec[3] = (unsigned long)(arg3); \
4422 _argvec[4] = (unsigned long)(arg4); \
4423 _argvec[5] = (unsigned long)(arg5); \
4424 __asm__ volatile( \
4425 VALGRIND_ALIGN_STACK \
4426 "ldr x0, [%1, #8] \n\t" \
4427 "ldr x1, [%1, #16] \n\t" \
4428 "ldr x2, [%1, #24] \n\t" \
4429 "ldr x3, [%1, #32] \n\t" \
4430 "ldr x4, [%1, #40] \n\t" \
4431 "ldr x8, [%1] \n\t" /* target->x8 */ \
4432 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4433 VALGRIND_RESTORE_STACK \
4434 "mov %0, x0" \
4435 : /*out*/ "=r" (_res) \
4436 : /*in*/ "0" (&_argvec[0]) \
4437 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4438 ); \
4439 lval = (__typeof__(lval)) _res; \
4440 } while (0)
4441
4442 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4443 do { \
4444 volatile OrigFn _orig = (orig); \
4445 volatile unsigned long _argvec[7]; \
4446 volatile unsigned long _res; \
4447 _argvec[0] = (unsigned long)_orig.nraddr; \
4448 _argvec[1] = (unsigned long)(arg1); \
4449 _argvec[2] = (unsigned long)(arg2); \
4450 _argvec[3] = (unsigned long)(arg3); \
4451 _argvec[4] = (unsigned long)(arg4); \
4452 _argvec[5] = (unsigned long)(arg5); \
4453 _argvec[6] = (unsigned long)(arg6); \
4454 __asm__ volatile( \
4455 VALGRIND_ALIGN_STACK \
4456 "ldr x0, [%1, #8] \n\t" \
4457 "ldr x1, [%1, #16] \n\t" \
4458 "ldr x2, [%1, #24] \n\t" \
4459 "ldr x3, [%1, #32] \n\t" \
4460 "ldr x4, [%1, #40] \n\t" \
4461 "ldr x5, [%1, #48] \n\t" \
4462 "ldr x8, [%1] \n\t" /* target->x8 */ \
4463 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4464 VALGRIND_RESTORE_STACK \
4465 "mov %0, x0" \
4466 : /*out*/ "=r" (_res) \
4467 : /*in*/ "0" (&_argvec[0]) \
4468 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4469 ); \
4470 lval = (__typeof__(lval)) _res; \
4471 } while (0)
4472
4473 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4474 arg7) \
4475 do { \
4476 volatile OrigFn _orig = (orig); \
4477 volatile unsigned long _argvec[8]; \
4478 volatile unsigned long _res; \
4479 _argvec[0] = (unsigned long)_orig.nraddr; \
4480 _argvec[1] = (unsigned long)(arg1); \
4481 _argvec[2] = (unsigned long)(arg2); \
4482 _argvec[3] = (unsigned long)(arg3); \
4483 _argvec[4] = (unsigned long)(arg4); \
4484 _argvec[5] = (unsigned long)(arg5); \
4485 _argvec[6] = (unsigned long)(arg6); \
4486 _argvec[7] = (unsigned long)(arg7); \
4487 __asm__ volatile( \
4488 VALGRIND_ALIGN_STACK \
4489 "ldr x0, [%1, #8] \n\t" \
4490 "ldr x1, [%1, #16] \n\t" \
4491 "ldr x2, [%1, #24] \n\t" \
4492 "ldr x3, [%1, #32] \n\t" \
4493 "ldr x4, [%1, #40] \n\t" \
4494 "ldr x5, [%1, #48] \n\t" \
4495 "ldr x6, [%1, #56] \n\t" \
4496 "ldr x8, [%1] \n\t" /* target->x8 */ \
4497 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4498 VALGRIND_RESTORE_STACK \
4499 "mov %0, x0" \
4500 : /*out*/ "=r" (_res) \
4501 : /*in*/ "0" (&_argvec[0]) \
4502 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4503 ); \
4504 lval = (__typeof__(lval)) _res; \
4505 } while (0)
4506
4507 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4508 arg7,arg8) \
4509 do { \
4510 volatile OrigFn _orig = (orig); \
4511 volatile unsigned long _argvec[9]; \
4512 volatile unsigned long _res; \
4513 _argvec[0] = (unsigned long)_orig.nraddr; \
4514 _argvec[1] = (unsigned long)(arg1); \
4515 _argvec[2] = (unsigned long)(arg2); \
4516 _argvec[3] = (unsigned long)(arg3); \
4517 _argvec[4] = (unsigned long)(arg4); \
4518 _argvec[5] = (unsigned long)(arg5); \
4519 _argvec[6] = (unsigned long)(arg6); \
4520 _argvec[7] = (unsigned long)(arg7); \
4521 _argvec[8] = (unsigned long)(arg8); \
4522 __asm__ volatile( \
4523 VALGRIND_ALIGN_STACK \
4524 "ldr x0, [%1, #8] \n\t" \
4525 "ldr x1, [%1, #16] \n\t" \
4526 "ldr x2, [%1, #24] \n\t" \
4527 "ldr x3, [%1, #32] \n\t" \
4528 "ldr x4, [%1, #40] \n\t" \
4529 "ldr x5, [%1, #48] \n\t" \
4530 "ldr x6, [%1, #56] \n\t" \
4531 "ldr x7, [%1, #64] \n\t" \
4532 "ldr x8, [%1] \n\t" /* target->x8 */ \
4533 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4534 VALGRIND_RESTORE_STACK \
4535 "mov %0, x0" \
4536 : /*out*/ "=r" (_res) \
4537 : /*in*/ "0" (&_argvec[0]) \
4538 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4539 ); \
4540 lval = (__typeof__(lval)) _res; \
4541 } while (0)
4542
4543 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4544 arg7,arg8,arg9) \
4545 do { \
4546 volatile OrigFn _orig = (orig); \
4547 volatile unsigned long _argvec[10]; \
4548 volatile unsigned long _res; \
4549 _argvec[0] = (unsigned long)_orig.nraddr; \
4550 _argvec[1] = (unsigned long)(arg1); \
4551 _argvec[2] = (unsigned long)(arg2); \
4552 _argvec[3] = (unsigned long)(arg3); \
4553 _argvec[4] = (unsigned long)(arg4); \
4554 _argvec[5] = (unsigned long)(arg5); \
4555 _argvec[6] = (unsigned long)(arg6); \
4556 _argvec[7] = (unsigned long)(arg7); \
4557 _argvec[8] = (unsigned long)(arg8); \
4558 _argvec[9] = (unsigned long)(arg9); \
4559 __asm__ volatile( \
4560 VALGRIND_ALIGN_STACK \
4561 "sub sp, sp, #0x20 \n\t" \
4562 "ldr x0, [%1, #8] \n\t" \
4563 "ldr x1, [%1, #16] \n\t" \
4564 "ldr x2, [%1, #24] \n\t" \
4565 "ldr x3, [%1, #32] \n\t" \
4566 "ldr x4, [%1, #40] \n\t" \
4567 "ldr x5, [%1, #48] \n\t" \
4568 "ldr x6, [%1, #56] \n\t" \
4569 "ldr x7, [%1, #64] \n\t" \
4570 "ldr x8, [%1, #72] \n\t" \
4571 "str x8, [sp, #0] \n\t" \
4572 "ldr x8, [%1] \n\t" /* target->x8 */ \
4573 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4574 VALGRIND_RESTORE_STACK \
4575 "mov %0, x0" \
4576 : /*out*/ "=r" (_res) \
4577 : /*in*/ "0" (&_argvec[0]) \
4578 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4579 ); \
4580 lval = (__typeof__(lval)) _res; \
4581 } while (0)
4582
4583 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4584 arg7,arg8,arg9,arg10) \
4585 do { \
4586 volatile OrigFn _orig = (orig); \
4587 volatile unsigned long _argvec[11]; \
4588 volatile unsigned long _res; \
4589 _argvec[0] = (unsigned long)_orig.nraddr; \
4590 _argvec[1] = (unsigned long)(arg1); \
4591 _argvec[2] = (unsigned long)(arg2); \
4592 _argvec[3] = (unsigned long)(arg3); \
4593 _argvec[4] = (unsigned long)(arg4); \
4594 _argvec[5] = (unsigned long)(arg5); \
4595 _argvec[6] = (unsigned long)(arg6); \
4596 _argvec[7] = (unsigned long)(arg7); \
4597 _argvec[8] = (unsigned long)(arg8); \
4598 _argvec[9] = (unsigned long)(arg9); \
4599 _argvec[10] = (unsigned long)(arg10); \
4600 __asm__ volatile( \
4601 VALGRIND_ALIGN_STACK \
4602 "sub sp, sp, #0x20 \n\t" \
4603 "ldr x0, [%1, #8] \n\t" \
4604 "ldr x1, [%1, #16] \n\t" \
4605 "ldr x2, [%1, #24] \n\t" \
4606 "ldr x3, [%1, #32] \n\t" \
4607 "ldr x4, [%1, #40] \n\t" \
4608 "ldr x5, [%1, #48] \n\t" \
4609 "ldr x6, [%1, #56] \n\t" \
4610 "ldr x7, [%1, #64] \n\t" \
4611 "ldr x8, [%1, #72] \n\t" \
4612 "str x8, [sp, #0] \n\t" \
4613 "ldr x8, [%1, #80] \n\t" \
4614 "str x8, [sp, #8] \n\t" \
4615 "ldr x8, [%1] \n\t" /* target->x8 */ \
4616 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4617 VALGRIND_RESTORE_STACK \
4618 "mov %0, x0" \
4619 : /*out*/ "=r" (_res) \
4620 : /*in*/ "0" (&_argvec[0]) \
4621 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4622 ); \
4623 lval = (__typeof__(lval)) _res; \
4624 } while (0)
4625
4626 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4627 arg7,arg8,arg9,arg10,arg11) \
4628 do { \
4629 volatile OrigFn _orig = (orig); \
4630 volatile unsigned long _argvec[12]; \
4631 volatile unsigned long _res; \
4632 _argvec[0] = (unsigned long)_orig.nraddr; \
4633 _argvec[1] = (unsigned long)(arg1); \
4634 _argvec[2] = (unsigned long)(arg2); \
4635 _argvec[3] = (unsigned long)(arg3); \
4636 _argvec[4] = (unsigned long)(arg4); \
4637 _argvec[5] = (unsigned long)(arg5); \
4638 _argvec[6] = (unsigned long)(arg6); \
4639 _argvec[7] = (unsigned long)(arg7); \
4640 _argvec[8] = (unsigned long)(arg8); \
4641 _argvec[9] = (unsigned long)(arg9); \
4642 _argvec[10] = (unsigned long)(arg10); \
4643 _argvec[11] = (unsigned long)(arg11); \
4644 __asm__ volatile( \
4645 VALGRIND_ALIGN_STACK \
4646 "sub sp, sp, #0x30 \n\t" \
4647 "ldr x0, [%1, #8] \n\t" \
4648 "ldr x1, [%1, #16] \n\t" \
4649 "ldr x2, [%1, #24] \n\t" \
4650 "ldr x3, [%1, #32] \n\t" \
4651 "ldr x4, [%1, #40] \n\t" \
4652 "ldr x5, [%1, #48] \n\t" \
4653 "ldr x6, [%1, #56] \n\t" \
4654 "ldr x7, [%1, #64] \n\t" \
4655 "ldr x8, [%1, #72] \n\t" \
4656 "str x8, [sp, #0] \n\t" \
4657 "ldr x8, [%1, #80] \n\t" \
4658 "str x8, [sp, #8] \n\t" \
4659 "ldr x8, [%1, #88] \n\t" \
4660 "str x8, [sp, #16] \n\t" \
4661 "ldr x8, [%1] \n\t" /* target->x8 */ \
4662 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4663 VALGRIND_RESTORE_STACK \
4664 "mov %0, x0" \
4665 : /*out*/ "=r" (_res) \
4666 : /*in*/ "0" (&_argvec[0]) \
4667 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4668 ); \
4669 lval = (__typeof__(lval)) _res; \
4670 } while (0)
4671
4672 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4673 arg7,arg8,arg9,arg10,arg11, \
4674 arg12) \
4675 do { \
4676 volatile OrigFn _orig = (orig); \
4677 volatile unsigned long _argvec[13]; \
4678 volatile unsigned long _res; \
4679 _argvec[0] = (unsigned long)_orig.nraddr; \
4680 _argvec[1] = (unsigned long)(arg1); \
4681 _argvec[2] = (unsigned long)(arg2); \
4682 _argvec[3] = (unsigned long)(arg3); \
4683 _argvec[4] = (unsigned long)(arg4); \
4684 _argvec[5] = (unsigned long)(arg5); \
4685 _argvec[6] = (unsigned long)(arg6); \
4686 _argvec[7] = (unsigned long)(arg7); \
4687 _argvec[8] = (unsigned long)(arg8); \
4688 _argvec[9] = (unsigned long)(arg9); \
4689 _argvec[10] = (unsigned long)(arg10); \
4690 _argvec[11] = (unsigned long)(arg11); \
4691 _argvec[12] = (unsigned long)(arg12); \
4692 __asm__ volatile( \
4693 VALGRIND_ALIGN_STACK \
4694 "sub sp, sp, #0x30 \n\t" \
4695 "ldr x0, [%1, #8] \n\t" \
4696 "ldr x1, [%1, #16] \n\t" \
4697 "ldr x2, [%1, #24] \n\t" \
4698 "ldr x3, [%1, #32] \n\t" \
4699 "ldr x4, [%1, #40] \n\t" \
4700 "ldr x5, [%1, #48] \n\t" \
4701 "ldr x6, [%1, #56] \n\t" \
4702 "ldr x7, [%1, #64] \n\t" \
4703 "ldr x8, [%1, #72] \n\t" \
4704 "str x8, [sp, #0] \n\t" \
4705 "ldr x8, [%1, #80] \n\t" \
4706 "str x8, [sp, #8] \n\t" \
4707 "ldr x8, [%1, #88] \n\t" \
4708 "str x8, [sp, #16] \n\t" \
4709 "ldr x8, [%1, #96] \n\t" \
4710 "str x8, [sp, #24] \n\t" \
4711 "ldr x8, [%1] \n\t" /* target->x8 */ \
4712 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4713 VALGRIND_RESTORE_STACK \
4714 "mov %0, x0" \
4715 : /*out*/ "=r" (_res) \
4716 : /*in*/ "0" (&_argvec[0]) \
4717 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4718 ); \
4719 lval = (__typeof__(lval)) _res; \
4720 } while (0)
4721
4722 #endif /* PLAT_arm64_linux */
4723
4724 /* ------------------------- s390x-linux ------------------------- */
4725
4726 #if defined(PLAT_s390x_linux)
4727
4728 /* Similar workaround as amd64 (see above), but we use r11 as frame
4729 pointer and save the old r11 in r7. r11 might be used for
4730 argvec, therefore we copy argvec in r1 since r1 is clobbered
4731 after the call anyway. */
4732 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4733 # define __FRAME_POINTER \
4734 ,"d"(__builtin_dwarf_cfa())
4735 # define VALGRIND_CFI_PROLOGUE \
4736 ".cfi_remember_state\n\t" \
4737 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4738 "lgr 7,11\n\t" \
4739 "lgr 11,%2\n\t" \
4740 ".cfi_def_cfa r11, 0\n\t"
4741 # define VALGRIND_CFI_EPILOGUE \
4742 "lgr 11, 7\n\t" \
4743 ".cfi_restore_state\n\t"
4744 #else
4745 # define __FRAME_POINTER
4746 # define VALGRIND_CFI_PROLOGUE \
4747 "lgr 1,%1\n\t"
4748 # define VALGRIND_CFI_EPILOGUE
4749 #endif
4750
4751 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4752 according to the s390 GCC maintainer. (The ABI specification is not
4753 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4754 VALGRIND_RESTORE_STACK are not defined here. */
4755
4756 /* These regs are trashed by the hidden call. Note that we overwrite
4757 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4758 function a proper return address. All others are ABI defined call
4759 clobbers. */
4760 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4761 "f0","f1","f2","f3","f4","f5","f6","f7"
4762
4763 /* Nb: Although r11 is modified in the asm snippets below (inside
4764 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4765 two reasons:
4766 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4767 modified
4768 (2) GCC will complain that r11 cannot appear inside a clobber section,
4769 when compiled with -O -fno-omit-frame-pointer
4770 */
4771
4772 #define CALL_FN_W_v(lval, orig) \
4773 do { \
4774 volatile OrigFn _orig = (orig); \
4775 volatile unsigned long _argvec[1]; \
4776 volatile unsigned long _res; \
4777 _argvec[0] = (unsigned long)_orig.nraddr; \
4778 __asm__ volatile( \
4779 VALGRIND_CFI_PROLOGUE \
4780 "aghi 15,-160\n\t" \
4781 "lg 1, 0(1)\n\t" /* target->r1 */ \
4782 VALGRIND_CALL_NOREDIR_R1 \
4783 "lgr %0, 2\n\t" \
4784 "aghi 15,160\n\t" \
4785 VALGRIND_CFI_EPILOGUE \
4786 : /*out*/ "=d" (_res) \
4787 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4788 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4789 ); \
4790 lval = (__typeof__(lval)) _res; \
4791 } while (0)
4792
4793 /* The call abi has the arguments in r2-r6 and stack */
4794 #define CALL_FN_W_W(lval, orig, arg1) \
4795 do { \
4796 volatile OrigFn _orig = (orig); \
4797 volatile unsigned long _argvec[2]; \
4798 volatile unsigned long _res; \
4799 _argvec[0] = (unsigned long)_orig.nraddr; \
4800 _argvec[1] = (unsigned long)arg1; \
4801 __asm__ volatile( \
4802 VALGRIND_CFI_PROLOGUE \
4803 "aghi 15,-160\n\t" \
4804 "lg 2, 8(1)\n\t" \
4805 "lg 1, 0(1)\n\t" \
4806 VALGRIND_CALL_NOREDIR_R1 \
4807 "lgr %0, 2\n\t" \
4808 "aghi 15,160\n\t" \
4809 VALGRIND_CFI_EPILOGUE \
4810 : /*out*/ "=d" (_res) \
4811 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4812 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4813 ); \
4814 lval = (__typeof__(lval)) _res; \
4815 } while (0)
4816
4817 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4818 do { \
4819 volatile OrigFn _orig = (orig); \
4820 volatile unsigned long _argvec[3]; \
4821 volatile unsigned long _res; \
4822 _argvec[0] = (unsigned long)_orig.nraddr; \
4823 _argvec[1] = (unsigned long)arg1; \
4824 _argvec[2] = (unsigned long)arg2; \
4825 __asm__ volatile( \
4826 VALGRIND_CFI_PROLOGUE \
4827 "aghi 15,-160\n\t" \
4828 "lg 2, 8(1)\n\t" \
4829 "lg 3,16(1)\n\t" \
4830 "lg 1, 0(1)\n\t" \
4831 VALGRIND_CALL_NOREDIR_R1 \
4832 "lgr %0, 2\n\t" \
4833 "aghi 15,160\n\t" \
4834 VALGRIND_CFI_EPILOGUE \
4835 : /*out*/ "=d" (_res) \
4836 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4837 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4838 ); \
4839 lval = (__typeof__(lval)) _res; \
4840 } while (0)
4841
4842 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4843 do { \
4844 volatile OrigFn _orig = (orig); \
4845 volatile unsigned long _argvec[4]; \
4846 volatile unsigned long _res; \
4847 _argvec[0] = (unsigned long)_orig.nraddr; \
4848 _argvec[1] = (unsigned long)arg1; \
4849 _argvec[2] = (unsigned long)arg2; \
4850 _argvec[3] = (unsigned long)arg3; \
4851 __asm__ volatile( \
4852 VALGRIND_CFI_PROLOGUE \
4853 "aghi 15,-160\n\t" \
4854 "lg 2, 8(1)\n\t" \
4855 "lg 3,16(1)\n\t" \
4856 "lg 4,24(1)\n\t" \
4857 "lg 1, 0(1)\n\t" \
4858 VALGRIND_CALL_NOREDIR_R1 \
4859 "lgr %0, 2\n\t" \
4860 "aghi 15,160\n\t" \
4861 VALGRIND_CFI_EPILOGUE \
4862 : /*out*/ "=d" (_res) \
4863 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4864 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4865 ); \
4866 lval = (__typeof__(lval)) _res; \
4867 } while (0)
4868
4869 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4870 do { \
4871 volatile OrigFn _orig = (orig); \
4872 volatile unsigned long _argvec[5]; \
4873 volatile unsigned long _res; \
4874 _argvec[0] = (unsigned long)_orig.nraddr; \
4875 _argvec[1] = (unsigned long)arg1; \
4876 _argvec[2] = (unsigned long)arg2; \
4877 _argvec[3] = (unsigned long)arg3; \
4878 _argvec[4] = (unsigned long)arg4; \
4879 __asm__ volatile( \
4880 VALGRIND_CFI_PROLOGUE \
4881 "aghi 15,-160\n\t" \
4882 "lg 2, 8(1)\n\t" \
4883 "lg 3,16(1)\n\t" \
4884 "lg 4,24(1)\n\t" \
4885 "lg 5,32(1)\n\t" \
4886 "lg 1, 0(1)\n\t" \
4887 VALGRIND_CALL_NOREDIR_R1 \
4888 "lgr %0, 2\n\t" \
4889 "aghi 15,160\n\t" \
4890 VALGRIND_CFI_EPILOGUE \
4891 : /*out*/ "=d" (_res) \
4892 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4893 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4894 ); \
4895 lval = (__typeof__(lval)) _res; \
4896 } while (0)
4897
4898 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4899 do { \
4900 volatile OrigFn _orig = (orig); \
4901 volatile unsigned long _argvec[6]; \
4902 volatile unsigned long _res; \
4903 _argvec[0] = (unsigned long)_orig.nraddr; \
4904 _argvec[1] = (unsigned long)arg1; \
4905 _argvec[2] = (unsigned long)arg2; \
4906 _argvec[3] = (unsigned long)arg3; \
4907 _argvec[4] = (unsigned long)arg4; \
4908 _argvec[5] = (unsigned long)arg5; \
4909 __asm__ volatile( \
4910 VALGRIND_CFI_PROLOGUE \
4911 "aghi 15,-160\n\t" \
4912 "lg 2, 8(1)\n\t" \
4913 "lg 3,16(1)\n\t" \
4914 "lg 4,24(1)\n\t" \
4915 "lg 5,32(1)\n\t" \
4916 "lg 6,40(1)\n\t" \
4917 "lg 1, 0(1)\n\t" \
4918 VALGRIND_CALL_NOREDIR_R1 \
4919 "lgr %0, 2\n\t" \
4920 "aghi 15,160\n\t" \
4921 VALGRIND_CFI_EPILOGUE \
4922 : /*out*/ "=d" (_res) \
4923 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4924 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4925 ); \
4926 lval = (__typeof__(lval)) _res; \
4927 } while (0)
4928
4929 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4930 arg6) \
4931 do { \
4932 volatile OrigFn _orig = (orig); \
4933 volatile unsigned long _argvec[7]; \
4934 volatile unsigned long _res; \
4935 _argvec[0] = (unsigned long)_orig.nraddr; \
4936 _argvec[1] = (unsigned long)arg1; \
4937 _argvec[2] = (unsigned long)arg2; \
4938 _argvec[3] = (unsigned long)arg3; \
4939 _argvec[4] = (unsigned long)arg4; \
4940 _argvec[5] = (unsigned long)arg5; \
4941 _argvec[6] = (unsigned long)arg6; \
4942 __asm__ volatile( \
4943 VALGRIND_CFI_PROLOGUE \
4944 "aghi 15,-168\n\t" \
4945 "lg 2, 8(1)\n\t" \
4946 "lg 3,16(1)\n\t" \
4947 "lg 4,24(1)\n\t" \
4948 "lg 5,32(1)\n\t" \
4949 "lg 6,40(1)\n\t" \
4950 "mvc 160(8,15), 48(1)\n\t" \
4951 "lg 1, 0(1)\n\t" \
4952 VALGRIND_CALL_NOREDIR_R1 \
4953 "lgr %0, 2\n\t" \
4954 "aghi 15,168\n\t" \
4955 VALGRIND_CFI_EPILOGUE \
4956 : /*out*/ "=d" (_res) \
4957 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4958 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4959 ); \
4960 lval = (__typeof__(lval)) _res; \
4961 } while (0)
4962
4963 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4964 arg6, arg7) \
4965 do { \
4966 volatile OrigFn _orig = (orig); \
4967 volatile unsigned long _argvec[8]; \
4968 volatile unsigned long _res; \
4969 _argvec[0] = (unsigned long)_orig.nraddr; \
4970 _argvec[1] = (unsigned long)arg1; \
4971 _argvec[2] = (unsigned long)arg2; \
4972 _argvec[3] = (unsigned long)arg3; \
4973 _argvec[4] = (unsigned long)arg4; \
4974 _argvec[5] = (unsigned long)arg5; \
4975 _argvec[6] = (unsigned long)arg6; \
4976 _argvec[7] = (unsigned long)arg7; \
4977 __asm__ volatile( \
4978 VALGRIND_CFI_PROLOGUE \
4979 "aghi 15,-176\n\t" \
4980 "lg 2, 8(1)\n\t" \
4981 "lg 3,16(1)\n\t" \
4982 "lg 4,24(1)\n\t" \
4983 "lg 5,32(1)\n\t" \
4984 "lg 6,40(1)\n\t" \
4985 "mvc 160(8,15), 48(1)\n\t" \
4986 "mvc 168(8,15), 56(1)\n\t" \
4987 "lg 1, 0(1)\n\t" \
4988 VALGRIND_CALL_NOREDIR_R1 \
4989 "lgr %0, 2\n\t" \
4990 "aghi 15,176\n\t" \
4991 VALGRIND_CFI_EPILOGUE \
4992 : /*out*/ "=d" (_res) \
4993 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4994 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4995 ); \
4996 lval = (__typeof__(lval)) _res; \
4997 } while (0)
4998
4999 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5000 arg6, arg7 ,arg8) \
5001 do { \
5002 volatile OrigFn _orig = (orig); \
5003 volatile unsigned long _argvec[9]; \
5004 volatile unsigned long _res; \
5005 _argvec[0] = (unsigned long)_orig.nraddr; \
5006 _argvec[1] = (unsigned long)arg1; \
5007 _argvec[2] = (unsigned long)arg2; \
5008 _argvec[3] = (unsigned long)arg3; \
5009 _argvec[4] = (unsigned long)arg4; \
5010 _argvec[5] = (unsigned long)arg5; \
5011 _argvec[6] = (unsigned long)arg6; \
5012 _argvec[7] = (unsigned long)arg7; \
5013 _argvec[8] = (unsigned long)arg8; \
5014 __asm__ volatile( \
5015 VALGRIND_CFI_PROLOGUE \
5016 "aghi 15,-184\n\t" \
5017 "lg 2, 8(1)\n\t" \
5018 "lg 3,16(1)\n\t" \
5019 "lg 4,24(1)\n\t" \
5020 "lg 5,32(1)\n\t" \
5021 "lg 6,40(1)\n\t" \
5022 "mvc 160(8,15), 48(1)\n\t" \
5023 "mvc 168(8,15), 56(1)\n\t" \
5024 "mvc 176(8,15), 64(1)\n\t" \
5025 "lg 1, 0(1)\n\t" \
5026 VALGRIND_CALL_NOREDIR_R1 \
5027 "lgr %0, 2\n\t" \
5028 "aghi 15,184\n\t" \
5029 VALGRIND_CFI_EPILOGUE \
5030 : /*out*/ "=d" (_res) \
5031 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5032 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5033 ); \
5034 lval = (__typeof__(lval)) _res; \
5035 } while (0)
5036
5037 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5038 arg6, arg7 ,arg8, arg9) \
5039 do { \
5040 volatile OrigFn _orig = (orig); \
5041 volatile unsigned long _argvec[10]; \
5042 volatile unsigned long _res; \
5043 _argvec[0] = (unsigned long)_orig.nraddr; \
5044 _argvec[1] = (unsigned long)arg1; \
5045 _argvec[2] = (unsigned long)arg2; \
5046 _argvec[3] = (unsigned long)arg3; \
5047 _argvec[4] = (unsigned long)arg4; \
5048 _argvec[5] = (unsigned long)arg5; \
5049 _argvec[6] = (unsigned long)arg6; \
5050 _argvec[7] = (unsigned long)arg7; \
5051 _argvec[8] = (unsigned long)arg8; \
5052 _argvec[9] = (unsigned long)arg9; \
5053 __asm__ volatile( \
5054 VALGRIND_CFI_PROLOGUE \
5055 "aghi 15,-192\n\t" \
5056 "lg 2, 8(1)\n\t" \
5057 "lg 3,16(1)\n\t" \
5058 "lg 4,24(1)\n\t" \
5059 "lg 5,32(1)\n\t" \
5060 "lg 6,40(1)\n\t" \
5061 "mvc 160(8,15), 48(1)\n\t" \
5062 "mvc 168(8,15), 56(1)\n\t" \
5063 "mvc 176(8,15), 64(1)\n\t" \
5064 "mvc 184(8,15), 72(1)\n\t" \
5065 "lg 1, 0(1)\n\t" \
5066 VALGRIND_CALL_NOREDIR_R1 \
5067 "lgr %0, 2\n\t" \
5068 "aghi 15,192\n\t" \
5069 VALGRIND_CFI_EPILOGUE \
5070 : /*out*/ "=d" (_res) \
5071 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5072 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5073 ); \
5074 lval = (__typeof__(lval)) _res; \
5075 } while (0)
5076
5077 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5078 arg6, arg7 ,arg8, arg9, arg10) \
5079 do { \
5080 volatile OrigFn _orig = (orig); \
5081 volatile unsigned long _argvec[11]; \
5082 volatile unsigned long _res; \
5083 _argvec[0] = (unsigned long)_orig.nraddr; \
5084 _argvec[1] = (unsigned long)arg1; \
5085 _argvec[2] = (unsigned long)arg2; \
5086 _argvec[3] = (unsigned long)arg3; \
5087 _argvec[4] = (unsigned long)arg4; \
5088 _argvec[5] = (unsigned long)arg5; \
5089 _argvec[6] = (unsigned long)arg6; \
5090 _argvec[7] = (unsigned long)arg7; \
5091 _argvec[8] = (unsigned long)arg8; \
5092 _argvec[9] = (unsigned long)arg9; \
5093 _argvec[10] = (unsigned long)arg10; \
5094 __asm__ volatile( \
5095 VALGRIND_CFI_PROLOGUE \
5096 "aghi 15,-200\n\t" \
5097 "lg 2, 8(1)\n\t" \
5098 "lg 3,16(1)\n\t" \
5099 "lg 4,24(1)\n\t" \
5100 "lg 5,32(1)\n\t" \
5101 "lg 6,40(1)\n\t" \
5102 "mvc 160(8,15), 48(1)\n\t" \
5103 "mvc 168(8,15), 56(1)\n\t" \
5104 "mvc 176(8,15), 64(1)\n\t" \
5105 "mvc 184(8,15), 72(1)\n\t" \
5106 "mvc 192(8,15), 80(1)\n\t" \
5107 "lg 1, 0(1)\n\t" \
5108 VALGRIND_CALL_NOREDIR_R1 \
5109 "lgr %0, 2\n\t" \
5110 "aghi 15,200\n\t" \
5111 VALGRIND_CFI_EPILOGUE \
5112 : /*out*/ "=d" (_res) \
5113 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5114 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5115 ); \
5116 lval = (__typeof__(lval)) _res; \
5117 } while (0)
5118
5119 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5120 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5121 do { \
5122 volatile OrigFn _orig = (orig); \
5123 volatile unsigned long _argvec[12]; \
5124 volatile unsigned long _res; \
5125 _argvec[0] = (unsigned long)_orig.nraddr; \
5126 _argvec[1] = (unsigned long)arg1; \
5127 _argvec[2] = (unsigned long)arg2; \
5128 _argvec[3] = (unsigned long)arg3; \
5129 _argvec[4] = (unsigned long)arg4; \
5130 _argvec[5] = (unsigned long)arg5; \
5131 _argvec[6] = (unsigned long)arg6; \
5132 _argvec[7] = (unsigned long)arg7; \
5133 _argvec[8] = (unsigned long)arg8; \
5134 _argvec[9] = (unsigned long)arg9; \
5135 _argvec[10] = (unsigned long)arg10; \
5136 _argvec[11] = (unsigned long)arg11; \
5137 __asm__ volatile( \
5138 VALGRIND_CFI_PROLOGUE \
5139 "aghi 15,-208\n\t" \
5140 "lg 2, 8(1)\n\t" \
5141 "lg 3,16(1)\n\t" \
5142 "lg 4,24(1)\n\t" \
5143 "lg 5,32(1)\n\t" \
5144 "lg 6,40(1)\n\t" \
5145 "mvc 160(8,15), 48(1)\n\t" \
5146 "mvc 168(8,15), 56(1)\n\t" \
5147 "mvc 176(8,15), 64(1)\n\t" \
5148 "mvc 184(8,15), 72(1)\n\t" \
5149 "mvc 192(8,15), 80(1)\n\t" \
5150 "mvc 200(8,15), 88(1)\n\t" \
5151 "lg 1, 0(1)\n\t" \
5152 VALGRIND_CALL_NOREDIR_R1 \
5153 "lgr %0, 2\n\t" \
5154 "aghi 15,208\n\t" \
5155 VALGRIND_CFI_EPILOGUE \
5156 : /*out*/ "=d" (_res) \
5157 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5158 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5159 ); \
5160 lval = (__typeof__(lval)) _res; \
5161 } while (0)
5162
5163 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5164 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5165 do { \
5166 volatile OrigFn _orig = (orig); \
5167 volatile unsigned long _argvec[13]; \
5168 volatile unsigned long _res; \
5169 _argvec[0] = (unsigned long)_orig.nraddr; \
5170 _argvec[1] = (unsigned long)arg1; \
5171 _argvec[2] = (unsigned long)arg2; \
5172 _argvec[3] = (unsigned long)arg3; \
5173 _argvec[4] = (unsigned long)arg4; \
5174 _argvec[5] = (unsigned long)arg5; \
5175 _argvec[6] = (unsigned long)arg6; \
5176 _argvec[7] = (unsigned long)arg7; \
5177 _argvec[8] = (unsigned long)arg8; \
5178 _argvec[9] = (unsigned long)arg9; \
5179 _argvec[10] = (unsigned long)arg10; \
5180 _argvec[11] = (unsigned long)arg11; \
5181 _argvec[12] = (unsigned long)arg12; \
5182 __asm__ volatile( \
5183 VALGRIND_CFI_PROLOGUE \
5184 "aghi 15,-216\n\t" \
5185 "lg 2, 8(1)\n\t" \
5186 "lg 3,16(1)\n\t" \
5187 "lg 4,24(1)\n\t" \
5188 "lg 5,32(1)\n\t" \
5189 "lg 6,40(1)\n\t" \
5190 "mvc 160(8,15), 48(1)\n\t" \
5191 "mvc 168(8,15), 56(1)\n\t" \
5192 "mvc 176(8,15), 64(1)\n\t" \
5193 "mvc 184(8,15), 72(1)\n\t" \
5194 "mvc 192(8,15), 80(1)\n\t" \
5195 "mvc 200(8,15), 88(1)\n\t" \
5196 "mvc 208(8,15), 96(1)\n\t" \
5197 "lg 1, 0(1)\n\t" \
5198 VALGRIND_CALL_NOREDIR_R1 \
5199 "lgr %0, 2\n\t" \
5200 "aghi 15,216\n\t" \
5201 VALGRIND_CFI_EPILOGUE \
5202 : /*out*/ "=d" (_res) \
5203 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5204 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5205 ); \
5206 lval = (__typeof__(lval)) _res; \
5207 } while (0)
5208
5209
5210 #endif /* PLAT_s390x_linux */
5211
5212 /* ------------------------- mips32-linux ----------------------- */
5213
5214 #if defined(PLAT_mips32_linux)
5215
5216 /* These regs are trashed by the hidden call. */
5217 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5218 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5219 "$25", "$31"
5220
5221 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5222 long) == 4. */
5223
5224 #define CALL_FN_W_v(lval, orig) \
5225 do { \
5226 volatile OrigFn _orig = (orig); \
5227 volatile unsigned long _argvec[1]; \
5228 volatile unsigned long _res; \
5229 _argvec[0] = (unsigned long)_orig.nraddr; \
5230 __asm__ volatile( \
5231 "subu $29, $29, 8 \n\t" \
5232 "sw $28, 0($29) \n\t" \
5233 "sw $31, 4($29) \n\t" \
5234 "subu $29, $29, 16 \n\t" \
5235 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5236 VALGRIND_CALL_NOREDIR_T9 \
5237 "addu $29, $29, 16\n\t" \
5238 "lw $28, 0($29) \n\t" \
5239 "lw $31, 4($29) \n\t" \
5240 "addu $29, $29, 8 \n\t" \
5241 "move %0, $2\n" \
5242 : /*out*/ "=r" (_res) \
5243 : /*in*/ "0" (&_argvec[0]) \
5244 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5245 ); \
5246 lval = (__typeof__(lval)) _res; \
5247 } while (0)
5248
5249 #define CALL_FN_W_W(lval, orig, arg1) \
5250 do { \
5251 volatile OrigFn _orig = (orig); \
5252 volatile unsigned long _argvec[2]; \
5253 volatile unsigned long _res; \
5254 _argvec[0] = (unsigned long)_orig.nraddr; \
5255 _argvec[1] = (unsigned long)(arg1); \
5256 __asm__ volatile( \
5257 "subu $29, $29, 8 \n\t" \
5258 "sw $28, 0($29) \n\t" \
5259 "sw $31, 4($29) \n\t" \
5260 "subu $29, $29, 16 \n\t" \
5261 "lw $4, 4(%1) \n\t" /* arg1*/ \
5262 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5263 VALGRIND_CALL_NOREDIR_T9 \
5264 "addu $29, $29, 16 \n\t" \
5265 "lw $28, 0($29) \n\t" \
5266 "lw $31, 4($29) \n\t" \
5267 "addu $29, $29, 8 \n\t" \
5268 "move %0, $2\n" \
5269 : /*out*/ "=r" (_res) \
5270 : /*in*/ "0" (&_argvec[0]) \
5271 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5272 ); \
5273 lval = (__typeof__(lval)) _res; \
5274 } while (0)
5275
5276 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5277 do { \
5278 volatile OrigFn _orig = (orig); \
5279 volatile unsigned long _argvec[3]; \
5280 volatile unsigned long _res; \
5281 _argvec[0] = (unsigned long)_orig.nraddr; \
5282 _argvec[1] = (unsigned long)(arg1); \
5283 _argvec[2] = (unsigned long)(arg2); \
5284 __asm__ volatile( \
5285 "subu $29, $29, 8 \n\t" \
5286 "sw $28, 0($29) \n\t" \
5287 "sw $31, 4($29) \n\t" \
5288 "subu $29, $29, 16 \n\t" \
5289 "lw $4, 4(%1) \n\t" \
5290 "lw $5, 8(%1) \n\t" \
5291 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5292 VALGRIND_CALL_NOREDIR_T9 \
5293 "addu $29, $29, 16 \n\t" \
5294 "lw $28, 0($29) \n\t" \
5295 "lw $31, 4($29) \n\t" \
5296 "addu $29, $29, 8 \n\t" \
5297 "move %0, $2\n" \
5298 : /*out*/ "=r" (_res) \
5299 : /*in*/ "0" (&_argvec[0]) \
5300 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5301 ); \
5302 lval = (__typeof__(lval)) _res; \
5303 } while (0)
5304
5305 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5306 do { \
5307 volatile OrigFn _orig = (orig); \
5308 volatile unsigned long _argvec[4]; \
5309 volatile unsigned long _res; \
5310 _argvec[0] = (unsigned long)_orig.nraddr; \
5311 _argvec[1] = (unsigned long)(arg1); \
5312 _argvec[2] = (unsigned long)(arg2); \
5313 _argvec[3] = (unsigned long)(arg3); \
5314 __asm__ volatile( \
5315 "subu $29, $29, 8 \n\t" \
5316 "sw $28, 0($29) \n\t" \
5317 "sw $31, 4($29) \n\t" \
5318 "subu $29, $29, 16 \n\t" \
5319 "lw $4, 4(%1) \n\t" \
5320 "lw $5, 8(%1) \n\t" \
5321 "lw $6, 12(%1) \n\t" \
5322 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5323 VALGRIND_CALL_NOREDIR_T9 \
5324 "addu $29, $29, 16 \n\t" \
5325 "lw $28, 0($29) \n\t" \
5326 "lw $31, 4($29) \n\t" \
5327 "addu $29, $29, 8 \n\t" \
5328 "move %0, $2\n" \
5329 : /*out*/ "=r" (_res) \
5330 : /*in*/ "0" (&_argvec[0]) \
5331 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5332 ); \
5333 lval = (__typeof__(lval)) _res; \
5334 } while (0)
5335
5336 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5337 do { \
5338 volatile OrigFn _orig = (orig); \
5339 volatile unsigned long _argvec[5]; \
5340 volatile unsigned long _res; \
5341 _argvec[0] = (unsigned long)_orig.nraddr; \
5342 _argvec[1] = (unsigned long)(arg1); \
5343 _argvec[2] = (unsigned long)(arg2); \
5344 _argvec[3] = (unsigned long)(arg3); \
5345 _argvec[4] = (unsigned long)(arg4); \
5346 __asm__ volatile( \
5347 "subu $29, $29, 8 \n\t" \
5348 "sw $28, 0($29) \n\t" \
5349 "sw $31, 4($29) \n\t" \
5350 "subu $29, $29, 16 \n\t" \
5351 "lw $4, 4(%1) \n\t" \
5352 "lw $5, 8(%1) \n\t" \
5353 "lw $6, 12(%1) \n\t" \
5354 "lw $7, 16(%1) \n\t" \
5355 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5356 VALGRIND_CALL_NOREDIR_T9 \
5357 "addu $29, $29, 16 \n\t" \
5358 "lw $28, 0($29) \n\t" \
5359 "lw $31, 4($29) \n\t" \
5360 "addu $29, $29, 8 \n\t" \
5361 "move %0, $2\n" \
5362 : /*out*/ "=r" (_res) \
5363 : /*in*/ "0" (&_argvec[0]) \
5364 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5365 ); \
5366 lval = (__typeof__(lval)) _res; \
5367 } while (0)
5368
5369 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5370 do { \
5371 volatile OrigFn _orig = (orig); \
5372 volatile unsigned long _argvec[6]; \
5373 volatile unsigned long _res; \
5374 _argvec[0] = (unsigned long)_orig.nraddr; \
5375 _argvec[1] = (unsigned long)(arg1); \
5376 _argvec[2] = (unsigned long)(arg2); \
5377 _argvec[3] = (unsigned long)(arg3); \
5378 _argvec[4] = (unsigned long)(arg4); \
5379 _argvec[5] = (unsigned long)(arg5); \
5380 __asm__ volatile( \
5381 "subu $29, $29, 8 \n\t" \
5382 "sw $28, 0($29) \n\t" \
5383 "sw $31, 4($29) \n\t" \
5384 "lw $4, 20(%1) \n\t" \
5385 "subu $29, $29, 24\n\t" \
5386 "sw $4, 16($29) \n\t" \
5387 "lw $4, 4(%1) \n\t" \
5388 "lw $5, 8(%1) \n\t" \
5389 "lw $6, 12(%1) \n\t" \
5390 "lw $7, 16(%1) \n\t" \
5391 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5392 VALGRIND_CALL_NOREDIR_T9 \
5393 "addu $29, $29, 24 \n\t" \
5394 "lw $28, 0($29) \n\t" \
5395 "lw $31, 4($29) \n\t" \
5396 "addu $29, $29, 8 \n\t" \
5397 "move %0, $2\n" \
5398 : /*out*/ "=r" (_res) \
5399 : /*in*/ "0" (&_argvec[0]) \
5400 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5401 ); \
5402 lval = (__typeof__(lval)) _res; \
5403 } while (0)
5404 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5405 do { \
5406 volatile OrigFn _orig = (orig); \
5407 volatile unsigned long _argvec[7]; \
5408 volatile unsigned long _res; \
5409 _argvec[0] = (unsigned long)_orig.nraddr; \
5410 _argvec[1] = (unsigned long)(arg1); \
5411 _argvec[2] = (unsigned long)(arg2); \
5412 _argvec[3] = (unsigned long)(arg3); \
5413 _argvec[4] = (unsigned long)(arg4); \
5414 _argvec[5] = (unsigned long)(arg5); \
5415 _argvec[6] = (unsigned long)(arg6); \
5416 __asm__ volatile( \
5417 "subu $29, $29, 8 \n\t" \
5418 "sw $28, 0($29) \n\t" \
5419 "sw $31, 4($29) \n\t" \
5420 "lw $4, 20(%1) \n\t" \
5421 "subu $29, $29, 32\n\t" \
5422 "sw $4, 16($29) \n\t" \
5423 "lw $4, 24(%1) \n\t" \
5424 "nop\n\t" \
5425 "sw $4, 20($29) \n\t" \
5426 "lw $4, 4(%1) \n\t" \
5427 "lw $5, 8(%1) \n\t" \
5428 "lw $6, 12(%1) \n\t" \
5429 "lw $7, 16(%1) \n\t" \
5430 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5431 VALGRIND_CALL_NOREDIR_T9 \
5432 "addu $29, $29, 32 \n\t" \
5433 "lw $28, 0($29) \n\t" \
5434 "lw $31, 4($29) \n\t" \
5435 "addu $29, $29, 8 \n\t" \
5436 "move %0, $2\n" \
5437 : /*out*/ "=r" (_res) \
5438 : /*in*/ "0" (&_argvec[0]) \
5439 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5440 ); \
5441 lval = (__typeof__(lval)) _res; \
5442 } while (0)
5443
5444 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5445 arg7) \
5446 do { \
5447 volatile OrigFn _orig = (orig); \
5448 volatile unsigned long _argvec[8]; \
5449 volatile unsigned long _res; \
5450 _argvec[0] = (unsigned long)_orig.nraddr; \
5451 _argvec[1] = (unsigned long)(arg1); \
5452 _argvec[2] = (unsigned long)(arg2); \
5453 _argvec[3] = (unsigned long)(arg3); \
5454 _argvec[4] = (unsigned long)(arg4); \
5455 _argvec[5] = (unsigned long)(arg5); \
5456 _argvec[6] = (unsigned long)(arg6); \
5457 _argvec[7] = (unsigned long)(arg7); \
5458 __asm__ volatile( \
5459 "subu $29, $29, 8 \n\t" \
5460 "sw $28, 0($29) \n\t" \
5461 "sw $31, 4($29) \n\t" \
5462 "lw $4, 20(%1) \n\t" \
5463 "subu $29, $29, 32\n\t" \
5464 "sw $4, 16($29) \n\t" \
5465 "lw $4, 24(%1) \n\t" \
5466 "sw $4, 20($29) \n\t" \
5467 "lw $4, 28(%1) \n\t" \
5468 "sw $4, 24($29) \n\t" \
5469 "lw $4, 4(%1) \n\t" \
5470 "lw $5, 8(%1) \n\t" \
5471 "lw $6, 12(%1) \n\t" \
5472 "lw $7, 16(%1) \n\t" \
5473 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5474 VALGRIND_CALL_NOREDIR_T9 \
5475 "addu $29, $29, 32 \n\t" \
5476 "lw $28, 0($29) \n\t" \
5477 "lw $31, 4($29) \n\t" \
5478 "addu $29, $29, 8 \n\t" \
5479 "move %0, $2\n" \
5480 : /*out*/ "=r" (_res) \
5481 : /*in*/ "0" (&_argvec[0]) \
5482 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5483 ); \
5484 lval = (__typeof__(lval)) _res; \
5485 } while (0)
5486
5487 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5488 arg7,arg8) \
5489 do { \
5490 volatile OrigFn _orig = (orig); \
5491 volatile unsigned long _argvec[9]; \
5492 volatile unsigned long _res; \
5493 _argvec[0] = (unsigned long)_orig.nraddr; \
5494 _argvec[1] = (unsigned long)(arg1); \
5495 _argvec[2] = (unsigned long)(arg2); \
5496 _argvec[3] = (unsigned long)(arg3); \
5497 _argvec[4] = (unsigned long)(arg4); \
5498 _argvec[5] = (unsigned long)(arg5); \
5499 _argvec[6] = (unsigned long)(arg6); \
5500 _argvec[7] = (unsigned long)(arg7); \
5501 _argvec[8] = (unsigned long)(arg8); \
5502 __asm__ volatile( \
5503 "subu $29, $29, 8 \n\t" \
5504 "sw $28, 0($29) \n\t" \
5505 "sw $31, 4($29) \n\t" \
5506 "lw $4, 20(%1) \n\t" \
5507 "subu $29, $29, 40\n\t" \
5508 "sw $4, 16($29) \n\t" \
5509 "lw $4, 24(%1) \n\t" \
5510 "sw $4, 20($29) \n\t" \
5511 "lw $4, 28(%1) \n\t" \
5512 "sw $4, 24($29) \n\t" \
5513 "lw $4, 32(%1) \n\t" \
5514 "sw $4, 28($29) \n\t" \
5515 "lw $4, 4(%1) \n\t" \
5516 "lw $5, 8(%1) \n\t" \
5517 "lw $6, 12(%1) \n\t" \
5518 "lw $7, 16(%1) \n\t" \
5519 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5520 VALGRIND_CALL_NOREDIR_T9 \
5521 "addu $29, $29, 40 \n\t" \
5522 "lw $28, 0($29) \n\t" \
5523 "lw $31, 4($29) \n\t" \
5524 "addu $29, $29, 8 \n\t" \
5525 "move %0, $2\n" \
5526 : /*out*/ "=r" (_res) \
5527 : /*in*/ "0" (&_argvec[0]) \
5528 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5529 ); \
5530 lval = (__typeof__(lval)) _res; \
5531 } while (0)
5532
5533 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5534 arg7,arg8,arg9) \
5535 do { \
5536 volatile OrigFn _orig = (orig); \
5537 volatile unsigned long _argvec[10]; \
5538 volatile unsigned long _res; \
5539 _argvec[0] = (unsigned long)_orig.nraddr; \
5540 _argvec[1] = (unsigned long)(arg1); \
5541 _argvec[2] = (unsigned long)(arg2); \
5542 _argvec[3] = (unsigned long)(arg3); \
5543 _argvec[4] = (unsigned long)(arg4); \
5544 _argvec[5] = (unsigned long)(arg5); \
5545 _argvec[6] = (unsigned long)(arg6); \
5546 _argvec[7] = (unsigned long)(arg7); \
5547 _argvec[8] = (unsigned long)(arg8); \
5548 _argvec[9] = (unsigned long)(arg9); \
5549 __asm__ volatile( \
5550 "subu $29, $29, 8 \n\t" \
5551 "sw $28, 0($29) \n\t" \
5552 "sw $31, 4($29) \n\t" \
5553 "lw $4, 20(%1) \n\t" \
5554 "subu $29, $29, 40\n\t" \
5555 "sw $4, 16($29) \n\t" \
5556 "lw $4, 24(%1) \n\t" \
5557 "sw $4, 20($29) \n\t" \
5558 "lw $4, 28(%1) \n\t" \
5559 "sw $4, 24($29) \n\t" \
5560 "lw $4, 32(%1) \n\t" \
5561 "sw $4, 28($29) \n\t" \
5562 "lw $4, 36(%1) \n\t" \
5563 "sw $4, 32($29) \n\t" \
5564 "lw $4, 4(%1) \n\t" \
5565 "lw $5, 8(%1) \n\t" \
5566 "lw $6, 12(%1) \n\t" \
5567 "lw $7, 16(%1) \n\t" \
5568 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5569 VALGRIND_CALL_NOREDIR_T9 \
5570 "addu $29, $29, 40 \n\t" \
5571 "lw $28, 0($29) \n\t" \
5572 "lw $31, 4($29) \n\t" \
5573 "addu $29, $29, 8 \n\t" \
5574 "move %0, $2\n" \
5575 : /*out*/ "=r" (_res) \
5576 : /*in*/ "0" (&_argvec[0]) \
5577 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5578 ); \
5579 lval = (__typeof__(lval)) _res; \
5580 } while (0)
5581
5582 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5583 arg7,arg8,arg9,arg10) \
5584 do { \
5585 volatile OrigFn _orig = (orig); \
5586 volatile unsigned long _argvec[11]; \
5587 volatile unsigned long _res; \
5588 _argvec[0] = (unsigned long)_orig.nraddr; \
5589 _argvec[1] = (unsigned long)(arg1); \
5590 _argvec[2] = (unsigned long)(arg2); \
5591 _argvec[3] = (unsigned long)(arg3); \
5592 _argvec[4] = (unsigned long)(arg4); \
5593 _argvec[5] = (unsigned long)(arg5); \
5594 _argvec[6] = (unsigned long)(arg6); \
5595 _argvec[7] = (unsigned long)(arg7); \
5596 _argvec[8] = (unsigned long)(arg8); \
5597 _argvec[9] = (unsigned long)(arg9); \
5598 _argvec[10] = (unsigned long)(arg10); \
5599 __asm__ volatile( \
5600 "subu $29, $29, 8 \n\t" \
5601 "sw $28, 0($29) \n\t" \
5602 "sw $31, 4($29) \n\t" \
5603 "lw $4, 20(%1) \n\t" \
5604 "subu $29, $29, 48\n\t" \
5605 "sw $4, 16($29) \n\t" \
5606 "lw $4, 24(%1) \n\t" \
5607 "sw $4, 20($29) \n\t" \
5608 "lw $4, 28(%1) \n\t" \
5609 "sw $4, 24($29) \n\t" \
5610 "lw $4, 32(%1) \n\t" \
5611 "sw $4, 28($29) \n\t" \
5612 "lw $4, 36(%1) \n\t" \
5613 "sw $4, 32($29) \n\t" \
5614 "lw $4, 40(%1) \n\t" \
5615 "sw $4, 36($29) \n\t" \
5616 "lw $4, 4(%1) \n\t" \
5617 "lw $5, 8(%1) \n\t" \
5618 "lw $6, 12(%1) \n\t" \
5619 "lw $7, 16(%1) \n\t" \
5620 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5621 VALGRIND_CALL_NOREDIR_T9 \
5622 "addu $29, $29, 48 \n\t" \
5623 "lw $28, 0($29) \n\t" \
5624 "lw $31, 4($29) \n\t" \
5625 "addu $29, $29, 8 \n\t" \
5626 "move %0, $2\n" \
5627 : /*out*/ "=r" (_res) \
5628 : /*in*/ "0" (&_argvec[0]) \
5629 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5630 ); \
5631 lval = (__typeof__(lval)) _res; \
5632 } while (0)
5633
5634 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5635 arg6,arg7,arg8,arg9,arg10, \
5636 arg11) \
5637 do { \
5638 volatile OrigFn _orig = (orig); \
5639 volatile unsigned long _argvec[12]; \
5640 volatile unsigned long _res; \
5641 _argvec[0] = (unsigned long)_orig.nraddr; \
5642 _argvec[1] = (unsigned long)(arg1); \
5643 _argvec[2] = (unsigned long)(arg2); \
5644 _argvec[3] = (unsigned long)(arg3); \
5645 _argvec[4] = (unsigned long)(arg4); \
5646 _argvec[5] = (unsigned long)(arg5); \
5647 _argvec[6] = (unsigned long)(arg6); \
5648 _argvec[7] = (unsigned long)(arg7); \
5649 _argvec[8] = (unsigned long)(arg8); \
5650 _argvec[9] = (unsigned long)(arg9); \
5651 _argvec[10] = (unsigned long)(arg10); \
5652 _argvec[11] = (unsigned long)(arg11); \
5653 __asm__ volatile( \
5654 "subu $29, $29, 8 \n\t" \
5655 "sw $28, 0($29) \n\t" \
5656 "sw $31, 4($29) \n\t" \
5657 "lw $4, 20(%1) \n\t" \
5658 "subu $29, $29, 48\n\t" \
5659 "sw $4, 16($29) \n\t" \
5660 "lw $4, 24(%1) \n\t" \
5661 "sw $4, 20($29) \n\t" \
5662 "lw $4, 28(%1) \n\t" \
5663 "sw $4, 24($29) \n\t" \
5664 "lw $4, 32(%1) \n\t" \
5665 "sw $4, 28($29) \n\t" \
5666 "lw $4, 36(%1) \n\t" \
5667 "sw $4, 32($29) \n\t" \
5668 "lw $4, 40(%1) \n\t" \
5669 "sw $4, 36($29) \n\t" \
5670 "lw $4, 44(%1) \n\t" \
5671 "sw $4, 40($29) \n\t" \
5672 "lw $4, 4(%1) \n\t" \
5673 "lw $5, 8(%1) \n\t" \
5674 "lw $6, 12(%1) \n\t" \
5675 "lw $7, 16(%1) \n\t" \
5676 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5677 VALGRIND_CALL_NOREDIR_T9 \
5678 "addu $29, $29, 48 \n\t" \
5679 "lw $28, 0($29) \n\t" \
5680 "lw $31, 4($29) \n\t" \
5681 "addu $29, $29, 8 \n\t" \
5682 "move %0, $2\n" \
5683 : /*out*/ "=r" (_res) \
5684 : /*in*/ "0" (&_argvec[0]) \
5685 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5686 ); \
5687 lval = (__typeof__(lval)) _res; \
5688 } while (0)
5689
5690 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5691 arg6,arg7,arg8,arg9,arg10, \
5692 arg11,arg12) \
5693 do { \
5694 volatile OrigFn _orig = (orig); \
5695 volatile unsigned long _argvec[13]; \
5696 volatile unsigned long _res; \
5697 _argvec[0] = (unsigned long)_orig.nraddr; \
5698 _argvec[1] = (unsigned long)(arg1); \
5699 _argvec[2] = (unsigned long)(arg2); \
5700 _argvec[3] = (unsigned long)(arg3); \
5701 _argvec[4] = (unsigned long)(arg4); \
5702 _argvec[5] = (unsigned long)(arg5); \
5703 _argvec[6] = (unsigned long)(arg6); \
5704 _argvec[7] = (unsigned long)(arg7); \
5705 _argvec[8] = (unsigned long)(arg8); \
5706 _argvec[9] = (unsigned long)(arg9); \
5707 _argvec[10] = (unsigned long)(arg10); \
5708 _argvec[11] = (unsigned long)(arg11); \
5709 _argvec[12] = (unsigned long)(arg12); \
5710 __asm__ volatile( \
5711 "subu $29, $29, 8 \n\t" \
5712 "sw $28, 0($29) \n\t" \
5713 "sw $31, 4($29) \n\t" \
5714 "lw $4, 20(%1) \n\t" \
5715 "subu $29, $29, 56\n\t" \
5716 "sw $4, 16($29) \n\t" \
5717 "lw $4, 24(%1) \n\t" \
5718 "sw $4, 20($29) \n\t" \
5719 "lw $4, 28(%1) \n\t" \
5720 "sw $4, 24($29) \n\t" \
5721 "lw $4, 32(%1) \n\t" \
5722 "sw $4, 28($29) \n\t" \
5723 "lw $4, 36(%1) \n\t" \
5724 "sw $4, 32($29) \n\t" \
5725 "lw $4, 40(%1) \n\t" \
5726 "sw $4, 36($29) \n\t" \
5727 "lw $4, 44(%1) \n\t" \
5728 "sw $4, 40($29) \n\t" \
5729 "lw $4, 48(%1) \n\t" \
5730 "sw $4, 44($29) \n\t" \
5731 "lw $4, 4(%1) \n\t" \
5732 "lw $5, 8(%1) \n\t" \
5733 "lw $6, 12(%1) \n\t" \
5734 "lw $7, 16(%1) \n\t" \
5735 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5736 VALGRIND_CALL_NOREDIR_T9 \
5737 "addu $29, $29, 56 \n\t" \
5738 "lw $28, 0($29) \n\t" \
5739 "lw $31, 4($29) \n\t" \
5740 "addu $29, $29, 8 \n\t" \
5741 "move %0, $2\n" \
5742 : /*out*/ "=r" (_res) \
5743 : /*in*/ "r" (&_argvec[0]) \
5744 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5745 ); \
5746 lval = (__typeof__(lval)) _res; \
5747 } while (0)
5748
5749 #endif /* PLAT_mips32_linux */
5750
5751 /* ------------------------- mips64-linux ------------------------- */
5752
5753 #if defined(PLAT_mips64_linux)
5754
5755 /* These regs are trashed by the hidden call. */
5756 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5757 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5758 "$25", "$31"
5759
5760 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5761 long) == 4. */
5762
5763 #define CALL_FN_W_v(lval, orig) \
5764 do { \
5765 volatile OrigFn _orig = (orig); \
5766 volatile unsigned long _argvec[1]; \
5767 volatile unsigned long _res; \
5768 _argvec[0] = (unsigned long)_orig.nraddr; \
5769 __asm__ volatile( \
5770 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5771 VALGRIND_CALL_NOREDIR_T9 \
5772 "move %0, $2\n" \
5773 : /*out*/ "=r" (_res) \
5774 : /*in*/ "0" (&_argvec[0]) \
5775 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5776 ); \
5777 lval = (__typeof__(lval)) _res; \
5778 } while (0)
5779
5780 #define CALL_FN_W_W(lval, orig, arg1) \
5781 do { \
5782 volatile OrigFn _orig = (orig); \
5783 volatile unsigned long _argvec[2]; \
5784 volatile unsigned long _res; \
5785 _argvec[0] = (unsigned long)_orig.nraddr; \
5786 _argvec[1] = (unsigned long)(arg1); \
5787 __asm__ volatile( \
5788 "ld $4, 8(%1)\n\t" /* arg1*/ \
5789 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5790 VALGRIND_CALL_NOREDIR_T9 \
5791 "move %0, $2\n" \
5792 : /*out*/ "=r" (_res) \
5793 : /*in*/ "r" (&_argvec[0]) \
5794 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5795 ); \
5796 lval = (__typeof__(lval)) _res; \
5797 } while (0)
5798
5799 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5800 do { \
5801 volatile OrigFn _orig = (orig); \
5802 volatile unsigned long _argvec[3]; \
5803 volatile unsigned long _res; \
5804 _argvec[0] = (unsigned long)_orig.nraddr; \
5805 _argvec[1] = (unsigned long)(arg1); \
5806 _argvec[2] = (unsigned long)(arg2); \
5807 __asm__ volatile( \
5808 "ld $4, 8(%1)\n\t" \
5809 "ld $5, 16(%1)\n\t" \
5810 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5811 VALGRIND_CALL_NOREDIR_T9 \
5812 "move %0, $2\n" \
5813 : /*out*/ "=r" (_res) \
5814 : /*in*/ "r" (&_argvec[0]) \
5815 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5816 ); \
5817 lval = (__typeof__(lval)) _res; \
5818 } while (0)
5819
5820 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5821 do { \
5822 volatile OrigFn _orig = (orig); \
5823 volatile unsigned long _argvec[4]; \
5824 volatile unsigned long _res; \
5825 _argvec[0] = (unsigned long)_orig.nraddr; \
5826 _argvec[1] = (unsigned long)(arg1); \
5827 _argvec[2] = (unsigned long)(arg2); \
5828 _argvec[3] = (unsigned long)(arg3); \
5829 __asm__ volatile( \
5830 "ld $4, 8(%1)\n\t" \
5831 "ld $5, 16(%1)\n\t" \
5832 "ld $6, 24(%1)\n\t" \
5833 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5834 VALGRIND_CALL_NOREDIR_T9 \
5835 "move %0, $2\n" \
5836 : /*out*/ "=r" (_res) \
5837 : /*in*/ "r" (&_argvec[0]) \
5838 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5839 ); \
5840 lval = (__typeof__(lval)) _res; \
5841 } while (0)
5842
5843 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5844 do { \
5845 volatile OrigFn _orig = (orig); \
5846 volatile unsigned long _argvec[5]; \
5847 volatile unsigned long _res; \
5848 _argvec[0] = (unsigned long)_orig.nraddr; \
5849 _argvec[1] = (unsigned long)(arg1); \
5850 _argvec[2] = (unsigned long)(arg2); \
5851 _argvec[3] = (unsigned long)(arg3); \
5852 _argvec[4] = (unsigned long)(arg4); \
5853 __asm__ volatile( \
5854 "ld $4, 8(%1)\n\t" \
5855 "ld $5, 16(%1)\n\t" \
5856 "ld $6, 24(%1)\n\t" \
5857 "ld $7, 32(%1)\n\t" \
5858 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5859 VALGRIND_CALL_NOREDIR_T9 \
5860 "move %0, $2\n" \
5861 : /*out*/ "=r" (_res) \
5862 : /*in*/ "r" (&_argvec[0]) \
5863 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5864 ); \
5865 lval = (__typeof__(lval)) _res; \
5866 } while (0)
5867
5868 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5869 do { \
5870 volatile OrigFn _orig = (orig); \
5871 volatile unsigned long _argvec[6]; \
5872 volatile unsigned long _res; \
5873 _argvec[0] = (unsigned long)_orig.nraddr; \
5874 _argvec[1] = (unsigned long)(arg1); \
5875 _argvec[2] = (unsigned long)(arg2); \
5876 _argvec[3] = (unsigned long)(arg3); \
5877 _argvec[4] = (unsigned long)(arg4); \
5878 _argvec[5] = (unsigned long)(arg5); \
5879 __asm__ volatile( \
5880 "ld $4, 8(%1)\n\t" \
5881 "ld $5, 16(%1)\n\t" \
5882 "ld $6, 24(%1)\n\t" \
5883 "ld $7, 32(%1)\n\t" \
5884 "ld $8, 40(%1)\n\t" \
5885 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5886 VALGRIND_CALL_NOREDIR_T9 \
5887 "move %0, $2\n" \
5888 : /*out*/ "=r" (_res) \
5889 : /*in*/ "r" (&_argvec[0]) \
5890 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5891 ); \
5892 lval = (__typeof__(lval)) _res; \
5893 } while (0)
5894
5895 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5896 do { \
5897 volatile OrigFn _orig = (orig); \
5898 volatile unsigned long _argvec[7]; \
5899 volatile unsigned long _res; \
5900 _argvec[0] = (unsigned long)_orig.nraddr; \
5901 _argvec[1] = (unsigned long)(arg1); \
5902 _argvec[2] = (unsigned long)(arg2); \
5903 _argvec[3] = (unsigned long)(arg3); \
5904 _argvec[4] = (unsigned long)(arg4); \
5905 _argvec[5] = (unsigned long)(arg5); \
5906 _argvec[6] = (unsigned long)(arg6); \
5907 __asm__ volatile( \
5908 "ld $4, 8(%1)\n\t" \
5909 "ld $5, 16(%1)\n\t" \
5910 "ld $6, 24(%1)\n\t" \
5911 "ld $7, 32(%1)\n\t" \
5912 "ld $8, 40(%1)\n\t" \
5913 "ld $9, 48(%1)\n\t" \
5914 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5915 VALGRIND_CALL_NOREDIR_T9 \
5916 "move %0, $2\n" \
5917 : /*out*/ "=r" (_res) \
5918 : /*in*/ "r" (&_argvec[0]) \
5919 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5920 ); \
5921 lval = (__typeof__(lval)) _res; \
5922 } while (0)
5923
5924 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5925 arg7) \
5926 do { \
5927 volatile OrigFn _orig = (orig); \
5928 volatile unsigned long _argvec[8]; \
5929 volatile unsigned long _res; \
5930 _argvec[0] = (unsigned long)_orig.nraddr; \
5931 _argvec[1] = (unsigned long)(arg1); \
5932 _argvec[2] = (unsigned long)(arg2); \
5933 _argvec[3] = (unsigned long)(arg3); \
5934 _argvec[4] = (unsigned long)(arg4); \
5935 _argvec[5] = (unsigned long)(arg5); \
5936 _argvec[6] = (unsigned long)(arg6); \
5937 _argvec[7] = (unsigned long)(arg7); \
5938 __asm__ volatile( \
5939 "ld $4, 8(%1)\n\t" \
5940 "ld $5, 16(%1)\n\t" \
5941 "ld $6, 24(%1)\n\t" \
5942 "ld $7, 32(%1)\n\t" \
5943 "ld $8, 40(%1)\n\t" \
5944 "ld $9, 48(%1)\n\t" \
5945 "ld $10, 56(%1)\n\t" \
5946 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5947 VALGRIND_CALL_NOREDIR_T9 \
5948 "move %0, $2\n" \
5949 : /*out*/ "=r" (_res) \
5950 : /*in*/ "r" (&_argvec[0]) \
5951 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5952 ); \
5953 lval = (__typeof__(lval)) _res; \
5954 } while (0)
5955
5956 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5957 arg7,arg8) \
5958 do { \
5959 volatile OrigFn _orig = (orig); \
5960 volatile unsigned long _argvec[9]; \
5961 volatile unsigned long _res; \
5962 _argvec[0] = (unsigned long)_orig.nraddr; \
5963 _argvec[1] = (unsigned long)(arg1); \
5964 _argvec[2] = (unsigned long)(arg2); \
5965 _argvec[3] = (unsigned long)(arg3); \
5966 _argvec[4] = (unsigned long)(arg4); \
5967 _argvec[5] = (unsigned long)(arg5); \
5968 _argvec[6] = (unsigned long)(arg6); \
5969 _argvec[7] = (unsigned long)(arg7); \
5970 _argvec[8] = (unsigned long)(arg8); \
5971 __asm__ volatile( \
5972 "ld $4, 8(%1)\n\t" \
5973 "ld $5, 16(%1)\n\t" \
5974 "ld $6, 24(%1)\n\t" \
5975 "ld $7, 32(%1)\n\t" \
5976 "ld $8, 40(%1)\n\t" \
5977 "ld $9, 48(%1)\n\t" \
5978 "ld $10, 56(%1)\n\t" \
5979 "ld $11, 64(%1)\n\t" \
5980 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5981 VALGRIND_CALL_NOREDIR_T9 \
5982 "move %0, $2\n" \
5983 : /*out*/ "=r" (_res) \
5984 : /*in*/ "r" (&_argvec[0]) \
5985 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5986 ); \
5987 lval = (__typeof__(lval)) _res; \
5988 } while (0)
5989
5990 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5991 arg7,arg8,arg9) \
5992 do { \
5993 volatile OrigFn _orig = (orig); \
5994 volatile unsigned long _argvec[10]; \
5995 volatile unsigned long _res; \
5996 _argvec[0] = (unsigned long)_orig.nraddr; \
5997 _argvec[1] = (unsigned long)(arg1); \
5998 _argvec[2] = (unsigned long)(arg2); \
5999 _argvec[3] = (unsigned long)(arg3); \
6000 _argvec[4] = (unsigned long)(arg4); \
6001 _argvec[5] = (unsigned long)(arg5); \
6002 _argvec[6] = (unsigned long)(arg6); \
6003 _argvec[7] = (unsigned long)(arg7); \
6004 _argvec[8] = (unsigned long)(arg8); \
6005 _argvec[9] = (unsigned long)(arg9); \
6006 __asm__ volatile( \
6007 "dsubu $29, $29, 8\n\t" \
6008 "ld $4, 72(%1)\n\t" \
6009 "sd $4, 0($29)\n\t" \
6010 "ld $4, 8(%1)\n\t" \
6011 "ld $5, 16(%1)\n\t" \
6012 "ld $6, 24(%1)\n\t" \
6013 "ld $7, 32(%1)\n\t" \
6014 "ld $8, 40(%1)\n\t" \
6015 "ld $9, 48(%1)\n\t" \
6016 "ld $10, 56(%1)\n\t" \
6017 "ld $11, 64(%1)\n\t" \
6018 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6019 VALGRIND_CALL_NOREDIR_T9 \
6020 "daddu $29, $29, 8\n\t" \
6021 "move %0, $2\n" \
6022 : /*out*/ "=r" (_res) \
6023 : /*in*/ "r" (&_argvec[0]) \
6024 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6025 ); \
6026 lval = (__typeof__(lval)) _res; \
6027 } while (0)
6028
6029 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6030 arg7,arg8,arg9,arg10) \
6031 do { \
6032 volatile OrigFn _orig = (orig); \
6033 volatile unsigned long _argvec[11]; \
6034 volatile unsigned long _res; \
6035 _argvec[0] = (unsigned long)_orig.nraddr; \
6036 _argvec[1] = (unsigned long)(arg1); \
6037 _argvec[2] = (unsigned long)(arg2); \
6038 _argvec[3] = (unsigned long)(arg3); \
6039 _argvec[4] = (unsigned long)(arg4); \
6040 _argvec[5] = (unsigned long)(arg5); \
6041 _argvec[6] = (unsigned long)(arg6); \
6042 _argvec[7] = (unsigned long)(arg7); \
6043 _argvec[8] = (unsigned long)(arg8); \
6044 _argvec[9] = (unsigned long)(arg9); \
6045 _argvec[10] = (unsigned long)(arg10); \
6046 __asm__ volatile( \
6047 "dsubu $29, $29, 16\n\t" \
6048 "ld $4, 72(%1)\n\t" \
6049 "sd $4, 0($29)\n\t" \
6050 "ld $4, 80(%1)\n\t" \
6051 "sd $4, 8($29)\n\t" \
6052 "ld $4, 8(%1)\n\t" \
6053 "ld $5, 16(%1)\n\t" \
6054 "ld $6, 24(%1)\n\t" \
6055 "ld $7, 32(%1)\n\t" \
6056 "ld $8, 40(%1)\n\t" \
6057 "ld $9, 48(%1)\n\t" \
6058 "ld $10, 56(%1)\n\t" \
6059 "ld $11, 64(%1)\n\t" \
6060 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6061 VALGRIND_CALL_NOREDIR_T9 \
6062 "daddu $29, $29, 16\n\t" \
6063 "move %0, $2\n" \
6064 : /*out*/ "=r" (_res) \
6065 : /*in*/ "r" (&_argvec[0]) \
6066 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6067 ); \
6068 lval = (__typeof__(lval)) _res; \
6069 } while (0)
6070
6071 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6072 arg6,arg7,arg8,arg9,arg10, \
6073 arg11) \
6074 do { \
6075 volatile OrigFn _orig = (orig); \
6076 volatile unsigned long _argvec[12]; \
6077 volatile unsigned long _res; \
6078 _argvec[0] = (unsigned long)_orig.nraddr; \
6079 _argvec[1] = (unsigned long)(arg1); \
6080 _argvec[2] = (unsigned long)(arg2); \
6081 _argvec[3] = (unsigned long)(arg3); \
6082 _argvec[4] = (unsigned long)(arg4); \
6083 _argvec[5] = (unsigned long)(arg5); \
6084 _argvec[6] = (unsigned long)(arg6); \
6085 _argvec[7] = (unsigned long)(arg7); \
6086 _argvec[8] = (unsigned long)(arg8); \
6087 _argvec[9] = (unsigned long)(arg9); \
6088 _argvec[10] = (unsigned long)(arg10); \
6089 _argvec[11] = (unsigned long)(arg11); \
6090 __asm__ volatile( \
6091 "dsubu $29, $29, 24\n\t" \
6092 "ld $4, 72(%1)\n\t" \
6093 "sd $4, 0($29)\n\t" \
6094 "ld $4, 80(%1)\n\t" \
6095 "sd $4, 8($29)\n\t" \
6096 "ld $4, 88(%1)\n\t" \
6097 "sd $4, 16($29)\n\t" \
6098 "ld $4, 8(%1)\n\t" \
6099 "ld $5, 16(%1)\n\t" \
6100 "ld $6, 24(%1)\n\t" \
6101 "ld $7, 32(%1)\n\t" \
6102 "ld $8, 40(%1)\n\t" \
6103 "ld $9, 48(%1)\n\t" \
6104 "ld $10, 56(%1)\n\t" \
6105 "ld $11, 64(%1)\n\t" \
6106 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6107 VALGRIND_CALL_NOREDIR_T9 \
6108 "daddu $29, $29, 24\n\t" \
6109 "move %0, $2\n" \
6110 : /*out*/ "=r" (_res) \
6111 : /*in*/ "r" (&_argvec[0]) \
6112 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6113 ); \
6114 lval = (__typeof__(lval)) _res; \
6115 } while (0)
6116
6117 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6118 arg6,arg7,arg8,arg9,arg10, \
6119 arg11,arg12) \
6120 do { \
6121 volatile OrigFn _orig = (orig); \
6122 volatile unsigned long _argvec[13]; \
6123 volatile unsigned long _res; \
6124 _argvec[0] = (unsigned long)_orig.nraddr; \
6125 _argvec[1] = (unsigned long)(arg1); \
6126 _argvec[2] = (unsigned long)(arg2); \
6127 _argvec[3] = (unsigned long)(arg3); \
6128 _argvec[4] = (unsigned long)(arg4); \
6129 _argvec[5] = (unsigned long)(arg5); \
6130 _argvec[6] = (unsigned long)(arg6); \
6131 _argvec[7] = (unsigned long)(arg7); \
6132 _argvec[8] = (unsigned long)(arg8); \
6133 _argvec[9] = (unsigned long)(arg9); \
6134 _argvec[10] = (unsigned long)(arg10); \
6135 _argvec[11] = (unsigned long)(arg11); \
6136 _argvec[12] = (unsigned long)(arg12); \
6137 __asm__ volatile( \
6138 "dsubu $29, $29, 32\n\t" \
6139 "ld $4, 72(%1)\n\t" \
6140 "sd $4, 0($29)\n\t" \
6141 "ld $4, 80(%1)\n\t" \
6142 "sd $4, 8($29)\n\t" \
6143 "ld $4, 88(%1)\n\t" \
6144 "sd $4, 16($29)\n\t" \
6145 "ld $4, 96(%1)\n\t" \
6146 "sd $4, 24($29)\n\t" \
6147 "ld $4, 8(%1)\n\t" \
6148 "ld $5, 16(%1)\n\t" \
6149 "ld $6, 24(%1)\n\t" \
6150 "ld $7, 32(%1)\n\t" \
6151 "ld $8, 40(%1)\n\t" \
6152 "ld $9, 48(%1)\n\t" \
6153 "ld $10, 56(%1)\n\t" \
6154 "ld $11, 64(%1)\n\t" \
6155 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6156 VALGRIND_CALL_NOREDIR_T9 \
6157 "daddu $29, $29, 32\n\t" \
6158 "move %0, $2\n" \
6159 : /*out*/ "=r" (_res) \
6160 : /*in*/ "r" (&_argvec[0]) \
6161 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6162 ); \
6163 lval = (__typeof__(lval)) _res; \
6164 } while (0)
6165
6166 #endif /* PLAT_mips64_linux */
6167
6168 /* ------------------------ tilegx-linux ------------------------- */
6169
6170 #if defined(PLAT_tilegx_linux)
6171
6172 /* These regs are trashed by the hidden call. */
6173 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3", "r4", "r5", \
6174 "r6", "r7", "r8", "r9", "r10", "r11", "r12", "r13", "r14", \
6175 "r15", "r16", "r17", "r18", "r19", "r20", "r21", "r22", \
6176 "r23", "r24", "r25", "r26", "r27", "r28", "r29", "lr"
6177
6178 /* These CALL_FN_ macros assume that on tilegx-linux, sizeof(unsigned
6179 long) == 8. */
6180
6181 #define CALL_FN_W_v(lval, orig) \
6182 do { \
6183 volatile OrigFn _orig = (orig); \
6184 volatile unsigned long _argvec[1]; \
6185 volatile unsigned long _res; \
6186 _argvec[0] = (unsigned long)_orig.nraddr; \
6187 __asm__ volatile( \
6188 "addi sp, sp, -8 \n\t" \
6189 "st_add sp, lr, -8 \n\t" \
6190 "ld r12, %1 \n\t" /* target->r11 */ \
6191 VALGRIND_CALL_NOREDIR_R12 \
6192 "addi sp, sp, 8\n\t" \
6193 "ld_add lr, sp, 8 \n\t" \
6194 "move %0, r0 \n" \
6195 : /*out*/ "=r" (_res) \
6196 : /*in*/ "r" (&_argvec[0]) \
6197 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6198 \
6199 lval = (__typeof__(lval)) _res; \
6200 } while (0)
6201
6202 #define CALL_FN_W_W(lval, orig, arg1) \
6203 do { \
6204 volatile OrigFn _orig = (orig); \
6205 volatile unsigned long _argvec[2]; \
6206 volatile unsigned long _res; \
6207 _argvec[0] = (unsigned long)_orig.nraddr; \
6208 _argvec[1] = (unsigned long)(arg1); \
6209 __asm__ volatile( \
6210 "addi sp, sp, -8 \n\t" \
6211 "st_add sp, lr, -8 \n\t" \
6212 "move r29, %1 \n\t" \
6213 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6214 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6215 VALGRIND_CALL_NOREDIR_R12 \
6216 "addi sp, sp, 8\n\t" \
6217 "ld_add lr, sp, 8 \n\t" \
6218 "move %0, r0\n" \
6219 : /*out*/ "=r" (_res) \
6220 : /*in*/ "r" (&_argvec[0]) \
6221 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6222 lval = (__typeof__(lval)) _res; \
6223 } while (0)
6224
6225 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6226 do { \
6227 volatile OrigFn _orig = (orig); \
6228 volatile unsigned long _argvec[3]; \
6229 volatile unsigned long _res; \
6230 _argvec[0] = (unsigned long)_orig.nraddr; \
6231 _argvec[1] = (unsigned long)(arg1); \
6232 _argvec[2] = (unsigned long)(arg2); \
6233 __asm__ volatile( \
6234 "addi sp, sp, -8 \n\t" \
6235 "st_add sp, lr, -8 \n\t" \
6236 "move r29, %1 \n\t" \
6237 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6238 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6239 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6240 VALGRIND_CALL_NOREDIR_R12 \
6241 "addi sp, sp, 8\n\t" \
6242 "ld_add lr, sp, 8 \n\t" \
6243 "move %0, r0\n" \
6244 : /*out*/ "=r" (_res) \
6245 : /*in*/ "r" (&_argvec[0]) \
6246 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6247 lval = (__typeof__(lval)) _res; \
6248 } while (0)
6249
6250 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6251 do { \
6252 volatile OrigFn _orig = (orig); \
6253 volatile unsigned long _argvec[4]; \
6254 volatile unsigned long _res; \
6255 _argvec[0] = (unsigned long)_orig.nraddr; \
6256 _argvec[1] = (unsigned long)(arg1); \
6257 _argvec[2] = (unsigned long)(arg2); \
6258 _argvec[3] = (unsigned long)(arg3); \
6259 __asm__ volatile( \
6260 "addi sp, sp, -8 \n\t" \
6261 "st_add sp, lr, -8 \n\t" \
6262 "move r29, %1 \n\t" \
6263 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6264 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6265 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6266 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6267 VALGRIND_CALL_NOREDIR_R12 \
6268 "addi sp, sp, 8 \n\t" \
6269 "ld_add lr, sp, 8 \n\t" \
6270 "move %0, r0\n" \
6271 : /*out*/ "=r" (_res) \
6272 : /*in*/ "r" (&_argvec[0]) \
6273 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6274 lval = (__typeof__(lval)) _res; \
6275 } while (0)
6276
6277 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6278 do { \
6279 volatile OrigFn _orig = (orig); \
6280 volatile unsigned long _argvec[5]; \
6281 volatile unsigned long _res; \
6282 _argvec[0] = (unsigned long)_orig.nraddr; \
6283 _argvec[1] = (unsigned long)(arg1); \
6284 _argvec[2] = (unsigned long)(arg2); \
6285 _argvec[3] = (unsigned long)(arg3); \
6286 _argvec[4] = (unsigned long)(arg4); \
6287 __asm__ volatile( \
6288 "addi sp, sp, -8 \n\t" \
6289 "st_add sp, lr, -8 \n\t" \
6290 "move r29, %1 \n\t" \
6291 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6292 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6293 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6294 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6295 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6296 VALGRIND_CALL_NOREDIR_R12 \
6297 "addi sp, sp, 8\n\t" \
6298 "ld_add lr, sp, 8 \n\t" \
6299 "move %0, r0\n" \
6300 : /*out*/ "=r" (_res) \
6301 : /*in*/ "r" (&_argvec[0]) \
6302 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6303 lval = (__typeof__(lval)) _res; \
6304 } while (0)
6305
6306 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6307 do { \
6308 volatile OrigFn _orig = (orig); \
6309 volatile unsigned long _argvec[6]; \
6310 volatile unsigned long _res; \
6311 _argvec[0] = (unsigned long)_orig.nraddr; \
6312 _argvec[1] = (unsigned long)(arg1); \
6313 _argvec[2] = (unsigned long)(arg2); \
6314 _argvec[3] = (unsigned long)(arg3); \
6315 _argvec[4] = (unsigned long)(arg4); \
6316 _argvec[5] = (unsigned long)(arg5); \
6317 __asm__ volatile( \
6318 "addi sp, sp, -8 \n\t" \
6319 "st_add sp, lr, -8 \n\t" \
6320 "move r29, %1 \n\t" \
6321 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6322 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6323 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6324 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6325 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6326 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6327 VALGRIND_CALL_NOREDIR_R12 \
6328 "addi sp, sp, 8\n\t" \
6329 "ld_add lr, sp, 8 \n\t" \
6330 "move %0, r0\n" \
6331 : /*out*/ "=r" (_res) \
6332 : /*in*/ "r" (&_argvec[0]) \
6333 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6334 lval = (__typeof__(lval)) _res; \
6335 } while (0)
6336 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6337 do { \
6338 volatile OrigFn _orig = (orig); \
6339 volatile unsigned long _argvec[7]; \
6340 volatile unsigned long _res; \
6341 _argvec[0] = (unsigned long)_orig.nraddr; \
6342 _argvec[1] = (unsigned long)(arg1); \
6343 _argvec[2] = (unsigned long)(arg2); \
6344 _argvec[3] = (unsigned long)(arg3); \
6345 _argvec[4] = (unsigned long)(arg4); \
6346 _argvec[5] = (unsigned long)(arg5); \
6347 _argvec[6] = (unsigned long)(arg6); \
6348 __asm__ volatile( \
6349 "addi sp, sp, -8 \n\t" \
6350 "st_add sp, lr, -8 \n\t" \
6351 "move r29, %1 \n\t" \
6352 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6353 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6354 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6355 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6356 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6357 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6358 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6359 VALGRIND_CALL_NOREDIR_R12 \
6360 "addi sp, sp, 8\n\t" \
6361 "ld_add lr, sp, 8 \n\t" \
6362 "move %0, r0\n" \
6363 : /*out*/ "=r" (_res) \
6364 : /*in*/ "r" (&_argvec[0]) \
6365 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6366 lval = (__typeof__(lval)) _res; \
6367 } while (0)
6368
6369 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6370 arg7) \
6371 do { \
6372 volatile OrigFn _orig = (orig); \
6373 volatile unsigned long _argvec[8]; \
6374 volatile unsigned long _res; \
6375 _argvec[0] = (unsigned long)_orig.nraddr; \
6376 _argvec[1] = (unsigned long)(arg1); \
6377 _argvec[2] = (unsigned long)(arg2); \
6378 _argvec[3] = (unsigned long)(arg3); \
6379 _argvec[4] = (unsigned long)(arg4); \
6380 _argvec[5] = (unsigned long)(arg5); \
6381 _argvec[6] = (unsigned long)(arg6); \
6382 _argvec[7] = (unsigned long)(arg7); \
6383 __asm__ volatile( \
6384 "addi sp, sp, -8 \n\t" \
6385 "st_add sp, lr, -8 \n\t" \
6386 "move r29, %1 \n\t" \
6387 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6388 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6389 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6390 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6391 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6392 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6393 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6394 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6395 VALGRIND_CALL_NOREDIR_R12 \
6396 "addi sp, sp, 8\n\t" \
6397 "ld_add lr, sp, 8 \n\t" \
6398 "move %0, r0\n" \
6399 : /*out*/ "=r" (_res) \
6400 : /*in*/ "r" (&_argvec[0]) \
6401 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6402 lval = (__typeof__(lval)) _res; \
6403 } while (0)
6404
6405 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6406 arg7,arg8) \
6407 do { \
6408 volatile OrigFn _orig = (orig); \
6409 volatile unsigned long _argvec[9]; \
6410 volatile unsigned long _res; \
6411 _argvec[0] = (unsigned long)_orig.nraddr; \
6412 _argvec[1] = (unsigned long)(arg1); \
6413 _argvec[2] = (unsigned long)(arg2); \
6414 _argvec[3] = (unsigned long)(arg3); \
6415 _argvec[4] = (unsigned long)(arg4); \
6416 _argvec[5] = (unsigned long)(arg5); \
6417 _argvec[6] = (unsigned long)(arg6); \
6418 _argvec[7] = (unsigned long)(arg7); \
6419 _argvec[8] = (unsigned long)(arg8); \
6420 __asm__ volatile( \
6421 "addi sp, sp, -8 \n\t" \
6422 "st_add sp, lr, -8 \n\t" \
6423 "move r29, %1 \n\t" \
6424 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6425 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6426 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6427 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6428 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6429 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6430 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6431 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6432 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6433 VALGRIND_CALL_NOREDIR_R12 \
6434 "addi sp, sp, 8\n\t" \
6435 "ld_add lr, sp, 8 \n\t" \
6436 "move %0, r0\n" \
6437 : /*out*/ "=r" (_res) \
6438 : /*in*/ "r" (&_argvec[0]) \
6439 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6440 lval = (__typeof__(lval)) _res; \
6441 } while (0)
6442
6443 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6444 arg7,arg8,arg9) \
6445 do { \
6446 volatile OrigFn _orig = (orig); \
6447 volatile unsigned long _argvec[10]; \
6448 volatile unsigned long _res; \
6449 _argvec[0] = (unsigned long)_orig.nraddr; \
6450 _argvec[1] = (unsigned long)(arg1); \
6451 _argvec[2] = (unsigned long)(arg2); \
6452 _argvec[3] = (unsigned long)(arg3); \
6453 _argvec[4] = (unsigned long)(arg4); \
6454 _argvec[5] = (unsigned long)(arg5); \
6455 _argvec[6] = (unsigned long)(arg6); \
6456 _argvec[7] = (unsigned long)(arg7); \
6457 _argvec[8] = (unsigned long)(arg8); \
6458 _argvec[9] = (unsigned long)(arg9); \
6459 __asm__ volatile( \
6460 "addi sp, sp, -8 \n\t" \
6461 "st_add sp, lr, -8 \n\t" \
6462 "move r29, %1 \n\t" \
6463 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6464 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6465 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6466 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6467 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6468 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6469 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6470 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6471 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6472 "ld_add r8, r29, 8 \n\t" /*arg9 -> r8 */ \
6473 VALGRIND_CALL_NOREDIR_R12 \
6474 "addi sp, sp, 8\n\t" \
6475 "ld_add lr, sp, 8 \n\t" \
6476 "move %0, r0\n" \
6477 : /*out*/ "=r" (_res) \
6478 : /*in*/ "r" (&_argvec[0]) \
6479 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6480 lval = (__typeof__(lval)) _res; \
6481 } while (0)
6482
6483 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6484 arg7,arg8,arg9,arg10) \
6485 do { \
6486 volatile OrigFn _orig = (orig); \
6487 volatile unsigned long _argvec[11]; \
6488 volatile unsigned long _res; \
6489 _argvec[0] = (unsigned long)_orig.nraddr; \
6490 _argvec[1] = (unsigned long)(arg1); \
6491 _argvec[2] = (unsigned long)(arg2); \
6492 _argvec[3] = (unsigned long)(arg3); \
6493 _argvec[4] = (unsigned long)(arg4); \
6494 _argvec[5] = (unsigned long)(arg5); \
6495 _argvec[6] = (unsigned long)(arg6); \
6496 _argvec[7] = (unsigned long)(arg7); \
6497 _argvec[8] = (unsigned long)(arg8); \
6498 _argvec[9] = (unsigned long)(arg9); \
6499 _argvec[10] = (unsigned long)(arg10); \
6500 __asm__ volatile( \
6501 "addi sp, sp, -8 \n\t" \
6502 "st_add sp, lr, -8 \n\t" \
6503 "move r29, %1 \n\t" \
6504 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6505 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6506 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6507 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6508 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6509 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6510 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6511 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6512 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6513 "ld_add r8, r29, 8 \n\t" /*arg9 -> r8 */ \
6514 "ld_add r9, r29, 8 \n\t" /*arg10 -> r9 */ \
6515 VALGRIND_CALL_NOREDIR_R12 \
6516 "addi sp, sp, 8\n\t" \
6517 "ld_add lr, sp, 8 \n\t" \
6518 "move %0, r0\n" \
6519 : /*out*/ "=r" (_res) \
6520 : /*in*/ "r" (&_argvec[0]) \
6521 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6522 lval = (__typeof__(lval)) _res; \
6523 } while (0)
6524
6525 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6526 arg6,arg7,arg8,arg9,arg10, \
6527 arg11) \
6528 do { \
6529 volatile OrigFn _orig = (orig); \
6530 volatile unsigned long _argvec[12]; \
6531 volatile unsigned long _res; \
6532 _argvec[0] = (unsigned long)_orig.nraddr; \
6533 _argvec[1] = (unsigned long)(arg1); \
6534 _argvec[2] = (unsigned long)(arg2); \
6535 _argvec[3] = (unsigned long)(arg3); \
6536 _argvec[4] = (unsigned long)(arg4); \
6537 _argvec[5] = (unsigned long)(arg5); \
6538 _argvec[6] = (unsigned long)(arg6); \
6539 _argvec[7] = (unsigned long)(arg7); \
6540 _argvec[8] = (unsigned long)(arg8); \
6541 _argvec[9] = (unsigned long)(arg9); \
6542 _argvec[10] = (unsigned long)(arg10); \
6543 _argvec[11] = (unsigned long)(arg11); \
6544 __asm__ volatile( \
6545 "addi sp, sp, -8 \n\t" \
6546 "st_add sp, lr, -8 \n\t" \
6547 "move r29, %1 \n\t" \
6548 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6549 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6550 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6551 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6552 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6553 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6554 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6555 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6556 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6557 "ld_add r8, r29, 8 \n\t" /*arg9 -> r8 */ \
6558 "ld_add r9, r29, 8 \n\t" /*arg10 -> r9 */ \
6559 "ld r10, r29 \n\t" \
6560 "st_add sp, r10, -16 \n\t" \
6561 VALGRIND_CALL_NOREDIR_R12 \
6562 "addi sp, sp, 24 \n\t" \
6563 "ld_add lr, sp, 8 \n\t" \
6564 "move %0, r0\n" \
6565 : /*out*/ "=r" (_res) \
6566 : /*in*/ "r" (&_argvec[0]) \
6567 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6568 lval = (__typeof__(lval)) _res; \
6569 } while (0)
6570
6571 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6572 arg6,arg7,arg8,arg9,arg10, \
6573 arg11,arg12) \
6574 do { \
6575 volatile OrigFn _orig = (orig); \
6576 volatile unsigned long _argvec[13]; \
6577 volatile unsigned long _res; \
6578 _argvec[0] = (unsigned long)_orig.nraddr; \
6579 _argvec[1] = (unsigned long)(arg1); \
6580 _argvec[2] = (unsigned long)(arg2); \
6581 _argvec[3] = (unsigned long)(arg3); \
6582 _argvec[4] = (unsigned long)(arg4); \
6583 _argvec[5] = (unsigned long)(arg5); \
6584 _argvec[6] = (unsigned long)(arg6); \
6585 _argvec[7] = (unsigned long)(arg7); \
6586 _argvec[8] = (unsigned long)(arg8); \
6587 _argvec[9] = (unsigned long)(arg9); \
6588 _argvec[10] = (unsigned long)(arg10); \
6589 _argvec[11] = (unsigned long)(arg11); \
6590 _argvec[12] = (unsigned long)(arg12); \
6591 __asm__ volatile( \
6592 "addi sp, sp, -8 \n\t" \
6593 "st_add sp, lr, -8 \n\t" \
6594 "move r29, %1 \n\t" \
6595 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6596 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6597 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6598 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6599 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6600 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6601 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6602 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6603 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6604 "ld_add r8, r29, 8 \n\t" /*arg9 -> r8 */ \
6605 "ld_add r9, r29, 8 \n\t" /*arg10 -> r9 */ \
6606 "addi r28, sp, -8 \n\t" \
6607 "addi sp, sp, -24 \n\t" \
6608 "ld_add r10, r29, 8 \n\t" \
6609 "ld r11, r29 \n\t" \
6610 "st_add r28, r10, 8 \n\t" \
6611 "st r28, r11 \n\t" \
6612 VALGRIND_CALL_NOREDIR_R12 \
6613 "addi sp, sp, 32 \n\t" \
6614 "ld_add lr, sp, 8 \n\t" \
6615 "move %0, r0\n" \
6616 : /*out*/ "=r" (_res) \
6617 : /*in*/ "r" (&_argvec[0]) \
6618 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6619 lval = (__typeof__(lval)) _res; \
6620 } while (0)
6621 #endif /* PLAT_tilegx_linux */
6622
6623 /* ------------------------------------------------------------------ */
6624 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6625 /* */
6626 /* ------------------------------------------------------------------ */
6627
6628 /* Some request codes. There are many more of these, but most are not
6629 exposed to end-user view. These are the public ones, all of the
6630 form 0x1000 + small_number.
6631
6632 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6633 ones start at 0x2000.
6634 */
6635
6636 /* These macros are used by tools -- they must be public, but don't
6637 embed them into other programs. */
6638 #define VG_USERREQ_TOOL_BASE(a,b) \
6639 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6640 #define VG_IS_TOOL_USERREQ(a, b, v) \
6641 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6642
6643 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6644 This enum comprises an ABI exported by Valgrind to programs
6645 which use client requests. DO NOT CHANGE THE ORDER OF THESE
6646 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
6647 typedef
6648 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6649 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6650
6651 /* These allow any function to be called from the simulated
6652 CPU but run on the real CPU. Nb: the first arg passed to
6653 the function is always the ThreadId of the running
6654 thread! So CLIENT_CALL0 actually requires a 1 arg
6655 function, etc. */
6656 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6657 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6658 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6659 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6660
6661 /* Can be useful in regression testing suites -- eg. can
6662 send Valgrind's output to /dev/null and still count
6663 errors. */
6664 VG_USERREQ__COUNT_ERRORS = 0x1201,
6665
6666 /* Allows the client program and/or gdbserver to execute a monitor
6667 command. */
6668 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6669
6670 /* These are useful and can be interpreted by any tool that
6671 tracks malloc() et al, by using vg_replace_malloc.c. */
6672 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6673 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6674 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6675 /* Memory pool support. */
6676 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6677 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6678 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6679 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6680 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6681 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6682 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6683 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6684
6685 /* Allow printfs to valgrind log. */
6686 /* The first two pass the va_list argument by value, which
6687 assumes it is the same size as or smaller than a UWord,
6688 which generally isn't the case. Hence are deprecated.
6689 The second two pass the vargs by reference and so are
6690 immune to this problem. */
6691 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6692 VG_USERREQ__PRINTF = 0x1401,
6693 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6694 /* both :: char* fmt, va_list* vargs */
6695 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6696 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6697
6698 /* Stack support. */
6699 VG_USERREQ__STACK_REGISTER = 0x1501,
6700 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6701 VG_USERREQ__STACK_CHANGE = 0x1503,
6702
6703 /* Wine support */
6704 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6705
6706 /* Querying of debug info. */
6707 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6708
6709 /* Disable/enable error reporting level. Takes a single
6710 Word arg which is the delta to this thread's error
6711 disablement indicator. Hence 1 disables or further
6712 disables errors, and -1 moves back towards enablement.
6713 Other values are not allowed. */
6714 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6715
6716 /* Initialise IR injection */
6717 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901
6718 } Vg_ClientRequest;
6719
6720 #if !defined(__GNUC__)
6721 # define __extension__ /* */
6722 #endif
6723
6724
6725 /* Returns the number of Valgrinds this code is running under. That
6726 is, 0 if running natively, 1 if running under Valgrind, 2 if
6727 running under Valgrind which is running under another Valgrind,
6728 etc. */
6729 #define RUNNING_ON_VALGRIND \
6730 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6731 VG_USERREQ__RUNNING_ON_VALGRIND, \
6732 0, 0, 0, 0, 0) \
6733
6734
6735 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6736 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6737 since it provides a way to make sure valgrind will retranslate the
6738 invalidated area. Returns no value. */
6739 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6740 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6741 _qzz_addr, _qzz_len, 0, 0, 0)
6742
6743
6744 /* These requests are for getting Valgrind itself to print something.
6745 Possibly with a backtrace. This is a really ugly hack. The return value
6746 is the number of characters printed, excluding the "**<pid>** " part at the
6747 start and the backtrace (if present). */
6748
6749 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6750 /* Modern GCC will optimize the static routine out if unused,
6751 and unused attribute will shut down warnings about it. */
6752 static int VALGRIND_PRINTF(const char *format, ...)
6753 __attribute__((format(__printf__, 1, 2), __unused__));
6754 #endif
6755 static int
6756 #if defined(_MSC_VER)
6757 __inline
6758 #endif
VALGRIND_PRINTF(const char * format,...)6759 VALGRIND_PRINTF(const char *format, ...)
6760 {
6761 #if defined(NVALGRIND)
6762 return 0;
6763 #else /* NVALGRIND */
6764 #if defined(_MSC_VER) || defined(__MINGW64__)
6765 uintptr_t _qzz_res;
6766 #else
6767 unsigned long _qzz_res;
6768 #endif
6769 va_list vargs;
6770 va_start(vargs, format);
6771 #if defined(_MSC_VER) || defined(__MINGW64__)
6772 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6773 VG_USERREQ__PRINTF_VALIST_BY_REF,
6774 (uintptr_t)format,
6775 (uintptr_t)&vargs,
6776 0, 0, 0);
6777 #else
6778 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6779 VG_USERREQ__PRINTF_VALIST_BY_REF,
6780 (unsigned long)format,
6781 (unsigned long)&vargs,
6782 0, 0, 0);
6783 #endif
6784 va_end(vargs);
6785 return (int)_qzz_res;
6786 #endif /* NVALGRIND */
6787 }
6788
6789 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6790 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6791 __attribute__((format(__printf__, 1, 2), __unused__));
6792 #endif
6793 static int
6794 #if defined(_MSC_VER)
6795 __inline
6796 #endif
VALGRIND_PRINTF_BACKTRACE(const char * format,...)6797 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6798 {
6799 #if defined(NVALGRIND)
6800 return 0;
6801 #else /* NVALGRIND */
6802 #if defined(_MSC_VER) || defined(__MINGW64__)
6803 uintptr_t _qzz_res;
6804 #else
6805 unsigned long _qzz_res;
6806 #endif
6807 va_list vargs;
6808 va_start(vargs, format);
6809 #if defined(_MSC_VER) || defined(__MINGW64__)
6810 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6811 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6812 (uintptr_t)format,
6813 (uintptr_t)&vargs,
6814 0, 0, 0);
6815 #else
6816 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6817 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6818 (unsigned long)format,
6819 (unsigned long)&vargs,
6820 0, 0, 0);
6821 #endif
6822 va_end(vargs);
6823 return (int)_qzz_res;
6824 #endif /* NVALGRIND */
6825 }
6826
6827
6828 /* These requests allow control to move from the simulated CPU to the
6829 real CPU, calling an arbitary function.
6830
6831 Note that the current ThreadId is inserted as the first argument.
6832 So this call:
6833
6834 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6835
6836 requires f to have this signature:
6837
6838 Word f(Word tid, Word arg1, Word arg2)
6839
6840 where "Word" is a word-sized type.
6841
6842 Note that these client requests are not entirely reliable. For example,
6843 if you call a function with them that subsequently calls printf(),
6844 there's a high chance Valgrind will crash. Generally, your prospects of
6845 these working are made higher if the called function does not refer to
6846 any global variables, and does not refer to any libc or other functions
6847 (printf et al). Any kind of entanglement with libc or dynamic linking is
6848 likely to have a bad outcome, for tricky reasons which we've grappled
6849 with a lot in the past.
6850 */
6851 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6852 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6853 VG_USERREQ__CLIENT_CALL0, \
6854 _qyy_fn, \
6855 0, 0, 0, 0)
6856
6857 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6858 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6859 VG_USERREQ__CLIENT_CALL1, \
6860 _qyy_fn, \
6861 _qyy_arg1, 0, 0, 0)
6862
6863 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6864 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6865 VG_USERREQ__CLIENT_CALL2, \
6866 _qyy_fn, \
6867 _qyy_arg1, _qyy_arg2, 0, 0)
6868
6869 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6870 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6871 VG_USERREQ__CLIENT_CALL3, \
6872 _qyy_fn, \
6873 _qyy_arg1, _qyy_arg2, \
6874 _qyy_arg3, 0)
6875
6876
6877 /* Counts the number of errors that have been recorded by a tool. Nb:
6878 the tool must record the errors with VG_(maybe_record_error)() or
6879 VG_(unique_error)() for them to be counted. */
6880 #define VALGRIND_COUNT_ERRORS \
6881 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6882 0 /* default return */, \
6883 VG_USERREQ__COUNT_ERRORS, \
6884 0, 0, 0, 0, 0)
6885
6886 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6887 when heap blocks are allocated in order to give accurate results. This
6888 happens automatically for the standard allocator functions such as
6889 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6890 delete[], etc.
6891
6892 But if your program uses a custom allocator, this doesn't automatically
6893 happen, and Valgrind will not do as well. For example, if you allocate
6894 superblocks with mmap() and then allocates chunks of the superblocks, all
6895 Valgrind's observations will be at the mmap() level and it won't know that
6896 the chunks should be considered separate entities. In Memcheck's case,
6897 that means you probably won't get heap block overrun detection (because
6898 there won't be redzones marked as unaddressable) and you definitely won't
6899 get any leak detection.
6900
6901 The following client requests allow a custom allocator to be annotated so
6902 that it can be handled accurately by Valgrind.
6903
6904 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6905 by a malloc()-like function. For Memcheck (an illustrative case), this
6906 does two things:
6907
6908 - It records that the block has been allocated. This means any addresses
6909 within the block mentioned in error messages will be
6910 identified as belonging to the block. It also means that if the block
6911 isn't freed it will be detected by the leak checker.
6912
6913 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6914 not set), or addressable and defined (if 'is_zeroed' is set). This
6915 controls how accesses to the block by the program are handled.
6916
6917 'addr' is the start of the usable block (ie. after any
6918 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6919 can apply redzones -- these are blocks of padding at the start and end of
6920 each block. Adding redzones is recommended as it makes it much more likely
6921 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6922 zeroed (or filled with another predictable value), as is the case for
6923 calloc().
6924
6925 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6926 heap block -- that will be used by the client program -- is allocated.
6927 It's best to put it at the outermost level of the allocator if possible;
6928 for example, if you have a function my_alloc() which calls
6929 internal_alloc(), and the client request is put inside internal_alloc(),
6930 stack traces relating to the heap block will contain entries for both
6931 my_alloc() and internal_alloc(), which is probably not what you want.
6932
6933 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6934 custom blocks from within a heap block, B, that has been allocated with
6935 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6936 -- the custom blocks will take precedence.
6937
6938 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6939 Memcheck, it does two things:
6940
6941 - It records that the block has been deallocated. This assumes that the
6942 block was annotated as having been allocated via
6943 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6944
6945 - It marks the block as being unaddressable.
6946
6947 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6948 heap block is deallocated.
6949
6950 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6951 Memcheck, it does four things:
6952
6953 - It records that the size of a block has been changed. This assumes that
6954 the block was annotated as having been allocated via
6955 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6956
6957 - If the block shrunk, it marks the freed memory as being unaddressable.
6958
6959 - If the block grew, it marks the new area as undefined and defines a red
6960 zone past the end of the new block.
6961
6962 - The V-bits of the overlap between the old and the new block are preserved.
6963
6964 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6965 and before deallocation of the old block.
6966
6967 In many cases, these three client requests will not be enough to get your
6968 allocator working well with Memcheck. More specifically, if your allocator
6969 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6970 will be necessary to mark the memory as addressable just before the zeroing
6971 occurs, otherwise you'll get a lot of invalid write errors. For example,
6972 you'll need to do this if your allocator recycles freed blocks, but it
6973 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6974 Alternatively, if your allocator reuses freed blocks for allocator-internal
6975 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6976
6977 Really, what's happening is a blurring of the lines between the client
6978 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6979 memory should be considered unaddressable to the client program, but the
6980 allocator knows more than the rest of the client program and so may be able
6981 to safely access it. Extra client requests are necessary for Valgrind to
6982 understand the distinction between the allocator and the rest of the
6983 program.
6984
6985 Ignored if addr == 0.
6986 */
6987 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6988 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6989 addr, sizeB, rzB, is_zeroed, 0)
6990
6991 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6992 Ignored if addr == 0.
6993 */
6994 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6995 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6996 addr, oldSizeB, newSizeB, rzB, 0)
6997
6998 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6999 Ignored if addr == 0.
7000 */
7001 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
7002 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
7003 addr, rzB, 0, 0, 0)
7004
7005 /* Create a memory pool. */
7006 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
7007 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7008 pool, rzB, is_zeroed, 0, 0)
7009
7010 /* Destroy a memory pool. */
7011 #define VALGRIND_DESTROY_MEMPOOL(pool) \
7012 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
7013 pool, 0, 0, 0, 0)
7014
7015 /* Associate a piece of memory with a memory pool. */
7016 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
7017 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
7018 pool, addr, size, 0, 0)
7019
7020 /* Disassociate a piece of memory from a memory pool. */
7021 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
7022 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
7023 pool, addr, 0, 0, 0)
7024
7025 /* Disassociate any pieces outside a particular range. */
7026 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7027 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7028 pool, addr, size, 0, 0)
7029
7030 /* Resize and/or move a piece associated with a memory pool. */
7031 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7032 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7033 poolA, poolB, 0, 0, 0)
7034
7035 /* Resize and/or move a piece associated with a memory pool. */
7036 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7037 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7038 pool, addrA, addrB, size, 0)
7039
7040 /* Return 1 if a mempool exists, else 0. */
7041 #define VALGRIND_MEMPOOL_EXISTS(pool) \
7042 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7043 VG_USERREQ__MEMPOOL_EXISTS, \
7044 pool, 0, 0, 0, 0)
7045
7046 /* Mark a piece of memory as being a stack. Returns a stack id.
7047 start is the lowest addressable stack byte, end is the highest
7048 addressable stack byte. */
7049 #define VALGRIND_STACK_REGISTER(start, end) \
7050 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7051 VG_USERREQ__STACK_REGISTER, \
7052 start, end, 0, 0, 0)
7053
7054 /* Unmark the piece of memory associated with a stack id as being a
7055 stack. */
7056 #define VALGRIND_STACK_DEREGISTER(id) \
7057 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7058 id, 0, 0, 0, 0)
7059
7060 /* Change the start and end address of the stack id.
7061 start is the new lowest addressable stack byte, end is the new highest
7062 addressable stack byte. */
7063 #define VALGRIND_STACK_CHANGE(id, start, end) \
7064 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7065 id, start, end, 0, 0)
7066
7067 /* Load PDB debug info for Wine PE image_map. */
7068 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7069 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7070 fd, ptr, total_size, delta, 0)
7071
7072 /* Map a code address to a source file name and line number. buf64
7073 must point to a 64-byte buffer in the caller's address space. The
7074 result will be dumped in there and is guaranteed to be zero
7075 terminated. If no info is found, the first byte is set to zero. */
7076 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7077 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7078 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7079 addr, buf64, 0, 0, 0)
7080
7081 /* Disable error reporting for this thread. Behaves in a stack like
7082 way, so you can safely call this multiple times provided that
7083 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
7084 to re-enable reporting. The first call of this macro disables
7085 reporting. Subsequent calls have no effect except to increase the
7086 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
7087 reporting. Child threads do not inherit this setting from their
7088 parents -- they are always created with reporting enabled. */
7089 #define VALGRIND_DISABLE_ERROR_REPORTING \
7090 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7091 1, 0, 0, 0, 0)
7092
7093 /* Re-enable error reporting, as per comments on
7094 VALGRIND_DISABLE_ERROR_REPORTING. */
7095 #define VALGRIND_ENABLE_ERROR_REPORTING \
7096 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7097 -1, 0, 0, 0, 0)
7098
7099 /* Execute a monitor command from the client program.
7100 If a connection is opened with GDB, the output will be sent
7101 according to the output mode set for vgdb.
7102 If no connection is opened, output will go to the log output.
7103 Returns 1 if command not recognised, 0 otherwise. */
7104 #define VALGRIND_MONITOR_COMMAND(command) \
7105 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7106 command, 0, 0, 0, 0)
7107
7108
7109 #undef PLAT_x86_darwin
7110 #undef PLAT_amd64_darwin
7111 #undef PLAT_x86_win32
7112 #undef PLAT_amd64_win64
7113 #undef PLAT_x86_linux
7114 #undef PLAT_amd64_linux
7115 #undef PLAT_ppc32_linux
7116 #undef PLAT_ppc64be_linux
7117 #undef PLAT_ppc64le_linux
7118 #undef PLAT_arm_linux
7119 #undef PLAT_s390x_linux
7120 #undef PLAT_mips32_linux
7121 #undef PLAT_mips64_linux
7122 #undef PLAT_tilegx_linux
7123 #undef PLAT_x86_solaris
7124 #undef PLAT_amd64_solaris
7125
7126 #endif /* __VALGRIND_H */
7127