valgrind.h revision c913c8e1a75c78cd653fec8076c07ee4bfb56824
1/* -*- c -*- 2 ---------------------------------------------------------------- 3 4 Notice that the following BSD-style license applies to this one 5 file (valgrind.h) only. The rest of Valgrind is licensed under the 6 terms of the GNU General Public License, version 2, unless 7 otherwise indicated. See the COPYING file in the source 8 distribution for details. 9 10 ---------------------------------------------------------------- 11 12 This file is part of Valgrind, a dynamic binary instrumentation 13 framework. 14 15 Copyright (C) 2000-2013 Julian Seward. All rights reserved. 16 17 Redistribution and use in source and binary forms, with or without 18 modification, are permitted provided that the following conditions 19 are met: 20 21 1. Redistributions of source code must retain the above copyright 22 notice, this list of conditions and the following disclaimer. 23 24 2. The origin of this software must not be misrepresented; you must 25 not claim that you wrote the original software. If you use this 26 software in a product, an acknowledgment in the product 27 documentation would be appreciated but is not required. 28 29 3. Altered source versions must be plainly marked as such, and must 30 not be misrepresented as being the original software. 31 32 4. The name of the author may not be used to endorse or promote 33 products derived from this software without specific prior written 34 permission. 35 36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS 37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY 40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE 42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 47 48 ---------------------------------------------------------------- 49 50 Notice that the above BSD-style license applies to this one file 51 (valgrind.h) only. The entire rest of Valgrind is licensed under 52 the terms of the GNU General Public License, version 2. See the 53 COPYING file in the source distribution for details. 54 55 ---------------------------------------------------------------- 56*/ 57 58 59/* This file is for inclusion into client (your!) code. 60 61 You can use these macros to manipulate and query Valgrind's 62 execution inside your own programs. 63 64 The resulting executables will still run without Valgrind, just a 65 little bit more slowly than they otherwise would, but otherwise 66 unchanged. When not running on valgrind, each client request 67 consumes very few (eg. 7) instructions, so the resulting performance 68 loss is negligible unless you plan to execute client requests 69 millions of times per second. Nevertheless, if that is still a 70 problem, you can compile with the NVALGRIND symbol defined (gcc 71 -DNVALGRIND) so that client requests are not even compiled in. */ 72 73#ifndef __VALGRIND_H 74#define __VALGRIND_H 75 76 77/* ------------------------------------------------------------------ */ 78/* VERSION NUMBER OF VALGRIND */ 79/* ------------------------------------------------------------------ */ 80 81/* Specify Valgrind's version number, so that user code can 82 conditionally compile based on our version number. Note that these 83 were introduced at version 3.6 and so do not exist in version 3.5 84 or earlier. The recommended way to use them to check for "version 85 X.Y or later" is (eg) 86 87#if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \ 88 && (__VALGRIND_MAJOR__ > 3 \ 89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6)) 90*/ 91#define __VALGRIND_MAJOR__ 3 92#define __VALGRIND_MINOR__ 10 93 94 95#include <stdarg.h> 96 97/* Nb: this file might be included in a file compiled with -ansi. So 98 we can't use C++ style "//" comments nor the "asm" keyword (instead 99 use "__asm__"). */ 100 101/* Derive some tags indicating what the target platform is. Note 102 that in this file we're using the compiler's CPP symbols for 103 identifying architectures, which are different to the ones we use 104 within the rest of Valgrind. Note, __powerpc__ is active for both 105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the 106 latter (on Linux, that is). 107 108 Misc note: how to find out what's predefined in gcc by default: 109 gcc -Wp,-dM somefile.c 110*/ 111#undef PLAT_x86_darwin 112#undef PLAT_amd64_darwin 113#undef PLAT_x86_win32 114#undef PLAT_amd64_win64 115#undef PLAT_x86_linux 116#undef PLAT_amd64_linux 117#undef PLAT_ppc32_linux 118#undef PLAT_ppc64_linux 119#undef PLAT_arm_linux 120#undef PLAT_arm64_linux 121#undef PLAT_s390x_linux 122#undef PLAT_mips32_linux 123#undef PLAT_mips64_linux 124 125 126#if defined(__APPLE__) && defined(__i386__) 127# define PLAT_x86_darwin 1 128#elif defined(__APPLE__) && defined(__x86_64__) 129# define PLAT_amd64_darwin 1 130#elif (defined(__MINGW32__) && !defined(__MINGW64__)) \ 131 || defined(__CYGWIN32__) \ 132 || (defined(_WIN32) && defined(_M_IX86)) 133# define PLAT_x86_win32 1 134#elif defined(__MINGW64__) \ 135 || (defined(_WIN64) && defined(_M_X64)) 136# define PLAT_amd64_win64 1 137#elif defined(__linux__) && defined(__i386__) 138# define PLAT_x86_linux 1 139#elif defined(__linux__) && defined(__x86_64__) 140# define PLAT_amd64_linux 1 141#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__) 142# define PLAT_ppc32_linux 1 143#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) 144# define PLAT_ppc64_linux 1 145#elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__) 146# define PLAT_arm_linux 1 147#elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__) 148# define PLAT_arm64_linux 1 149#elif defined(__linux__) && defined(__s390__) && defined(__s390x__) 150# define PLAT_s390x_linux 1 151#elif defined(__linux__) && defined(__mips__) && (__mips==64) 152# define PLAT_mips64_linux 1 153#elif defined(__linux__) && defined(__mips__) && (__mips!=64) 154# define PLAT_mips32_linux 1 155#else 156/* If we're not compiling for our target platform, don't generate 157 any inline asms. */ 158# if !defined(NVALGRIND) 159# define NVALGRIND 1 160# endif 161#endif 162 163 164/* ------------------------------------------------------------------ */ 165/* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */ 166/* in here of use to end-users -- skip to the next section. */ 167/* ------------------------------------------------------------------ */ 168 169/* 170 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client 171 * request. Accepts both pointers and integers as arguments. 172 * 173 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind 174 * client request that does not return a value. 175 176 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind 177 * client request and whose value equals the client request result. Accepts 178 * both pointers and integers as arguments. Note that such calls are not 179 * necessarily pure functions -- they may have side effects. 180 */ 181 182#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \ 183 _zzq_request, _zzq_arg1, _zzq_arg2, \ 184 _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 185 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \ 186 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \ 187 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0) 188 189#define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \ 190 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 191 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \ 192 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \ 193 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0) 194 195#if defined(NVALGRIND) 196 197/* Define NVALGRIND to completely remove the Valgrind magic sequence 198 from the compiled code (analogous to NDEBUG's effects on 199 assert()) */ 200#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 201 _zzq_default, _zzq_request, \ 202 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 203 (_zzq_default) 204 205#else /* ! NVALGRIND */ 206 207/* The following defines the magic code sequences which the JITter 208 spots and handles magically. Don't look too closely at them as 209 they will rot your brain. 210 211 The assembly code sequences for all architectures is in this one 212 file. This is because this file must be stand-alone, and we don't 213 want to have multiple files. 214 215 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default 216 value gets put in the return slot, so that everything works when 217 this is executed not under Valgrind. Args are passed in a memory 218 block, and so there's no intrinsic limit to the number that could 219 be passed, but it's currently five. 220 221 The macro args are: 222 _zzq_rlval result lvalue 223 _zzq_default default value (result returned when running on real CPU) 224 _zzq_request request code 225 _zzq_arg1..5 request params 226 227 The other two macros are used to support function wrapping, and are 228 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the 229 guest's NRADDR pseudo-register and whatever other information is 230 needed to safely run the call original from the wrapper: on 231 ppc64-linux, the R2 value at the divert point is also needed. This 232 information is abstracted into a user-visible type, OrigFn. 233 234 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the 235 guest, but guarantees that the branch instruction will not be 236 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64: 237 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a 238 complete inline asm, since it needs to be combined with more magic 239 inline asm stuff to be useful. 240*/ 241 242/* ------------------------- x86-{linux,darwin} ---------------- */ 243 244#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \ 245 || (defined(PLAT_x86_win32) && defined(__GNUC__)) 246 247typedef 248 struct { 249 unsigned int nraddr; /* where's the code? */ 250 } 251 OrigFn; 252 253#define __SPECIAL_INSTRUCTION_PREAMBLE \ 254 "roll $3, %%edi ; roll $13, %%edi\n\t" \ 255 "roll $29, %%edi ; roll $19, %%edi\n\t" 256 257#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 258 _zzq_default, _zzq_request, \ 259 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 260 __extension__ \ 261 ({volatile unsigned int _zzq_args[6]; \ 262 volatile unsigned int _zzq_result; \ 263 _zzq_args[0] = (unsigned int)(_zzq_request); \ 264 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 265 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 266 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 267 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 268 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 269 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 270 /* %EDX = client_request ( %EAX ) */ \ 271 "xchgl %%ebx,%%ebx" \ 272 : "=d" (_zzq_result) \ 273 : "a" (&_zzq_args[0]), "0" (_zzq_default) \ 274 : "cc", "memory" \ 275 ); \ 276 _zzq_result; \ 277 }) 278 279#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 280 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 281 volatile unsigned int __addr; \ 282 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 283 /* %EAX = guest_NRADDR */ \ 284 "xchgl %%ecx,%%ecx" \ 285 : "=a" (__addr) \ 286 : \ 287 : "cc", "memory" \ 288 ); \ 289 _zzq_orig->nraddr = __addr; \ 290 } 291 292#define VALGRIND_CALL_NOREDIR_EAX \ 293 __SPECIAL_INSTRUCTION_PREAMBLE \ 294 /* call-noredir *%EAX */ \ 295 "xchgl %%edx,%%edx\n\t" 296 297#define VALGRIND_VEX_INJECT_IR() \ 298 do { \ 299 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 300 "xchgl %%edi,%%edi\n\t" \ 301 : : : "cc", "memory" \ 302 ); \ 303 } while (0) 304 305#endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */ 306 307/* ------------------------- x86-Win32 ------------------------- */ 308 309#if defined(PLAT_x86_win32) && !defined(__GNUC__) 310 311typedef 312 struct { 313 unsigned int nraddr; /* where's the code? */ 314 } 315 OrigFn; 316 317#if defined(_MSC_VER) 318 319#define __SPECIAL_INSTRUCTION_PREAMBLE \ 320 __asm rol edi, 3 __asm rol edi, 13 \ 321 __asm rol edi, 29 __asm rol edi, 19 322 323#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 324 _zzq_default, _zzq_request, \ 325 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 326 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \ 327 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \ 328 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \ 329 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5)) 330 331static __inline uintptr_t 332valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request, 333 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2, 334 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4, 335 uintptr_t _zzq_arg5) 336{ 337 volatile uintptr_t _zzq_args[6]; 338 volatile unsigned int _zzq_result; 339 _zzq_args[0] = (uintptr_t)(_zzq_request); 340 _zzq_args[1] = (uintptr_t)(_zzq_arg1); 341 _zzq_args[2] = (uintptr_t)(_zzq_arg2); 342 _zzq_args[3] = (uintptr_t)(_zzq_arg3); 343 _zzq_args[4] = (uintptr_t)(_zzq_arg4); 344 _zzq_args[5] = (uintptr_t)(_zzq_arg5); 345 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default 346 __SPECIAL_INSTRUCTION_PREAMBLE 347 /* %EDX = client_request ( %EAX ) */ 348 __asm xchg ebx,ebx 349 __asm mov _zzq_result, edx 350 } 351 return _zzq_result; 352} 353 354#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 355 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 356 volatile unsigned int __addr; \ 357 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \ 358 /* %EAX = guest_NRADDR */ \ 359 __asm xchg ecx,ecx \ 360 __asm mov __addr, eax \ 361 } \ 362 _zzq_orig->nraddr = __addr; \ 363 } 364 365#define VALGRIND_CALL_NOREDIR_EAX ERROR 366 367#define VALGRIND_VEX_INJECT_IR() \ 368 do { \ 369 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \ 370 __asm xchg edi,edi \ 371 } \ 372 } while (0) 373 374#else 375#error Unsupported compiler. 376#endif 377 378#endif /* PLAT_x86_win32 */ 379 380/* ------------------------ amd64-{linux,darwin} --------------- */ 381 382#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \ 383 || (defined(PLAT_amd64_win64) && defined(__GNUC__)) 384 385typedef 386 struct { 387 unsigned long long int nraddr; /* where's the code? */ 388 } 389 OrigFn; 390 391#define __SPECIAL_INSTRUCTION_PREAMBLE \ 392 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \ 393 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t" 394 395#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 396 _zzq_default, _zzq_request, \ 397 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 398 __extension__ \ 399 ({ volatile unsigned long long int _zzq_args[6]; \ 400 volatile unsigned long long int _zzq_result; \ 401 _zzq_args[0] = (unsigned long long int)(_zzq_request); \ 402 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \ 403 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \ 404 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \ 405 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \ 406 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \ 407 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 408 /* %RDX = client_request ( %RAX ) */ \ 409 "xchgq %%rbx,%%rbx" \ 410 : "=d" (_zzq_result) \ 411 : "a" (&_zzq_args[0]), "0" (_zzq_default) \ 412 : "cc", "memory" \ 413 ); \ 414 _zzq_result; \ 415 }) 416 417#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 418 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 419 volatile unsigned long long int __addr; \ 420 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 421 /* %RAX = guest_NRADDR */ \ 422 "xchgq %%rcx,%%rcx" \ 423 : "=a" (__addr) \ 424 : \ 425 : "cc", "memory" \ 426 ); \ 427 _zzq_orig->nraddr = __addr; \ 428 } 429 430#define VALGRIND_CALL_NOREDIR_RAX \ 431 __SPECIAL_INSTRUCTION_PREAMBLE \ 432 /* call-noredir *%RAX */ \ 433 "xchgq %%rdx,%%rdx\n\t" 434 435#define VALGRIND_VEX_INJECT_IR() \ 436 do { \ 437 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 438 "xchgq %%rdi,%%rdi\n\t" \ 439 : : : "cc", "memory" \ 440 ); \ 441 } while (0) 442 443#endif /* PLAT_amd64_linux || PLAT_amd64_darwin */ 444 445/* ------------------------- amd64-Win64 ------------------------- */ 446 447#if defined(PLAT_amd64_win64) && !defined(__GNUC__) 448 449#error Unsupported compiler. 450 451#endif /* PLAT_amd64_win64 */ 452 453/* ------------------------ ppc32-linux ------------------------ */ 454 455#if defined(PLAT_ppc32_linux) 456 457typedef 458 struct { 459 unsigned int nraddr; /* where's the code? */ 460 } 461 OrigFn; 462 463#define __SPECIAL_INSTRUCTION_PREAMBLE \ 464 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \ 465 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t" 466 467#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 468 _zzq_default, _zzq_request, \ 469 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 470 \ 471 __extension__ \ 472 ({ unsigned int _zzq_args[6]; \ 473 unsigned int _zzq_result; \ 474 unsigned int* _zzq_ptr; \ 475 _zzq_args[0] = (unsigned int)(_zzq_request); \ 476 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 477 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 478 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 479 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 480 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 481 _zzq_ptr = _zzq_args; \ 482 __asm__ volatile("mr 3,%1\n\t" /*default*/ \ 483 "mr 4,%2\n\t" /*ptr*/ \ 484 __SPECIAL_INSTRUCTION_PREAMBLE \ 485 /* %R3 = client_request ( %R4 ) */ \ 486 "or 1,1,1\n\t" \ 487 "mr %0,3" /*result*/ \ 488 : "=b" (_zzq_result) \ 489 : "b" (_zzq_default), "b" (_zzq_ptr) \ 490 : "cc", "memory", "r3", "r4"); \ 491 _zzq_result; \ 492 }) 493 494#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 495 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 496 unsigned int __addr; \ 497 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 498 /* %R3 = guest_NRADDR */ \ 499 "or 2,2,2\n\t" \ 500 "mr %0,3" \ 501 : "=b" (__addr) \ 502 : \ 503 : "cc", "memory", "r3" \ 504 ); \ 505 _zzq_orig->nraddr = __addr; \ 506 } 507 508#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 509 __SPECIAL_INSTRUCTION_PREAMBLE \ 510 /* branch-and-link-to-noredir *%R11 */ \ 511 "or 3,3,3\n\t" 512 513#define VALGRIND_VEX_INJECT_IR() \ 514 do { \ 515 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 516 "or 5,5,5\n\t" \ 517 ); \ 518 } while (0) 519 520#endif /* PLAT_ppc32_linux */ 521 522/* ------------------------ ppc64-linux ------------------------ */ 523 524#if defined(PLAT_ppc64_linux) 525 526typedef 527 struct { 528 unsigned long long int nraddr; /* where's the code? */ 529 unsigned long long int r2; /* what tocptr do we need? */ 530 } 531 OrigFn; 532 533#define __SPECIAL_INSTRUCTION_PREAMBLE \ 534 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \ 535 "rotldi 0,0,61 ; rotldi 0,0,51\n\t" 536 537#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 538 _zzq_default, _zzq_request, \ 539 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 540 \ 541 __extension__ \ 542 ({ unsigned long long int _zzq_args[6]; \ 543 unsigned long long int _zzq_result; \ 544 unsigned long long int* _zzq_ptr; \ 545 _zzq_args[0] = (unsigned long long int)(_zzq_request); \ 546 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \ 547 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \ 548 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \ 549 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \ 550 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \ 551 _zzq_ptr = _zzq_args; \ 552 __asm__ volatile("mr 3,%1\n\t" /*default*/ \ 553 "mr 4,%2\n\t" /*ptr*/ \ 554 __SPECIAL_INSTRUCTION_PREAMBLE \ 555 /* %R3 = client_request ( %R4 ) */ \ 556 "or 1,1,1\n\t" \ 557 "mr %0,3" /*result*/ \ 558 : "=b" (_zzq_result) \ 559 : "b" (_zzq_default), "b" (_zzq_ptr) \ 560 : "cc", "memory", "r3", "r4"); \ 561 _zzq_result; \ 562 }) 563 564#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 565 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 566 unsigned long long int __addr; \ 567 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 568 /* %R3 = guest_NRADDR */ \ 569 "or 2,2,2\n\t" \ 570 "mr %0,3" \ 571 : "=b" (__addr) \ 572 : \ 573 : "cc", "memory", "r3" \ 574 ); \ 575 _zzq_orig->nraddr = __addr; \ 576 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 577 /* %R3 = guest_NRADDR_GPR2 */ \ 578 "or 4,4,4\n\t" \ 579 "mr %0,3" \ 580 : "=b" (__addr) \ 581 : \ 582 : "cc", "memory", "r3" \ 583 ); \ 584 _zzq_orig->r2 = __addr; \ 585 } 586 587#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 588 __SPECIAL_INSTRUCTION_PREAMBLE \ 589 /* branch-and-link-to-noredir *%R11 */ \ 590 "or 3,3,3\n\t" 591 592#define VALGRIND_VEX_INJECT_IR() \ 593 do { \ 594 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 595 "or 5,5,5\n\t" \ 596 ); \ 597 } while (0) 598 599#endif /* PLAT_ppc64_linux */ 600 601/* ------------------------- arm-linux ------------------------- */ 602 603#if defined(PLAT_arm_linux) 604 605typedef 606 struct { 607 unsigned int nraddr; /* where's the code? */ 608 } 609 OrigFn; 610 611#define __SPECIAL_INSTRUCTION_PREAMBLE \ 612 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \ 613 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t" 614 615#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 616 _zzq_default, _zzq_request, \ 617 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 618 \ 619 __extension__ \ 620 ({volatile unsigned int _zzq_args[6]; \ 621 volatile unsigned int _zzq_result; \ 622 _zzq_args[0] = (unsigned int)(_zzq_request); \ 623 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 624 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 625 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 626 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 627 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 628 __asm__ volatile("mov r3, %1\n\t" /*default*/ \ 629 "mov r4, %2\n\t" /*ptr*/ \ 630 __SPECIAL_INSTRUCTION_PREAMBLE \ 631 /* R3 = client_request ( R4 ) */ \ 632 "orr r10, r10, r10\n\t" \ 633 "mov %0, r3" /*result*/ \ 634 : "=r" (_zzq_result) \ 635 : "r" (_zzq_default), "r" (&_zzq_args[0]) \ 636 : "cc","memory", "r3", "r4"); \ 637 _zzq_result; \ 638 }) 639 640#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 641 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 642 unsigned int __addr; \ 643 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 644 /* R3 = guest_NRADDR */ \ 645 "orr r11, r11, r11\n\t" \ 646 "mov %0, r3" \ 647 : "=r" (__addr) \ 648 : \ 649 : "cc", "memory", "r3" \ 650 ); \ 651 _zzq_orig->nraddr = __addr; \ 652 } 653 654#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 655 __SPECIAL_INSTRUCTION_PREAMBLE \ 656 /* branch-and-link-to-noredir *%R4 */ \ 657 "orr r12, r12, r12\n\t" 658 659#define VALGRIND_VEX_INJECT_IR() \ 660 do { \ 661 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 662 "orr r9, r9, r9\n\t" \ 663 : : : "cc", "memory" \ 664 ); \ 665 } while (0) 666 667#endif /* PLAT_arm_linux */ 668 669/* ------------------------ arm64-linux ------------------------- */ 670 671#if defined(PLAT_arm64_linux) 672 673typedef 674 struct { 675 unsigned long long int nraddr; /* where's the code? */ 676 } 677 OrigFn; 678 679#define __SPECIAL_INSTRUCTION_PREAMBLE \ 680 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \ 681 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t" 682 683#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 684 _zzq_default, _zzq_request, \ 685 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 686 \ 687 __extension__ \ 688 ({volatile unsigned long long int _zzq_args[6]; \ 689 volatile unsigned long long int _zzq_result; \ 690 _zzq_args[0] = (unsigned long long int)(_zzq_request); \ 691 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \ 692 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \ 693 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \ 694 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \ 695 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \ 696 __asm__ volatile("mov x3, %1\n\t" /*default*/ \ 697 "mov x4, %2\n\t" /*ptr*/ \ 698 __SPECIAL_INSTRUCTION_PREAMBLE \ 699 /* X3 = client_request ( X4 ) */ \ 700 "orr x10, x10, x10\n\t" \ 701 "mov %0, x3" /*result*/ \ 702 : "=r" (_zzq_result) \ 703 : "r" (_zzq_default), "r" (&_zzq_args[0]) \ 704 : "cc","memory", "x3", "x4"); \ 705 _zzq_result; \ 706 }) 707 708#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 709 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 710 unsigned long long int __addr; \ 711 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 712 /* X3 = guest_NRADDR */ \ 713 "orr x11, x11, x11\n\t" \ 714 "mov %0, x3" \ 715 : "=r" (__addr) \ 716 : \ 717 : "cc", "memory", "x3" \ 718 ); \ 719 _zzq_orig->nraddr = __addr; \ 720 } 721 722#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 723 __SPECIAL_INSTRUCTION_PREAMBLE \ 724 /* branch-and-link-to-noredir X8 */ \ 725 "orr x12, x12, x12\n\t" 726 727#define VALGRIND_VEX_INJECT_IR() \ 728 do { \ 729 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 730 "orr x9, x9, x9\n\t" \ 731 : : : "cc", "memory" \ 732 ); \ 733 } while (0) 734 735#endif /* PLAT_arm64_linux */ 736 737/* ------------------------ s390x-linux ------------------------ */ 738 739#if defined(PLAT_s390x_linux) 740 741typedef 742 struct { 743 unsigned long long int nraddr; /* where's the code? */ 744 } 745 OrigFn; 746 747/* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific 748 * code. This detection is implemented in platform specific toIR.c 749 * (e.g. VEX/priv/guest_s390_decoder.c). 750 */ 751#define __SPECIAL_INSTRUCTION_PREAMBLE \ 752 "lr 15,15\n\t" \ 753 "lr 1,1\n\t" \ 754 "lr 2,2\n\t" \ 755 "lr 3,3\n\t" 756 757#define __CLIENT_REQUEST_CODE "lr 2,2\n\t" 758#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t" 759#define __CALL_NO_REDIR_CODE "lr 4,4\n\t" 760#define __VEX_INJECT_IR_CODE "lr 5,5\n\t" 761 762#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 763 _zzq_default, _zzq_request, \ 764 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 765 __extension__ \ 766 ({volatile unsigned long long int _zzq_args[6]; \ 767 volatile unsigned long long int _zzq_result; \ 768 _zzq_args[0] = (unsigned long long int)(_zzq_request); \ 769 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \ 770 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \ 771 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \ 772 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \ 773 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \ 774 __asm__ volatile(/* r2 = args */ \ 775 "lgr 2,%1\n\t" \ 776 /* r3 = default */ \ 777 "lgr 3,%2\n\t" \ 778 __SPECIAL_INSTRUCTION_PREAMBLE \ 779 __CLIENT_REQUEST_CODE \ 780 /* results = r3 */ \ 781 "lgr %0, 3\n\t" \ 782 : "=d" (_zzq_result) \ 783 : "a" (&_zzq_args[0]), "0" (_zzq_default) \ 784 : "cc", "2", "3", "memory" \ 785 ); \ 786 _zzq_result; \ 787 }) 788 789#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 790 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 791 volatile unsigned long long int __addr; \ 792 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 793 __GET_NR_CONTEXT_CODE \ 794 "lgr %0, 3\n\t" \ 795 : "=a" (__addr) \ 796 : \ 797 : "cc", "3", "memory" \ 798 ); \ 799 _zzq_orig->nraddr = __addr; \ 800 } 801 802#define VALGRIND_CALL_NOREDIR_R1 \ 803 __SPECIAL_INSTRUCTION_PREAMBLE \ 804 __CALL_NO_REDIR_CODE 805 806#define VALGRIND_VEX_INJECT_IR() \ 807 do { \ 808 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 809 __VEX_INJECT_IR_CODE); \ 810 } while (0) 811 812#endif /* PLAT_s390x_linux */ 813 814/* ------------------------- mips32-linux ---------------- */ 815 816#if defined(PLAT_mips32_linux) 817 818typedef 819 struct { 820 unsigned int nraddr; /* where's the code? */ 821 } 822 OrigFn; 823 824/* .word 0x342 825 * .word 0x742 826 * .word 0xC2 827 * .word 0x4C2*/ 828#define __SPECIAL_INSTRUCTION_PREAMBLE \ 829 "srl $0, $0, 13\n\t" \ 830 "srl $0, $0, 29\n\t" \ 831 "srl $0, $0, 3\n\t" \ 832 "srl $0, $0, 19\n\t" 833 834#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 835 _zzq_default, _zzq_request, \ 836 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 837 __extension__ \ 838 ({ volatile unsigned int _zzq_args[6]; \ 839 volatile unsigned int _zzq_result; \ 840 _zzq_args[0] = (unsigned int)(_zzq_request); \ 841 _zzq_args[1] = (unsigned int)(_zzq_arg1); \ 842 _zzq_args[2] = (unsigned int)(_zzq_arg2); \ 843 _zzq_args[3] = (unsigned int)(_zzq_arg3); \ 844 _zzq_args[4] = (unsigned int)(_zzq_arg4); \ 845 _zzq_args[5] = (unsigned int)(_zzq_arg5); \ 846 __asm__ volatile("move $11, %1\n\t" /*default*/ \ 847 "move $12, %2\n\t" /*ptr*/ \ 848 __SPECIAL_INSTRUCTION_PREAMBLE \ 849 /* T3 = client_request ( T4 ) */ \ 850 "or $13, $13, $13\n\t" \ 851 "move %0, $11\n\t" /*result*/ \ 852 : "=r" (_zzq_result) \ 853 : "r" (_zzq_default), "r" (&_zzq_args[0]) \ 854 : "$11", "$12"); \ 855 _zzq_result; \ 856 }) 857 858#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 859 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 860 volatile unsigned int __addr; \ 861 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 862 /* %t9 = guest_NRADDR */ \ 863 "or $14, $14, $14\n\t" \ 864 "move %0, $11" /*result*/ \ 865 : "=r" (__addr) \ 866 : \ 867 : "$11" \ 868 ); \ 869 _zzq_orig->nraddr = __addr; \ 870 } 871 872#define VALGRIND_CALL_NOREDIR_T9 \ 873 __SPECIAL_INSTRUCTION_PREAMBLE \ 874 /* call-noredir *%t9 */ \ 875 "or $15, $15, $15\n\t" 876 877#define VALGRIND_VEX_INJECT_IR() \ 878 do { \ 879 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 880 "or $11, $11, $11\n\t" \ 881 ); \ 882 } while (0) 883 884 885#endif /* PLAT_mips32_linux */ 886 887/* ------------------------- mips64-linux ---------------- */ 888 889#if defined(PLAT_mips64_linux) 890 891typedef 892 struct { 893 unsigned long long nraddr; /* where's the code? */ 894 } 895 OrigFn; 896 897/* dsll $0,$0, 3 898 * dsll $0,$0, 13 899 * dsll $0,$0, 29 900 * dsll $0,$0, 19*/ 901#define __SPECIAL_INSTRUCTION_PREAMBLE \ 902 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \ 903 "dsll $0,$0,29 ; dsll $0,$0,19\n\t" 904 905#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 906 _zzq_default, _zzq_request, \ 907 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \ 908 __extension__ \ 909 ({ volatile unsigned long long int _zzq_args[6]; \ 910 volatile unsigned long long int _zzq_result; \ 911 _zzq_args[0] = (unsigned long long int)(_zzq_request); \ 912 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \ 913 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \ 914 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \ 915 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \ 916 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \ 917 __asm__ volatile("move $11, %1\n\t" /*default*/ \ 918 "move $12, %2\n\t" /*ptr*/ \ 919 __SPECIAL_INSTRUCTION_PREAMBLE \ 920 /* $11 = client_request ( $12 ) */ \ 921 "or $13, $13, $13\n\t" \ 922 "move %0, $11\n\t" /*result*/ \ 923 : "=r" (_zzq_result) \ 924 : "r" (_zzq_default), "r" (&_zzq_args[0]) \ 925 : "$11", "$12"); \ 926 _zzq_result; \ 927 }) 928 929#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \ 930 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \ 931 volatile unsigned long long int __addr; \ 932 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 933 /* $11 = guest_NRADDR */ \ 934 "or $14, $14, $14\n\t" \ 935 "move %0, $11" /*result*/ \ 936 : "=r" (__addr) \ 937 : \ 938 : "$11"); \ 939 _zzq_orig->nraddr = __addr; \ 940 } 941 942#define VALGRIND_CALL_NOREDIR_T9 \ 943 __SPECIAL_INSTRUCTION_PREAMBLE \ 944 /* call-noredir $25 */ \ 945 "or $15, $15, $15\n\t" 946 947#define VALGRIND_VEX_INJECT_IR() \ 948 do { \ 949 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \ 950 "or $11, $11, $11\n\t" \ 951 ); \ 952 } while (0) 953 954#endif /* PLAT_mips64_linux */ 955 956/* Insert assembly code for other platforms here... */ 957 958#endif /* NVALGRIND */ 959 960 961/* ------------------------------------------------------------------ */ 962/* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */ 963/* ugly. It's the least-worst tradeoff I can think of. */ 964/* ------------------------------------------------------------------ */ 965 966/* This section defines magic (a.k.a appalling-hack) macros for doing 967 guaranteed-no-redirection macros, so as to get from function 968 wrappers to the functions they are wrapping. The whole point is to 969 construct standard call sequences, but to do the call itself with a 970 special no-redirect call pseudo-instruction that the JIT 971 understands and handles specially. This section is long and 972 repetitious, and I can't see a way to make it shorter. 973 974 The naming scheme is as follows: 975 976 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc} 977 978 'W' stands for "word" and 'v' for "void". Hence there are 979 different macros for calling arity 0, 1, 2, 3, 4, etc, functions, 980 and for each, the possibility of returning a word-typed result, or 981 no result. 982*/ 983 984/* Use these to write the name of your wrapper. NOTE: duplicates 985 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts 986 the default behaviour equivalance class tag "0000" into the name. 987 See pub_tool_redir.h for details -- normally you don't need to 988 think about this, though. */ 989 990/* Use an extra level of macroisation so as to ensure the soname/fnname 991 args are fully macro-expanded before pasting them together. */ 992#define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd 993 994#define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \ 995 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname) 996 997#define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \ 998 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname) 999 1000/* Use this macro from within a wrapper function to collect the 1001 context (address and possibly other info) of the original function. 1002 Once you have that you can then use it in one of the CALL_FN_ 1003 macros. The type of the argument _lval is OrigFn. */ 1004#define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval) 1005 1006/* Also provide end-user facilities for function replacement, rather 1007 than wrapping. A replacement function differs from a wrapper in 1008 that it has no way to get hold of the original function being 1009 called, and hence no way to call onwards to it. In a replacement 1010 function, VALGRIND_GET_ORIG_FN always returns zero. */ 1011 1012#define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \ 1013 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname) 1014 1015#define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \ 1016 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname) 1017 1018/* Derivatives of the main macros below, for calling functions 1019 returning void. */ 1020 1021#define CALL_FN_v_v(fnptr) \ 1022 do { volatile unsigned long _junk; \ 1023 CALL_FN_W_v(_junk,fnptr); } while (0) 1024 1025#define CALL_FN_v_W(fnptr, arg1) \ 1026 do { volatile unsigned long _junk; \ 1027 CALL_FN_W_W(_junk,fnptr,arg1); } while (0) 1028 1029#define CALL_FN_v_WW(fnptr, arg1,arg2) \ 1030 do { volatile unsigned long _junk; \ 1031 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0) 1032 1033#define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \ 1034 do { volatile unsigned long _junk; \ 1035 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0) 1036 1037#define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \ 1038 do { volatile unsigned long _junk; \ 1039 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0) 1040 1041#define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \ 1042 do { volatile unsigned long _junk; \ 1043 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0) 1044 1045#define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \ 1046 do { volatile unsigned long _junk; \ 1047 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0) 1048 1049#define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \ 1050 do { volatile unsigned long _junk; \ 1051 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0) 1052 1053/* ------------------------- x86-{linux,darwin} ---------------- */ 1054 1055#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) 1056 1057/* These regs are trashed by the hidden call. No need to mention eax 1058 as gcc can already see that, plus causes gcc to bomb. */ 1059#define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx" 1060 1061/* Macros to save and align the stack before making a function 1062 call and restore it afterwards as gcc may not keep the stack 1063 pointer aligned if it doesn't realise calls are being made 1064 to other functions. */ 1065 1066#define VALGRIND_ALIGN_STACK \ 1067 "movl %%esp,%%edi\n\t" \ 1068 "andl $0xfffffff0,%%esp\n\t" 1069#define VALGRIND_RESTORE_STACK \ 1070 "movl %%edi,%%esp\n\t" 1071 1072/* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned 1073 long) == 4. */ 1074 1075#define CALL_FN_W_v(lval, orig) \ 1076 do { \ 1077 volatile OrigFn _orig = (orig); \ 1078 volatile unsigned long _argvec[1]; \ 1079 volatile unsigned long _res; \ 1080 _argvec[0] = (unsigned long)_orig.nraddr; \ 1081 __asm__ volatile( \ 1082 VALGRIND_ALIGN_STACK \ 1083 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1084 VALGRIND_CALL_NOREDIR_EAX \ 1085 VALGRIND_RESTORE_STACK \ 1086 : /*out*/ "=a" (_res) \ 1087 : /*in*/ "a" (&_argvec[0]) \ 1088 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1089 ); \ 1090 lval = (__typeof__(lval)) _res; \ 1091 } while (0) 1092 1093#define CALL_FN_W_W(lval, orig, arg1) \ 1094 do { \ 1095 volatile OrigFn _orig = (orig); \ 1096 volatile unsigned long _argvec[2]; \ 1097 volatile unsigned long _res; \ 1098 _argvec[0] = (unsigned long)_orig.nraddr; \ 1099 _argvec[1] = (unsigned long)(arg1); \ 1100 __asm__ volatile( \ 1101 VALGRIND_ALIGN_STACK \ 1102 "subl $12, %%esp\n\t" \ 1103 "pushl 4(%%eax)\n\t" \ 1104 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1105 VALGRIND_CALL_NOREDIR_EAX \ 1106 VALGRIND_RESTORE_STACK \ 1107 : /*out*/ "=a" (_res) \ 1108 : /*in*/ "a" (&_argvec[0]) \ 1109 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1110 ); \ 1111 lval = (__typeof__(lval)) _res; \ 1112 } while (0) 1113 1114#define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 1115 do { \ 1116 volatile OrigFn _orig = (orig); \ 1117 volatile unsigned long _argvec[3]; \ 1118 volatile unsigned long _res; \ 1119 _argvec[0] = (unsigned long)_orig.nraddr; \ 1120 _argvec[1] = (unsigned long)(arg1); \ 1121 _argvec[2] = (unsigned long)(arg2); \ 1122 __asm__ volatile( \ 1123 VALGRIND_ALIGN_STACK \ 1124 "subl $8, %%esp\n\t" \ 1125 "pushl 8(%%eax)\n\t" \ 1126 "pushl 4(%%eax)\n\t" \ 1127 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1128 VALGRIND_CALL_NOREDIR_EAX \ 1129 VALGRIND_RESTORE_STACK \ 1130 : /*out*/ "=a" (_res) \ 1131 : /*in*/ "a" (&_argvec[0]) \ 1132 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1133 ); \ 1134 lval = (__typeof__(lval)) _res; \ 1135 } while (0) 1136 1137#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 1138 do { \ 1139 volatile OrigFn _orig = (orig); \ 1140 volatile unsigned long _argvec[4]; \ 1141 volatile unsigned long _res; \ 1142 _argvec[0] = (unsigned long)_orig.nraddr; \ 1143 _argvec[1] = (unsigned long)(arg1); \ 1144 _argvec[2] = (unsigned long)(arg2); \ 1145 _argvec[3] = (unsigned long)(arg3); \ 1146 __asm__ volatile( \ 1147 VALGRIND_ALIGN_STACK \ 1148 "subl $4, %%esp\n\t" \ 1149 "pushl 12(%%eax)\n\t" \ 1150 "pushl 8(%%eax)\n\t" \ 1151 "pushl 4(%%eax)\n\t" \ 1152 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1153 VALGRIND_CALL_NOREDIR_EAX \ 1154 VALGRIND_RESTORE_STACK \ 1155 : /*out*/ "=a" (_res) \ 1156 : /*in*/ "a" (&_argvec[0]) \ 1157 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1158 ); \ 1159 lval = (__typeof__(lval)) _res; \ 1160 } while (0) 1161 1162#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 1163 do { \ 1164 volatile OrigFn _orig = (orig); \ 1165 volatile unsigned long _argvec[5]; \ 1166 volatile unsigned long _res; \ 1167 _argvec[0] = (unsigned long)_orig.nraddr; \ 1168 _argvec[1] = (unsigned long)(arg1); \ 1169 _argvec[2] = (unsigned long)(arg2); \ 1170 _argvec[3] = (unsigned long)(arg3); \ 1171 _argvec[4] = (unsigned long)(arg4); \ 1172 __asm__ volatile( \ 1173 VALGRIND_ALIGN_STACK \ 1174 "pushl 16(%%eax)\n\t" \ 1175 "pushl 12(%%eax)\n\t" \ 1176 "pushl 8(%%eax)\n\t" \ 1177 "pushl 4(%%eax)\n\t" \ 1178 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1179 VALGRIND_CALL_NOREDIR_EAX \ 1180 VALGRIND_RESTORE_STACK \ 1181 : /*out*/ "=a" (_res) \ 1182 : /*in*/ "a" (&_argvec[0]) \ 1183 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1184 ); \ 1185 lval = (__typeof__(lval)) _res; \ 1186 } while (0) 1187 1188#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 1189 do { \ 1190 volatile OrigFn _orig = (orig); \ 1191 volatile unsigned long _argvec[6]; \ 1192 volatile unsigned long _res; \ 1193 _argvec[0] = (unsigned long)_orig.nraddr; \ 1194 _argvec[1] = (unsigned long)(arg1); \ 1195 _argvec[2] = (unsigned long)(arg2); \ 1196 _argvec[3] = (unsigned long)(arg3); \ 1197 _argvec[4] = (unsigned long)(arg4); \ 1198 _argvec[5] = (unsigned long)(arg5); \ 1199 __asm__ volatile( \ 1200 VALGRIND_ALIGN_STACK \ 1201 "subl $12, %%esp\n\t" \ 1202 "pushl 20(%%eax)\n\t" \ 1203 "pushl 16(%%eax)\n\t" \ 1204 "pushl 12(%%eax)\n\t" \ 1205 "pushl 8(%%eax)\n\t" \ 1206 "pushl 4(%%eax)\n\t" \ 1207 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1208 VALGRIND_CALL_NOREDIR_EAX \ 1209 VALGRIND_RESTORE_STACK \ 1210 : /*out*/ "=a" (_res) \ 1211 : /*in*/ "a" (&_argvec[0]) \ 1212 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1213 ); \ 1214 lval = (__typeof__(lval)) _res; \ 1215 } while (0) 1216 1217#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 1218 do { \ 1219 volatile OrigFn _orig = (orig); \ 1220 volatile unsigned long _argvec[7]; \ 1221 volatile unsigned long _res; \ 1222 _argvec[0] = (unsigned long)_orig.nraddr; \ 1223 _argvec[1] = (unsigned long)(arg1); \ 1224 _argvec[2] = (unsigned long)(arg2); \ 1225 _argvec[3] = (unsigned long)(arg3); \ 1226 _argvec[4] = (unsigned long)(arg4); \ 1227 _argvec[5] = (unsigned long)(arg5); \ 1228 _argvec[6] = (unsigned long)(arg6); \ 1229 __asm__ volatile( \ 1230 VALGRIND_ALIGN_STACK \ 1231 "subl $8, %%esp\n\t" \ 1232 "pushl 24(%%eax)\n\t" \ 1233 "pushl 20(%%eax)\n\t" \ 1234 "pushl 16(%%eax)\n\t" \ 1235 "pushl 12(%%eax)\n\t" \ 1236 "pushl 8(%%eax)\n\t" \ 1237 "pushl 4(%%eax)\n\t" \ 1238 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1239 VALGRIND_CALL_NOREDIR_EAX \ 1240 VALGRIND_RESTORE_STACK \ 1241 : /*out*/ "=a" (_res) \ 1242 : /*in*/ "a" (&_argvec[0]) \ 1243 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1244 ); \ 1245 lval = (__typeof__(lval)) _res; \ 1246 } while (0) 1247 1248#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1249 arg7) \ 1250 do { \ 1251 volatile OrigFn _orig = (orig); \ 1252 volatile unsigned long _argvec[8]; \ 1253 volatile unsigned long _res; \ 1254 _argvec[0] = (unsigned long)_orig.nraddr; \ 1255 _argvec[1] = (unsigned long)(arg1); \ 1256 _argvec[2] = (unsigned long)(arg2); \ 1257 _argvec[3] = (unsigned long)(arg3); \ 1258 _argvec[4] = (unsigned long)(arg4); \ 1259 _argvec[5] = (unsigned long)(arg5); \ 1260 _argvec[6] = (unsigned long)(arg6); \ 1261 _argvec[7] = (unsigned long)(arg7); \ 1262 __asm__ volatile( \ 1263 VALGRIND_ALIGN_STACK \ 1264 "subl $4, %%esp\n\t" \ 1265 "pushl 28(%%eax)\n\t" \ 1266 "pushl 24(%%eax)\n\t" \ 1267 "pushl 20(%%eax)\n\t" \ 1268 "pushl 16(%%eax)\n\t" \ 1269 "pushl 12(%%eax)\n\t" \ 1270 "pushl 8(%%eax)\n\t" \ 1271 "pushl 4(%%eax)\n\t" \ 1272 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1273 VALGRIND_CALL_NOREDIR_EAX \ 1274 VALGRIND_RESTORE_STACK \ 1275 : /*out*/ "=a" (_res) \ 1276 : /*in*/ "a" (&_argvec[0]) \ 1277 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1278 ); \ 1279 lval = (__typeof__(lval)) _res; \ 1280 } while (0) 1281 1282#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1283 arg7,arg8) \ 1284 do { \ 1285 volatile OrigFn _orig = (orig); \ 1286 volatile unsigned long _argvec[9]; \ 1287 volatile unsigned long _res; \ 1288 _argvec[0] = (unsigned long)_orig.nraddr; \ 1289 _argvec[1] = (unsigned long)(arg1); \ 1290 _argvec[2] = (unsigned long)(arg2); \ 1291 _argvec[3] = (unsigned long)(arg3); \ 1292 _argvec[4] = (unsigned long)(arg4); \ 1293 _argvec[5] = (unsigned long)(arg5); \ 1294 _argvec[6] = (unsigned long)(arg6); \ 1295 _argvec[7] = (unsigned long)(arg7); \ 1296 _argvec[8] = (unsigned long)(arg8); \ 1297 __asm__ volatile( \ 1298 VALGRIND_ALIGN_STACK \ 1299 "pushl 32(%%eax)\n\t" \ 1300 "pushl 28(%%eax)\n\t" \ 1301 "pushl 24(%%eax)\n\t" \ 1302 "pushl 20(%%eax)\n\t" \ 1303 "pushl 16(%%eax)\n\t" \ 1304 "pushl 12(%%eax)\n\t" \ 1305 "pushl 8(%%eax)\n\t" \ 1306 "pushl 4(%%eax)\n\t" \ 1307 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1308 VALGRIND_CALL_NOREDIR_EAX \ 1309 VALGRIND_RESTORE_STACK \ 1310 : /*out*/ "=a" (_res) \ 1311 : /*in*/ "a" (&_argvec[0]) \ 1312 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1313 ); \ 1314 lval = (__typeof__(lval)) _res; \ 1315 } while (0) 1316 1317#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1318 arg7,arg8,arg9) \ 1319 do { \ 1320 volatile OrigFn _orig = (orig); \ 1321 volatile unsigned long _argvec[10]; \ 1322 volatile unsigned long _res; \ 1323 _argvec[0] = (unsigned long)_orig.nraddr; \ 1324 _argvec[1] = (unsigned long)(arg1); \ 1325 _argvec[2] = (unsigned long)(arg2); \ 1326 _argvec[3] = (unsigned long)(arg3); \ 1327 _argvec[4] = (unsigned long)(arg4); \ 1328 _argvec[5] = (unsigned long)(arg5); \ 1329 _argvec[6] = (unsigned long)(arg6); \ 1330 _argvec[7] = (unsigned long)(arg7); \ 1331 _argvec[8] = (unsigned long)(arg8); \ 1332 _argvec[9] = (unsigned long)(arg9); \ 1333 __asm__ volatile( \ 1334 VALGRIND_ALIGN_STACK \ 1335 "subl $12, %%esp\n\t" \ 1336 "pushl 36(%%eax)\n\t" \ 1337 "pushl 32(%%eax)\n\t" \ 1338 "pushl 28(%%eax)\n\t" \ 1339 "pushl 24(%%eax)\n\t" \ 1340 "pushl 20(%%eax)\n\t" \ 1341 "pushl 16(%%eax)\n\t" \ 1342 "pushl 12(%%eax)\n\t" \ 1343 "pushl 8(%%eax)\n\t" \ 1344 "pushl 4(%%eax)\n\t" \ 1345 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1346 VALGRIND_CALL_NOREDIR_EAX \ 1347 VALGRIND_RESTORE_STACK \ 1348 : /*out*/ "=a" (_res) \ 1349 : /*in*/ "a" (&_argvec[0]) \ 1350 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1351 ); \ 1352 lval = (__typeof__(lval)) _res; \ 1353 } while (0) 1354 1355#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1356 arg7,arg8,arg9,arg10) \ 1357 do { \ 1358 volatile OrigFn _orig = (orig); \ 1359 volatile unsigned long _argvec[11]; \ 1360 volatile unsigned long _res; \ 1361 _argvec[0] = (unsigned long)_orig.nraddr; \ 1362 _argvec[1] = (unsigned long)(arg1); \ 1363 _argvec[2] = (unsigned long)(arg2); \ 1364 _argvec[3] = (unsigned long)(arg3); \ 1365 _argvec[4] = (unsigned long)(arg4); \ 1366 _argvec[5] = (unsigned long)(arg5); \ 1367 _argvec[6] = (unsigned long)(arg6); \ 1368 _argvec[7] = (unsigned long)(arg7); \ 1369 _argvec[8] = (unsigned long)(arg8); \ 1370 _argvec[9] = (unsigned long)(arg9); \ 1371 _argvec[10] = (unsigned long)(arg10); \ 1372 __asm__ volatile( \ 1373 VALGRIND_ALIGN_STACK \ 1374 "subl $8, %%esp\n\t" \ 1375 "pushl 40(%%eax)\n\t" \ 1376 "pushl 36(%%eax)\n\t" \ 1377 "pushl 32(%%eax)\n\t" \ 1378 "pushl 28(%%eax)\n\t" \ 1379 "pushl 24(%%eax)\n\t" \ 1380 "pushl 20(%%eax)\n\t" \ 1381 "pushl 16(%%eax)\n\t" \ 1382 "pushl 12(%%eax)\n\t" \ 1383 "pushl 8(%%eax)\n\t" \ 1384 "pushl 4(%%eax)\n\t" \ 1385 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1386 VALGRIND_CALL_NOREDIR_EAX \ 1387 VALGRIND_RESTORE_STACK \ 1388 : /*out*/ "=a" (_res) \ 1389 : /*in*/ "a" (&_argvec[0]) \ 1390 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1391 ); \ 1392 lval = (__typeof__(lval)) _res; \ 1393 } while (0) 1394 1395#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 1396 arg6,arg7,arg8,arg9,arg10, \ 1397 arg11) \ 1398 do { \ 1399 volatile OrigFn _orig = (orig); \ 1400 volatile unsigned long _argvec[12]; \ 1401 volatile unsigned long _res; \ 1402 _argvec[0] = (unsigned long)_orig.nraddr; \ 1403 _argvec[1] = (unsigned long)(arg1); \ 1404 _argvec[2] = (unsigned long)(arg2); \ 1405 _argvec[3] = (unsigned long)(arg3); \ 1406 _argvec[4] = (unsigned long)(arg4); \ 1407 _argvec[5] = (unsigned long)(arg5); \ 1408 _argvec[6] = (unsigned long)(arg6); \ 1409 _argvec[7] = (unsigned long)(arg7); \ 1410 _argvec[8] = (unsigned long)(arg8); \ 1411 _argvec[9] = (unsigned long)(arg9); \ 1412 _argvec[10] = (unsigned long)(arg10); \ 1413 _argvec[11] = (unsigned long)(arg11); \ 1414 __asm__ volatile( \ 1415 VALGRIND_ALIGN_STACK \ 1416 "subl $4, %%esp\n\t" \ 1417 "pushl 44(%%eax)\n\t" \ 1418 "pushl 40(%%eax)\n\t" \ 1419 "pushl 36(%%eax)\n\t" \ 1420 "pushl 32(%%eax)\n\t" \ 1421 "pushl 28(%%eax)\n\t" \ 1422 "pushl 24(%%eax)\n\t" \ 1423 "pushl 20(%%eax)\n\t" \ 1424 "pushl 16(%%eax)\n\t" \ 1425 "pushl 12(%%eax)\n\t" \ 1426 "pushl 8(%%eax)\n\t" \ 1427 "pushl 4(%%eax)\n\t" \ 1428 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1429 VALGRIND_CALL_NOREDIR_EAX \ 1430 VALGRIND_RESTORE_STACK \ 1431 : /*out*/ "=a" (_res) \ 1432 : /*in*/ "a" (&_argvec[0]) \ 1433 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1434 ); \ 1435 lval = (__typeof__(lval)) _res; \ 1436 } while (0) 1437 1438#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 1439 arg6,arg7,arg8,arg9,arg10, \ 1440 arg11,arg12) \ 1441 do { \ 1442 volatile OrigFn _orig = (orig); \ 1443 volatile unsigned long _argvec[13]; \ 1444 volatile unsigned long _res; \ 1445 _argvec[0] = (unsigned long)_orig.nraddr; \ 1446 _argvec[1] = (unsigned long)(arg1); \ 1447 _argvec[2] = (unsigned long)(arg2); \ 1448 _argvec[3] = (unsigned long)(arg3); \ 1449 _argvec[4] = (unsigned long)(arg4); \ 1450 _argvec[5] = (unsigned long)(arg5); \ 1451 _argvec[6] = (unsigned long)(arg6); \ 1452 _argvec[7] = (unsigned long)(arg7); \ 1453 _argvec[8] = (unsigned long)(arg8); \ 1454 _argvec[9] = (unsigned long)(arg9); \ 1455 _argvec[10] = (unsigned long)(arg10); \ 1456 _argvec[11] = (unsigned long)(arg11); \ 1457 _argvec[12] = (unsigned long)(arg12); \ 1458 __asm__ volatile( \ 1459 VALGRIND_ALIGN_STACK \ 1460 "pushl 48(%%eax)\n\t" \ 1461 "pushl 44(%%eax)\n\t" \ 1462 "pushl 40(%%eax)\n\t" \ 1463 "pushl 36(%%eax)\n\t" \ 1464 "pushl 32(%%eax)\n\t" \ 1465 "pushl 28(%%eax)\n\t" \ 1466 "pushl 24(%%eax)\n\t" \ 1467 "pushl 20(%%eax)\n\t" \ 1468 "pushl 16(%%eax)\n\t" \ 1469 "pushl 12(%%eax)\n\t" \ 1470 "pushl 8(%%eax)\n\t" \ 1471 "pushl 4(%%eax)\n\t" \ 1472 "movl (%%eax), %%eax\n\t" /* target->%eax */ \ 1473 VALGRIND_CALL_NOREDIR_EAX \ 1474 VALGRIND_RESTORE_STACK \ 1475 : /*out*/ "=a" (_res) \ 1476 : /*in*/ "a" (&_argvec[0]) \ 1477 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \ 1478 ); \ 1479 lval = (__typeof__(lval)) _res; \ 1480 } while (0) 1481 1482#endif /* PLAT_x86_linux || PLAT_x86_darwin */ 1483 1484/* ------------------------ amd64-{linux,darwin} --------------- */ 1485 1486#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) 1487 1488/* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */ 1489 1490/* These regs are trashed by the hidden call. */ 1491#define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \ 1492 "rdi", "r8", "r9", "r10", "r11" 1493 1494/* This is all pretty complex. It's so as to make stack unwinding 1495 work reliably. See bug 243270. The basic problem is the sub and 1496 add of 128 of %rsp in all of the following macros. If gcc believes 1497 the CFA is in %rsp, then unwinding may fail, because what's at the 1498 CFA is not what gcc "expected" when it constructs the CFIs for the 1499 places where the macros are instantiated. 1500 1501 But we can't just add a CFI annotation to increase the CFA offset 1502 by 128, to match the sub of 128 from %rsp, because we don't know 1503 whether gcc has chosen %rsp as the CFA at that point, or whether it 1504 has chosen some other register (eg, %rbp). In the latter case, 1505 adding a CFI annotation to change the CFA offset is simply wrong. 1506 1507 So the solution is to get hold of the CFA using 1508 __builtin_dwarf_cfa(), put it in a known register, and add a 1509 CFI annotation to say what the register is. We choose %rbp for 1510 this (perhaps perversely), because: 1511 1512 (1) %rbp is already subject to unwinding. If a new register was 1513 chosen then the unwinder would have to unwind it in all stack 1514 traces, which is expensive, and 1515 1516 (2) %rbp is already subject to precise exception updates in the 1517 JIT. If a new register was chosen, we'd have to have precise 1518 exceptions for it too, which reduces performance of the 1519 generated code. 1520 1521 However .. one extra complication. We can't just whack the result 1522 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the 1523 list of trashed registers at the end of the inline assembly 1524 fragments; gcc won't allow %rbp to appear in that list. Hence 1525 instead we need to stash %rbp in %r15 for the duration of the asm, 1526 and say that %r15 is trashed instead. gcc seems happy to go with 1527 that. 1528 1529 Oh .. and this all needs to be conditionalised so that it is 1530 unchanged from before this commit, when compiled with older gccs 1531 that don't support __builtin_dwarf_cfa. Furthermore, since 1532 this header file is freestanding, it has to be independent of 1533 config.h, and so the following conditionalisation cannot depend on 1534 configure time checks. 1535 1536 Although it's not clear from 1537 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)', 1538 this expression excludes Darwin. 1539 .cfi directives in Darwin assembly appear to be completely 1540 different and I haven't investigated how they work. 1541 1542 For even more entertainment value, note we have to use the 1543 completely undocumented __builtin_dwarf_cfa(), which appears to 1544 really compute the CFA, whereas __builtin_frame_address(0) claims 1545 to but actually doesn't. See 1546 https://bugs.kde.org/show_bug.cgi?id=243270#c47 1547*/ 1548#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM) 1549# define __FRAME_POINTER \ 1550 ,"r"(__builtin_dwarf_cfa()) 1551# define VALGRIND_CFI_PROLOGUE \ 1552 "movq %%rbp, %%r15\n\t" \ 1553 "movq %2, %%rbp\n\t" \ 1554 ".cfi_remember_state\n\t" \ 1555 ".cfi_def_cfa rbp, 0\n\t" 1556# define VALGRIND_CFI_EPILOGUE \ 1557 "movq %%r15, %%rbp\n\t" \ 1558 ".cfi_restore_state\n\t" 1559#else 1560# define __FRAME_POINTER 1561# define VALGRIND_CFI_PROLOGUE 1562# define VALGRIND_CFI_EPILOGUE 1563#endif 1564 1565/* Macros to save and align the stack before making a function 1566 call and restore it afterwards as gcc may not keep the stack 1567 pointer aligned if it doesn't realise calls are being made 1568 to other functions. */ 1569 1570#define VALGRIND_ALIGN_STACK \ 1571 "movq %%rsp,%%r14\n\t" \ 1572 "andq $0xfffffffffffffff0,%%rsp\n\t" 1573#define VALGRIND_RESTORE_STACK \ 1574 "movq %%r14,%%rsp\n\t" 1575 1576/* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned 1577 long) == 8. */ 1578 1579/* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_ 1580 macros. In order not to trash the stack redzone, we need to drop 1581 %rsp by 128 before the hidden call, and restore afterwards. The 1582 nastyness is that it is only by luck that the stack still appears 1583 to be unwindable during the hidden call - since then the behaviour 1584 of any routine using this macro does not match what the CFI data 1585 says. Sigh. 1586 1587 Why is this important? Imagine that a wrapper has a stack 1588 allocated local, and passes to the hidden call, a pointer to it. 1589 Because gcc does not know about the hidden call, it may allocate 1590 that local in the redzone. Unfortunately the hidden call may then 1591 trash it before it comes to use it. So we must step clear of the 1592 redzone, for the duration of the hidden call, to make it safe. 1593 1594 Probably the same problem afflicts the other redzone-style ABIs too 1595 (ppc64-linux); but for those, the stack is 1596 self describing (none of this CFI nonsense) so at least messing 1597 with the stack pointer doesn't give a danger of non-unwindable 1598 stack. */ 1599 1600#define CALL_FN_W_v(lval, orig) \ 1601 do { \ 1602 volatile OrigFn _orig = (orig); \ 1603 volatile unsigned long _argvec[1]; \ 1604 volatile unsigned long _res; \ 1605 _argvec[0] = (unsigned long)_orig.nraddr; \ 1606 __asm__ volatile( \ 1607 VALGRIND_CFI_PROLOGUE \ 1608 VALGRIND_ALIGN_STACK \ 1609 "subq $128,%%rsp\n\t" \ 1610 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1611 VALGRIND_CALL_NOREDIR_RAX \ 1612 VALGRIND_RESTORE_STACK \ 1613 VALGRIND_CFI_EPILOGUE \ 1614 : /*out*/ "=a" (_res) \ 1615 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1616 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1617 ); \ 1618 lval = (__typeof__(lval)) _res; \ 1619 } while (0) 1620 1621#define CALL_FN_W_W(lval, orig, arg1) \ 1622 do { \ 1623 volatile OrigFn _orig = (orig); \ 1624 volatile unsigned long _argvec[2]; \ 1625 volatile unsigned long _res; \ 1626 _argvec[0] = (unsigned long)_orig.nraddr; \ 1627 _argvec[1] = (unsigned long)(arg1); \ 1628 __asm__ volatile( \ 1629 VALGRIND_CFI_PROLOGUE \ 1630 VALGRIND_ALIGN_STACK \ 1631 "subq $128,%%rsp\n\t" \ 1632 "movq 8(%%rax), %%rdi\n\t" \ 1633 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1634 VALGRIND_CALL_NOREDIR_RAX \ 1635 VALGRIND_RESTORE_STACK \ 1636 VALGRIND_CFI_EPILOGUE \ 1637 : /*out*/ "=a" (_res) \ 1638 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1639 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1640 ); \ 1641 lval = (__typeof__(lval)) _res; \ 1642 } while (0) 1643 1644#define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 1645 do { \ 1646 volatile OrigFn _orig = (orig); \ 1647 volatile unsigned long _argvec[3]; \ 1648 volatile unsigned long _res; \ 1649 _argvec[0] = (unsigned long)_orig.nraddr; \ 1650 _argvec[1] = (unsigned long)(arg1); \ 1651 _argvec[2] = (unsigned long)(arg2); \ 1652 __asm__ volatile( \ 1653 VALGRIND_CFI_PROLOGUE \ 1654 VALGRIND_ALIGN_STACK \ 1655 "subq $128,%%rsp\n\t" \ 1656 "movq 16(%%rax), %%rsi\n\t" \ 1657 "movq 8(%%rax), %%rdi\n\t" \ 1658 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1659 VALGRIND_CALL_NOREDIR_RAX \ 1660 VALGRIND_RESTORE_STACK \ 1661 VALGRIND_CFI_EPILOGUE \ 1662 : /*out*/ "=a" (_res) \ 1663 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1664 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1665 ); \ 1666 lval = (__typeof__(lval)) _res; \ 1667 } while (0) 1668 1669#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 1670 do { \ 1671 volatile OrigFn _orig = (orig); \ 1672 volatile unsigned long _argvec[4]; \ 1673 volatile unsigned long _res; \ 1674 _argvec[0] = (unsigned long)_orig.nraddr; \ 1675 _argvec[1] = (unsigned long)(arg1); \ 1676 _argvec[2] = (unsigned long)(arg2); \ 1677 _argvec[3] = (unsigned long)(arg3); \ 1678 __asm__ volatile( \ 1679 VALGRIND_CFI_PROLOGUE \ 1680 VALGRIND_ALIGN_STACK \ 1681 "subq $128,%%rsp\n\t" \ 1682 "movq 24(%%rax), %%rdx\n\t" \ 1683 "movq 16(%%rax), %%rsi\n\t" \ 1684 "movq 8(%%rax), %%rdi\n\t" \ 1685 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1686 VALGRIND_CALL_NOREDIR_RAX \ 1687 VALGRIND_RESTORE_STACK \ 1688 VALGRIND_CFI_EPILOGUE \ 1689 : /*out*/ "=a" (_res) \ 1690 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1691 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1692 ); \ 1693 lval = (__typeof__(lval)) _res; \ 1694 } while (0) 1695 1696#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 1697 do { \ 1698 volatile OrigFn _orig = (orig); \ 1699 volatile unsigned long _argvec[5]; \ 1700 volatile unsigned long _res; \ 1701 _argvec[0] = (unsigned long)_orig.nraddr; \ 1702 _argvec[1] = (unsigned long)(arg1); \ 1703 _argvec[2] = (unsigned long)(arg2); \ 1704 _argvec[3] = (unsigned long)(arg3); \ 1705 _argvec[4] = (unsigned long)(arg4); \ 1706 __asm__ volatile( \ 1707 VALGRIND_CFI_PROLOGUE \ 1708 VALGRIND_ALIGN_STACK \ 1709 "subq $128,%%rsp\n\t" \ 1710 "movq 32(%%rax), %%rcx\n\t" \ 1711 "movq 24(%%rax), %%rdx\n\t" \ 1712 "movq 16(%%rax), %%rsi\n\t" \ 1713 "movq 8(%%rax), %%rdi\n\t" \ 1714 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1715 VALGRIND_CALL_NOREDIR_RAX \ 1716 VALGRIND_RESTORE_STACK \ 1717 VALGRIND_CFI_EPILOGUE \ 1718 : /*out*/ "=a" (_res) \ 1719 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1720 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1721 ); \ 1722 lval = (__typeof__(lval)) _res; \ 1723 } while (0) 1724 1725#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 1726 do { \ 1727 volatile OrigFn _orig = (orig); \ 1728 volatile unsigned long _argvec[6]; \ 1729 volatile unsigned long _res; \ 1730 _argvec[0] = (unsigned long)_orig.nraddr; \ 1731 _argvec[1] = (unsigned long)(arg1); \ 1732 _argvec[2] = (unsigned long)(arg2); \ 1733 _argvec[3] = (unsigned long)(arg3); \ 1734 _argvec[4] = (unsigned long)(arg4); \ 1735 _argvec[5] = (unsigned long)(arg5); \ 1736 __asm__ volatile( \ 1737 VALGRIND_CFI_PROLOGUE \ 1738 VALGRIND_ALIGN_STACK \ 1739 "subq $128,%%rsp\n\t" \ 1740 "movq 40(%%rax), %%r8\n\t" \ 1741 "movq 32(%%rax), %%rcx\n\t" \ 1742 "movq 24(%%rax), %%rdx\n\t" \ 1743 "movq 16(%%rax), %%rsi\n\t" \ 1744 "movq 8(%%rax), %%rdi\n\t" \ 1745 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1746 VALGRIND_CALL_NOREDIR_RAX \ 1747 VALGRIND_RESTORE_STACK \ 1748 VALGRIND_CFI_EPILOGUE \ 1749 : /*out*/ "=a" (_res) \ 1750 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1751 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1752 ); \ 1753 lval = (__typeof__(lval)) _res; \ 1754 } while (0) 1755 1756#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 1757 do { \ 1758 volatile OrigFn _orig = (orig); \ 1759 volatile unsigned long _argvec[7]; \ 1760 volatile unsigned long _res; \ 1761 _argvec[0] = (unsigned long)_orig.nraddr; \ 1762 _argvec[1] = (unsigned long)(arg1); \ 1763 _argvec[2] = (unsigned long)(arg2); \ 1764 _argvec[3] = (unsigned long)(arg3); \ 1765 _argvec[4] = (unsigned long)(arg4); \ 1766 _argvec[5] = (unsigned long)(arg5); \ 1767 _argvec[6] = (unsigned long)(arg6); \ 1768 __asm__ volatile( \ 1769 VALGRIND_CFI_PROLOGUE \ 1770 VALGRIND_ALIGN_STACK \ 1771 "subq $128,%%rsp\n\t" \ 1772 "movq 48(%%rax), %%r9\n\t" \ 1773 "movq 40(%%rax), %%r8\n\t" \ 1774 "movq 32(%%rax), %%rcx\n\t" \ 1775 "movq 24(%%rax), %%rdx\n\t" \ 1776 "movq 16(%%rax), %%rsi\n\t" \ 1777 "movq 8(%%rax), %%rdi\n\t" \ 1778 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1779 VALGRIND_CALL_NOREDIR_RAX \ 1780 VALGRIND_RESTORE_STACK \ 1781 VALGRIND_CFI_EPILOGUE \ 1782 : /*out*/ "=a" (_res) \ 1783 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1784 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1785 ); \ 1786 lval = (__typeof__(lval)) _res; \ 1787 } while (0) 1788 1789#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1790 arg7) \ 1791 do { \ 1792 volatile OrigFn _orig = (orig); \ 1793 volatile unsigned long _argvec[8]; \ 1794 volatile unsigned long _res; \ 1795 _argvec[0] = (unsigned long)_orig.nraddr; \ 1796 _argvec[1] = (unsigned long)(arg1); \ 1797 _argvec[2] = (unsigned long)(arg2); \ 1798 _argvec[3] = (unsigned long)(arg3); \ 1799 _argvec[4] = (unsigned long)(arg4); \ 1800 _argvec[5] = (unsigned long)(arg5); \ 1801 _argvec[6] = (unsigned long)(arg6); \ 1802 _argvec[7] = (unsigned long)(arg7); \ 1803 __asm__ volatile( \ 1804 VALGRIND_CFI_PROLOGUE \ 1805 VALGRIND_ALIGN_STACK \ 1806 "subq $136,%%rsp\n\t" \ 1807 "pushq 56(%%rax)\n\t" \ 1808 "movq 48(%%rax), %%r9\n\t" \ 1809 "movq 40(%%rax), %%r8\n\t" \ 1810 "movq 32(%%rax), %%rcx\n\t" \ 1811 "movq 24(%%rax), %%rdx\n\t" \ 1812 "movq 16(%%rax), %%rsi\n\t" \ 1813 "movq 8(%%rax), %%rdi\n\t" \ 1814 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1815 VALGRIND_CALL_NOREDIR_RAX \ 1816 VALGRIND_RESTORE_STACK \ 1817 VALGRIND_CFI_EPILOGUE \ 1818 : /*out*/ "=a" (_res) \ 1819 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1820 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1821 ); \ 1822 lval = (__typeof__(lval)) _res; \ 1823 } while (0) 1824 1825#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1826 arg7,arg8) \ 1827 do { \ 1828 volatile OrigFn _orig = (orig); \ 1829 volatile unsigned long _argvec[9]; \ 1830 volatile unsigned long _res; \ 1831 _argvec[0] = (unsigned long)_orig.nraddr; \ 1832 _argvec[1] = (unsigned long)(arg1); \ 1833 _argvec[2] = (unsigned long)(arg2); \ 1834 _argvec[3] = (unsigned long)(arg3); \ 1835 _argvec[4] = (unsigned long)(arg4); \ 1836 _argvec[5] = (unsigned long)(arg5); \ 1837 _argvec[6] = (unsigned long)(arg6); \ 1838 _argvec[7] = (unsigned long)(arg7); \ 1839 _argvec[8] = (unsigned long)(arg8); \ 1840 __asm__ volatile( \ 1841 VALGRIND_CFI_PROLOGUE \ 1842 VALGRIND_ALIGN_STACK \ 1843 "subq $128,%%rsp\n\t" \ 1844 "pushq 64(%%rax)\n\t" \ 1845 "pushq 56(%%rax)\n\t" \ 1846 "movq 48(%%rax), %%r9\n\t" \ 1847 "movq 40(%%rax), %%r8\n\t" \ 1848 "movq 32(%%rax), %%rcx\n\t" \ 1849 "movq 24(%%rax), %%rdx\n\t" \ 1850 "movq 16(%%rax), %%rsi\n\t" \ 1851 "movq 8(%%rax), %%rdi\n\t" \ 1852 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1853 VALGRIND_CALL_NOREDIR_RAX \ 1854 VALGRIND_RESTORE_STACK \ 1855 VALGRIND_CFI_EPILOGUE \ 1856 : /*out*/ "=a" (_res) \ 1857 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1858 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1859 ); \ 1860 lval = (__typeof__(lval)) _res; \ 1861 } while (0) 1862 1863#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1864 arg7,arg8,arg9) \ 1865 do { \ 1866 volatile OrigFn _orig = (orig); \ 1867 volatile unsigned long _argvec[10]; \ 1868 volatile unsigned long _res; \ 1869 _argvec[0] = (unsigned long)_orig.nraddr; \ 1870 _argvec[1] = (unsigned long)(arg1); \ 1871 _argvec[2] = (unsigned long)(arg2); \ 1872 _argvec[3] = (unsigned long)(arg3); \ 1873 _argvec[4] = (unsigned long)(arg4); \ 1874 _argvec[5] = (unsigned long)(arg5); \ 1875 _argvec[6] = (unsigned long)(arg6); \ 1876 _argvec[7] = (unsigned long)(arg7); \ 1877 _argvec[8] = (unsigned long)(arg8); \ 1878 _argvec[9] = (unsigned long)(arg9); \ 1879 __asm__ volatile( \ 1880 VALGRIND_CFI_PROLOGUE \ 1881 VALGRIND_ALIGN_STACK \ 1882 "subq $136,%%rsp\n\t" \ 1883 "pushq 72(%%rax)\n\t" \ 1884 "pushq 64(%%rax)\n\t" \ 1885 "pushq 56(%%rax)\n\t" \ 1886 "movq 48(%%rax), %%r9\n\t" \ 1887 "movq 40(%%rax), %%r8\n\t" \ 1888 "movq 32(%%rax), %%rcx\n\t" \ 1889 "movq 24(%%rax), %%rdx\n\t" \ 1890 "movq 16(%%rax), %%rsi\n\t" \ 1891 "movq 8(%%rax), %%rdi\n\t" \ 1892 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1893 VALGRIND_CALL_NOREDIR_RAX \ 1894 VALGRIND_RESTORE_STACK \ 1895 VALGRIND_CFI_EPILOGUE \ 1896 : /*out*/ "=a" (_res) \ 1897 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1898 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1899 ); \ 1900 lval = (__typeof__(lval)) _res; \ 1901 } while (0) 1902 1903#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1904 arg7,arg8,arg9,arg10) \ 1905 do { \ 1906 volatile OrigFn _orig = (orig); \ 1907 volatile unsigned long _argvec[11]; \ 1908 volatile unsigned long _res; \ 1909 _argvec[0] = (unsigned long)_orig.nraddr; \ 1910 _argvec[1] = (unsigned long)(arg1); \ 1911 _argvec[2] = (unsigned long)(arg2); \ 1912 _argvec[3] = (unsigned long)(arg3); \ 1913 _argvec[4] = (unsigned long)(arg4); \ 1914 _argvec[5] = (unsigned long)(arg5); \ 1915 _argvec[6] = (unsigned long)(arg6); \ 1916 _argvec[7] = (unsigned long)(arg7); \ 1917 _argvec[8] = (unsigned long)(arg8); \ 1918 _argvec[9] = (unsigned long)(arg9); \ 1919 _argvec[10] = (unsigned long)(arg10); \ 1920 __asm__ volatile( \ 1921 VALGRIND_CFI_PROLOGUE \ 1922 VALGRIND_ALIGN_STACK \ 1923 "subq $128,%%rsp\n\t" \ 1924 "pushq 80(%%rax)\n\t" \ 1925 "pushq 72(%%rax)\n\t" \ 1926 "pushq 64(%%rax)\n\t" \ 1927 "pushq 56(%%rax)\n\t" \ 1928 "movq 48(%%rax), %%r9\n\t" \ 1929 "movq 40(%%rax), %%r8\n\t" \ 1930 "movq 32(%%rax), %%rcx\n\t" \ 1931 "movq 24(%%rax), %%rdx\n\t" \ 1932 "movq 16(%%rax), %%rsi\n\t" \ 1933 "movq 8(%%rax), %%rdi\n\t" \ 1934 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1935 VALGRIND_CALL_NOREDIR_RAX \ 1936 VALGRIND_RESTORE_STACK \ 1937 VALGRIND_CFI_EPILOGUE \ 1938 : /*out*/ "=a" (_res) \ 1939 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1940 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1941 ); \ 1942 lval = (__typeof__(lval)) _res; \ 1943 } while (0) 1944 1945#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1946 arg7,arg8,arg9,arg10,arg11) \ 1947 do { \ 1948 volatile OrigFn _orig = (orig); \ 1949 volatile unsigned long _argvec[12]; \ 1950 volatile unsigned long _res; \ 1951 _argvec[0] = (unsigned long)_orig.nraddr; \ 1952 _argvec[1] = (unsigned long)(arg1); \ 1953 _argvec[2] = (unsigned long)(arg2); \ 1954 _argvec[3] = (unsigned long)(arg3); \ 1955 _argvec[4] = (unsigned long)(arg4); \ 1956 _argvec[5] = (unsigned long)(arg5); \ 1957 _argvec[6] = (unsigned long)(arg6); \ 1958 _argvec[7] = (unsigned long)(arg7); \ 1959 _argvec[8] = (unsigned long)(arg8); \ 1960 _argvec[9] = (unsigned long)(arg9); \ 1961 _argvec[10] = (unsigned long)(arg10); \ 1962 _argvec[11] = (unsigned long)(arg11); \ 1963 __asm__ volatile( \ 1964 VALGRIND_CFI_PROLOGUE \ 1965 VALGRIND_ALIGN_STACK \ 1966 "subq $136,%%rsp\n\t" \ 1967 "pushq 88(%%rax)\n\t" \ 1968 "pushq 80(%%rax)\n\t" \ 1969 "pushq 72(%%rax)\n\t" \ 1970 "pushq 64(%%rax)\n\t" \ 1971 "pushq 56(%%rax)\n\t" \ 1972 "movq 48(%%rax), %%r9\n\t" \ 1973 "movq 40(%%rax), %%r8\n\t" \ 1974 "movq 32(%%rax), %%rcx\n\t" \ 1975 "movq 24(%%rax), %%rdx\n\t" \ 1976 "movq 16(%%rax), %%rsi\n\t" \ 1977 "movq 8(%%rax), %%rdi\n\t" \ 1978 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 1979 VALGRIND_CALL_NOREDIR_RAX \ 1980 VALGRIND_RESTORE_STACK \ 1981 VALGRIND_CFI_EPILOGUE \ 1982 : /*out*/ "=a" (_res) \ 1983 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 1984 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 1985 ); \ 1986 lval = (__typeof__(lval)) _res; \ 1987 } while (0) 1988 1989#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 1990 arg7,arg8,arg9,arg10,arg11,arg12) \ 1991 do { \ 1992 volatile OrigFn _orig = (orig); \ 1993 volatile unsigned long _argvec[13]; \ 1994 volatile unsigned long _res; \ 1995 _argvec[0] = (unsigned long)_orig.nraddr; \ 1996 _argvec[1] = (unsigned long)(arg1); \ 1997 _argvec[2] = (unsigned long)(arg2); \ 1998 _argvec[3] = (unsigned long)(arg3); \ 1999 _argvec[4] = (unsigned long)(arg4); \ 2000 _argvec[5] = (unsigned long)(arg5); \ 2001 _argvec[6] = (unsigned long)(arg6); \ 2002 _argvec[7] = (unsigned long)(arg7); \ 2003 _argvec[8] = (unsigned long)(arg8); \ 2004 _argvec[9] = (unsigned long)(arg9); \ 2005 _argvec[10] = (unsigned long)(arg10); \ 2006 _argvec[11] = (unsigned long)(arg11); \ 2007 _argvec[12] = (unsigned long)(arg12); \ 2008 __asm__ volatile( \ 2009 VALGRIND_CFI_PROLOGUE \ 2010 VALGRIND_ALIGN_STACK \ 2011 "subq $128,%%rsp\n\t" \ 2012 "pushq 96(%%rax)\n\t" \ 2013 "pushq 88(%%rax)\n\t" \ 2014 "pushq 80(%%rax)\n\t" \ 2015 "pushq 72(%%rax)\n\t" \ 2016 "pushq 64(%%rax)\n\t" \ 2017 "pushq 56(%%rax)\n\t" \ 2018 "movq 48(%%rax), %%r9\n\t" \ 2019 "movq 40(%%rax), %%r8\n\t" \ 2020 "movq 32(%%rax), %%rcx\n\t" \ 2021 "movq 24(%%rax), %%rdx\n\t" \ 2022 "movq 16(%%rax), %%rsi\n\t" \ 2023 "movq 8(%%rax), %%rdi\n\t" \ 2024 "movq (%%rax), %%rax\n\t" /* target->%rax */ \ 2025 VALGRIND_CALL_NOREDIR_RAX \ 2026 VALGRIND_RESTORE_STACK \ 2027 VALGRIND_CFI_EPILOGUE \ 2028 : /*out*/ "=a" (_res) \ 2029 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 2030 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \ 2031 ); \ 2032 lval = (__typeof__(lval)) _res; \ 2033 } while (0) 2034 2035#endif /* PLAT_amd64_linux || PLAT_amd64_darwin */ 2036 2037/* ------------------------ ppc32-linux ------------------------ */ 2038 2039#if defined(PLAT_ppc32_linux) 2040 2041/* This is useful for finding out about the on-stack stuff: 2042 2043 extern int f9 ( int,int,int,int,int,int,int,int,int ); 2044 extern int f10 ( int,int,int,int,int,int,int,int,int,int ); 2045 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int ); 2046 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int ); 2047 2048 int g9 ( void ) { 2049 return f9(11,22,33,44,55,66,77,88,99); 2050 } 2051 int g10 ( void ) { 2052 return f10(11,22,33,44,55,66,77,88,99,110); 2053 } 2054 int g11 ( void ) { 2055 return f11(11,22,33,44,55,66,77,88,99,110,121); 2056 } 2057 int g12 ( void ) { 2058 return f12(11,22,33,44,55,66,77,88,99,110,121,132); 2059 } 2060*/ 2061 2062/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */ 2063 2064/* These regs are trashed by the hidden call. */ 2065#define __CALLER_SAVED_REGS \ 2066 "lr", "ctr", "xer", \ 2067 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \ 2068 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \ 2069 "r11", "r12", "r13" 2070 2071/* Macros to save and align the stack before making a function 2072 call and restore it afterwards as gcc may not keep the stack 2073 pointer aligned if it doesn't realise calls are being made 2074 to other functions. */ 2075 2076#define VALGRIND_ALIGN_STACK \ 2077 "mr 28,1\n\t" \ 2078 "rlwinm 1,1,0,0,27\n\t" 2079#define VALGRIND_RESTORE_STACK \ 2080 "mr 1,28\n\t" 2081 2082/* These CALL_FN_ macros assume that on ppc32-linux, 2083 sizeof(unsigned long) == 4. */ 2084 2085#define CALL_FN_W_v(lval, orig) \ 2086 do { \ 2087 volatile OrigFn _orig = (orig); \ 2088 volatile unsigned long _argvec[1]; \ 2089 volatile unsigned long _res; \ 2090 _argvec[0] = (unsigned long)_orig.nraddr; \ 2091 __asm__ volatile( \ 2092 VALGRIND_ALIGN_STACK \ 2093 "mr 11,%1\n\t" \ 2094 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2095 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2096 VALGRIND_RESTORE_STACK \ 2097 "mr %0,3" \ 2098 : /*out*/ "=r" (_res) \ 2099 : /*in*/ "r" (&_argvec[0]) \ 2100 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2101 ); \ 2102 lval = (__typeof__(lval)) _res; \ 2103 } while (0) 2104 2105#define CALL_FN_W_W(lval, orig, arg1) \ 2106 do { \ 2107 volatile OrigFn _orig = (orig); \ 2108 volatile unsigned long _argvec[2]; \ 2109 volatile unsigned long _res; \ 2110 _argvec[0] = (unsigned long)_orig.nraddr; \ 2111 _argvec[1] = (unsigned long)arg1; \ 2112 __asm__ volatile( \ 2113 VALGRIND_ALIGN_STACK \ 2114 "mr 11,%1\n\t" \ 2115 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2116 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2117 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2118 VALGRIND_RESTORE_STACK \ 2119 "mr %0,3" \ 2120 : /*out*/ "=r" (_res) \ 2121 : /*in*/ "r" (&_argvec[0]) \ 2122 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2123 ); \ 2124 lval = (__typeof__(lval)) _res; \ 2125 } while (0) 2126 2127#define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 2128 do { \ 2129 volatile OrigFn _orig = (orig); \ 2130 volatile unsigned long _argvec[3]; \ 2131 volatile unsigned long _res; \ 2132 _argvec[0] = (unsigned long)_orig.nraddr; \ 2133 _argvec[1] = (unsigned long)arg1; \ 2134 _argvec[2] = (unsigned long)arg2; \ 2135 __asm__ volatile( \ 2136 VALGRIND_ALIGN_STACK \ 2137 "mr 11,%1\n\t" \ 2138 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2139 "lwz 4,8(11)\n\t" \ 2140 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2141 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2142 VALGRIND_RESTORE_STACK \ 2143 "mr %0,3" \ 2144 : /*out*/ "=r" (_res) \ 2145 : /*in*/ "r" (&_argvec[0]) \ 2146 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2147 ); \ 2148 lval = (__typeof__(lval)) _res; \ 2149 } while (0) 2150 2151#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 2152 do { \ 2153 volatile OrigFn _orig = (orig); \ 2154 volatile unsigned long _argvec[4]; \ 2155 volatile unsigned long _res; \ 2156 _argvec[0] = (unsigned long)_orig.nraddr; \ 2157 _argvec[1] = (unsigned long)arg1; \ 2158 _argvec[2] = (unsigned long)arg2; \ 2159 _argvec[3] = (unsigned long)arg3; \ 2160 __asm__ volatile( \ 2161 VALGRIND_ALIGN_STACK \ 2162 "mr 11,%1\n\t" \ 2163 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2164 "lwz 4,8(11)\n\t" \ 2165 "lwz 5,12(11)\n\t" \ 2166 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2167 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2168 VALGRIND_RESTORE_STACK \ 2169 "mr %0,3" \ 2170 : /*out*/ "=r" (_res) \ 2171 : /*in*/ "r" (&_argvec[0]) \ 2172 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2173 ); \ 2174 lval = (__typeof__(lval)) _res; \ 2175 } while (0) 2176 2177#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 2178 do { \ 2179 volatile OrigFn _orig = (orig); \ 2180 volatile unsigned long _argvec[5]; \ 2181 volatile unsigned long _res; \ 2182 _argvec[0] = (unsigned long)_orig.nraddr; \ 2183 _argvec[1] = (unsigned long)arg1; \ 2184 _argvec[2] = (unsigned long)arg2; \ 2185 _argvec[3] = (unsigned long)arg3; \ 2186 _argvec[4] = (unsigned long)arg4; \ 2187 __asm__ volatile( \ 2188 VALGRIND_ALIGN_STACK \ 2189 "mr 11,%1\n\t" \ 2190 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2191 "lwz 4,8(11)\n\t" \ 2192 "lwz 5,12(11)\n\t" \ 2193 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2194 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2195 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2196 VALGRIND_RESTORE_STACK \ 2197 "mr %0,3" \ 2198 : /*out*/ "=r" (_res) \ 2199 : /*in*/ "r" (&_argvec[0]) \ 2200 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2201 ); \ 2202 lval = (__typeof__(lval)) _res; \ 2203 } while (0) 2204 2205#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 2206 do { \ 2207 volatile OrigFn _orig = (orig); \ 2208 volatile unsigned long _argvec[6]; \ 2209 volatile unsigned long _res; \ 2210 _argvec[0] = (unsigned long)_orig.nraddr; \ 2211 _argvec[1] = (unsigned long)arg1; \ 2212 _argvec[2] = (unsigned long)arg2; \ 2213 _argvec[3] = (unsigned long)arg3; \ 2214 _argvec[4] = (unsigned long)arg4; \ 2215 _argvec[5] = (unsigned long)arg5; \ 2216 __asm__ volatile( \ 2217 VALGRIND_ALIGN_STACK \ 2218 "mr 11,%1\n\t" \ 2219 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2220 "lwz 4,8(11)\n\t" \ 2221 "lwz 5,12(11)\n\t" \ 2222 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2223 "lwz 7,20(11)\n\t" \ 2224 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2225 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2226 VALGRIND_RESTORE_STACK \ 2227 "mr %0,3" \ 2228 : /*out*/ "=r" (_res) \ 2229 : /*in*/ "r" (&_argvec[0]) \ 2230 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2231 ); \ 2232 lval = (__typeof__(lval)) _res; \ 2233 } while (0) 2234 2235#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 2236 do { \ 2237 volatile OrigFn _orig = (orig); \ 2238 volatile unsigned long _argvec[7]; \ 2239 volatile unsigned long _res; \ 2240 _argvec[0] = (unsigned long)_orig.nraddr; \ 2241 _argvec[1] = (unsigned long)arg1; \ 2242 _argvec[2] = (unsigned long)arg2; \ 2243 _argvec[3] = (unsigned long)arg3; \ 2244 _argvec[4] = (unsigned long)arg4; \ 2245 _argvec[5] = (unsigned long)arg5; \ 2246 _argvec[6] = (unsigned long)arg6; \ 2247 __asm__ volatile( \ 2248 VALGRIND_ALIGN_STACK \ 2249 "mr 11,%1\n\t" \ 2250 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2251 "lwz 4,8(11)\n\t" \ 2252 "lwz 5,12(11)\n\t" \ 2253 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2254 "lwz 7,20(11)\n\t" \ 2255 "lwz 8,24(11)\n\t" \ 2256 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2257 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2258 VALGRIND_RESTORE_STACK \ 2259 "mr %0,3" \ 2260 : /*out*/ "=r" (_res) \ 2261 : /*in*/ "r" (&_argvec[0]) \ 2262 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2263 ); \ 2264 lval = (__typeof__(lval)) _res; \ 2265 } while (0) 2266 2267#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2268 arg7) \ 2269 do { \ 2270 volatile OrigFn _orig = (orig); \ 2271 volatile unsigned long _argvec[8]; \ 2272 volatile unsigned long _res; \ 2273 _argvec[0] = (unsigned long)_orig.nraddr; \ 2274 _argvec[1] = (unsigned long)arg1; \ 2275 _argvec[2] = (unsigned long)arg2; \ 2276 _argvec[3] = (unsigned long)arg3; \ 2277 _argvec[4] = (unsigned long)arg4; \ 2278 _argvec[5] = (unsigned long)arg5; \ 2279 _argvec[6] = (unsigned long)arg6; \ 2280 _argvec[7] = (unsigned long)arg7; \ 2281 __asm__ volatile( \ 2282 VALGRIND_ALIGN_STACK \ 2283 "mr 11,%1\n\t" \ 2284 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2285 "lwz 4,8(11)\n\t" \ 2286 "lwz 5,12(11)\n\t" \ 2287 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2288 "lwz 7,20(11)\n\t" \ 2289 "lwz 8,24(11)\n\t" \ 2290 "lwz 9,28(11)\n\t" \ 2291 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2292 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2293 VALGRIND_RESTORE_STACK \ 2294 "mr %0,3" \ 2295 : /*out*/ "=r" (_res) \ 2296 : /*in*/ "r" (&_argvec[0]) \ 2297 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2298 ); \ 2299 lval = (__typeof__(lval)) _res; \ 2300 } while (0) 2301 2302#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2303 arg7,arg8) \ 2304 do { \ 2305 volatile OrigFn _orig = (orig); \ 2306 volatile unsigned long _argvec[9]; \ 2307 volatile unsigned long _res; \ 2308 _argvec[0] = (unsigned long)_orig.nraddr; \ 2309 _argvec[1] = (unsigned long)arg1; \ 2310 _argvec[2] = (unsigned long)arg2; \ 2311 _argvec[3] = (unsigned long)arg3; \ 2312 _argvec[4] = (unsigned long)arg4; \ 2313 _argvec[5] = (unsigned long)arg5; \ 2314 _argvec[6] = (unsigned long)arg6; \ 2315 _argvec[7] = (unsigned long)arg7; \ 2316 _argvec[8] = (unsigned long)arg8; \ 2317 __asm__ volatile( \ 2318 VALGRIND_ALIGN_STACK \ 2319 "mr 11,%1\n\t" \ 2320 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2321 "lwz 4,8(11)\n\t" \ 2322 "lwz 5,12(11)\n\t" \ 2323 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2324 "lwz 7,20(11)\n\t" \ 2325 "lwz 8,24(11)\n\t" \ 2326 "lwz 9,28(11)\n\t" \ 2327 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2328 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2329 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2330 VALGRIND_RESTORE_STACK \ 2331 "mr %0,3" \ 2332 : /*out*/ "=r" (_res) \ 2333 : /*in*/ "r" (&_argvec[0]) \ 2334 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2335 ); \ 2336 lval = (__typeof__(lval)) _res; \ 2337 } while (0) 2338 2339#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2340 arg7,arg8,arg9) \ 2341 do { \ 2342 volatile OrigFn _orig = (orig); \ 2343 volatile unsigned long _argvec[10]; \ 2344 volatile unsigned long _res; \ 2345 _argvec[0] = (unsigned long)_orig.nraddr; \ 2346 _argvec[1] = (unsigned long)arg1; \ 2347 _argvec[2] = (unsigned long)arg2; \ 2348 _argvec[3] = (unsigned long)arg3; \ 2349 _argvec[4] = (unsigned long)arg4; \ 2350 _argvec[5] = (unsigned long)arg5; \ 2351 _argvec[6] = (unsigned long)arg6; \ 2352 _argvec[7] = (unsigned long)arg7; \ 2353 _argvec[8] = (unsigned long)arg8; \ 2354 _argvec[9] = (unsigned long)arg9; \ 2355 __asm__ volatile( \ 2356 VALGRIND_ALIGN_STACK \ 2357 "mr 11,%1\n\t" \ 2358 "addi 1,1,-16\n\t" \ 2359 /* arg9 */ \ 2360 "lwz 3,36(11)\n\t" \ 2361 "stw 3,8(1)\n\t" \ 2362 /* args1-8 */ \ 2363 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2364 "lwz 4,8(11)\n\t" \ 2365 "lwz 5,12(11)\n\t" \ 2366 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2367 "lwz 7,20(11)\n\t" \ 2368 "lwz 8,24(11)\n\t" \ 2369 "lwz 9,28(11)\n\t" \ 2370 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2371 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2372 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2373 VALGRIND_RESTORE_STACK \ 2374 "mr %0,3" \ 2375 : /*out*/ "=r" (_res) \ 2376 : /*in*/ "r" (&_argvec[0]) \ 2377 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2378 ); \ 2379 lval = (__typeof__(lval)) _res; \ 2380 } while (0) 2381 2382#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2383 arg7,arg8,arg9,arg10) \ 2384 do { \ 2385 volatile OrigFn _orig = (orig); \ 2386 volatile unsigned long _argvec[11]; \ 2387 volatile unsigned long _res; \ 2388 _argvec[0] = (unsigned long)_orig.nraddr; \ 2389 _argvec[1] = (unsigned long)arg1; \ 2390 _argvec[2] = (unsigned long)arg2; \ 2391 _argvec[3] = (unsigned long)arg3; \ 2392 _argvec[4] = (unsigned long)arg4; \ 2393 _argvec[5] = (unsigned long)arg5; \ 2394 _argvec[6] = (unsigned long)arg6; \ 2395 _argvec[7] = (unsigned long)arg7; \ 2396 _argvec[8] = (unsigned long)arg8; \ 2397 _argvec[9] = (unsigned long)arg9; \ 2398 _argvec[10] = (unsigned long)arg10; \ 2399 __asm__ volatile( \ 2400 VALGRIND_ALIGN_STACK \ 2401 "mr 11,%1\n\t" \ 2402 "addi 1,1,-16\n\t" \ 2403 /* arg10 */ \ 2404 "lwz 3,40(11)\n\t" \ 2405 "stw 3,12(1)\n\t" \ 2406 /* arg9 */ \ 2407 "lwz 3,36(11)\n\t" \ 2408 "stw 3,8(1)\n\t" \ 2409 /* args1-8 */ \ 2410 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2411 "lwz 4,8(11)\n\t" \ 2412 "lwz 5,12(11)\n\t" \ 2413 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2414 "lwz 7,20(11)\n\t" \ 2415 "lwz 8,24(11)\n\t" \ 2416 "lwz 9,28(11)\n\t" \ 2417 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2418 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2419 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2420 VALGRIND_RESTORE_STACK \ 2421 "mr %0,3" \ 2422 : /*out*/ "=r" (_res) \ 2423 : /*in*/ "r" (&_argvec[0]) \ 2424 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2425 ); \ 2426 lval = (__typeof__(lval)) _res; \ 2427 } while (0) 2428 2429#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2430 arg7,arg8,arg9,arg10,arg11) \ 2431 do { \ 2432 volatile OrigFn _orig = (orig); \ 2433 volatile unsigned long _argvec[12]; \ 2434 volatile unsigned long _res; \ 2435 _argvec[0] = (unsigned long)_orig.nraddr; \ 2436 _argvec[1] = (unsigned long)arg1; \ 2437 _argvec[2] = (unsigned long)arg2; \ 2438 _argvec[3] = (unsigned long)arg3; \ 2439 _argvec[4] = (unsigned long)arg4; \ 2440 _argvec[5] = (unsigned long)arg5; \ 2441 _argvec[6] = (unsigned long)arg6; \ 2442 _argvec[7] = (unsigned long)arg7; \ 2443 _argvec[8] = (unsigned long)arg8; \ 2444 _argvec[9] = (unsigned long)arg9; \ 2445 _argvec[10] = (unsigned long)arg10; \ 2446 _argvec[11] = (unsigned long)arg11; \ 2447 __asm__ volatile( \ 2448 VALGRIND_ALIGN_STACK \ 2449 "mr 11,%1\n\t" \ 2450 "addi 1,1,-32\n\t" \ 2451 /* arg11 */ \ 2452 "lwz 3,44(11)\n\t" \ 2453 "stw 3,16(1)\n\t" \ 2454 /* arg10 */ \ 2455 "lwz 3,40(11)\n\t" \ 2456 "stw 3,12(1)\n\t" \ 2457 /* arg9 */ \ 2458 "lwz 3,36(11)\n\t" \ 2459 "stw 3,8(1)\n\t" \ 2460 /* args1-8 */ \ 2461 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2462 "lwz 4,8(11)\n\t" \ 2463 "lwz 5,12(11)\n\t" \ 2464 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2465 "lwz 7,20(11)\n\t" \ 2466 "lwz 8,24(11)\n\t" \ 2467 "lwz 9,28(11)\n\t" \ 2468 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2469 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2470 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2471 VALGRIND_RESTORE_STACK \ 2472 "mr %0,3" \ 2473 : /*out*/ "=r" (_res) \ 2474 : /*in*/ "r" (&_argvec[0]) \ 2475 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2476 ); \ 2477 lval = (__typeof__(lval)) _res; \ 2478 } while (0) 2479 2480#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2481 arg7,arg8,arg9,arg10,arg11,arg12) \ 2482 do { \ 2483 volatile OrigFn _orig = (orig); \ 2484 volatile unsigned long _argvec[13]; \ 2485 volatile unsigned long _res; \ 2486 _argvec[0] = (unsigned long)_orig.nraddr; \ 2487 _argvec[1] = (unsigned long)arg1; \ 2488 _argvec[2] = (unsigned long)arg2; \ 2489 _argvec[3] = (unsigned long)arg3; \ 2490 _argvec[4] = (unsigned long)arg4; \ 2491 _argvec[5] = (unsigned long)arg5; \ 2492 _argvec[6] = (unsigned long)arg6; \ 2493 _argvec[7] = (unsigned long)arg7; \ 2494 _argvec[8] = (unsigned long)arg8; \ 2495 _argvec[9] = (unsigned long)arg9; \ 2496 _argvec[10] = (unsigned long)arg10; \ 2497 _argvec[11] = (unsigned long)arg11; \ 2498 _argvec[12] = (unsigned long)arg12; \ 2499 __asm__ volatile( \ 2500 VALGRIND_ALIGN_STACK \ 2501 "mr 11,%1\n\t" \ 2502 "addi 1,1,-32\n\t" \ 2503 /* arg12 */ \ 2504 "lwz 3,48(11)\n\t" \ 2505 "stw 3,20(1)\n\t" \ 2506 /* arg11 */ \ 2507 "lwz 3,44(11)\n\t" \ 2508 "stw 3,16(1)\n\t" \ 2509 /* arg10 */ \ 2510 "lwz 3,40(11)\n\t" \ 2511 "stw 3,12(1)\n\t" \ 2512 /* arg9 */ \ 2513 "lwz 3,36(11)\n\t" \ 2514 "stw 3,8(1)\n\t" \ 2515 /* args1-8 */ \ 2516 "lwz 3,4(11)\n\t" /* arg1->r3 */ \ 2517 "lwz 4,8(11)\n\t" \ 2518 "lwz 5,12(11)\n\t" \ 2519 "lwz 6,16(11)\n\t" /* arg4->r6 */ \ 2520 "lwz 7,20(11)\n\t" \ 2521 "lwz 8,24(11)\n\t" \ 2522 "lwz 9,28(11)\n\t" \ 2523 "lwz 10,32(11)\n\t" /* arg8->r10 */ \ 2524 "lwz 11,0(11)\n\t" /* target->r11 */ \ 2525 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2526 VALGRIND_RESTORE_STACK \ 2527 "mr %0,3" \ 2528 : /*out*/ "=r" (_res) \ 2529 : /*in*/ "r" (&_argvec[0]) \ 2530 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2531 ); \ 2532 lval = (__typeof__(lval)) _res; \ 2533 } while (0) 2534 2535#endif /* PLAT_ppc32_linux */ 2536 2537/* ------------------------ ppc64-linux ------------------------ */ 2538 2539#if defined(PLAT_ppc64_linux) 2540 2541/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */ 2542 2543/* These regs are trashed by the hidden call. */ 2544#define __CALLER_SAVED_REGS \ 2545 "lr", "ctr", "xer", \ 2546 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \ 2547 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \ 2548 "r11", "r12", "r13" 2549 2550/* Macros to save and align the stack before making a function 2551 call and restore it afterwards as gcc may not keep the stack 2552 pointer aligned if it doesn't realise calls are being made 2553 to other functions. */ 2554 2555#define VALGRIND_ALIGN_STACK \ 2556 "mr 28,1\n\t" \ 2557 "rldicr 1,1,0,59\n\t" 2558#define VALGRIND_RESTORE_STACK \ 2559 "mr 1,28\n\t" 2560 2561/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned 2562 long) == 8. */ 2563 2564#define CALL_FN_W_v(lval, orig) \ 2565 do { \ 2566 volatile OrigFn _orig = (orig); \ 2567 volatile unsigned long _argvec[3+0]; \ 2568 volatile unsigned long _res; \ 2569 /* _argvec[0] holds current r2 across the call */ \ 2570 _argvec[1] = (unsigned long)_orig.r2; \ 2571 _argvec[2] = (unsigned long)_orig.nraddr; \ 2572 __asm__ volatile( \ 2573 VALGRIND_ALIGN_STACK \ 2574 "mr 11,%1\n\t" \ 2575 "std 2,-16(11)\n\t" /* save tocptr */ \ 2576 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2577 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2578 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2579 "mr 11,%1\n\t" \ 2580 "mr %0,3\n\t" \ 2581 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2582 VALGRIND_RESTORE_STACK \ 2583 : /*out*/ "=r" (_res) \ 2584 : /*in*/ "r" (&_argvec[2]) \ 2585 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2586 ); \ 2587 lval = (__typeof__(lval)) _res; \ 2588 } while (0) 2589 2590#define CALL_FN_W_W(lval, orig, arg1) \ 2591 do { \ 2592 volatile OrigFn _orig = (orig); \ 2593 volatile unsigned long _argvec[3+1]; \ 2594 volatile unsigned long _res; \ 2595 /* _argvec[0] holds current r2 across the call */ \ 2596 _argvec[1] = (unsigned long)_orig.r2; \ 2597 _argvec[2] = (unsigned long)_orig.nraddr; \ 2598 _argvec[2+1] = (unsigned long)arg1; \ 2599 __asm__ volatile( \ 2600 VALGRIND_ALIGN_STACK \ 2601 "mr 11,%1\n\t" \ 2602 "std 2,-16(11)\n\t" /* save tocptr */ \ 2603 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2604 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2605 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2606 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2607 "mr 11,%1\n\t" \ 2608 "mr %0,3\n\t" \ 2609 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2610 VALGRIND_RESTORE_STACK \ 2611 : /*out*/ "=r" (_res) \ 2612 : /*in*/ "r" (&_argvec[2]) \ 2613 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2614 ); \ 2615 lval = (__typeof__(lval)) _res; \ 2616 } while (0) 2617 2618#define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 2619 do { \ 2620 volatile OrigFn _orig = (orig); \ 2621 volatile unsigned long _argvec[3+2]; \ 2622 volatile unsigned long _res; \ 2623 /* _argvec[0] holds current r2 across the call */ \ 2624 _argvec[1] = (unsigned long)_orig.r2; \ 2625 _argvec[2] = (unsigned long)_orig.nraddr; \ 2626 _argvec[2+1] = (unsigned long)arg1; \ 2627 _argvec[2+2] = (unsigned long)arg2; \ 2628 __asm__ volatile( \ 2629 VALGRIND_ALIGN_STACK \ 2630 "mr 11,%1\n\t" \ 2631 "std 2,-16(11)\n\t" /* save tocptr */ \ 2632 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2633 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2634 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2635 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2636 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2637 "mr 11,%1\n\t" \ 2638 "mr %0,3\n\t" \ 2639 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2640 VALGRIND_RESTORE_STACK \ 2641 : /*out*/ "=r" (_res) \ 2642 : /*in*/ "r" (&_argvec[2]) \ 2643 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2644 ); \ 2645 lval = (__typeof__(lval)) _res; \ 2646 } while (0) 2647 2648#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 2649 do { \ 2650 volatile OrigFn _orig = (orig); \ 2651 volatile unsigned long _argvec[3+3]; \ 2652 volatile unsigned long _res; \ 2653 /* _argvec[0] holds current r2 across the call */ \ 2654 _argvec[1] = (unsigned long)_orig.r2; \ 2655 _argvec[2] = (unsigned long)_orig.nraddr; \ 2656 _argvec[2+1] = (unsigned long)arg1; \ 2657 _argvec[2+2] = (unsigned long)arg2; \ 2658 _argvec[2+3] = (unsigned long)arg3; \ 2659 __asm__ volatile( \ 2660 VALGRIND_ALIGN_STACK \ 2661 "mr 11,%1\n\t" \ 2662 "std 2,-16(11)\n\t" /* save tocptr */ \ 2663 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2664 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2665 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2666 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2667 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2668 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2669 "mr 11,%1\n\t" \ 2670 "mr %0,3\n\t" \ 2671 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2672 VALGRIND_RESTORE_STACK \ 2673 : /*out*/ "=r" (_res) \ 2674 : /*in*/ "r" (&_argvec[2]) \ 2675 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2676 ); \ 2677 lval = (__typeof__(lval)) _res; \ 2678 } while (0) 2679 2680#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 2681 do { \ 2682 volatile OrigFn _orig = (orig); \ 2683 volatile unsigned long _argvec[3+4]; \ 2684 volatile unsigned long _res; \ 2685 /* _argvec[0] holds current r2 across the call */ \ 2686 _argvec[1] = (unsigned long)_orig.r2; \ 2687 _argvec[2] = (unsigned long)_orig.nraddr; \ 2688 _argvec[2+1] = (unsigned long)arg1; \ 2689 _argvec[2+2] = (unsigned long)arg2; \ 2690 _argvec[2+3] = (unsigned long)arg3; \ 2691 _argvec[2+4] = (unsigned long)arg4; \ 2692 __asm__ volatile( \ 2693 VALGRIND_ALIGN_STACK \ 2694 "mr 11,%1\n\t" \ 2695 "std 2,-16(11)\n\t" /* save tocptr */ \ 2696 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2697 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2698 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2699 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2700 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2701 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2702 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2703 "mr 11,%1\n\t" \ 2704 "mr %0,3\n\t" \ 2705 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2706 VALGRIND_RESTORE_STACK \ 2707 : /*out*/ "=r" (_res) \ 2708 : /*in*/ "r" (&_argvec[2]) \ 2709 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2710 ); \ 2711 lval = (__typeof__(lval)) _res; \ 2712 } while (0) 2713 2714#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 2715 do { \ 2716 volatile OrigFn _orig = (orig); \ 2717 volatile unsigned long _argvec[3+5]; \ 2718 volatile unsigned long _res; \ 2719 /* _argvec[0] holds current r2 across the call */ \ 2720 _argvec[1] = (unsigned long)_orig.r2; \ 2721 _argvec[2] = (unsigned long)_orig.nraddr; \ 2722 _argvec[2+1] = (unsigned long)arg1; \ 2723 _argvec[2+2] = (unsigned long)arg2; \ 2724 _argvec[2+3] = (unsigned long)arg3; \ 2725 _argvec[2+4] = (unsigned long)arg4; \ 2726 _argvec[2+5] = (unsigned long)arg5; \ 2727 __asm__ volatile( \ 2728 VALGRIND_ALIGN_STACK \ 2729 "mr 11,%1\n\t" \ 2730 "std 2,-16(11)\n\t" /* save tocptr */ \ 2731 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2732 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2733 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2734 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2735 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2736 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2737 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2738 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2739 "mr 11,%1\n\t" \ 2740 "mr %0,3\n\t" \ 2741 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2742 VALGRIND_RESTORE_STACK \ 2743 : /*out*/ "=r" (_res) \ 2744 : /*in*/ "r" (&_argvec[2]) \ 2745 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2746 ); \ 2747 lval = (__typeof__(lval)) _res; \ 2748 } while (0) 2749 2750#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 2751 do { \ 2752 volatile OrigFn _orig = (orig); \ 2753 volatile unsigned long _argvec[3+6]; \ 2754 volatile unsigned long _res; \ 2755 /* _argvec[0] holds current r2 across the call */ \ 2756 _argvec[1] = (unsigned long)_orig.r2; \ 2757 _argvec[2] = (unsigned long)_orig.nraddr; \ 2758 _argvec[2+1] = (unsigned long)arg1; \ 2759 _argvec[2+2] = (unsigned long)arg2; \ 2760 _argvec[2+3] = (unsigned long)arg3; \ 2761 _argvec[2+4] = (unsigned long)arg4; \ 2762 _argvec[2+5] = (unsigned long)arg5; \ 2763 _argvec[2+6] = (unsigned long)arg6; \ 2764 __asm__ volatile( \ 2765 VALGRIND_ALIGN_STACK \ 2766 "mr 11,%1\n\t" \ 2767 "std 2,-16(11)\n\t" /* save tocptr */ \ 2768 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2769 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2770 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2771 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2772 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2773 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2774 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 2775 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2776 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2777 "mr 11,%1\n\t" \ 2778 "mr %0,3\n\t" \ 2779 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2780 VALGRIND_RESTORE_STACK \ 2781 : /*out*/ "=r" (_res) \ 2782 : /*in*/ "r" (&_argvec[2]) \ 2783 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2784 ); \ 2785 lval = (__typeof__(lval)) _res; \ 2786 } while (0) 2787 2788#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2789 arg7) \ 2790 do { \ 2791 volatile OrigFn _orig = (orig); \ 2792 volatile unsigned long _argvec[3+7]; \ 2793 volatile unsigned long _res; \ 2794 /* _argvec[0] holds current r2 across the call */ \ 2795 _argvec[1] = (unsigned long)_orig.r2; \ 2796 _argvec[2] = (unsigned long)_orig.nraddr; \ 2797 _argvec[2+1] = (unsigned long)arg1; \ 2798 _argvec[2+2] = (unsigned long)arg2; \ 2799 _argvec[2+3] = (unsigned long)arg3; \ 2800 _argvec[2+4] = (unsigned long)arg4; \ 2801 _argvec[2+5] = (unsigned long)arg5; \ 2802 _argvec[2+6] = (unsigned long)arg6; \ 2803 _argvec[2+7] = (unsigned long)arg7; \ 2804 __asm__ volatile( \ 2805 VALGRIND_ALIGN_STACK \ 2806 "mr 11,%1\n\t" \ 2807 "std 2,-16(11)\n\t" /* save tocptr */ \ 2808 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2809 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2810 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2811 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2812 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2813 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2814 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 2815 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 2816 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2817 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2818 "mr 11,%1\n\t" \ 2819 "mr %0,3\n\t" \ 2820 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2821 VALGRIND_RESTORE_STACK \ 2822 : /*out*/ "=r" (_res) \ 2823 : /*in*/ "r" (&_argvec[2]) \ 2824 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2825 ); \ 2826 lval = (__typeof__(lval)) _res; \ 2827 } while (0) 2828 2829#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2830 arg7,arg8) \ 2831 do { \ 2832 volatile OrigFn _orig = (orig); \ 2833 volatile unsigned long _argvec[3+8]; \ 2834 volatile unsigned long _res; \ 2835 /* _argvec[0] holds current r2 across the call */ \ 2836 _argvec[1] = (unsigned long)_orig.r2; \ 2837 _argvec[2] = (unsigned long)_orig.nraddr; \ 2838 _argvec[2+1] = (unsigned long)arg1; \ 2839 _argvec[2+2] = (unsigned long)arg2; \ 2840 _argvec[2+3] = (unsigned long)arg3; \ 2841 _argvec[2+4] = (unsigned long)arg4; \ 2842 _argvec[2+5] = (unsigned long)arg5; \ 2843 _argvec[2+6] = (unsigned long)arg6; \ 2844 _argvec[2+7] = (unsigned long)arg7; \ 2845 _argvec[2+8] = (unsigned long)arg8; \ 2846 __asm__ volatile( \ 2847 VALGRIND_ALIGN_STACK \ 2848 "mr 11,%1\n\t" \ 2849 "std 2,-16(11)\n\t" /* save tocptr */ \ 2850 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2851 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2852 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2853 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2854 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2855 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2856 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 2857 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 2858 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 2859 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2860 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2861 "mr 11,%1\n\t" \ 2862 "mr %0,3\n\t" \ 2863 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2864 VALGRIND_RESTORE_STACK \ 2865 : /*out*/ "=r" (_res) \ 2866 : /*in*/ "r" (&_argvec[2]) \ 2867 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2868 ); \ 2869 lval = (__typeof__(lval)) _res; \ 2870 } while (0) 2871 2872#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2873 arg7,arg8,arg9) \ 2874 do { \ 2875 volatile OrigFn _orig = (orig); \ 2876 volatile unsigned long _argvec[3+9]; \ 2877 volatile unsigned long _res; \ 2878 /* _argvec[0] holds current r2 across the call */ \ 2879 _argvec[1] = (unsigned long)_orig.r2; \ 2880 _argvec[2] = (unsigned long)_orig.nraddr; \ 2881 _argvec[2+1] = (unsigned long)arg1; \ 2882 _argvec[2+2] = (unsigned long)arg2; \ 2883 _argvec[2+3] = (unsigned long)arg3; \ 2884 _argvec[2+4] = (unsigned long)arg4; \ 2885 _argvec[2+5] = (unsigned long)arg5; \ 2886 _argvec[2+6] = (unsigned long)arg6; \ 2887 _argvec[2+7] = (unsigned long)arg7; \ 2888 _argvec[2+8] = (unsigned long)arg8; \ 2889 _argvec[2+9] = (unsigned long)arg9; \ 2890 __asm__ volatile( \ 2891 VALGRIND_ALIGN_STACK \ 2892 "mr 11,%1\n\t" \ 2893 "std 2,-16(11)\n\t" /* save tocptr */ \ 2894 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2895 "addi 1,1,-128\n\t" /* expand stack frame */ \ 2896 /* arg9 */ \ 2897 "ld 3,72(11)\n\t" \ 2898 "std 3,112(1)\n\t" \ 2899 /* args1-8 */ \ 2900 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2901 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2902 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2903 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2904 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2905 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 2906 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 2907 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 2908 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2909 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2910 "mr 11,%1\n\t" \ 2911 "mr %0,3\n\t" \ 2912 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2913 VALGRIND_RESTORE_STACK \ 2914 : /*out*/ "=r" (_res) \ 2915 : /*in*/ "r" (&_argvec[2]) \ 2916 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2917 ); \ 2918 lval = (__typeof__(lval)) _res; \ 2919 } while (0) 2920 2921#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2922 arg7,arg8,arg9,arg10) \ 2923 do { \ 2924 volatile OrigFn _orig = (orig); \ 2925 volatile unsigned long _argvec[3+10]; \ 2926 volatile unsigned long _res; \ 2927 /* _argvec[0] holds current r2 across the call */ \ 2928 _argvec[1] = (unsigned long)_orig.r2; \ 2929 _argvec[2] = (unsigned long)_orig.nraddr; \ 2930 _argvec[2+1] = (unsigned long)arg1; \ 2931 _argvec[2+2] = (unsigned long)arg2; \ 2932 _argvec[2+3] = (unsigned long)arg3; \ 2933 _argvec[2+4] = (unsigned long)arg4; \ 2934 _argvec[2+5] = (unsigned long)arg5; \ 2935 _argvec[2+6] = (unsigned long)arg6; \ 2936 _argvec[2+7] = (unsigned long)arg7; \ 2937 _argvec[2+8] = (unsigned long)arg8; \ 2938 _argvec[2+9] = (unsigned long)arg9; \ 2939 _argvec[2+10] = (unsigned long)arg10; \ 2940 __asm__ volatile( \ 2941 VALGRIND_ALIGN_STACK \ 2942 "mr 11,%1\n\t" \ 2943 "std 2,-16(11)\n\t" /* save tocptr */ \ 2944 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2945 "addi 1,1,-128\n\t" /* expand stack frame */ \ 2946 /* arg10 */ \ 2947 "ld 3,80(11)\n\t" \ 2948 "std 3,120(1)\n\t" \ 2949 /* arg9 */ \ 2950 "ld 3,72(11)\n\t" \ 2951 "std 3,112(1)\n\t" \ 2952 /* args1-8 */ \ 2953 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 2954 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 2955 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 2956 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 2957 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 2958 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 2959 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 2960 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 2961 "ld 11, 0(11)\n\t" /* target->r11 */ \ 2962 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 2963 "mr 11,%1\n\t" \ 2964 "mr %0,3\n\t" \ 2965 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 2966 VALGRIND_RESTORE_STACK \ 2967 : /*out*/ "=r" (_res) \ 2968 : /*in*/ "r" (&_argvec[2]) \ 2969 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 2970 ); \ 2971 lval = (__typeof__(lval)) _res; \ 2972 } while (0) 2973 2974#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 2975 arg7,arg8,arg9,arg10,arg11) \ 2976 do { \ 2977 volatile OrigFn _orig = (orig); \ 2978 volatile unsigned long _argvec[3+11]; \ 2979 volatile unsigned long _res; \ 2980 /* _argvec[0] holds current r2 across the call */ \ 2981 _argvec[1] = (unsigned long)_orig.r2; \ 2982 _argvec[2] = (unsigned long)_orig.nraddr; \ 2983 _argvec[2+1] = (unsigned long)arg1; \ 2984 _argvec[2+2] = (unsigned long)arg2; \ 2985 _argvec[2+3] = (unsigned long)arg3; \ 2986 _argvec[2+4] = (unsigned long)arg4; \ 2987 _argvec[2+5] = (unsigned long)arg5; \ 2988 _argvec[2+6] = (unsigned long)arg6; \ 2989 _argvec[2+7] = (unsigned long)arg7; \ 2990 _argvec[2+8] = (unsigned long)arg8; \ 2991 _argvec[2+9] = (unsigned long)arg9; \ 2992 _argvec[2+10] = (unsigned long)arg10; \ 2993 _argvec[2+11] = (unsigned long)arg11; \ 2994 __asm__ volatile( \ 2995 VALGRIND_ALIGN_STACK \ 2996 "mr 11,%1\n\t" \ 2997 "std 2,-16(11)\n\t" /* save tocptr */ \ 2998 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 2999 "addi 1,1,-144\n\t" /* expand stack frame */ \ 3000 /* arg11 */ \ 3001 "ld 3,88(11)\n\t" \ 3002 "std 3,128(1)\n\t" \ 3003 /* arg10 */ \ 3004 "ld 3,80(11)\n\t" \ 3005 "std 3,120(1)\n\t" \ 3006 /* arg9 */ \ 3007 "ld 3,72(11)\n\t" \ 3008 "std 3,112(1)\n\t" \ 3009 /* args1-8 */ \ 3010 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 3011 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 3012 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 3013 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 3014 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 3015 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 3016 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 3017 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 3018 "ld 11, 0(11)\n\t" /* target->r11 */ \ 3019 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 3020 "mr 11,%1\n\t" \ 3021 "mr %0,3\n\t" \ 3022 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 3023 VALGRIND_RESTORE_STACK \ 3024 : /*out*/ "=r" (_res) \ 3025 : /*in*/ "r" (&_argvec[2]) \ 3026 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3027 ); \ 3028 lval = (__typeof__(lval)) _res; \ 3029 } while (0) 3030 3031#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3032 arg7,arg8,arg9,arg10,arg11,arg12) \ 3033 do { \ 3034 volatile OrigFn _orig = (orig); \ 3035 volatile unsigned long _argvec[3+12]; \ 3036 volatile unsigned long _res; \ 3037 /* _argvec[0] holds current r2 across the call */ \ 3038 _argvec[1] = (unsigned long)_orig.r2; \ 3039 _argvec[2] = (unsigned long)_orig.nraddr; \ 3040 _argvec[2+1] = (unsigned long)arg1; \ 3041 _argvec[2+2] = (unsigned long)arg2; \ 3042 _argvec[2+3] = (unsigned long)arg3; \ 3043 _argvec[2+4] = (unsigned long)arg4; \ 3044 _argvec[2+5] = (unsigned long)arg5; \ 3045 _argvec[2+6] = (unsigned long)arg6; \ 3046 _argvec[2+7] = (unsigned long)arg7; \ 3047 _argvec[2+8] = (unsigned long)arg8; \ 3048 _argvec[2+9] = (unsigned long)arg9; \ 3049 _argvec[2+10] = (unsigned long)arg10; \ 3050 _argvec[2+11] = (unsigned long)arg11; \ 3051 _argvec[2+12] = (unsigned long)arg12; \ 3052 __asm__ volatile( \ 3053 VALGRIND_ALIGN_STACK \ 3054 "mr 11,%1\n\t" \ 3055 "std 2,-16(11)\n\t" /* save tocptr */ \ 3056 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \ 3057 "addi 1,1,-144\n\t" /* expand stack frame */ \ 3058 /* arg12 */ \ 3059 "ld 3,96(11)\n\t" \ 3060 "std 3,136(1)\n\t" \ 3061 /* arg11 */ \ 3062 "ld 3,88(11)\n\t" \ 3063 "std 3,128(1)\n\t" \ 3064 /* arg10 */ \ 3065 "ld 3,80(11)\n\t" \ 3066 "std 3,120(1)\n\t" \ 3067 /* arg9 */ \ 3068 "ld 3,72(11)\n\t" \ 3069 "std 3,112(1)\n\t" \ 3070 /* args1-8 */ \ 3071 "ld 3, 8(11)\n\t" /* arg1->r3 */ \ 3072 "ld 4, 16(11)\n\t" /* arg2->r4 */ \ 3073 "ld 5, 24(11)\n\t" /* arg3->r5 */ \ 3074 "ld 6, 32(11)\n\t" /* arg4->r6 */ \ 3075 "ld 7, 40(11)\n\t" /* arg5->r7 */ \ 3076 "ld 8, 48(11)\n\t" /* arg6->r8 */ \ 3077 "ld 9, 56(11)\n\t" /* arg7->r9 */ \ 3078 "ld 10, 64(11)\n\t" /* arg8->r10 */ \ 3079 "ld 11, 0(11)\n\t" /* target->r11 */ \ 3080 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \ 3081 "mr 11,%1\n\t" \ 3082 "mr %0,3\n\t" \ 3083 "ld 2,-16(11)\n\t" /* restore tocptr */ \ 3084 VALGRIND_RESTORE_STACK \ 3085 : /*out*/ "=r" (_res) \ 3086 : /*in*/ "r" (&_argvec[2]) \ 3087 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \ 3088 ); \ 3089 lval = (__typeof__(lval)) _res; \ 3090 } while (0) 3091 3092#endif /* PLAT_ppc64_linux */ 3093 3094/* ------------------------- arm-linux ------------------------- */ 3095 3096#if defined(PLAT_arm_linux) 3097 3098/* These regs are trashed by the hidden call. */ 3099#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14" 3100 3101/* Macros to save and align the stack before making a function 3102 call and restore it afterwards as gcc may not keep the stack 3103 pointer aligned if it doesn't realise calls are being made 3104 to other functions. */ 3105 3106/* This is a bit tricky. We store the original stack pointer in r10 3107 as it is callee-saves. gcc doesn't allow the use of r11 for some 3108 reason. Also, we can't directly "bic" the stack pointer in thumb 3109 mode since r13 isn't an allowed register number in that context. 3110 So use r4 as a temporary, since that is about to get trashed 3111 anyway, just after each use of this macro. Side effect is we need 3112 to be very careful about any future changes, since 3113 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */ 3114#define VALGRIND_ALIGN_STACK \ 3115 "mov r10, sp\n\t" \ 3116 "mov r4, sp\n\t" \ 3117 "bic r4, r4, #7\n\t" \ 3118 "mov sp, r4\n\t" 3119#define VALGRIND_RESTORE_STACK \ 3120 "mov sp, r10\n\t" 3121 3122/* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned 3123 long) == 4. */ 3124 3125#define CALL_FN_W_v(lval, orig) \ 3126 do { \ 3127 volatile OrigFn _orig = (orig); \ 3128 volatile unsigned long _argvec[1]; \ 3129 volatile unsigned long _res; \ 3130 _argvec[0] = (unsigned long)_orig.nraddr; \ 3131 __asm__ volatile( \ 3132 VALGRIND_ALIGN_STACK \ 3133 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3134 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3135 VALGRIND_RESTORE_STACK \ 3136 "mov %0, r0\n" \ 3137 : /*out*/ "=r" (_res) \ 3138 : /*in*/ "0" (&_argvec[0]) \ 3139 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3140 ); \ 3141 lval = (__typeof__(lval)) _res; \ 3142 } while (0) 3143 3144#define CALL_FN_W_W(lval, orig, arg1) \ 3145 do { \ 3146 volatile OrigFn _orig = (orig); \ 3147 volatile unsigned long _argvec[2]; \ 3148 volatile unsigned long _res; \ 3149 _argvec[0] = (unsigned long)_orig.nraddr; \ 3150 _argvec[1] = (unsigned long)(arg1); \ 3151 __asm__ volatile( \ 3152 VALGRIND_ALIGN_STACK \ 3153 "ldr r0, [%1, #4] \n\t" \ 3154 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3155 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3156 VALGRIND_RESTORE_STACK \ 3157 "mov %0, r0\n" \ 3158 : /*out*/ "=r" (_res) \ 3159 : /*in*/ "0" (&_argvec[0]) \ 3160 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3161 ); \ 3162 lval = (__typeof__(lval)) _res; \ 3163 } while (0) 3164 3165#define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 3166 do { \ 3167 volatile OrigFn _orig = (orig); \ 3168 volatile unsigned long _argvec[3]; \ 3169 volatile unsigned long _res; \ 3170 _argvec[0] = (unsigned long)_orig.nraddr; \ 3171 _argvec[1] = (unsigned long)(arg1); \ 3172 _argvec[2] = (unsigned long)(arg2); \ 3173 __asm__ volatile( \ 3174 VALGRIND_ALIGN_STACK \ 3175 "ldr r0, [%1, #4] \n\t" \ 3176 "ldr r1, [%1, #8] \n\t" \ 3177 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3178 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3179 VALGRIND_RESTORE_STACK \ 3180 "mov %0, r0\n" \ 3181 : /*out*/ "=r" (_res) \ 3182 : /*in*/ "0" (&_argvec[0]) \ 3183 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3184 ); \ 3185 lval = (__typeof__(lval)) _res; \ 3186 } while (0) 3187 3188#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 3189 do { \ 3190 volatile OrigFn _orig = (orig); \ 3191 volatile unsigned long _argvec[4]; \ 3192 volatile unsigned long _res; \ 3193 _argvec[0] = (unsigned long)_orig.nraddr; \ 3194 _argvec[1] = (unsigned long)(arg1); \ 3195 _argvec[2] = (unsigned long)(arg2); \ 3196 _argvec[3] = (unsigned long)(arg3); \ 3197 __asm__ volatile( \ 3198 VALGRIND_ALIGN_STACK \ 3199 "ldr r0, [%1, #4] \n\t" \ 3200 "ldr r1, [%1, #8] \n\t" \ 3201 "ldr r2, [%1, #12] \n\t" \ 3202 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3203 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3204 VALGRIND_RESTORE_STACK \ 3205 "mov %0, r0\n" \ 3206 : /*out*/ "=r" (_res) \ 3207 : /*in*/ "0" (&_argvec[0]) \ 3208 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3209 ); \ 3210 lval = (__typeof__(lval)) _res; \ 3211 } while (0) 3212 3213#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 3214 do { \ 3215 volatile OrigFn _orig = (orig); \ 3216 volatile unsigned long _argvec[5]; \ 3217 volatile unsigned long _res; \ 3218 _argvec[0] = (unsigned long)_orig.nraddr; \ 3219 _argvec[1] = (unsigned long)(arg1); \ 3220 _argvec[2] = (unsigned long)(arg2); \ 3221 _argvec[3] = (unsigned long)(arg3); \ 3222 _argvec[4] = (unsigned long)(arg4); \ 3223 __asm__ volatile( \ 3224 VALGRIND_ALIGN_STACK \ 3225 "ldr r0, [%1, #4] \n\t" \ 3226 "ldr r1, [%1, #8] \n\t" \ 3227 "ldr r2, [%1, #12] \n\t" \ 3228 "ldr r3, [%1, #16] \n\t" \ 3229 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3230 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3231 VALGRIND_RESTORE_STACK \ 3232 "mov %0, r0" \ 3233 : /*out*/ "=r" (_res) \ 3234 : /*in*/ "0" (&_argvec[0]) \ 3235 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3236 ); \ 3237 lval = (__typeof__(lval)) _res; \ 3238 } while (0) 3239 3240#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 3241 do { \ 3242 volatile OrigFn _orig = (orig); \ 3243 volatile unsigned long _argvec[6]; \ 3244 volatile unsigned long _res; \ 3245 _argvec[0] = (unsigned long)_orig.nraddr; \ 3246 _argvec[1] = (unsigned long)(arg1); \ 3247 _argvec[2] = (unsigned long)(arg2); \ 3248 _argvec[3] = (unsigned long)(arg3); \ 3249 _argvec[4] = (unsigned long)(arg4); \ 3250 _argvec[5] = (unsigned long)(arg5); \ 3251 __asm__ volatile( \ 3252 VALGRIND_ALIGN_STACK \ 3253 "sub sp, sp, #4 \n\t" \ 3254 "ldr r0, [%1, #20] \n\t" \ 3255 "push {r0} \n\t" \ 3256 "ldr r0, [%1, #4] \n\t" \ 3257 "ldr r1, [%1, #8] \n\t" \ 3258 "ldr r2, [%1, #12] \n\t" \ 3259 "ldr r3, [%1, #16] \n\t" \ 3260 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3261 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3262 VALGRIND_RESTORE_STACK \ 3263 "mov %0, r0" \ 3264 : /*out*/ "=r" (_res) \ 3265 : /*in*/ "0" (&_argvec[0]) \ 3266 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3267 ); \ 3268 lval = (__typeof__(lval)) _res; \ 3269 } while (0) 3270 3271#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 3272 do { \ 3273 volatile OrigFn _orig = (orig); \ 3274 volatile unsigned long _argvec[7]; \ 3275 volatile unsigned long _res; \ 3276 _argvec[0] = (unsigned long)_orig.nraddr; \ 3277 _argvec[1] = (unsigned long)(arg1); \ 3278 _argvec[2] = (unsigned long)(arg2); \ 3279 _argvec[3] = (unsigned long)(arg3); \ 3280 _argvec[4] = (unsigned long)(arg4); \ 3281 _argvec[5] = (unsigned long)(arg5); \ 3282 _argvec[6] = (unsigned long)(arg6); \ 3283 __asm__ volatile( \ 3284 VALGRIND_ALIGN_STACK \ 3285 "ldr r0, [%1, #20] \n\t" \ 3286 "ldr r1, [%1, #24] \n\t" \ 3287 "push {r0, r1} \n\t" \ 3288 "ldr r0, [%1, #4] \n\t" \ 3289 "ldr r1, [%1, #8] \n\t" \ 3290 "ldr r2, [%1, #12] \n\t" \ 3291 "ldr r3, [%1, #16] \n\t" \ 3292 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3293 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3294 VALGRIND_RESTORE_STACK \ 3295 "mov %0, r0" \ 3296 : /*out*/ "=r" (_res) \ 3297 : /*in*/ "0" (&_argvec[0]) \ 3298 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3299 ); \ 3300 lval = (__typeof__(lval)) _res; \ 3301 } while (0) 3302 3303#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3304 arg7) \ 3305 do { \ 3306 volatile OrigFn _orig = (orig); \ 3307 volatile unsigned long _argvec[8]; \ 3308 volatile unsigned long _res; \ 3309 _argvec[0] = (unsigned long)_orig.nraddr; \ 3310 _argvec[1] = (unsigned long)(arg1); \ 3311 _argvec[2] = (unsigned long)(arg2); \ 3312 _argvec[3] = (unsigned long)(arg3); \ 3313 _argvec[4] = (unsigned long)(arg4); \ 3314 _argvec[5] = (unsigned long)(arg5); \ 3315 _argvec[6] = (unsigned long)(arg6); \ 3316 _argvec[7] = (unsigned long)(arg7); \ 3317 __asm__ volatile( \ 3318 VALGRIND_ALIGN_STACK \ 3319 "sub sp, sp, #4 \n\t" \ 3320 "ldr r0, [%1, #20] \n\t" \ 3321 "ldr r1, [%1, #24] \n\t" \ 3322 "ldr r2, [%1, #28] \n\t" \ 3323 "push {r0, r1, r2} \n\t" \ 3324 "ldr r0, [%1, #4] \n\t" \ 3325 "ldr r1, [%1, #8] \n\t" \ 3326 "ldr r2, [%1, #12] \n\t" \ 3327 "ldr r3, [%1, #16] \n\t" \ 3328 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3329 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3330 VALGRIND_RESTORE_STACK \ 3331 "mov %0, r0" \ 3332 : /*out*/ "=r" (_res) \ 3333 : /*in*/ "0" (&_argvec[0]) \ 3334 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3335 ); \ 3336 lval = (__typeof__(lval)) _res; \ 3337 } while (0) 3338 3339#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3340 arg7,arg8) \ 3341 do { \ 3342 volatile OrigFn _orig = (orig); \ 3343 volatile unsigned long _argvec[9]; \ 3344 volatile unsigned long _res; \ 3345 _argvec[0] = (unsigned long)_orig.nraddr; \ 3346 _argvec[1] = (unsigned long)(arg1); \ 3347 _argvec[2] = (unsigned long)(arg2); \ 3348 _argvec[3] = (unsigned long)(arg3); \ 3349 _argvec[4] = (unsigned long)(arg4); \ 3350 _argvec[5] = (unsigned long)(arg5); \ 3351 _argvec[6] = (unsigned long)(arg6); \ 3352 _argvec[7] = (unsigned long)(arg7); \ 3353 _argvec[8] = (unsigned long)(arg8); \ 3354 __asm__ volatile( \ 3355 VALGRIND_ALIGN_STACK \ 3356 "ldr r0, [%1, #20] \n\t" \ 3357 "ldr r1, [%1, #24] \n\t" \ 3358 "ldr r2, [%1, #28] \n\t" \ 3359 "ldr r3, [%1, #32] \n\t" \ 3360 "push {r0, r1, r2, r3} \n\t" \ 3361 "ldr r0, [%1, #4] \n\t" \ 3362 "ldr r1, [%1, #8] \n\t" \ 3363 "ldr r2, [%1, #12] \n\t" \ 3364 "ldr r3, [%1, #16] \n\t" \ 3365 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3366 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3367 VALGRIND_RESTORE_STACK \ 3368 "mov %0, r0" \ 3369 : /*out*/ "=r" (_res) \ 3370 : /*in*/ "0" (&_argvec[0]) \ 3371 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3372 ); \ 3373 lval = (__typeof__(lval)) _res; \ 3374 } while (0) 3375 3376#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3377 arg7,arg8,arg9) \ 3378 do { \ 3379 volatile OrigFn _orig = (orig); \ 3380 volatile unsigned long _argvec[10]; \ 3381 volatile unsigned long _res; \ 3382 _argvec[0] = (unsigned long)_orig.nraddr; \ 3383 _argvec[1] = (unsigned long)(arg1); \ 3384 _argvec[2] = (unsigned long)(arg2); \ 3385 _argvec[3] = (unsigned long)(arg3); \ 3386 _argvec[4] = (unsigned long)(arg4); \ 3387 _argvec[5] = (unsigned long)(arg5); \ 3388 _argvec[6] = (unsigned long)(arg6); \ 3389 _argvec[7] = (unsigned long)(arg7); \ 3390 _argvec[8] = (unsigned long)(arg8); \ 3391 _argvec[9] = (unsigned long)(arg9); \ 3392 __asm__ volatile( \ 3393 VALGRIND_ALIGN_STACK \ 3394 "sub sp, sp, #4 \n\t" \ 3395 "ldr r0, [%1, #20] \n\t" \ 3396 "ldr r1, [%1, #24] \n\t" \ 3397 "ldr r2, [%1, #28] \n\t" \ 3398 "ldr r3, [%1, #32] \n\t" \ 3399 "ldr r4, [%1, #36] \n\t" \ 3400 "push {r0, r1, r2, r3, r4} \n\t" \ 3401 "ldr r0, [%1, #4] \n\t" \ 3402 "ldr r1, [%1, #8] \n\t" \ 3403 "ldr r2, [%1, #12] \n\t" \ 3404 "ldr r3, [%1, #16] \n\t" \ 3405 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3406 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3407 VALGRIND_RESTORE_STACK \ 3408 "mov %0, r0" \ 3409 : /*out*/ "=r" (_res) \ 3410 : /*in*/ "0" (&_argvec[0]) \ 3411 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3412 ); \ 3413 lval = (__typeof__(lval)) _res; \ 3414 } while (0) 3415 3416#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3417 arg7,arg8,arg9,arg10) \ 3418 do { \ 3419 volatile OrigFn _orig = (orig); \ 3420 volatile unsigned long _argvec[11]; \ 3421 volatile unsigned long _res; \ 3422 _argvec[0] = (unsigned long)_orig.nraddr; \ 3423 _argvec[1] = (unsigned long)(arg1); \ 3424 _argvec[2] = (unsigned long)(arg2); \ 3425 _argvec[3] = (unsigned long)(arg3); \ 3426 _argvec[4] = (unsigned long)(arg4); \ 3427 _argvec[5] = (unsigned long)(arg5); \ 3428 _argvec[6] = (unsigned long)(arg6); \ 3429 _argvec[7] = (unsigned long)(arg7); \ 3430 _argvec[8] = (unsigned long)(arg8); \ 3431 _argvec[9] = (unsigned long)(arg9); \ 3432 _argvec[10] = (unsigned long)(arg10); \ 3433 __asm__ volatile( \ 3434 VALGRIND_ALIGN_STACK \ 3435 "ldr r0, [%1, #40] \n\t" \ 3436 "push {r0} \n\t" \ 3437 "ldr r0, [%1, #20] \n\t" \ 3438 "ldr r1, [%1, #24] \n\t" \ 3439 "ldr r2, [%1, #28] \n\t" \ 3440 "ldr r3, [%1, #32] \n\t" \ 3441 "ldr r4, [%1, #36] \n\t" \ 3442 "push {r0, r1, r2, r3, r4} \n\t" \ 3443 "ldr r0, [%1, #4] \n\t" \ 3444 "ldr r1, [%1, #8] \n\t" \ 3445 "ldr r2, [%1, #12] \n\t" \ 3446 "ldr r3, [%1, #16] \n\t" \ 3447 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3448 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3449 VALGRIND_RESTORE_STACK \ 3450 "mov %0, r0" \ 3451 : /*out*/ "=r" (_res) \ 3452 : /*in*/ "0" (&_argvec[0]) \ 3453 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3454 ); \ 3455 lval = (__typeof__(lval)) _res; \ 3456 } while (0) 3457 3458#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 3459 arg6,arg7,arg8,arg9,arg10, \ 3460 arg11) \ 3461 do { \ 3462 volatile OrigFn _orig = (orig); \ 3463 volatile unsigned long _argvec[12]; \ 3464 volatile unsigned long _res; \ 3465 _argvec[0] = (unsigned long)_orig.nraddr; \ 3466 _argvec[1] = (unsigned long)(arg1); \ 3467 _argvec[2] = (unsigned long)(arg2); \ 3468 _argvec[3] = (unsigned long)(arg3); \ 3469 _argvec[4] = (unsigned long)(arg4); \ 3470 _argvec[5] = (unsigned long)(arg5); \ 3471 _argvec[6] = (unsigned long)(arg6); \ 3472 _argvec[7] = (unsigned long)(arg7); \ 3473 _argvec[8] = (unsigned long)(arg8); \ 3474 _argvec[9] = (unsigned long)(arg9); \ 3475 _argvec[10] = (unsigned long)(arg10); \ 3476 _argvec[11] = (unsigned long)(arg11); \ 3477 __asm__ volatile( \ 3478 VALGRIND_ALIGN_STACK \ 3479 "sub sp, sp, #4 \n\t" \ 3480 "ldr r0, [%1, #40] \n\t" \ 3481 "ldr r1, [%1, #44] \n\t" \ 3482 "push {r0, r1} \n\t" \ 3483 "ldr r0, [%1, #20] \n\t" \ 3484 "ldr r1, [%1, #24] \n\t" \ 3485 "ldr r2, [%1, #28] \n\t" \ 3486 "ldr r3, [%1, #32] \n\t" \ 3487 "ldr r4, [%1, #36] \n\t" \ 3488 "push {r0, r1, r2, r3, r4} \n\t" \ 3489 "ldr r0, [%1, #4] \n\t" \ 3490 "ldr r1, [%1, #8] \n\t" \ 3491 "ldr r2, [%1, #12] \n\t" \ 3492 "ldr r3, [%1, #16] \n\t" \ 3493 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3494 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3495 VALGRIND_RESTORE_STACK \ 3496 "mov %0, r0" \ 3497 : /*out*/ "=r" (_res) \ 3498 : /*in*/ "0" (&_argvec[0]) \ 3499 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3500 ); \ 3501 lval = (__typeof__(lval)) _res; \ 3502 } while (0) 3503 3504#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 3505 arg6,arg7,arg8,arg9,arg10, \ 3506 arg11,arg12) \ 3507 do { \ 3508 volatile OrigFn _orig = (orig); \ 3509 volatile unsigned long _argvec[13]; \ 3510 volatile unsigned long _res; \ 3511 _argvec[0] = (unsigned long)_orig.nraddr; \ 3512 _argvec[1] = (unsigned long)(arg1); \ 3513 _argvec[2] = (unsigned long)(arg2); \ 3514 _argvec[3] = (unsigned long)(arg3); \ 3515 _argvec[4] = (unsigned long)(arg4); \ 3516 _argvec[5] = (unsigned long)(arg5); \ 3517 _argvec[6] = (unsigned long)(arg6); \ 3518 _argvec[7] = (unsigned long)(arg7); \ 3519 _argvec[8] = (unsigned long)(arg8); \ 3520 _argvec[9] = (unsigned long)(arg9); \ 3521 _argvec[10] = (unsigned long)(arg10); \ 3522 _argvec[11] = (unsigned long)(arg11); \ 3523 _argvec[12] = (unsigned long)(arg12); \ 3524 __asm__ volatile( \ 3525 VALGRIND_ALIGN_STACK \ 3526 "ldr r0, [%1, #40] \n\t" \ 3527 "ldr r1, [%1, #44] \n\t" \ 3528 "ldr r2, [%1, #48] \n\t" \ 3529 "push {r0, r1, r2} \n\t" \ 3530 "ldr r0, [%1, #20] \n\t" \ 3531 "ldr r1, [%1, #24] \n\t" \ 3532 "ldr r2, [%1, #28] \n\t" \ 3533 "ldr r3, [%1, #32] \n\t" \ 3534 "ldr r4, [%1, #36] \n\t" \ 3535 "push {r0, r1, r2, r3, r4} \n\t" \ 3536 "ldr r0, [%1, #4] \n\t" \ 3537 "ldr r1, [%1, #8] \n\t" \ 3538 "ldr r2, [%1, #12] \n\t" \ 3539 "ldr r3, [%1, #16] \n\t" \ 3540 "ldr r4, [%1] \n\t" /* target->r4 */ \ 3541 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \ 3542 VALGRIND_RESTORE_STACK \ 3543 "mov %0, r0" \ 3544 : /*out*/ "=r" (_res) \ 3545 : /*in*/ "0" (&_argvec[0]) \ 3546 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \ 3547 ); \ 3548 lval = (__typeof__(lval)) _res; \ 3549 } while (0) 3550 3551#endif /* PLAT_arm_linux */ 3552 3553/* ------------------------ arm64-linux ------------------------ */ 3554 3555#if defined(PLAT_arm64_linux) 3556 3557/* These regs are trashed by the hidden call. */ 3558#define __CALLER_SAVED_REGS \ 3559 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \ 3560 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \ 3561 "x18", "x19", "x20", "x30", \ 3562 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \ 3563 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \ 3564 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \ 3565 "v26", "v27", "v28", "v29", "v30", "v31" 3566 3567/* x21 is callee-saved, so we can use it to save and restore SP around 3568 the hidden call. */ 3569#define VALGRIND_ALIGN_STACK \ 3570 "mov x21, sp\n\t" \ 3571 "bic sp, x21, #15\n\t" 3572#define VALGRIND_RESTORE_STACK \ 3573 "mov sp, x21\n\t" 3574 3575/* These CALL_FN_ macros assume that on arm64-linux, 3576 sizeof(unsigned long) == 8. */ 3577 3578#define CALL_FN_W_v(lval, orig) \ 3579 do { \ 3580 volatile OrigFn _orig = (orig); \ 3581 volatile unsigned long _argvec[1]; \ 3582 volatile unsigned long _res; \ 3583 _argvec[0] = (unsigned long)_orig.nraddr; \ 3584 __asm__ volatile( \ 3585 VALGRIND_ALIGN_STACK \ 3586 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3587 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3588 VALGRIND_RESTORE_STACK \ 3589 "mov %0, x0\n" \ 3590 : /*out*/ "=r" (_res) \ 3591 : /*in*/ "0" (&_argvec[0]) \ 3592 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3593 ); \ 3594 lval = (__typeof__(lval)) _res; \ 3595 } while (0) 3596 3597#define CALL_FN_W_W(lval, orig, arg1) \ 3598 do { \ 3599 volatile OrigFn _orig = (orig); \ 3600 volatile unsigned long _argvec[2]; \ 3601 volatile unsigned long _res; \ 3602 _argvec[0] = (unsigned long)_orig.nraddr; \ 3603 _argvec[1] = (unsigned long)(arg1); \ 3604 __asm__ volatile( \ 3605 VALGRIND_ALIGN_STACK \ 3606 "ldr x0, [%1, #8] \n\t" \ 3607 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3608 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3609 VALGRIND_RESTORE_STACK \ 3610 "mov %0, x0\n" \ 3611 : /*out*/ "=r" (_res) \ 3612 : /*in*/ "0" (&_argvec[0]) \ 3613 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3614 ); \ 3615 lval = (__typeof__(lval)) _res; \ 3616 } while (0) 3617 3618#define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 3619 do { \ 3620 volatile OrigFn _orig = (orig); \ 3621 volatile unsigned long _argvec[3]; \ 3622 volatile unsigned long _res; \ 3623 _argvec[0] = (unsigned long)_orig.nraddr; \ 3624 _argvec[1] = (unsigned long)(arg1); \ 3625 _argvec[2] = (unsigned long)(arg2); \ 3626 __asm__ volatile( \ 3627 VALGRIND_ALIGN_STACK \ 3628 "ldr x0, [%1, #8] \n\t" \ 3629 "ldr x1, [%1, #16] \n\t" \ 3630 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3631 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3632 VALGRIND_RESTORE_STACK \ 3633 "mov %0, x0\n" \ 3634 : /*out*/ "=r" (_res) \ 3635 : /*in*/ "0" (&_argvec[0]) \ 3636 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3637 ); \ 3638 lval = (__typeof__(lval)) _res; \ 3639 } while (0) 3640 3641#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 3642 do { \ 3643 volatile OrigFn _orig = (orig); \ 3644 volatile unsigned long _argvec[4]; \ 3645 volatile unsigned long _res; \ 3646 _argvec[0] = (unsigned long)_orig.nraddr; \ 3647 _argvec[1] = (unsigned long)(arg1); \ 3648 _argvec[2] = (unsigned long)(arg2); \ 3649 _argvec[3] = (unsigned long)(arg3); \ 3650 __asm__ volatile( \ 3651 VALGRIND_ALIGN_STACK \ 3652 "ldr x0, [%1, #8] \n\t" \ 3653 "ldr x1, [%1, #16] \n\t" \ 3654 "ldr x2, [%1, #24] \n\t" \ 3655 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3656 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3657 VALGRIND_RESTORE_STACK \ 3658 "mov %0, x0\n" \ 3659 : /*out*/ "=r" (_res) \ 3660 : /*in*/ "0" (&_argvec[0]) \ 3661 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3662 ); \ 3663 lval = (__typeof__(lval)) _res; \ 3664 } while (0) 3665 3666#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 3667 do { \ 3668 volatile OrigFn _orig = (orig); \ 3669 volatile unsigned long _argvec[5]; \ 3670 volatile unsigned long _res; \ 3671 _argvec[0] = (unsigned long)_orig.nraddr; \ 3672 _argvec[1] = (unsigned long)(arg1); \ 3673 _argvec[2] = (unsigned long)(arg2); \ 3674 _argvec[3] = (unsigned long)(arg3); \ 3675 _argvec[4] = (unsigned long)(arg4); \ 3676 __asm__ volatile( \ 3677 VALGRIND_ALIGN_STACK \ 3678 "ldr x0, [%1, #8] \n\t" \ 3679 "ldr x1, [%1, #16] \n\t" \ 3680 "ldr x2, [%1, #24] \n\t" \ 3681 "ldr x3, [%1, #32] \n\t" \ 3682 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3683 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3684 VALGRIND_RESTORE_STACK \ 3685 "mov %0, x0" \ 3686 : /*out*/ "=r" (_res) \ 3687 : /*in*/ "0" (&_argvec[0]) \ 3688 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3689 ); \ 3690 lval = (__typeof__(lval)) _res; \ 3691 } while (0) 3692 3693#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 3694 do { \ 3695 volatile OrigFn _orig = (orig); \ 3696 volatile unsigned long _argvec[6]; \ 3697 volatile unsigned long _res; \ 3698 _argvec[0] = (unsigned long)_orig.nraddr; \ 3699 _argvec[1] = (unsigned long)(arg1); \ 3700 _argvec[2] = (unsigned long)(arg2); \ 3701 _argvec[3] = (unsigned long)(arg3); \ 3702 _argvec[4] = (unsigned long)(arg4); \ 3703 _argvec[5] = (unsigned long)(arg5); \ 3704 __asm__ volatile( \ 3705 VALGRIND_ALIGN_STACK \ 3706 "ldr x0, [%1, #8] \n\t" \ 3707 "ldr x1, [%1, #16] \n\t" \ 3708 "ldr x2, [%1, #24] \n\t" \ 3709 "ldr x3, [%1, #32] \n\t" \ 3710 "ldr x4, [%1, #40] \n\t" \ 3711 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3712 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3713 VALGRIND_RESTORE_STACK \ 3714 "mov %0, x0" \ 3715 : /*out*/ "=r" (_res) \ 3716 : /*in*/ "0" (&_argvec[0]) \ 3717 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3718 ); \ 3719 lval = (__typeof__(lval)) _res; \ 3720 } while (0) 3721 3722#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 3723 do { \ 3724 volatile OrigFn _orig = (orig); \ 3725 volatile unsigned long _argvec[7]; \ 3726 volatile unsigned long _res; \ 3727 _argvec[0] = (unsigned long)_orig.nraddr; \ 3728 _argvec[1] = (unsigned long)(arg1); \ 3729 _argvec[2] = (unsigned long)(arg2); \ 3730 _argvec[3] = (unsigned long)(arg3); \ 3731 _argvec[4] = (unsigned long)(arg4); \ 3732 _argvec[5] = (unsigned long)(arg5); \ 3733 _argvec[6] = (unsigned long)(arg6); \ 3734 __asm__ volatile( \ 3735 VALGRIND_ALIGN_STACK \ 3736 "ldr x0, [%1, #8] \n\t" \ 3737 "ldr x1, [%1, #16] \n\t" \ 3738 "ldr x2, [%1, #24] \n\t" \ 3739 "ldr x3, [%1, #32] \n\t" \ 3740 "ldr x4, [%1, #40] \n\t" \ 3741 "ldr x5, [%1, #48] \n\t" \ 3742 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3743 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3744 VALGRIND_RESTORE_STACK \ 3745 "mov %0, x0" \ 3746 : /*out*/ "=r" (_res) \ 3747 : /*in*/ "0" (&_argvec[0]) \ 3748 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3749 ); \ 3750 lval = (__typeof__(lval)) _res; \ 3751 } while (0) 3752 3753#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3754 arg7) \ 3755 do { \ 3756 volatile OrigFn _orig = (orig); \ 3757 volatile unsigned long _argvec[8]; \ 3758 volatile unsigned long _res; \ 3759 _argvec[0] = (unsigned long)_orig.nraddr; \ 3760 _argvec[1] = (unsigned long)(arg1); \ 3761 _argvec[2] = (unsigned long)(arg2); \ 3762 _argvec[3] = (unsigned long)(arg3); \ 3763 _argvec[4] = (unsigned long)(arg4); \ 3764 _argvec[5] = (unsigned long)(arg5); \ 3765 _argvec[6] = (unsigned long)(arg6); \ 3766 _argvec[7] = (unsigned long)(arg7); \ 3767 __asm__ volatile( \ 3768 VALGRIND_ALIGN_STACK \ 3769 "ldr x0, [%1, #8] \n\t" \ 3770 "ldr x1, [%1, #16] \n\t" \ 3771 "ldr x2, [%1, #24] \n\t" \ 3772 "ldr x3, [%1, #32] \n\t" \ 3773 "ldr x4, [%1, #40] \n\t" \ 3774 "ldr x5, [%1, #48] \n\t" \ 3775 "ldr x6, [%1, #56] \n\t" \ 3776 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3777 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3778 VALGRIND_RESTORE_STACK \ 3779 "mov %0, x0" \ 3780 : /*out*/ "=r" (_res) \ 3781 : /*in*/ "0" (&_argvec[0]) \ 3782 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3783 ); \ 3784 lval = (__typeof__(lval)) _res; \ 3785 } while (0) 3786 3787#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3788 arg7,arg8) \ 3789 do { \ 3790 volatile OrigFn _orig = (orig); \ 3791 volatile unsigned long _argvec[9]; \ 3792 volatile unsigned long _res; \ 3793 _argvec[0] = (unsigned long)_orig.nraddr; \ 3794 _argvec[1] = (unsigned long)(arg1); \ 3795 _argvec[2] = (unsigned long)(arg2); \ 3796 _argvec[3] = (unsigned long)(arg3); \ 3797 _argvec[4] = (unsigned long)(arg4); \ 3798 _argvec[5] = (unsigned long)(arg5); \ 3799 _argvec[6] = (unsigned long)(arg6); \ 3800 _argvec[7] = (unsigned long)(arg7); \ 3801 _argvec[8] = (unsigned long)(arg8); \ 3802 __asm__ volatile( \ 3803 VALGRIND_ALIGN_STACK \ 3804 "ldr x0, [%1, #8] \n\t" \ 3805 "ldr x1, [%1, #16] \n\t" \ 3806 "ldr x2, [%1, #24] \n\t" \ 3807 "ldr x3, [%1, #32] \n\t" \ 3808 "ldr x4, [%1, #40] \n\t" \ 3809 "ldr x5, [%1, #48] \n\t" \ 3810 "ldr x6, [%1, #56] \n\t" \ 3811 "ldr x7, [%1, #64] \n\t" \ 3812 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3813 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3814 VALGRIND_RESTORE_STACK \ 3815 "mov %0, x0" \ 3816 : /*out*/ "=r" (_res) \ 3817 : /*in*/ "0" (&_argvec[0]) \ 3818 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3819 ); \ 3820 lval = (__typeof__(lval)) _res; \ 3821 } while (0) 3822 3823#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3824 arg7,arg8,arg9) \ 3825 do { \ 3826 volatile OrigFn _orig = (orig); \ 3827 volatile unsigned long _argvec[10]; \ 3828 volatile unsigned long _res; \ 3829 _argvec[0] = (unsigned long)_orig.nraddr; \ 3830 _argvec[1] = (unsigned long)(arg1); \ 3831 _argvec[2] = (unsigned long)(arg2); \ 3832 _argvec[3] = (unsigned long)(arg3); \ 3833 _argvec[4] = (unsigned long)(arg4); \ 3834 _argvec[5] = (unsigned long)(arg5); \ 3835 _argvec[6] = (unsigned long)(arg6); \ 3836 _argvec[7] = (unsigned long)(arg7); \ 3837 _argvec[8] = (unsigned long)(arg8); \ 3838 _argvec[9] = (unsigned long)(arg9); \ 3839 __asm__ volatile( \ 3840 VALGRIND_ALIGN_STACK \ 3841 "sub sp, sp, #0x20 \n\t" \ 3842 "ldr x0, [%1, #8] \n\t" \ 3843 "ldr x1, [%1, #16] \n\t" \ 3844 "ldr x2, [%1, #24] \n\t" \ 3845 "ldr x3, [%1, #32] \n\t" \ 3846 "ldr x4, [%1, #40] \n\t" \ 3847 "ldr x5, [%1, #48] \n\t" \ 3848 "ldr x6, [%1, #56] \n\t" \ 3849 "ldr x7, [%1, #64] \n\t" \ 3850 "ldr x8, [%1, #72] \n\t" \ 3851 "str x8, [sp, #0] \n\t" \ 3852 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3853 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3854 VALGRIND_RESTORE_STACK \ 3855 "mov %0, x0" \ 3856 : /*out*/ "=r" (_res) \ 3857 : /*in*/ "0" (&_argvec[0]) \ 3858 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3859 ); \ 3860 lval = (__typeof__(lval)) _res; \ 3861 } while (0) 3862 3863#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3864 arg7,arg8,arg9,arg10) \ 3865 do { \ 3866 volatile OrigFn _orig = (orig); \ 3867 volatile unsigned long _argvec[11]; \ 3868 volatile unsigned long _res; \ 3869 _argvec[0] = (unsigned long)_orig.nraddr; \ 3870 _argvec[1] = (unsigned long)(arg1); \ 3871 _argvec[2] = (unsigned long)(arg2); \ 3872 _argvec[3] = (unsigned long)(arg3); \ 3873 _argvec[4] = (unsigned long)(arg4); \ 3874 _argvec[5] = (unsigned long)(arg5); \ 3875 _argvec[6] = (unsigned long)(arg6); \ 3876 _argvec[7] = (unsigned long)(arg7); \ 3877 _argvec[8] = (unsigned long)(arg8); \ 3878 _argvec[9] = (unsigned long)(arg9); \ 3879 _argvec[10] = (unsigned long)(arg10); \ 3880 __asm__ volatile( \ 3881 VALGRIND_ALIGN_STACK \ 3882 "sub sp, sp, #0x20 \n\t" \ 3883 "ldr x0, [%1, #8] \n\t" \ 3884 "ldr x1, [%1, #16] \n\t" \ 3885 "ldr x2, [%1, #24] \n\t" \ 3886 "ldr x3, [%1, #32] \n\t" \ 3887 "ldr x4, [%1, #40] \n\t" \ 3888 "ldr x5, [%1, #48] \n\t" \ 3889 "ldr x6, [%1, #56] \n\t" \ 3890 "ldr x7, [%1, #64] \n\t" \ 3891 "ldr x8, [%1, #72] \n\t" \ 3892 "str x8, [sp, #0] \n\t" \ 3893 "ldr x8, [%1, #80] \n\t" \ 3894 "str x8, [sp, #8] \n\t" \ 3895 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3896 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3897 VALGRIND_RESTORE_STACK \ 3898 "mov %0, x0" \ 3899 : /*out*/ "=r" (_res) \ 3900 : /*in*/ "0" (&_argvec[0]) \ 3901 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3902 ); \ 3903 lval = (__typeof__(lval)) _res; \ 3904 } while (0) 3905 3906#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3907 arg7,arg8,arg9,arg10,arg11) \ 3908 do { \ 3909 volatile OrigFn _orig = (orig); \ 3910 volatile unsigned long _argvec[12]; \ 3911 volatile unsigned long _res; \ 3912 _argvec[0] = (unsigned long)_orig.nraddr; \ 3913 _argvec[1] = (unsigned long)(arg1); \ 3914 _argvec[2] = (unsigned long)(arg2); \ 3915 _argvec[3] = (unsigned long)(arg3); \ 3916 _argvec[4] = (unsigned long)(arg4); \ 3917 _argvec[5] = (unsigned long)(arg5); \ 3918 _argvec[6] = (unsigned long)(arg6); \ 3919 _argvec[7] = (unsigned long)(arg7); \ 3920 _argvec[8] = (unsigned long)(arg8); \ 3921 _argvec[9] = (unsigned long)(arg9); \ 3922 _argvec[10] = (unsigned long)(arg10); \ 3923 _argvec[11] = (unsigned long)(arg11); \ 3924 __asm__ volatile( \ 3925 VALGRIND_ALIGN_STACK \ 3926 "sub sp, sp, #0x30 \n\t" \ 3927 "ldr x0, [%1, #8] \n\t" \ 3928 "ldr x1, [%1, #16] \n\t" \ 3929 "ldr x2, [%1, #24] \n\t" \ 3930 "ldr x3, [%1, #32] \n\t" \ 3931 "ldr x4, [%1, #40] \n\t" \ 3932 "ldr x5, [%1, #48] \n\t" \ 3933 "ldr x6, [%1, #56] \n\t" \ 3934 "ldr x7, [%1, #64] \n\t" \ 3935 "ldr x8, [%1, #72] \n\t" \ 3936 "str x8, [sp, #0] \n\t" \ 3937 "ldr x8, [%1, #80] \n\t" \ 3938 "str x8, [sp, #8] \n\t" \ 3939 "ldr x8, [%1, #88] \n\t" \ 3940 "str x8, [sp, #16] \n\t" \ 3941 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3942 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3943 VALGRIND_RESTORE_STACK \ 3944 "mov %0, x0" \ 3945 : /*out*/ "=r" (_res) \ 3946 : /*in*/ "0" (&_argvec[0]) \ 3947 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3948 ); \ 3949 lval = (__typeof__(lval)) _res; \ 3950 } while (0) 3951 3952#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 3953 arg7,arg8,arg9,arg10,arg11, \ 3954 arg12) \ 3955 do { \ 3956 volatile OrigFn _orig = (orig); \ 3957 volatile unsigned long _argvec[13]; \ 3958 volatile unsigned long _res; \ 3959 _argvec[0] = (unsigned long)_orig.nraddr; \ 3960 _argvec[1] = (unsigned long)(arg1); \ 3961 _argvec[2] = (unsigned long)(arg2); \ 3962 _argvec[3] = (unsigned long)(arg3); \ 3963 _argvec[4] = (unsigned long)(arg4); \ 3964 _argvec[5] = (unsigned long)(arg5); \ 3965 _argvec[6] = (unsigned long)(arg6); \ 3966 _argvec[7] = (unsigned long)(arg7); \ 3967 _argvec[8] = (unsigned long)(arg8); \ 3968 _argvec[9] = (unsigned long)(arg9); \ 3969 _argvec[10] = (unsigned long)(arg10); \ 3970 _argvec[11] = (unsigned long)(arg11); \ 3971 _argvec[12] = (unsigned long)(arg12); \ 3972 __asm__ volatile( \ 3973 VALGRIND_ALIGN_STACK \ 3974 "sub sp, sp, #0x30 \n\t" \ 3975 "ldr x0, [%1, #8] \n\t" \ 3976 "ldr x1, [%1, #16] \n\t" \ 3977 "ldr x2, [%1, #24] \n\t" \ 3978 "ldr x3, [%1, #32] \n\t" \ 3979 "ldr x4, [%1, #40] \n\t" \ 3980 "ldr x5, [%1, #48] \n\t" \ 3981 "ldr x6, [%1, #56] \n\t" \ 3982 "ldr x7, [%1, #64] \n\t" \ 3983 "ldr x8, [%1, #72] \n\t" \ 3984 "str x8, [sp, #0] \n\t" \ 3985 "ldr x8, [%1, #80] \n\t" \ 3986 "str x8, [sp, #8] \n\t" \ 3987 "ldr x8, [%1, #88] \n\t" \ 3988 "str x8, [sp, #16] \n\t" \ 3989 "ldr x8, [%1, #96] \n\t" \ 3990 "str x8, [sp, #24] \n\t" \ 3991 "ldr x8, [%1] \n\t" /* target->x8 */ \ 3992 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \ 3993 VALGRIND_RESTORE_STACK \ 3994 "mov %0, x0" \ 3995 : /*out*/ "=r" (_res) \ 3996 : /*in*/ "0" (&_argvec[0]) \ 3997 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \ 3998 ); \ 3999 lval = (__typeof__(lval)) _res; \ 4000 } while (0) 4001 4002#endif /* PLAT_arm64_linux */ 4003 4004/* ------------------------- s390x-linux ------------------------- */ 4005 4006#if defined(PLAT_s390x_linux) 4007 4008/* Similar workaround as amd64 (see above), but we use r11 as frame 4009 pointer and save the old r11 in r7. r11 might be used for 4010 argvec, therefore we copy argvec in r1 since r1 is clobbered 4011 after the call anyway. */ 4012#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM) 4013# define __FRAME_POINTER \ 4014 ,"d"(__builtin_dwarf_cfa()) 4015# define VALGRIND_CFI_PROLOGUE \ 4016 ".cfi_remember_state\n\t" \ 4017 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \ 4018 "lgr 7,11\n\t" \ 4019 "lgr 11,%2\n\t" \ 4020 ".cfi_def_cfa r11, 0\n\t" 4021# define VALGRIND_CFI_EPILOGUE \ 4022 "lgr 11, 7\n\t" \ 4023 ".cfi_restore_state\n\t" 4024#else 4025# define __FRAME_POINTER 4026# define VALGRIND_CFI_PROLOGUE \ 4027 "lgr 1,%1\n\t" 4028# define VALGRIND_CFI_EPILOGUE 4029#endif 4030 4031/* Nb: On s390 the stack pointer is properly aligned *at all times* 4032 according to the s390 GCC maintainer. (The ABI specification is not 4033 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and 4034 VALGRIND_RESTORE_STACK are not defined here. */ 4035 4036/* These regs are trashed by the hidden call. Note that we overwrite 4037 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the 4038 function a proper return address. All others are ABI defined call 4039 clobbers. */ 4040#define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \ 4041 "f0","f1","f2","f3","f4","f5","f6","f7" 4042 4043/* Nb: Although r11 is modified in the asm snippets below (inside 4044 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for 4045 two reasons: 4046 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not 4047 modified 4048 (2) GCC will complain that r11 cannot appear inside a clobber section, 4049 when compiled with -O -fno-omit-frame-pointer 4050 */ 4051 4052#define CALL_FN_W_v(lval, orig) \ 4053 do { \ 4054 volatile OrigFn _orig = (orig); \ 4055 volatile unsigned long _argvec[1]; \ 4056 volatile unsigned long _res; \ 4057 _argvec[0] = (unsigned long)_orig.nraddr; \ 4058 __asm__ volatile( \ 4059 VALGRIND_CFI_PROLOGUE \ 4060 "aghi 15,-160\n\t" \ 4061 "lg 1, 0(1)\n\t" /* target->r1 */ \ 4062 VALGRIND_CALL_NOREDIR_R1 \ 4063 "lgr %0, 2\n\t" \ 4064 "aghi 15,160\n\t" \ 4065 VALGRIND_CFI_EPILOGUE \ 4066 : /*out*/ "=d" (_res) \ 4067 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \ 4068 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4069 ); \ 4070 lval = (__typeof__(lval)) _res; \ 4071 } while (0) 4072 4073/* The call abi has the arguments in r2-r6 and stack */ 4074#define CALL_FN_W_W(lval, orig, arg1) \ 4075 do { \ 4076 volatile OrigFn _orig = (orig); \ 4077 volatile unsigned long _argvec[2]; \ 4078 volatile unsigned long _res; \ 4079 _argvec[0] = (unsigned long)_orig.nraddr; \ 4080 _argvec[1] = (unsigned long)arg1; \ 4081 __asm__ volatile( \ 4082 VALGRIND_CFI_PROLOGUE \ 4083 "aghi 15,-160\n\t" \ 4084 "lg 2, 8(1)\n\t" \ 4085 "lg 1, 0(1)\n\t" \ 4086 VALGRIND_CALL_NOREDIR_R1 \ 4087 "lgr %0, 2\n\t" \ 4088 "aghi 15,160\n\t" \ 4089 VALGRIND_CFI_EPILOGUE \ 4090 : /*out*/ "=d" (_res) \ 4091 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4092 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4093 ); \ 4094 lval = (__typeof__(lval)) _res; \ 4095 } while (0) 4096 4097#define CALL_FN_W_WW(lval, orig, arg1, arg2) \ 4098 do { \ 4099 volatile OrigFn _orig = (orig); \ 4100 volatile unsigned long _argvec[3]; \ 4101 volatile unsigned long _res; \ 4102 _argvec[0] = (unsigned long)_orig.nraddr; \ 4103 _argvec[1] = (unsigned long)arg1; \ 4104 _argvec[2] = (unsigned long)arg2; \ 4105 __asm__ volatile( \ 4106 VALGRIND_CFI_PROLOGUE \ 4107 "aghi 15,-160\n\t" \ 4108 "lg 2, 8(1)\n\t" \ 4109 "lg 3,16(1)\n\t" \ 4110 "lg 1, 0(1)\n\t" \ 4111 VALGRIND_CALL_NOREDIR_R1 \ 4112 "lgr %0, 2\n\t" \ 4113 "aghi 15,160\n\t" \ 4114 VALGRIND_CFI_EPILOGUE \ 4115 : /*out*/ "=d" (_res) \ 4116 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4117 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4118 ); \ 4119 lval = (__typeof__(lval)) _res; \ 4120 } while (0) 4121 4122#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \ 4123 do { \ 4124 volatile OrigFn _orig = (orig); \ 4125 volatile unsigned long _argvec[4]; \ 4126 volatile unsigned long _res; \ 4127 _argvec[0] = (unsigned long)_orig.nraddr; \ 4128 _argvec[1] = (unsigned long)arg1; \ 4129 _argvec[2] = (unsigned long)arg2; \ 4130 _argvec[3] = (unsigned long)arg3; \ 4131 __asm__ volatile( \ 4132 VALGRIND_CFI_PROLOGUE \ 4133 "aghi 15,-160\n\t" \ 4134 "lg 2, 8(1)\n\t" \ 4135 "lg 3,16(1)\n\t" \ 4136 "lg 4,24(1)\n\t" \ 4137 "lg 1, 0(1)\n\t" \ 4138 VALGRIND_CALL_NOREDIR_R1 \ 4139 "lgr %0, 2\n\t" \ 4140 "aghi 15,160\n\t" \ 4141 VALGRIND_CFI_EPILOGUE \ 4142 : /*out*/ "=d" (_res) \ 4143 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4144 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4145 ); \ 4146 lval = (__typeof__(lval)) _res; \ 4147 } while (0) 4148 4149#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \ 4150 do { \ 4151 volatile OrigFn _orig = (orig); \ 4152 volatile unsigned long _argvec[5]; \ 4153 volatile unsigned long _res; \ 4154 _argvec[0] = (unsigned long)_orig.nraddr; \ 4155 _argvec[1] = (unsigned long)arg1; \ 4156 _argvec[2] = (unsigned long)arg2; \ 4157 _argvec[3] = (unsigned long)arg3; \ 4158 _argvec[4] = (unsigned long)arg4; \ 4159 __asm__ volatile( \ 4160 VALGRIND_CFI_PROLOGUE \ 4161 "aghi 15,-160\n\t" \ 4162 "lg 2, 8(1)\n\t" \ 4163 "lg 3,16(1)\n\t" \ 4164 "lg 4,24(1)\n\t" \ 4165 "lg 5,32(1)\n\t" \ 4166 "lg 1, 0(1)\n\t" \ 4167 VALGRIND_CALL_NOREDIR_R1 \ 4168 "lgr %0, 2\n\t" \ 4169 "aghi 15,160\n\t" \ 4170 VALGRIND_CFI_EPILOGUE \ 4171 : /*out*/ "=d" (_res) \ 4172 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4173 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \ 4174 ); \ 4175 lval = (__typeof__(lval)) _res; \ 4176 } while (0) 4177 4178#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \ 4179 do { \ 4180 volatile OrigFn _orig = (orig); \ 4181 volatile unsigned long _argvec[6]; \ 4182 volatile unsigned long _res; \ 4183 _argvec[0] = (unsigned long)_orig.nraddr; \ 4184 _argvec[1] = (unsigned long)arg1; \ 4185 _argvec[2] = (unsigned long)arg2; \ 4186 _argvec[3] = (unsigned long)arg3; \ 4187 _argvec[4] = (unsigned long)arg4; \ 4188 _argvec[5] = (unsigned long)arg5; \ 4189 __asm__ volatile( \ 4190 VALGRIND_CFI_PROLOGUE \ 4191 "aghi 15,-160\n\t" \ 4192 "lg 2, 8(1)\n\t" \ 4193 "lg 3,16(1)\n\t" \ 4194 "lg 4,24(1)\n\t" \ 4195 "lg 5,32(1)\n\t" \ 4196 "lg 6,40(1)\n\t" \ 4197 "lg 1, 0(1)\n\t" \ 4198 VALGRIND_CALL_NOREDIR_R1 \ 4199 "lgr %0, 2\n\t" \ 4200 "aghi 15,160\n\t" \ 4201 VALGRIND_CFI_EPILOGUE \ 4202 : /*out*/ "=d" (_res) \ 4203 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4204 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4205 ); \ 4206 lval = (__typeof__(lval)) _res; \ 4207 } while (0) 4208 4209#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4210 arg6) \ 4211 do { \ 4212 volatile OrigFn _orig = (orig); \ 4213 volatile unsigned long _argvec[7]; \ 4214 volatile unsigned long _res; \ 4215 _argvec[0] = (unsigned long)_orig.nraddr; \ 4216 _argvec[1] = (unsigned long)arg1; \ 4217 _argvec[2] = (unsigned long)arg2; \ 4218 _argvec[3] = (unsigned long)arg3; \ 4219 _argvec[4] = (unsigned long)arg4; \ 4220 _argvec[5] = (unsigned long)arg5; \ 4221 _argvec[6] = (unsigned long)arg6; \ 4222 __asm__ volatile( \ 4223 VALGRIND_CFI_PROLOGUE \ 4224 "aghi 15,-168\n\t" \ 4225 "lg 2, 8(1)\n\t" \ 4226 "lg 3,16(1)\n\t" \ 4227 "lg 4,24(1)\n\t" \ 4228 "lg 5,32(1)\n\t" \ 4229 "lg 6,40(1)\n\t" \ 4230 "mvc 160(8,15), 48(1)\n\t" \ 4231 "lg 1, 0(1)\n\t" \ 4232 VALGRIND_CALL_NOREDIR_R1 \ 4233 "lgr %0, 2\n\t" \ 4234 "aghi 15,168\n\t" \ 4235 VALGRIND_CFI_EPILOGUE \ 4236 : /*out*/ "=d" (_res) \ 4237 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4238 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4239 ); \ 4240 lval = (__typeof__(lval)) _res; \ 4241 } while (0) 4242 4243#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4244 arg6, arg7) \ 4245 do { \ 4246 volatile OrigFn _orig = (orig); \ 4247 volatile unsigned long _argvec[8]; \ 4248 volatile unsigned long _res; \ 4249 _argvec[0] = (unsigned long)_orig.nraddr; \ 4250 _argvec[1] = (unsigned long)arg1; \ 4251 _argvec[2] = (unsigned long)arg2; \ 4252 _argvec[3] = (unsigned long)arg3; \ 4253 _argvec[4] = (unsigned long)arg4; \ 4254 _argvec[5] = (unsigned long)arg5; \ 4255 _argvec[6] = (unsigned long)arg6; \ 4256 _argvec[7] = (unsigned long)arg7; \ 4257 __asm__ volatile( \ 4258 VALGRIND_CFI_PROLOGUE \ 4259 "aghi 15,-176\n\t" \ 4260 "lg 2, 8(1)\n\t" \ 4261 "lg 3,16(1)\n\t" \ 4262 "lg 4,24(1)\n\t" \ 4263 "lg 5,32(1)\n\t" \ 4264 "lg 6,40(1)\n\t" \ 4265 "mvc 160(8,15), 48(1)\n\t" \ 4266 "mvc 168(8,15), 56(1)\n\t" \ 4267 "lg 1, 0(1)\n\t" \ 4268 VALGRIND_CALL_NOREDIR_R1 \ 4269 "lgr %0, 2\n\t" \ 4270 "aghi 15,176\n\t" \ 4271 VALGRIND_CFI_EPILOGUE \ 4272 : /*out*/ "=d" (_res) \ 4273 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4274 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4275 ); \ 4276 lval = (__typeof__(lval)) _res; \ 4277 } while (0) 4278 4279#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4280 arg6, arg7 ,arg8) \ 4281 do { \ 4282 volatile OrigFn _orig = (orig); \ 4283 volatile unsigned long _argvec[9]; \ 4284 volatile unsigned long _res; \ 4285 _argvec[0] = (unsigned long)_orig.nraddr; \ 4286 _argvec[1] = (unsigned long)arg1; \ 4287 _argvec[2] = (unsigned long)arg2; \ 4288 _argvec[3] = (unsigned long)arg3; \ 4289 _argvec[4] = (unsigned long)arg4; \ 4290 _argvec[5] = (unsigned long)arg5; \ 4291 _argvec[6] = (unsigned long)arg6; \ 4292 _argvec[7] = (unsigned long)arg7; \ 4293 _argvec[8] = (unsigned long)arg8; \ 4294 __asm__ volatile( \ 4295 VALGRIND_CFI_PROLOGUE \ 4296 "aghi 15,-184\n\t" \ 4297 "lg 2, 8(1)\n\t" \ 4298 "lg 3,16(1)\n\t" \ 4299 "lg 4,24(1)\n\t" \ 4300 "lg 5,32(1)\n\t" \ 4301 "lg 6,40(1)\n\t" \ 4302 "mvc 160(8,15), 48(1)\n\t" \ 4303 "mvc 168(8,15), 56(1)\n\t" \ 4304 "mvc 176(8,15), 64(1)\n\t" \ 4305 "lg 1, 0(1)\n\t" \ 4306 VALGRIND_CALL_NOREDIR_R1 \ 4307 "lgr %0, 2\n\t" \ 4308 "aghi 15,184\n\t" \ 4309 VALGRIND_CFI_EPILOGUE \ 4310 : /*out*/ "=d" (_res) \ 4311 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4312 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4313 ); \ 4314 lval = (__typeof__(lval)) _res; \ 4315 } while (0) 4316 4317#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4318 arg6, arg7 ,arg8, arg9) \ 4319 do { \ 4320 volatile OrigFn _orig = (orig); \ 4321 volatile unsigned long _argvec[10]; \ 4322 volatile unsigned long _res; \ 4323 _argvec[0] = (unsigned long)_orig.nraddr; \ 4324 _argvec[1] = (unsigned long)arg1; \ 4325 _argvec[2] = (unsigned long)arg2; \ 4326 _argvec[3] = (unsigned long)arg3; \ 4327 _argvec[4] = (unsigned long)arg4; \ 4328 _argvec[5] = (unsigned long)arg5; \ 4329 _argvec[6] = (unsigned long)arg6; \ 4330 _argvec[7] = (unsigned long)arg7; \ 4331 _argvec[8] = (unsigned long)arg8; \ 4332 _argvec[9] = (unsigned long)arg9; \ 4333 __asm__ volatile( \ 4334 VALGRIND_CFI_PROLOGUE \ 4335 "aghi 15,-192\n\t" \ 4336 "lg 2, 8(1)\n\t" \ 4337 "lg 3,16(1)\n\t" \ 4338 "lg 4,24(1)\n\t" \ 4339 "lg 5,32(1)\n\t" \ 4340 "lg 6,40(1)\n\t" \ 4341 "mvc 160(8,15), 48(1)\n\t" \ 4342 "mvc 168(8,15), 56(1)\n\t" \ 4343 "mvc 176(8,15), 64(1)\n\t" \ 4344 "mvc 184(8,15), 72(1)\n\t" \ 4345 "lg 1, 0(1)\n\t" \ 4346 VALGRIND_CALL_NOREDIR_R1 \ 4347 "lgr %0, 2\n\t" \ 4348 "aghi 15,192\n\t" \ 4349 VALGRIND_CFI_EPILOGUE \ 4350 : /*out*/ "=d" (_res) \ 4351 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4352 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4353 ); \ 4354 lval = (__typeof__(lval)) _res; \ 4355 } while (0) 4356 4357#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4358 arg6, arg7 ,arg8, arg9, arg10) \ 4359 do { \ 4360 volatile OrigFn _orig = (orig); \ 4361 volatile unsigned long _argvec[11]; \ 4362 volatile unsigned long _res; \ 4363 _argvec[0] = (unsigned long)_orig.nraddr; \ 4364 _argvec[1] = (unsigned long)arg1; \ 4365 _argvec[2] = (unsigned long)arg2; \ 4366 _argvec[3] = (unsigned long)arg3; \ 4367 _argvec[4] = (unsigned long)arg4; \ 4368 _argvec[5] = (unsigned long)arg5; \ 4369 _argvec[6] = (unsigned long)arg6; \ 4370 _argvec[7] = (unsigned long)arg7; \ 4371 _argvec[8] = (unsigned long)arg8; \ 4372 _argvec[9] = (unsigned long)arg9; \ 4373 _argvec[10] = (unsigned long)arg10; \ 4374 __asm__ volatile( \ 4375 VALGRIND_CFI_PROLOGUE \ 4376 "aghi 15,-200\n\t" \ 4377 "lg 2, 8(1)\n\t" \ 4378 "lg 3,16(1)\n\t" \ 4379 "lg 4,24(1)\n\t" \ 4380 "lg 5,32(1)\n\t" \ 4381 "lg 6,40(1)\n\t" \ 4382 "mvc 160(8,15), 48(1)\n\t" \ 4383 "mvc 168(8,15), 56(1)\n\t" \ 4384 "mvc 176(8,15), 64(1)\n\t" \ 4385 "mvc 184(8,15), 72(1)\n\t" \ 4386 "mvc 192(8,15), 80(1)\n\t" \ 4387 "lg 1, 0(1)\n\t" \ 4388 VALGRIND_CALL_NOREDIR_R1 \ 4389 "lgr %0, 2\n\t" \ 4390 "aghi 15,200\n\t" \ 4391 VALGRIND_CFI_EPILOGUE \ 4392 : /*out*/ "=d" (_res) \ 4393 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4394 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4395 ); \ 4396 lval = (__typeof__(lval)) _res; \ 4397 } while (0) 4398 4399#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4400 arg6, arg7 ,arg8, arg9, arg10, arg11) \ 4401 do { \ 4402 volatile OrigFn _orig = (orig); \ 4403 volatile unsigned long _argvec[12]; \ 4404 volatile unsigned long _res; \ 4405 _argvec[0] = (unsigned long)_orig.nraddr; \ 4406 _argvec[1] = (unsigned long)arg1; \ 4407 _argvec[2] = (unsigned long)arg2; \ 4408 _argvec[3] = (unsigned long)arg3; \ 4409 _argvec[4] = (unsigned long)arg4; \ 4410 _argvec[5] = (unsigned long)arg5; \ 4411 _argvec[6] = (unsigned long)arg6; \ 4412 _argvec[7] = (unsigned long)arg7; \ 4413 _argvec[8] = (unsigned long)arg8; \ 4414 _argvec[9] = (unsigned long)arg9; \ 4415 _argvec[10] = (unsigned long)arg10; \ 4416 _argvec[11] = (unsigned long)arg11; \ 4417 __asm__ volatile( \ 4418 VALGRIND_CFI_PROLOGUE \ 4419 "aghi 15,-208\n\t" \ 4420 "lg 2, 8(1)\n\t" \ 4421 "lg 3,16(1)\n\t" \ 4422 "lg 4,24(1)\n\t" \ 4423 "lg 5,32(1)\n\t" \ 4424 "lg 6,40(1)\n\t" \ 4425 "mvc 160(8,15), 48(1)\n\t" \ 4426 "mvc 168(8,15), 56(1)\n\t" \ 4427 "mvc 176(8,15), 64(1)\n\t" \ 4428 "mvc 184(8,15), 72(1)\n\t" \ 4429 "mvc 192(8,15), 80(1)\n\t" \ 4430 "mvc 200(8,15), 88(1)\n\t" \ 4431 "lg 1, 0(1)\n\t" \ 4432 VALGRIND_CALL_NOREDIR_R1 \ 4433 "lgr %0, 2\n\t" \ 4434 "aghi 15,208\n\t" \ 4435 VALGRIND_CFI_EPILOGUE \ 4436 : /*out*/ "=d" (_res) \ 4437 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4438 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4439 ); \ 4440 lval = (__typeof__(lval)) _res; \ 4441 } while (0) 4442 4443#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \ 4444 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\ 4445 do { \ 4446 volatile OrigFn _orig = (orig); \ 4447 volatile unsigned long _argvec[13]; \ 4448 volatile unsigned long _res; \ 4449 _argvec[0] = (unsigned long)_orig.nraddr; \ 4450 _argvec[1] = (unsigned long)arg1; \ 4451 _argvec[2] = (unsigned long)arg2; \ 4452 _argvec[3] = (unsigned long)arg3; \ 4453 _argvec[4] = (unsigned long)arg4; \ 4454 _argvec[5] = (unsigned long)arg5; \ 4455 _argvec[6] = (unsigned long)arg6; \ 4456 _argvec[7] = (unsigned long)arg7; \ 4457 _argvec[8] = (unsigned long)arg8; \ 4458 _argvec[9] = (unsigned long)arg9; \ 4459 _argvec[10] = (unsigned long)arg10; \ 4460 _argvec[11] = (unsigned long)arg11; \ 4461 _argvec[12] = (unsigned long)arg12; \ 4462 __asm__ volatile( \ 4463 VALGRIND_CFI_PROLOGUE \ 4464 "aghi 15,-216\n\t" \ 4465 "lg 2, 8(1)\n\t" \ 4466 "lg 3,16(1)\n\t" \ 4467 "lg 4,24(1)\n\t" \ 4468 "lg 5,32(1)\n\t" \ 4469 "lg 6,40(1)\n\t" \ 4470 "mvc 160(8,15), 48(1)\n\t" \ 4471 "mvc 168(8,15), 56(1)\n\t" \ 4472 "mvc 176(8,15), 64(1)\n\t" \ 4473 "mvc 184(8,15), 72(1)\n\t" \ 4474 "mvc 192(8,15), 80(1)\n\t" \ 4475 "mvc 200(8,15), 88(1)\n\t" \ 4476 "mvc 208(8,15), 96(1)\n\t" \ 4477 "lg 1, 0(1)\n\t" \ 4478 VALGRIND_CALL_NOREDIR_R1 \ 4479 "lgr %0, 2\n\t" \ 4480 "aghi 15,216\n\t" \ 4481 VALGRIND_CFI_EPILOGUE \ 4482 : /*out*/ "=d" (_res) \ 4483 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \ 4484 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \ 4485 ); \ 4486 lval = (__typeof__(lval)) _res; \ 4487 } while (0) 4488 4489 4490#endif /* PLAT_s390x_linux */ 4491 4492/* ------------------------- mips32-linux ----------------------- */ 4493 4494#if defined(PLAT_mips32_linux) 4495 4496/* These regs are trashed by the hidden call. */ 4497#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \ 4498"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \ 4499"$25", "$31" 4500 4501/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned 4502 long) == 4. */ 4503 4504#define CALL_FN_W_v(lval, orig) \ 4505 do { \ 4506 volatile OrigFn _orig = (orig); \ 4507 volatile unsigned long _argvec[1]; \ 4508 volatile unsigned long _res; \ 4509 _argvec[0] = (unsigned long)_orig.nraddr; \ 4510 __asm__ volatile( \ 4511 "subu $29, $29, 8 \n\t" \ 4512 "sw $28, 0($29) \n\t" \ 4513 "sw $31, 4($29) \n\t" \ 4514 "subu $29, $29, 16 \n\t" \ 4515 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4516 VALGRIND_CALL_NOREDIR_T9 \ 4517 "addu $29, $29, 16\n\t" \ 4518 "lw $28, 0($29) \n\t" \ 4519 "lw $31, 4($29) \n\t" \ 4520 "addu $29, $29, 8 \n\t" \ 4521 "move %0, $2\n" \ 4522 : /*out*/ "=r" (_res) \ 4523 : /*in*/ "0" (&_argvec[0]) \ 4524 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4525 ); \ 4526 lval = (__typeof__(lval)) _res; \ 4527 } while (0) 4528 4529#define CALL_FN_W_W(lval, orig, arg1) \ 4530 do { \ 4531 volatile OrigFn _orig = (orig); \ 4532 volatile unsigned long _argvec[2]; \ 4533 volatile unsigned long _res; \ 4534 _argvec[0] = (unsigned long)_orig.nraddr; \ 4535 _argvec[1] = (unsigned long)(arg1); \ 4536 __asm__ volatile( \ 4537 "subu $29, $29, 8 \n\t" \ 4538 "sw $28, 0($29) \n\t" \ 4539 "sw $31, 4($29) \n\t" \ 4540 "subu $29, $29, 16 \n\t" \ 4541 "lw $4, 4(%1) \n\t" /* arg1*/ \ 4542 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4543 VALGRIND_CALL_NOREDIR_T9 \ 4544 "addu $29, $29, 16 \n\t" \ 4545 "lw $28, 0($29) \n\t" \ 4546 "lw $31, 4($29) \n\t" \ 4547 "addu $29, $29, 8 \n\t" \ 4548 "move %0, $2\n" \ 4549 : /*out*/ "=r" (_res) \ 4550 : /*in*/ "0" (&_argvec[0]) \ 4551 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4552 ); \ 4553 lval = (__typeof__(lval)) _res; \ 4554 } while (0) 4555 4556#define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 4557 do { \ 4558 volatile OrigFn _orig = (orig); \ 4559 volatile unsigned long _argvec[3]; \ 4560 volatile unsigned long _res; \ 4561 _argvec[0] = (unsigned long)_orig.nraddr; \ 4562 _argvec[1] = (unsigned long)(arg1); \ 4563 _argvec[2] = (unsigned long)(arg2); \ 4564 __asm__ volatile( \ 4565 "subu $29, $29, 8 \n\t" \ 4566 "sw $28, 0($29) \n\t" \ 4567 "sw $31, 4($29) \n\t" \ 4568 "subu $29, $29, 16 \n\t" \ 4569 "lw $4, 4(%1) \n\t" \ 4570 "lw $5, 8(%1) \n\t" \ 4571 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4572 VALGRIND_CALL_NOREDIR_T9 \ 4573 "addu $29, $29, 16 \n\t" \ 4574 "lw $28, 0($29) \n\t" \ 4575 "lw $31, 4($29) \n\t" \ 4576 "addu $29, $29, 8 \n\t" \ 4577 "move %0, $2\n" \ 4578 : /*out*/ "=r" (_res) \ 4579 : /*in*/ "0" (&_argvec[0]) \ 4580 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4581 ); \ 4582 lval = (__typeof__(lval)) _res; \ 4583 } while (0) 4584 4585#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 4586 do { \ 4587 volatile OrigFn _orig = (orig); \ 4588 volatile unsigned long _argvec[4]; \ 4589 volatile unsigned long _res; \ 4590 _argvec[0] = (unsigned long)_orig.nraddr; \ 4591 _argvec[1] = (unsigned long)(arg1); \ 4592 _argvec[2] = (unsigned long)(arg2); \ 4593 _argvec[3] = (unsigned long)(arg3); \ 4594 __asm__ volatile( \ 4595 "subu $29, $29, 8 \n\t" \ 4596 "sw $28, 0($29) \n\t" \ 4597 "sw $31, 4($29) \n\t" \ 4598 "subu $29, $29, 16 \n\t" \ 4599 "lw $4, 4(%1) \n\t" \ 4600 "lw $5, 8(%1) \n\t" \ 4601 "lw $6, 12(%1) \n\t" \ 4602 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4603 VALGRIND_CALL_NOREDIR_T9 \ 4604 "addu $29, $29, 16 \n\t" \ 4605 "lw $28, 0($29) \n\t" \ 4606 "lw $31, 4($29) \n\t" \ 4607 "addu $29, $29, 8 \n\t" \ 4608 "move %0, $2\n" \ 4609 : /*out*/ "=r" (_res) \ 4610 : /*in*/ "0" (&_argvec[0]) \ 4611 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4612 ); \ 4613 lval = (__typeof__(lval)) _res; \ 4614 } while (0) 4615 4616#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 4617 do { \ 4618 volatile OrigFn _orig = (orig); \ 4619 volatile unsigned long _argvec[5]; \ 4620 volatile unsigned long _res; \ 4621 _argvec[0] = (unsigned long)_orig.nraddr; \ 4622 _argvec[1] = (unsigned long)(arg1); \ 4623 _argvec[2] = (unsigned long)(arg2); \ 4624 _argvec[3] = (unsigned long)(arg3); \ 4625 _argvec[4] = (unsigned long)(arg4); \ 4626 __asm__ volatile( \ 4627 "subu $29, $29, 8 \n\t" \ 4628 "sw $28, 0($29) \n\t" \ 4629 "sw $31, 4($29) \n\t" \ 4630 "subu $29, $29, 16 \n\t" \ 4631 "lw $4, 4(%1) \n\t" \ 4632 "lw $5, 8(%1) \n\t" \ 4633 "lw $6, 12(%1) \n\t" \ 4634 "lw $7, 16(%1) \n\t" \ 4635 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4636 VALGRIND_CALL_NOREDIR_T9 \ 4637 "addu $29, $29, 16 \n\t" \ 4638 "lw $28, 0($29) \n\t" \ 4639 "lw $31, 4($29) \n\t" \ 4640 "addu $29, $29, 8 \n\t" \ 4641 "move %0, $2\n" \ 4642 : /*out*/ "=r" (_res) \ 4643 : /*in*/ "0" (&_argvec[0]) \ 4644 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4645 ); \ 4646 lval = (__typeof__(lval)) _res; \ 4647 } while (0) 4648 4649#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 4650 do { \ 4651 volatile OrigFn _orig = (orig); \ 4652 volatile unsigned long _argvec[6]; \ 4653 volatile unsigned long _res; \ 4654 _argvec[0] = (unsigned long)_orig.nraddr; \ 4655 _argvec[1] = (unsigned long)(arg1); \ 4656 _argvec[2] = (unsigned long)(arg2); \ 4657 _argvec[3] = (unsigned long)(arg3); \ 4658 _argvec[4] = (unsigned long)(arg4); \ 4659 _argvec[5] = (unsigned long)(arg5); \ 4660 __asm__ volatile( \ 4661 "subu $29, $29, 8 \n\t" \ 4662 "sw $28, 0($29) \n\t" \ 4663 "sw $31, 4($29) \n\t" \ 4664 "lw $4, 20(%1) \n\t" \ 4665 "subu $29, $29, 24\n\t" \ 4666 "sw $4, 16($29) \n\t" \ 4667 "lw $4, 4(%1) \n\t" \ 4668 "lw $5, 8(%1) \n\t" \ 4669 "lw $6, 12(%1) \n\t" \ 4670 "lw $7, 16(%1) \n\t" \ 4671 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4672 VALGRIND_CALL_NOREDIR_T9 \ 4673 "addu $29, $29, 24 \n\t" \ 4674 "lw $28, 0($29) \n\t" \ 4675 "lw $31, 4($29) \n\t" \ 4676 "addu $29, $29, 8 \n\t" \ 4677 "move %0, $2\n" \ 4678 : /*out*/ "=r" (_res) \ 4679 : /*in*/ "0" (&_argvec[0]) \ 4680 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4681 ); \ 4682 lval = (__typeof__(lval)) _res; \ 4683 } while (0) 4684#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 4685 do { \ 4686 volatile OrigFn _orig = (orig); \ 4687 volatile unsigned long _argvec[7]; \ 4688 volatile unsigned long _res; \ 4689 _argvec[0] = (unsigned long)_orig.nraddr; \ 4690 _argvec[1] = (unsigned long)(arg1); \ 4691 _argvec[2] = (unsigned long)(arg2); \ 4692 _argvec[3] = (unsigned long)(arg3); \ 4693 _argvec[4] = (unsigned long)(arg4); \ 4694 _argvec[5] = (unsigned long)(arg5); \ 4695 _argvec[6] = (unsigned long)(arg6); \ 4696 __asm__ volatile( \ 4697 "subu $29, $29, 8 \n\t" \ 4698 "sw $28, 0($29) \n\t" \ 4699 "sw $31, 4($29) \n\t" \ 4700 "lw $4, 20(%1) \n\t" \ 4701 "subu $29, $29, 32\n\t" \ 4702 "sw $4, 16($29) \n\t" \ 4703 "lw $4, 24(%1) \n\t" \ 4704 "nop\n\t" \ 4705 "sw $4, 20($29) \n\t" \ 4706 "lw $4, 4(%1) \n\t" \ 4707 "lw $5, 8(%1) \n\t" \ 4708 "lw $6, 12(%1) \n\t" \ 4709 "lw $7, 16(%1) \n\t" \ 4710 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4711 VALGRIND_CALL_NOREDIR_T9 \ 4712 "addu $29, $29, 32 \n\t" \ 4713 "lw $28, 0($29) \n\t" \ 4714 "lw $31, 4($29) \n\t" \ 4715 "addu $29, $29, 8 \n\t" \ 4716 "move %0, $2\n" \ 4717 : /*out*/ "=r" (_res) \ 4718 : /*in*/ "0" (&_argvec[0]) \ 4719 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4720 ); \ 4721 lval = (__typeof__(lval)) _res; \ 4722 } while (0) 4723 4724#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4725 arg7) \ 4726 do { \ 4727 volatile OrigFn _orig = (orig); \ 4728 volatile unsigned long _argvec[8]; \ 4729 volatile unsigned long _res; \ 4730 _argvec[0] = (unsigned long)_orig.nraddr; \ 4731 _argvec[1] = (unsigned long)(arg1); \ 4732 _argvec[2] = (unsigned long)(arg2); \ 4733 _argvec[3] = (unsigned long)(arg3); \ 4734 _argvec[4] = (unsigned long)(arg4); \ 4735 _argvec[5] = (unsigned long)(arg5); \ 4736 _argvec[6] = (unsigned long)(arg6); \ 4737 _argvec[7] = (unsigned long)(arg7); \ 4738 __asm__ volatile( \ 4739 "subu $29, $29, 8 \n\t" \ 4740 "sw $28, 0($29) \n\t" \ 4741 "sw $31, 4($29) \n\t" \ 4742 "lw $4, 20(%1) \n\t" \ 4743 "subu $29, $29, 32\n\t" \ 4744 "sw $4, 16($29) \n\t" \ 4745 "lw $4, 24(%1) \n\t" \ 4746 "sw $4, 20($29) \n\t" \ 4747 "lw $4, 28(%1) \n\t" \ 4748 "sw $4, 24($29) \n\t" \ 4749 "lw $4, 4(%1) \n\t" \ 4750 "lw $5, 8(%1) \n\t" \ 4751 "lw $6, 12(%1) \n\t" \ 4752 "lw $7, 16(%1) \n\t" \ 4753 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4754 VALGRIND_CALL_NOREDIR_T9 \ 4755 "addu $29, $29, 32 \n\t" \ 4756 "lw $28, 0($29) \n\t" \ 4757 "lw $31, 4($29) \n\t" \ 4758 "addu $29, $29, 8 \n\t" \ 4759 "move %0, $2\n" \ 4760 : /*out*/ "=r" (_res) \ 4761 : /*in*/ "0" (&_argvec[0]) \ 4762 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4763 ); \ 4764 lval = (__typeof__(lval)) _res; \ 4765 } while (0) 4766 4767#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4768 arg7,arg8) \ 4769 do { \ 4770 volatile OrigFn _orig = (orig); \ 4771 volatile unsigned long _argvec[9]; \ 4772 volatile unsigned long _res; \ 4773 _argvec[0] = (unsigned long)_orig.nraddr; \ 4774 _argvec[1] = (unsigned long)(arg1); \ 4775 _argvec[2] = (unsigned long)(arg2); \ 4776 _argvec[3] = (unsigned long)(arg3); \ 4777 _argvec[4] = (unsigned long)(arg4); \ 4778 _argvec[5] = (unsigned long)(arg5); \ 4779 _argvec[6] = (unsigned long)(arg6); \ 4780 _argvec[7] = (unsigned long)(arg7); \ 4781 _argvec[8] = (unsigned long)(arg8); \ 4782 __asm__ volatile( \ 4783 "subu $29, $29, 8 \n\t" \ 4784 "sw $28, 0($29) \n\t" \ 4785 "sw $31, 4($29) \n\t" \ 4786 "lw $4, 20(%1) \n\t" \ 4787 "subu $29, $29, 40\n\t" \ 4788 "sw $4, 16($29) \n\t" \ 4789 "lw $4, 24(%1) \n\t" \ 4790 "sw $4, 20($29) \n\t" \ 4791 "lw $4, 28(%1) \n\t" \ 4792 "sw $4, 24($29) \n\t" \ 4793 "lw $4, 32(%1) \n\t" \ 4794 "sw $4, 28($29) \n\t" \ 4795 "lw $4, 4(%1) \n\t" \ 4796 "lw $5, 8(%1) \n\t" \ 4797 "lw $6, 12(%1) \n\t" \ 4798 "lw $7, 16(%1) \n\t" \ 4799 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4800 VALGRIND_CALL_NOREDIR_T9 \ 4801 "addu $29, $29, 40 \n\t" \ 4802 "lw $28, 0($29) \n\t" \ 4803 "lw $31, 4($29) \n\t" \ 4804 "addu $29, $29, 8 \n\t" \ 4805 "move %0, $2\n" \ 4806 : /*out*/ "=r" (_res) \ 4807 : /*in*/ "0" (&_argvec[0]) \ 4808 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4809 ); \ 4810 lval = (__typeof__(lval)) _res; \ 4811 } while (0) 4812 4813#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4814 arg7,arg8,arg9) \ 4815 do { \ 4816 volatile OrigFn _orig = (orig); \ 4817 volatile unsigned long _argvec[10]; \ 4818 volatile unsigned long _res; \ 4819 _argvec[0] = (unsigned long)_orig.nraddr; \ 4820 _argvec[1] = (unsigned long)(arg1); \ 4821 _argvec[2] = (unsigned long)(arg2); \ 4822 _argvec[3] = (unsigned long)(arg3); \ 4823 _argvec[4] = (unsigned long)(arg4); \ 4824 _argvec[5] = (unsigned long)(arg5); \ 4825 _argvec[6] = (unsigned long)(arg6); \ 4826 _argvec[7] = (unsigned long)(arg7); \ 4827 _argvec[8] = (unsigned long)(arg8); \ 4828 _argvec[9] = (unsigned long)(arg9); \ 4829 __asm__ volatile( \ 4830 "subu $29, $29, 8 \n\t" \ 4831 "sw $28, 0($29) \n\t" \ 4832 "sw $31, 4($29) \n\t" \ 4833 "lw $4, 20(%1) \n\t" \ 4834 "subu $29, $29, 40\n\t" \ 4835 "sw $4, 16($29) \n\t" \ 4836 "lw $4, 24(%1) \n\t" \ 4837 "sw $4, 20($29) \n\t" \ 4838 "lw $4, 28(%1) \n\t" \ 4839 "sw $4, 24($29) \n\t" \ 4840 "lw $4, 32(%1) \n\t" \ 4841 "sw $4, 28($29) \n\t" \ 4842 "lw $4, 36(%1) \n\t" \ 4843 "sw $4, 32($29) \n\t" \ 4844 "lw $4, 4(%1) \n\t" \ 4845 "lw $5, 8(%1) \n\t" \ 4846 "lw $6, 12(%1) \n\t" \ 4847 "lw $7, 16(%1) \n\t" \ 4848 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4849 VALGRIND_CALL_NOREDIR_T9 \ 4850 "addu $29, $29, 40 \n\t" \ 4851 "lw $28, 0($29) \n\t" \ 4852 "lw $31, 4($29) \n\t" \ 4853 "addu $29, $29, 8 \n\t" \ 4854 "move %0, $2\n" \ 4855 : /*out*/ "=r" (_res) \ 4856 : /*in*/ "0" (&_argvec[0]) \ 4857 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4858 ); \ 4859 lval = (__typeof__(lval)) _res; \ 4860 } while (0) 4861 4862#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 4863 arg7,arg8,arg9,arg10) \ 4864 do { \ 4865 volatile OrigFn _orig = (orig); \ 4866 volatile unsigned long _argvec[11]; \ 4867 volatile unsigned long _res; \ 4868 _argvec[0] = (unsigned long)_orig.nraddr; \ 4869 _argvec[1] = (unsigned long)(arg1); \ 4870 _argvec[2] = (unsigned long)(arg2); \ 4871 _argvec[3] = (unsigned long)(arg3); \ 4872 _argvec[4] = (unsigned long)(arg4); \ 4873 _argvec[5] = (unsigned long)(arg5); \ 4874 _argvec[6] = (unsigned long)(arg6); \ 4875 _argvec[7] = (unsigned long)(arg7); \ 4876 _argvec[8] = (unsigned long)(arg8); \ 4877 _argvec[9] = (unsigned long)(arg9); \ 4878 _argvec[10] = (unsigned long)(arg10); \ 4879 __asm__ volatile( \ 4880 "subu $29, $29, 8 \n\t" \ 4881 "sw $28, 0($29) \n\t" \ 4882 "sw $31, 4($29) \n\t" \ 4883 "lw $4, 20(%1) \n\t" \ 4884 "subu $29, $29, 48\n\t" \ 4885 "sw $4, 16($29) \n\t" \ 4886 "lw $4, 24(%1) \n\t" \ 4887 "sw $4, 20($29) \n\t" \ 4888 "lw $4, 28(%1) \n\t" \ 4889 "sw $4, 24($29) \n\t" \ 4890 "lw $4, 32(%1) \n\t" \ 4891 "sw $4, 28($29) \n\t" \ 4892 "lw $4, 36(%1) \n\t" \ 4893 "sw $4, 32($29) \n\t" \ 4894 "lw $4, 40(%1) \n\t" \ 4895 "sw $4, 36($29) \n\t" \ 4896 "lw $4, 4(%1) \n\t" \ 4897 "lw $5, 8(%1) \n\t" \ 4898 "lw $6, 12(%1) \n\t" \ 4899 "lw $7, 16(%1) \n\t" \ 4900 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4901 VALGRIND_CALL_NOREDIR_T9 \ 4902 "addu $29, $29, 48 \n\t" \ 4903 "lw $28, 0($29) \n\t" \ 4904 "lw $31, 4($29) \n\t" \ 4905 "addu $29, $29, 8 \n\t" \ 4906 "move %0, $2\n" \ 4907 : /*out*/ "=r" (_res) \ 4908 : /*in*/ "0" (&_argvec[0]) \ 4909 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4910 ); \ 4911 lval = (__typeof__(lval)) _res; \ 4912 } while (0) 4913 4914#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 4915 arg6,arg7,arg8,arg9,arg10, \ 4916 arg11) \ 4917 do { \ 4918 volatile OrigFn _orig = (orig); \ 4919 volatile unsigned long _argvec[12]; \ 4920 volatile unsigned long _res; \ 4921 _argvec[0] = (unsigned long)_orig.nraddr; \ 4922 _argvec[1] = (unsigned long)(arg1); \ 4923 _argvec[2] = (unsigned long)(arg2); \ 4924 _argvec[3] = (unsigned long)(arg3); \ 4925 _argvec[4] = (unsigned long)(arg4); \ 4926 _argvec[5] = (unsigned long)(arg5); \ 4927 _argvec[6] = (unsigned long)(arg6); \ 4928 _argvec[7] = (unsigned long)(arg7); \ 4929 _argvec[8] = (unsigned long)(arg8); \ 4930 _argvec[9] = (unsigned long)(arg9); \ 4931 _argvec[10] = (unsigned long)(arg10); \ 4932 _argvec[11] = (unsigned long)(arg11); \ 4933 __asm__ volatile( \ 4934 "subu $29, $29, 8 \n\t" \ 4935 "sw $28, 0($29) \n\t" \ 4936 "sw $31, 4($29) \n\t" \ 4937 "lw $4, 20(%1) \n\t" \ 4938 "subu $29, $29, 48\n\t" \ 4939 "sw $4, 16($29) \n\t" \ 4940 "lw $4, 24(%1) \n\t" \ 4941 "sw $4, 20($29) \n\t" \ 4942 "lw $4, 28(%1) \n\t" \ 4943 "sw $4, 24($29) \n\t" \ 4944 "lw $4, 32(%1) \n\t" \ 4945 "sw $4, 28($29) \n\t" \ 4946 "lw $4, 36(%1) \n\t" \ 4947 "sw $4, 32($29) \n\t" \ 4948 "lw $4, 40(%1) \n\t" \ 4949 "sw $4, 36($29) \n\t" \ 4950 "lw $4, 44(%1) \n\t" \ 4951 "sw $4, 40($29) \n\t" \ 4952 "lw $4, 4(%1) \n\t" \ 4953 "lw $5, 8(%1) \n\t" \ 4954 "lw $6, 12(%1) \n\t" \ 4955 "lw $7, 16(%1) \n\t" \ 4956 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 4957 VALGRIND_CALL_NOREDIR_T9 \ 4958 "addu $29, $29, 48 \n\t" \ 4959 "lw $28, 0($29) \n\t" \ 4960 "lw $31, 4($29) \n\t" \ 4961 "addu $29, $29, 8 \n\t" \ 4962 "move %0, $2\n" \ 4963 : /*out*/ "=r" (_res) \ 4964 : /*in*/ "0" (&_argvec[0]) \ 4965 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 4966 ); \ 4967 lval = (__typeof__(lval)) _res; \ 4968 } while (0) 4969 4970#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 4971 arg6,arg7,arg8,arg9,arg10, \ 4972 arg11,arg12) \ 4973 do { \ 4974 volatile OrigFn _orig = (orig); \ 4975 volatile unsigned long _argvec[13]; \ 4976 volatile unsigned long _res; \ 4977 _argvec[0] = (unsigned long)_orig.nraddr; \ 4978 _argvec[1] = (unsigned long)(arg1); \ 4979 _argvec[2] = (unsigned long)(arg2); \ 4980 _argvec[3] = (unsigned long)(arg3); \ 4981 _argvec[4] = (unsigned long)(arg4); \ 4982 _argvec[5] = (unsigned long)(arg5); \ 4983 _argvec[6] = (unsigned long)(arg6); \ 4984 _argvec[7] = (unsigned long)(arg7); \ 4985 _argvec[8] = (unsigned long)(arg8); \ 4986 _argvec[9] = (unsigned long)(arg9); \ 4987 _argvec[10] = (unsigned long)(arg10); \ 4988 _argvec[11] = (unsigned long)(arg11); \ 4989 _argvec[12] = (unsigned long)(arg12); \ 4990 __asm__ volatile( \ 4991 "subu $29, $29, 8 \n\t" \ 4992 "sw $28, 0($29) \n\t" \ 4993 "sw $31, 4($29) \n\t" \ 4994 "lw $4, 20(%1) \n\t" \ 4995 "subu $29, $29, 56\n\t" \ 4996 "sw $4, 16($29) \n\t" \ 4997 "lw $4, 24(%1) \n\t" \ 4998 "sw $4, 20($29) \n\t" \ 4999 "lw $4, 28(%1) \n\t" \ 5000 "sw $4, 24($29) \n\t" \ 5001 "lw $4, 32(%1) \n\t" \ 5002 "sw $4, 28($29) \n\t" \ 5003 "lw $4, 36(%1) \n\t" \ 5004 "sw $4, 32($29) \n\t" \ 5005 "lw $4, 40(%1) \n\t" \ 5006 "sw $4, 36($29) \n\t" \ 5007 "lw $4, 44(%1) \n\t" \ 5008 "sw $4, 40($29) \n\t" \ 5009 "lw $4, 48(%1) \n\t" \ 5010 "sw $4, 44($29) \n\t" \ 5011 "lw $4, 4(%1) \n\t" \ 5012 "lw $5, 8(%1) \n\t" \ 5013 "lw $6, 12(%1) \n\t" \ 5014 "lw $7, 16(%1) \n\t" \ 5015 "lw $25, 0(%1) \n\t" /* target->t9 */ \ 5016 VALGRIND_CALL_NOREDIR_T9 \ 5017 "addu $29, $29, 56 \n\t" \ 5018 "lw $28, 0($29) \n\t" \ 5019 "lw $31, 4($29) \n\t" \ 5020 "addu $29, $29, 8 \n\t" \ 5021 "move %0, $2\n" \ 5022 : /*out*/ "=r" (_res) \ 5023 : /*in*/ "r" (&_argvec[0]) \ 5024 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5025 ); \ 5026 lval = (__typeof__(lval)) _res; \ 5027 } while (0) 5028 5029#endif /* PLAT_mips32_linux */ 5030 5031/* ------------------------- mips64-linux ------------------------- */ 5032 5033#if defined(PLAT_mips64_linux) 5034 5035/* These regs are trashed by the hidden call. */ 5036#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \ 5037"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \ 5038"$25", "$31" 5039 5040/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned 5041 long) == 4. */ 5042 5043#define CALL_FN_W_v(lval, orig) \ 5044 do { \ 5045 volatile OrigFn _orig = (orig); \ 5046 volatile unsigned long _argvec[1]; \ 5047 volatile unsigned long _res; \ 5048 _argvec[0] = (unsigned long)_orig.nraddr; \ 5049 __asm__ volatile( \ 5050 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5051 VALGRIND_CALL_NOREDIR_T9 \ 5052 "move %0, $2\n" \ 5053 : /*out*/ "=r" (_res) \ 5054 : /*in*/ "0" (&_argvec[0]) \ 5055 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5056 ); \ 5057 lval = (__typeof__(lval)) _res; \ 5058 } while (0) 5059 5060#define CALL_FN_W_W(lval, orig, arg1) \ 5061 do { \ 5062 volatile OrigFn _orig = (orig); \ 5063 volatile unsigned long _argvec[2]; \ 5064 volatile unsigned long _res; \ 5065 _argvec[0] = (unsigned long)_orig.nraddr; \ 5066 _argvec[1] = (unsigned long)(arg1); \ 5067 __asm__ volatile( \ 5068 "ld $4, 8(%1)\n\t" /* arg1*/ \ 5069 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5070 VALGRIND_CALL_NOREDIR_T9 \ 5071 "move %0, $2\n" \ 5072 : /*out*/ "=r" (_res) \ 5073 : /*in*/ "r" (&_argvec[0]) \ 5074 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5075 ); \ 5076 lval = (__typeof__(lval)) _res; \ 5077 } while (0) 5078 5079#define CALL_FN_W_WW(lval, orig, arg1,arg2) \ 5080 do { \ 5081 volatile OrigFn _orig = (orig); \ 5082 volatile unsigned long _argvec[3]; \ 5083 volatile unsigned long _res; \ 5084 _argvec[0] = (unsigned long)_orig.nraddr; \ 5085 _argvec[1] = (unsigned long)(arg1); \ 5086 _argvec[2] = (unsigned long)(arg2); \ 5087 __asm__ volatile( \ 5088 "ld $4, 8(%1)\n\t" \ 5089 "ld $5, 16(%1)\n\t" \ 5090 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5091 VALGRIND_CALL_NOREDIR_T9 \ 5092 "move %0, $2\n" \ 5093 : /*out*/ "=r" (_res) \ 5094 : /*in*/ "r" (&_argvec[0]) \ 5095 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5096 ); \ 5097 lval = (__typeof__(lval)) _res; \ 5098 } while (0) 5099 5100#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \ 5101 do { \ 5102 volatile OrigFn _orig = (orig); \ 5103 volatile unsigned long _argvec[4]; \ 5104 volatile unsigned long _res; \ 5105 _argvec[0] = (unsigned long)_orig.nraddr; \ 5106 _argvec[1] = (unsigned long)(arg1); \ 5107 _argvec[2] = (unsigned long)(arg2); \ 5108 _argvec[3] = (unsigned long)(arg3); \ 5109 __asm__ volatile( \ 5110 "ld $4, 8(%1)\n\t" \ 5111 "ld $5, 16(%1)\n\t" \ 5112 "ld $6, 24(%1)\n\t" \ 5113 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5114 VALGRIND_CALL_NOREDIR_T9 \ 5115 "move %0, $2\n" \ 5116 : /*out*/ "=r" (_res) \ 5117 : /*in*/ "r" (&_argvec[0]) \ 5118 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5119 ); \ 5120 lval = (__typeof__(lval)) _res; \ 5121 } while (0) 5122 5123#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \ 5124 do { \ 5125 volatile OrigFn _orig = (orig); \ 5126 volatile unsigned long _argvec[5]; \ 5127 volatile unsigned long _res; \ 5128 _argvec[0] = (unsigned long)_orig.nraddr; \ 5129 _argvec[1] = (unsigned long)(arg1); \ 5130 _argvec[2] = (unsigned long)(arg2); \ 5131 _argvec[3] = (unsigned long)(arg3); \ 5132 _argvec[4] = (unsigned long)(arg4); \ 5133 __asm__ volatile( \ 5134 "ld $4, 8(%1)\n\t" \ 5135 "ld $5, 16(%1)\n\t" \ 5136 "ld $6, 24(%1)\n\t" \ 5137 "ld $7, 32(%1)\n\t" \ 5138 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5139 VALGRIND_CALL_NOREDIR_T9 \ 5140 "move %0, $2\n" \ 5141 : /*out*/ "=r" (_res) \ 5142 : /*in*/ "r" (&_argvec[0]) \ 5143 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5144 ); \ 5145 lval = (__typeof__(lval)) _res; \ 5146 } while (0) 5147 5148#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \ 5149 do { \ 5150 volatile OrigFn _orig = (orig); \ 5151 volatile unsigned long _argvec[6]; \ 5152 volatile unsigned long _res; \ 5153 _argvec[0] = (unsigned long)_orig.nraddr; \ 5154 _argvec[1] = (unsigned long)(arg1); \ 5155 _argvec[2] = (unsigned long)(arg2); \ 5156 _argvec[3] = (unsigned long)(arg3); \ 5157 _argvec[4] = (unsigned long)(arg4); \ 5158 _argvec[5] = (unsigned long)(arg5); \ 5159 __asm__ volatile( \ 5160 "ld $4, 8(%1)\n\t" \ 5161 "ld $5, 16(%1)\n\t" \ 5162 "ld $6, 24(%1)\n\t" \ 5163 "ld $7, 32(%1)\n\t" \ 5164 "ld $8, 40(%1)\n\t" \ 5165 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5166 VALGRIND_CALL_NOREDIR_T9 \ 5167 "move %0, $2\n" \ 5168 : /*out*/ "=r" (_res) \ 5169 : /*in*/ "r" (&_argvec[0]) \ 5170 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5171 ); \ 5172 lval = (__typeof__(lval)) _res; \ 5173 } while (0) 5174 5175#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \ 5176 do { \ 5177 volatile OrigFn _orig = (orig); \ 5178 volatile unsigned long _argvec[7]; \ 5179 volatile unsigned long _res; \ 5180 _argvec[0] = (unsigned long)_orig.nraddr; \ 5181 _argvec[1] = (unsigned long)(arg1); \ 5182 _argvec[2] = (unsigned long)(arg2); \ 5183 _argvec[3] = (unsigned long)(arg3); \ 5184 _argvec[4] = (unsigned long)(arg4); \ 5185 _argvec[5] = (unsigned long)(arg5); \ 5186 _argvec[6] = (unsigned long)(arg6); \ 5187 __asm__ volatile( \ 5188 "ld $4, 8(%1)\n\t" \ 5189 "ld $5, 16(%1)\n\t" \ 5190 "ld $6, 24(%1)\n\t" \ 5191 "ld $7, 32(%1)\n\t" \ 5192 "ld $8, 40(%1)\n\t" \ 5193 "ld $9, 48(%1)\n\t" \ 5194 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5195 VALGRIND_CALL_NOREDIR_T9 \ 5196 "move %0, $2\n" \ 5197 : /*out*/ "=r" (_res) \ 5198 : /*in*/ "r" (&_argvec[0]) \ 5199 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5200 ); \ 5201 lval = (__typeof__(lval)) _res; \ 5202 } while (0) 5203 5204#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5205 arg7) \ 5206 do { \ 5207 volatile OrigFn _orig = (orig); \ 5208 volatile unsigned long _argvec[8]; \ 5209 volatile unsigned long _res; \ 5210 _argvec[0] = (unsigned long)_orig.nraddr; \ 5211 _argvec[1] = (unsigned long)(arg1); \ 5212 _argvec[2] = (unsigned long)(arg2); \ 5213 _argvec[3] = (unsigned long)(arg3); \ 5214 _argvec[4] = (unsigned long)(arg4); \ 5215 _argvec[5] = (unsigned long)(arg5); \ 5216 _argvec[6] = (unsigned long)(arg6); \ 5217 _argvec[7] = (unsigned long)(arg7); \ 5218 __asm__ volatile( \ 5219 "ld $4, 8(%1)\n\t" \ 5220 "ld $5, 16(%1)\n\t" \ 5221 "ld $6, 24(%1)\n\t" \ 5222 "ld $7, 32(%1)\n\t" \ 5223 "ld $8, 40(%1)\n\t" \ 5224 "ld $9, 48(%1)\n\t" \ 5225 "ld $10, 56(%1)\n\t" \ 5226 "ld $25, 0(%1) \n\t" /* target->t9 */ \ 5227 VALGRIND_CALL_NOREDIR_T9 \ 5228 "move %0, $2\n" \ 5229 : /*out*/ "=r" (_res) \ 5230 : /*in*/ "r" (&_argvec[0]) \ 5231 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5232 ); \ 5233 lval = (__typeof__(lval)) _res; \ 5234 } while (0) 5235 5236#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5237 arg7,arg8) \ 5238 do { \ 5239 volatile OrigFn _orig = (orig); \ 5240 volatile unsigned long _argvec[9]; \ 5241 volatile unsigned long _res; \ 5242 _argvec[0] = (unsigned long)_orig.nraddr; \ 5243 _argvec[1] = (unsigned long)(arg1); \ 5244 _argvec[2] = (unsigned long)(arg2); \ 5245 _argvec[3] = (unsigned long)(arg3); \ 5246 _argvec[4] = (unsigned long)(arg4); \ 5247 _argvec[5] = (unsigned long)(arg5); \ 5248 _argvec[6] = (unsigned long)(arg6); \ 5249 _argvec[7] = (unsigned long)(arg7); \ 5250 _argvec[8] = (unsigned long)(arg8); \ 5251 __asm__ volatile( \ 5252 "ld $4, 8(%1)\n\t" \ 5253 "ld $5, 16(%1)\n\t" \ 5254 "ld $6, 24(%1)\n\t" \ 5255 "ld $7, 32(%1)\n\t" \ 5256 "ld $8, 40(%1)\n\t" \ 5257 "ld $9, 48(%1)\n\t" \ 5258 "ld $10, 56(%1)\n\t" \ 5259 "ld $11, 64(%1)\n\t" \ 5260 "ld $25, 0(%1) \n\t" /* target->t9 */ \ 5261 VALGRIND_CALL_NOREDIR_T9 \ 5262 "move %0, $2\n" \ 5263 : /*out*/ "=r" (_res) \ 5264 : /*in*/ "r" (&_argvec[0]) \ 5265 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5266 ); \ 5267 lval = (__typeof__(lval)) _res; \ 5268 } while (0) 5269 5270#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5271 arg7,arg8,arg9) \ 5272 do { \ 5273 volatile OrigFn _orig = (orig); \ 5274 volatile unsigned long _argvec[10]; \ 5275 volatile unsigned long _res; \ 5276 _argvec[0] = (unsigned long)_orig.nraddr; \ 5277 _argvec[1] = (unsigned long)(arg1); \ 5278 _argvec[2] = (unsigned long)(arg2); \ 5279 _argvec[3] = (unsigned long)(arg3); \ 5280 _argvec[4] = (unsigned long)(arg4); \ 5281 _argvec[5] = (unsigned long)(arg5); \ 5282 _argvec[6] = (unsigned long)(arg6); \ 5283 _argvec[7] = (unsigned long)(arg7); \ 5284 _argvec[8] = (unsigned long)(arg8); \ 5285 _argvec[9] = (unsigned long)(arg9); \ 5286 __asm__ volatile( \ 5287 "dsubu $29, $29, 8\n\t" \ 5288 "ld $4, 72(%1)\n\t" \ 5289 "sd $4, 0($29)\n\t" \ 5290 "ld $4, 8(%1)\n\t" \ 5291 "ld $5, 16(%1)\n\t" \ 5292 "ld $6, 24(%1)\n\t" \ 5293 "ld $7, 32(%1)\n\t" \ 5294 "ld $8, 40(%1)\n\t" \ 5295 "ld $9, 48(%1)\n\t" \ 5296 "ld $10, 56(%1)\n\t" \ 5297 "ld $11, 64(%1)\n\t" \ 5298 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5299 VALGRIND_CALL_NOREDIR_T9 \ 5300 "daddu $29, $29, 8\n\t" \ 5301 "move %0, $2\n" \ 5302 : /*out*/ "=r" (_res) \ 5303 : /*in*/ "r" (&_argvec[0]) \ 5304 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5305 ); \ 5306 lval = (__typeof__(lval)) _res; \ 5307 } while (0) 5308 5309#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \ 5310 arg7,arg8,arg9,arg10) \ 5311 do { \ 5312 volatile OrigFn _orig = (orig); \ 5313 volatile unsigned long _argvec[11]; \ 5314 volatile unsigned long _res; \ 5315 _argvec[0] = (unsigned long)_orig.nraddr; \ 5316 _argvec[1] = (unsigned long)(arg1); \ 5317 _argvec[2] = (unsigned long)(arg2); \ 5318 _argvec[3] = (unsigned long)(arg3); \ 5319 _argvec[4] = (unsigned long)(arg4); \ 5320 _argvec[5] = (unsigned long)(arg5); \ 5321 _argvec[6] = (unsigned long)(arg6); \ 5322 _argvec[7] = (unsigned long)(arg7); \ 5323 _argvec[8] = (unsigned long)(arg8); \ 5324 _argvec[9] = (unsigned long)(arg9); \ 5325 _argvec[10] = (unsigned long)(arg10); \ 5326 __asm__ volatile( \ 5327 "dsubu $29, $29, 16\n\t" \ 5328 "ld $4, 72(%1)\n\t" \ 5329 "sd $4, 0($29)\n\t" \ 5330 "ld $4, 80(%1)\n\t" \ 5331 "sd $4, 8($29)\n\t" \ 5332 "ld $4, 8(%1)\n\t" \ 5333 "ld $5, 16(%1)\n\t" \ 5334 "ld $6, 24(%1)\n\t" \ 5335 "ld $7, 32(%1)\n\t" \ 5336 "ld $8, 40(%1)\n\t" \ 5337 "ld $9, 48(%1)\n\t" \ 5338 "ld $10, 56(%1)\n\t" \ 5339 "ld $11, 64(%1)\n\t" \ 5340 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5341 VALGRIND_CALL_NOREDIR_T9 \ 5342 "daddu $29, $29, 16\n\t" \ 5343 "move %0, $2\n" \ 5344 : /*out*/ "=r" (_res) \ 5345 : /*in*/ "r" (&_argvec[0]) \ 5346 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5347 ); \ 5348 lval = (__typeof__(lval)) _res; \ 5349 } while (0) 5350 5351#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 5352 arg6,arg7,arg8,arg9,arg10, \ 5353 arg11) \ 5354 do { \ 5355 volatile OrigFn _orig = (orig); \ 5356 volatile unsigned long _argvec[12]; \ 5357 volatile unsigned long _res; \ 5358 _argvec[0] = (unsigned long)_orig.nraddr; \ 5359 _argvec[1] = (unsigned long)(arg1); \ 5360 _argvec[2] = (unsigned long)(arg2); \ 5361 _argvec[3] = (unsigned long)(arg3); \ 5362 _argvec[4] = (unsigned long)(arg4); \ 5363 _argvec[5] = (unsigned long)(arg5); \ 5364 _argvec[6] = (unsigned long)(arg6); \ 5365 _argvec[7] = (unsigned long)(arg7); \ 5366 _argvec[8] = (unsigned long)(arg8); \ 5367 _argvec[9] = (unsigned long)(arg9); \ 5368 _argvec[10] = (unsigned long)(arg10); \ 5369 _argvec[11] = (unsigned long)(arg11); \ 5370 __asm__ volatile( \ 5371 "dsubu $29, $29, 24\n\t" \ 5372 "ld $4, 72(%1)\n\t" \ 5373 "sd $4, 0($29)\n\t" \ 5374 "ld $4, 80(%1)\n\t" \ 5375 "sd $4, 8($29)\n\t" \ 5376 "ld $4, 88(%1)\n\t" \ 5377 "sd $4, 16($29)\n\t" \ 5378 "ld $4, 8(%1)\n\t" \ 5379 "ld $5, 16(%1)\n\t" \ 5380 "ld $6, 24(%1)\n\t" \ 5381 "ld $7, 32(%1)\n\t" \ 5382 "ld $8, 40(%1)\n\t" \ 5383 "ld $9, 48(%1)\n\t" \ 5384 "ld $10, 56(%1)\n\t" \ 5385 "ld $11, 64(%1)\n\t" \ 5386 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5387 VALGRIND_CALL_NOREDIR_T9 \ 5388 "daddu $29, $29, 24\n\t" \ 5389 "move %0, $2\n" \ 5390 : /*out*/ "=r" (_res) \ 5391 : /*in*/ "r" (&_argvec[0]) \ 5392 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5393 ); \ 5394 lval = (__typeof__(lval)) _res; \ 5395 } while (0) 5396 5397#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \ 5398 arg6,arg7,arg8,arg9,arg10, \ 5399 arg11,arg12) \ 5400 do { \ 5401 volatile OrigFn _orig = (orig); \ 5402 volatile unsigned long _argvec[13]; \ 5403 volatile unsigned long _res; \ 5404 _argvec[0] = (unsigned long)_orig.nraddr; \ 5405 _argvec[1] = (unsigned long)(arg1); \ 5406 _argvec[2] = (unsigned long)(arg2); \ 5407 _argvec[3] = (unsigned long)(arg3); \ 5408 _argvec[4] = (unsigned long)(arg4); \ 5409 _argvec[5] = (unsigned long)(arg5); \ 5410 _argvec[6] = (unsigned long)(arg6); \ 5411 _argvec[7] = (unsigned long)(arg7); \ 5412 _argvec[8] = (unsigned long)(arg8); \ 5413 _argvec[9] = (unsigned long)(arg9); \ 5414 _argvec[10] = (unsigned long)(arg10); \ 5415 _argvec[11] = (unsigned long)(arg11); \ 5416 _argvec[12] = (unsigned long)(arg12); \ 5417 __asm__ volatile( \ 5418 "dsubu $29, $29, 32\n\t" \ 5419 "ld $4, 72(%1)\n\t" \ 5420 "sd $4, 0($29)\n\t" \ 5421 "ld $4, 80(%1)\n\t" \ 5422 "sd $4, 8($29)\n\t" \ 5423 "ld $4, 88(%1)\n\t" \ 5424 "sd $4, 16($29)\n\t" \ 5425 "ld $4, 96(%1)\n\t" \ 5426 "sd $4, 24($29)\n\t" \ 5427 "ld $4, 8(%1)\n\t" \ 5428 "ld $5, 16(%1)\n\t" \ 5429 "ld $6, 24(%1)\n\t" \ 5430 "ld $7, 32(%1)\n\t" \ 5431 "ld $8, 40(%1)\n\t" \ 5432 "ld $9, 48(%1)\n\t" \ 5433 "ld $10, 56(%1)\n\t" \ 5434 "ld $11, 64(%1)\n\t" \ 5435 "ld $25, 0(%1)\n\t" /* target->t9 */ \ 5436 VALGRIND_CALL_NOREDIR_T9 \ 5437 "daddu $29, $29, 32\n\t" \ 5438 "move %0, $2\n" \ 5439 : /*out*/ "=r" (_res) \ 5440 : /*in*/ "r" (&_argvec[0]) \ 5441 : /*trash*/ "memory", __CALLER_SAVED_REGS \ 5442 ); \ 5443 lval = (__typeof__(lval)) _res; \ 5444 } while (0) 5445 5446#endif /* PLAT_mips64_linux */ 5447 5448 5449/* ------------------------------------------------------------------ */ 5450/* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */ 5451/* */ 5452/* ------------------------------------------------------------------ */ 5453 5454/* Some request codes. There are many more of these, but most are not 5455 exposed to end-user view. These are the public ones, all of the 5456 form 0x1000 + small_number. 5457 5458 Core ones are in the range 0x00000000--0x0000ffff. The non-public 5459 ones start at 0x2000. 5460*/ 5461 5462/* These macros are used by tools -- they must be public, but don't 5463 embed them into other programs. */ 5464#define VG_USERREQ_TOOL_BASE(a,b) \ 5465 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16)) 5466#define VG_IS_TOOL_USERREQ(a, b, v) \ 5467 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000)) 5468 5469/* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !! 5470 This enum comprises an ABI exported by Valgrind to programs 5471 which use client requests. DO NOT CHANGE THE ORDER OF THESE 5472 ENTRIES, NOR DELETE ANY -- add new ones at the end. */ 5473typedef 5474 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001, 5475 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002, 5476 5477 /* These allow any function to be called from the simulated 5478 CPU but run on the real CPU. Nb: the first arg passed to 5479 the function is always the ThreadId of the running 5480 thread! So CLIENT_CALL0 actually requires a 1 arg 5481 function, etc. */ 5482 VG_USERREQ__CLIENT_CALL0 = 0x1101, 5483 VG_USERREQ__CLIENT_CALL1 = 0x1102, 5484 VG_USERREQ__CLIENT_CALL2 = 0x1103, 5485 VG_USERREQ__CLIENT_CALL3 = 0x1104, 5486 5487 /* Can be useful in regression testing suites -- eg. can 5488 send Valgrind's output to /dev/null and still count 5489 errors. */ 5490 VG_USERREQ__COUNT_ERRORS = 0x1201, 5491 5492 /* Allows the client program and/or gdbserver to execute a monitor 5493 command. */ 5494 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202, 5495 5496 /* These are useful and can be interpreted by any tool that 5497 tracks malloc() et al, by using vg_replace_malloc.c. */ 5498 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301, 5499 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b, 5500 VG_USERREQ__FREELIKE_BLOCK = 0x1302, 5501 /* Memory pool support. */ 5502 VG_USERREQ__CREATE_MEMPOOL = 0x1303, 5503 VG_USERREQ__DESTROY_MEMPOOL = 0x1304, 5504 VG_USERREQ__MEMPOOL_ALLOC = 0x1305, 5505 VG_USERREQ__MEMPOOL_FREE = 0x1306, 5506 VG_USERREQ__MEMPOOL_TRIM = 0x1307, 5507 VG_USERREQ__MOVE_MEMPOOL = 0x1308, 5508 VG_USERREQ__MEMPOOL_CHANGE = 0x1309, 5509 VG_USERREQ__MEMPOOL_EXISTS = 0x130a, 5510 5511 /* Allow printfs to valgrind log. */ 5512 /* The first two pass the va_list argument by value, which 5513 assumes it is the same size as or smaller than a UWord, 5514 which generally isn't the case. Hence are deprecated. 5515 The second two pass the vargs by reference and so are 5516 immune to this problem. */ 5517 /* both :: char* fmt, va_list vargs (DEPRECATED) */ 5518 VG_USERREQ__PRINTF = 0x1401, 5519 VG_USERREQ__PRINTF_BACKTRACE = 0x1402, 5520 /* both :: char* fmt, va_list* vargs */ 5521 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403, 5522 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404, 5523 5524 /* Stack support. */ 5525 VG_USERREQ__STACK_REGISTER = 0x1501, 5526 VG_USERREQ__STACK_DEREGISTER = 0x1502, 5527 VG_USERREQ__STACK_CHANGE = 0x1503, 5528 5529 /* Wine support */ 5530 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601, 5531 5532 /* Querying of debug info. */ 5533 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701, 5534 5535 /* Disable/enable error reporting level. Takes a single 5536 Word arg which is the delta to this thread's error 5537 disablement indicator. Hence 1 disables or further 5538 disables errors, and -1 moves back towards enablement. 5539 Other values are not allowed. */ 5540 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801, 5541 5542 /* Initialise IR injection */ 5543 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901 5544 } Vg_ClientRequest; 5545 5546#if !defined(__GNUC__) 5547# define __extension__ /* */ 5548#endif 5549 5550 5551/* Returns the number of Valgrinds this code is running under. That 5552 is, 0 if running natively, 1 if running under Valgrind, 2 if 5553 running under Valgrind which is running under another Valgrind, 5554 etc. */ 5555#define RUNNING_ON_VALGRIND \ 5556 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \ 5557 VG_USERREQ__RUNNING_ON_VALGRIND, \ 5558 0, 0, 0, 0, 0) \ 5559 5560 5561/* Discard translation of code in the range [_qzz_addr .. _qzz_addr + 5562 _qzz_len - 1]. Useful if you are debugging a JITter or some such, 5563 since it provides a way to make sure valgrind will retranslate the 5564 invalidated area. Returns no value. */ 5565#define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \ 5566 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \ 5567 _qzz_addr, _qzz_len, 0, 0, 0) 5568 5569 5570/* These requests are for getting Valgrind itself to print something. 5571 Possibly with a backtrace. This is a really ugly hack. The return value 5572 is the number of characters printed, excluding the "**<pid>** " part at the 5573 start and the backtrace (if present). */ 5574 5575#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER) 5576/* Modern GCC will optimize the static routine out if unused, 5577 and unused attribute will shut down warnings about it. */ 5578static int VALGRIND_PRINTF(const char *format, ...) 5579 __attribute__((format(__printf__, 1, 2), __unused__)); 5580#endif 5581static int 5582#if defined(_MSC_VER) 5583__inline 5584#endif 5585VALGRIND_PRINTF(const char *format, ...) 5586{ 5587#if defined(NVALGRIND) 5588 return 0; 5589#else /* NVALGRIND */ 5590#if defined(_MSC_VER) || defined(__MINGW64__) 5591 uintptr_t _qzz_res; 5592#else 5593 unsigned long _qzz_res; 5594#endif 5595 va_list vargs; 5596 va_start(vargs, format); 5597#if defined(_MSC_VER) || defined(__MINGW64__) 5598 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0, 5599 VG_USERREQ__PRINTF_VALIST_BY_REF, 5600 (uintptr_t)format, 5601 (uintptr_t)&vargs, 5602 0, 0, 0); 5603#else 5604 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0, 5605 VG_USERREQ__PRINTF_VALIST_BY_REF, 5606 (unsigned long)format, 5607 (unsigned long)&vargs, 5608 0, 0, 0); 5609#endif 5610 va_end(vargs); 5611 return (int)_qzz_res; 5612#endif /* NVALGRIND */ 5613} 5614 5615#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER) 5616static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...) 5617 __attribute__((format(__printf__, 1, 2), __unused__)); 5618#endif 5619static int 5620#if defined(_MSC_VER) 5621__inline 5622#endif 5623VALGRIND_PRINTF_BACKTRACE(const char *format, ...) 5624{ 5625#if defined(NVALGRIND) 5626 return 0; 5627#else /* NVALGRIND */ 5628#if defined(_MSC_VER) || defined(__MINGW64__) 5629 uintptr_t _qzz_res; 5630#else 5631 unsigned long _qzz_res; 5632#endif 5633 va_list vargs; 5634 va_start(vargs, format); 5635#if defined(_MSC_VER) || defined(__MINGW64__) 5636 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0, 5637 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF, 5638 (uintptr_t)format, 5639 (uintptr_t)&vargs, 5640 0, 0, 0); 5641#else 5642 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0, 5643 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF, 5644 (unsigned long)format, 5645 (unsigned long)&vargs, 5646 0, 0, 0); 5647#endif 5648 va_end(vargs); 5649 return (int)_qzz_res; 5650#endif /* NVALGRIND */ 5651} 5652 5653 5654/* These requests allow control to move from the simulated CPU to the 5655 real CPU, calling an arbitary function. 5656 5657 Note that the current ThreadId is inserted as the first argument. 5658 So this call: 5659 5660 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2) 5661 5662 requires f to have this signature: 5663 5664 Word f(Word tid, Word arg1, Word arg2) 5665 5666 where "Word" is a word-sized type. 5667 5668 Note that these client requests are not entirely reliable. For example, 5669 if you call a function with them that subsequently calls printf(), 5670 there's a high chance Valgrind will crash. Generally, your prospects of 5671 these working are made higher if the called function does not refer to 5672 any global variables, and does not refer to any libc or other functions 5673 (printf et al). Any kind of entanglement with libc or dynamic linking is 5674 likely to have a bad outcome, for tricky reasons which we've grappled 5675 with a lot in the past. 5676*/ 5677#define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \ 5678 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \ 5679 VG_USERREQ__CLIENT_CALL0, \ 5680 _qyy_fn, \ 5681 0, 0, 0, 0) 5682 5683#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \ 5684 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \ 5685 VG_USERREQ__CLIENT_CALL1, \ 5686 _qyy_fn, \ 5687 _qyy_arg1, 0, 0, 0) 5688 5689#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \ 5690 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \ 5691 VG_USERREQ__CLIENT_CALL2, \ 5692 _qyy_fn, \ 5693 _qyy_arg1, _qyy_arg2, 0, 0) 5694 5695#define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \ 5696 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \ 5697 VG_USERREQ__CLIENT_CALL3, \ 5698 _qyy_fn, \ 5699 _qyy_arg1, _qyy_arg2, \ 5700 _qyy_arg3, 0) 5701 5702 5703/* Counts the number of errors that have been recorded by a tool. Nb: 5704 the tool must record the errors with VG_(maybe_record_error)() or 5705 VG_(unique_error)() for them to be counted. */ 5706#define VALGRIND_COUNT_ERRORS \ 5707 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \ 5708 0 /* default return */, \ 5709 VG_USERREQ__COUNT_ERRORS, \ 5710 0, 0, 0, 0, 0) 5711 5712/* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing 5713 when heap blocks are allocated in order to give accurate results. This 5714 happens automatically for the standard allocator functions such as 5715 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete, 5716 delete[], etc. 5717 5718 But if your program uses a custom allocator, this doesn't automatically 5719 happen, and Valgrind will not do as well. For example, if you allocate 5720 superblocks with mmap() and then allocates chunks of the superblocks, all 5721 Valgrind's observations will be at the mmap() level and it won't know that 5722 the chunks should be considered separate entities. In Memcheck's case, 5723 that means you probably won't get heap block overrun detection (because 5724 there won't be redzones marked as unaddressable) and you definitely won't 5725 get any leak detection. 5726 5727 The following client requests allow a custom allocator to be annotated so 5728 that it can be handled accurately by Valgrind. 5729 5730 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated 5731 by a malloc()-like function. For Memcheck (an illustrative case), this 5732 does two things: 5733 5734 - It records that the block has been allocated. This means any addresses 5735 within the block mentioned in error messages will be 5736 identified as belonging to the block. It also means that if the block 5737 isn't freed it will be detected by the leak checker. 5738 5739 - It marks the block as being addressable and undefined (if 'is_zeroed' is 5740 not set), or addressable and defined (if 'is_zeroed' is set). This 5741 controls how accesses to the block by the program are handled. 5742 5743 'addr' is the start of the usable block (ie. after any 5744 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator 5745 can apply redzones -- these are blocks of padding at the start and end of 5746 each block. Adding redzones is recommended as it makes it much more likely 5747 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is 5748 zeroed (or filled with another predictable value), as is the case for 5749 calloc(). 5750 5751 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a 5752 heap block -- that will be used by the client program -- is allocated. 5753 It's best to put it at the outermost level of the allocator if possible; 5754 for example, if you have a function my_alloc() which calls 5755 internal_alloc(), and the client request is put inside internal_alloc(), 5756 stack traces relating to the heap block will contain entries for both 5757 my_alloc() and internal_alloc(), which is probably not what you want. 5758 5759 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out 5760 custom blocks from within a heap block, B, that has been allocated with 5761 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking 5762 -- the custom blocks will take precedence. 5763 5764 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For 5765 Memcheck, it does two things: 5766 5767 - It records that the block has been deallocated. This assumes that the 5768 block was annotated as having been allocated via 5769 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued. 5770 5771 - It marks the block as being unaddressable. 5772 5773 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a 5774 heap block is deallocated. 5775 5776 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For 5777 Memcheck, it does four things: 5778 5779 - It records that the size of a block has been changed. This assumes that 5780 the block was annotated as having been allocated via 5781 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued. 5782 5783 - If the block shrunk, it marks the freed memory as being unaddressable. 5784 5785 - If the block grew, it marks the new area as undefined and defines a red 5786 zone past the end of the new block. 5787 5788 - The V-bits of the overlap between the old and the new block are preserved. 5789 5790 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block 5791 and before deallocation of the old block. 5792 5793 In many cases, these three client requests will not be enough to get your 5794 allocator working well with Memcheck. More specifically, if your allocator 5795 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call 5796 will be necessary to mark the memory as addressable just before the zeroing 5797 occurs, otherwise you'll get a lot of invalid write errors. For example, 5798 you'll need to do this if your allocator recycles freed blocks, but it 5799 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK). 5800 Alternatively, if your allocator reuses freed blocks for allocator-internal 5801 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary. 5802 5803 Really, what's happening is a blurring of the lines between the client 5804 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the 5805 memory should be considered unaddressable to the client program, but the 5806 allocator knows more than the rest of the client program and so may be able 5807 to safely access it. Extra client requests are necessary for Valgrind to 5808 understand the distinction between the allocator and the rest of the 5809 program. 5810 5811 Ignored if addr == 0. 5812*/ 5813#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \ 5814 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \ 5815 addr, sizeB, rzB, is_zeroed, 0) 5816 5817/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details. 5818 Ignored if addr == 0. 5819*/ 5820#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \ 5821 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \ 5822 addr, oldSizeB, newSizeB, rzB, 0) 5823 5824/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details. 5825 Ignored if addr == 0. 5826*/ 5827#define VALGRIND_FREELIKE_BLOCK(addr, rzB) \ 5828 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \ 5829 addr, rzB, 0, 0, 0) 5830 5831/* Create a memory pool. */ 5832#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \ 5833 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \ 5834 pool, rzB, is_zeroed, 0, 0) 5835 5836/* Destroy a memory pool. */ 5837#define VALGRIND_DESTROY_MEMPOOL(pool) \ 5838 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \ 5839 pool, 0, 0, 0, 0) 5840 5841/* Associate a piece of memory with a memory pool. */ 5842#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \ 5843 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \ 5844 pool, addr, size, 0, 0) 5845 5846/* Disassociate a piece of memory from a memory pool. */ 5847#define VALGRIND_MEMPOOL_FREE(pool, addr) \ 5848 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \ 5849 pool, addr, 0, 0, 0) 5850 5851/* Disassociate any pieces outside a particular range. */ 5852#define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \ 5853 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \ 5854 pool, addr, size, 0, 0) 5855 5856/* Resize and/or move a piece associated with a memory pool. */ 5857#define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \ 5858 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \ 5859 poolA, poolB, 0, 0, 0) 5860 5861/* Resize and/or move a piece associated with a memory pool. */ 5862#define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \ 5863 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \ 5864 pool, addrA, addrB, size, 0) 5865 5866/* Return 1 if a mempool exists, else 0. */ 5867#define VALGRIND_MEMPOOL_EXISTS(pool) \ 5868 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \ 5869 VG_USERREQ__MEMPOOL_EXISTS, \ 5870 pool, 0, 0, 0, 0) 5871 5872/* Mark a piece of memory as being a stack. Returns a stack id. */ 5873#define VALGRIND_STACK_REGISTER(start, end) \ 5874 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \ 5875 VG_USERREQ__STACK_REGISTER, \ 5876 start, end, 0, 0, 0) 5877 5878/* Unmark the piece of memory associated with a stack id as being a 5879 stack. */ 5880#define VALGRIND_STACK_DEREGISTER(id) \ 5881 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \ 5882 id, 0, 0, 0, 0) 5883 5884/* Change the start and end address of the stack id. */ 5885#define VALGRIND_STACK_CHANGE(id, start, end) \ 5886 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \ 5887 id, start, end, 0, 0) 5888 5889/* Load PDB debug info for Wine PE image_map. */ 5890#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \ 5891 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \ 5892 fd, ptr, total_size, delta, 0) 5893 5894/* Map a code address to a source file name and line number. buf64 5895 must point to a 64-byte buffer in the caller's address space. The 5896 result will be dumped in there and is guaranteed to be zero 5897 terminated. If no info is found, the first byte is set to zero. */ 5898#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \ 5899 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \ 5900 VG_USERREQ__MAP_IP_TO_SRCLOC, \ 5901 addr, buf64, 0, 0, 0) 5902 5903/* Disable error reporting for this thread. Behaves in a stack like 5904 way, so you can safely call this multiple times provided that 5905 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times 5906 to re-enable reporting. The first call of this macro disables 5907 reporting. Subsequent calls have no effect except to increase the 5908 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable 5909 reporting. Child threads do not inherit this setting from their 5910 parents -- they are always created with reporting enabled. */ 5911#define VALGRIND_DISABLE_ERROR_REPORTING \ 5912 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \ 5913 1, 0, 0, 0, 0) 5914 5915/* Re-enable error reporting, as per comments on 5916 VALGRIND_DISABLE_ERROR_REPORTING. */ 5917#define VALGRIND_ENABLE_ERROR_REPORTING \ 5918 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \ 5919 -1, 0, 0, 0, 0) 5920 5921/* Execute a monitor command from the client program. 5922 If a connection is opened with GDB, the output will be sent 5923 according to the output mode set for vgdb. 5924 If no connection is opened, output will go to the log output. 5925 Returns 1 if command not recognised, 0 otherwise. */ 5926#define VALGRIND_MONITOR_COMMAND(command) \ 5927 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \ 5928 command, 0, 0, 0, 0) 5929 5930 5931#undef PLAT_x86_darwin 5932#undef PLAT_amd64_darwin 5933#undef PLAT_x86_win32 5934#undef PLAT_amd64_win64 5935#undef PLAT_x86_linux 5936#undef PLAT_amd64_linux 5937#undef PLAT_ppc32_linux 5938#undef PLAT_ppc64_linux 5939#undef PLAT_arm_linux 5940#undef PLAT_s390x_linux 5941#undef PLAT_mips32_linux 5942#undef PLAT_mips64_linux 5943 5944#endif /* __VALGRIND_H */ 5945