1 //===-- tsan_platform.h -----------------------------------------*- C++ -*-===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file is a part of ThreadSanitizer (TSan), a race detector.
11 //
12 // Platform-specific code.
13 //===----------------------------------------------------------------------===//
14
15 #ifndef TSAN_PLATFORM_H
16 #define TSAN_PLATFORM_H
17
18 #if !defined(__LP64__) && !defined(_WIN64)
19 # error "Only 64-bit is supported"
20 #endif
21
22 #include "tsan_defs.h"
23 #include "tsan_trace.h"
24
25 namespace __tsan {
26
27 #if !defined(SANITIZER_GO)
28
29 #if defined(__x86_64__)
30 /*
31 C/C++ on linux/x86_64 and freebsd/x86_64
32 0000 0000 1000 - 0100 0000 0000: main binary and/or MAP_32BIT mappings
33 0100 0000 0000 - 0200 0000 0000: -
34 0200 0000 0000 - 1000 0000 0000: shadow
35 1000 0000 0000 - 3000 0000 0000: -
36 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
37 4000 0000 0000 - 6000 0000 0000: -
38 6000 0000 0000 - 6200 0000 0000: traces
39 6200 0000 0000 - 7d00 0000 0000: -
40 7d00 0000 0000 - 7e00 0000 0000: heap
41 7e00 0000 0000 - 7e80 0000 0000: -
42 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack
43 */
44 struct Mapping {
45 static const uptr kMetaShadowBeg = 0x300000000000ull;
46 static const uptr kMetaShadowEnd = 0x400000000000ull;
47 static const uptr kTraceMemBeg = 0x600000000000ull;
48 static const uptr kTraceMemEnd = 0x620000000000ull;
49 static const uptr kShadowBeg = 0x020000000000ull;
50 static const uptr kShadowEnd = 0x100000000000ull;
51 static const uptr kHeapMemBeg = 0x7d0000000000ull;
52 static const uptr kHeapMemEnd = 0x7e0000000000ull;
53 static const uptr kLoAppMemBeg = 0x000000001000ull;
54 static const uptr kLoAppMemEnd = 0x010000000000ull;
55 static const uptr kHiAppMemBeg = 0x7e8000000000ull;
56 static const uptr kHiAppMemEnd = 0x800000000000ull;
57 static const uptr kAppMemMsk = 0x7c0000000000ull;
58 static const uptr kAppMemXor = 0x020000000000ull;
59 static const uptr kVdsoBeg = 0xf000000000000000ull;
60 };
61 #elif defined(__mips64)
62 /*
63 C/C++ on linux/mips64
64 0100 0000 00 - 0200 0000 00: main binary
65 0200 0000 00 - 1400 0000 00: -
66 1400 0000 00 - 2400 0000 00: shadow
67 2400 0000 00 - 3000 0000 00: -
68 3000 0000 00 - 4000 0000 00: metainfo (memory blocks and sync objects)
69 4000 0000 00 - 6000 0000 00: -
70 6000 0000 00 - 6200 0000 00: traces
71 6200 0000 00 - fe00 0000 00: -
72 fe00 0000 00 - ff00 0000 00: heap
73 ff00 0000 00 - ff80 0000 00: -
74 ff80 0000 00 - ffff ffff ff: modules and main thread stack
75 */
76 struct Mapping {
77 static const uptr kMetaShadowBeg = 0x3000000000ull;
78 static const uptr kMetaShadowEnd = 0x4000000000ull;
79 static const uptr kTraceMemBeg = 0x6000000000ull;
80 static const uptr kTraceMemEnd = 0x6200000000ull;
81 static const uptr kShadowBeg = 0x1400000000ull;
82 static const uptr kShadowEnd = 0x2400000000ull;
83 static const uptr kHeapMemBeg = 0xfe00000000ull;
84 static const uptr kHeapMemEnd = 0xff00000000ull;
85 static const uptr kLoAppMemBeg = 0x0100000000ull;
86 static const uptr kLoAppMemEnd = 0x0200000000ull;
87 static const uptr kHiAppMemBeg = 0xff80000000ull;
88 static const uptr kHiAppMemEnd = 0xffffffffffull;
89 static const uptr kAppMemMsk = 0xfc00000000ull;
90 static const uptr kAppMemXor = 0x0400000000ull;
91 static const uptr kVdsoBeg = 0xfffff00000ull;
92 };
93 #elif defined(__aarch64__)
94 // AArch64 supports multiple VMA which leads to multiple address transformation
95 // functions. To support these multiple VMAS transformations and mappings TSAN
96 // runtime for AArch64 uses an external memory read (vmaSize) to select which
97 // mapping to use. Although slower, it make a same instrumented binary run on
98 // multiple kernels.
99
100 /*
101 C/C++ on linux/aarch64 (39-bit VMA)
102 0000 0010 00 - 0100 0000 00: main binary
103 0100 0000 00 - 0800 0000 00: -
104 0800 0000 00 - 2000 0000 00: shadow memory
105 2000 0000 00 - 3100 0000 00: -
106 3100 0000 00 - 3400 0000 00: metainfo
107 3400 0000 00 - 5500 0000 00: -
108 5500 0000 00 - 5600 0000 00: main binary (PIE)
109 5600 0000 00 - 6000 0000 00: -
110 6000 0000 00 - 6200 0000 00: traces
111 6200 0000 00 - 7d00 0000 00: -
112 7c00 0000 00 - 7d00 0000 00: heap
113 7d00 0000 00 - 7fff ffff ff: modules and main thread stack
114 */
115 struct Mapping39 {
116 static const uptr kLoAppMemBeg = 0x0000001000ull;
117 static const uptr kLoAppMemEnd = 0x0100000000ull;
118 static const uptr kShadowBeg = 0x0800000000ull;
119 static const uptr kShadowEnd = 0x2000000000ull;
120 static const uptr kMetaShadowBeg = 0x3100000000ull;
121 static const uptr kMetaShadowEnd = 0x3400000000ull;
122 static const uptr kMidAppMemBeg = 0x5500000000ull;
123 static const uptr kMidAppMemEnd = 0x5600000000ull;
124 static const uptr kMidShadowOff = 0x5000000000ull;
125 static const uptr kTraceMemBeg = 0x6000000000ull;
126 static const uptr kTraceMemEnd = 0x6200000000ull;
127 static const uptr kHeapMemBeg = 0x7c00000000ull;
128 static const uptr kHeapMemEnd = 0x7d00000000ull;
129 static const uptr kHiAppMemBeg = 0x7e00000000ull;
130 static const uptr kHiAppMemEnd = 0x7fffffffffull;
131 static const uptr kAppMemMsk = 0x7800000000ull;
132 static const uptr kAppMemXor = 0x0200000000ull;
133 static const uptr kVdsoBeg = 0x7f00000000ull;
134 };
135
136 /*
137 C/C++ on linux/aarch64 (42-bit VMA)
138 00000 0010 00 - 01000 0000 00: main binary
139 01000 0000 00 - 10000 0000 00: -
140 10000 0000 00 - 20000 0000 00: shadow memory
141 20000 0000 00 - 26000 0000 00: -
142 26000 0000 00 - 28000 0000 00: metainfo
143 28000 0000 00 - 2aa00 0000 00: -
144 2aa00 0000 00 - 2ab00 0000 00: main binary (PIE)
145 2ab00 0000 00 - 36200 0000 00: -
146 36200 0000 00 - 36240 0000 00: traces
147 36240 0000 00 - 3e000 0000 00: -
148 3e000 0000 00 - 3f000 0000 00: heap
149 3f000 0000 00 - 3ffff ffff ff: modules and main thread stack
150 */
151 struct Mapping42 {
152 static const uptr kLoAppMemBeg = 0x00000001000ull;
153 static const uptr kLoAppMemEnd = 0x01000000000ull;
154 static const uptr kShadowBeg = 0x10000000000ull;
155 static const uptr kShadowEnd = 0x20000000000ull;
156 static const uptr kMetaShadowBeg = 0x26000000000ull;
157 static const uptr kMetaShadowEnd = 0x28000000000ull;
158 static const uptr kMidAppMemBeg = 0x2aa00000000ull;
159 static const uptr kMidAppMemEnd = 0x2ab00000000ull;
160 static const uptr kMidShadowOff = 0x28000000000ull;
161 static const uptr kTraceMemBeg = 0x36200000000ull;
162 static const uptr kTraceMemEnd = 0x36400000000ull;
163 static const uptr kHeapMemBeg = 0x3e000000000ull;
164 static const uptr kHeapMemEnd = 0x3f000000000ull;
165 static const uptr kHiAppMemBeg = 0x3f000000000ull;
166 static const uptr kHiAppMemEnd = 0x3ffffffffffull;
167 static const uptr kAppMemMsk = 0x3c000000000ull;
168 static const uptr kAppMemXor = 0x04000000000ull;
169 static const uptr kVdsoBeg = 0x37f00000000ull;
170 };
171
172 // Indicates the runtime will define the memory regions at runtime.
173 #define TSAN_RUNTIME_VMA 1
174 // Indicates that mapping defines a mid range memory segment.
175 #define TSAN_MID_APP_RANGE 1
176 #elif defined(__powerpc64__)
177 // PPC64 supports multiple VMA which leads to multiple address transformation
178 // functions. To support these multiple VMAS transformations and mappings TSAN
179 // runtime for PPC64 uses an external memory read (vmaSize) to select which
180 // mapping to use. Although slower, it make a same instrumented binary run on
181 // multiple kernels.
182
183 /*
184 C/C++ on linux/powerpc64 (44-bit VMA)
185 0000 0000 0100 - 0001 0000 0000: main binary
186 0001 0000 0000 - 0001 0000 0000: -
187 0001 0000 0000 - 0b00 0000 0000: shadow
188 0b00 0000 0000 - 0b00 0000 0000: -
189 0b00 0000 0000 - 0d00 0000 0000: metainfo (memory blocks and sync objects)
190 0d00 0000 0000 - 0d00 0000 0000: -
191 0d00 0000 0000 - 0f00 0000 0000: traces
192 0f00 0000 0000 - 0f00 0000 0000: -
193 0f00 0000 0000 - 0f50 0000 0000: heap
194 0f50 0000 0000 - 0f60 0000 0000: -
195 0f60 0000 0000 - 1000 0000 0000: modules and main thread stack
196 */
197 struct Mapping44 {
198 static const uptr kMetaShadowBeg = 0x0b0000000000ull;
199 static const uptr kMetaShadowEnd = 0x0d0000000000ull;
200 static const uptr kTraceMemBeg = 0x0d0000000000ull;
201 static const uptr kTraceMemEnd = 0x0f0000000000ull;
202 static const uptr kShadowBeg = 0x000100000000ull;
203 static const uptr kShadowEnd = 0x0b0000000000ull;
204 static const uptr kLoAppMemBeg = 0x000000000100ull;
205 static const uptr kLoAppMemEnd = 0x000100000000ull;
206 static const uptr kHeapMemBeg = 0x0f0000000000ull;
207 static const uptr kHeapMemEnd = 0x0f5000000000ull;
208 static const uptr kHiAppMemBeg = 0x0f6000000000ull;
209 static const uptr kHiAppMemEnd = 0x100000000000ull; // 44 bits
210 static const uptr kAppMemMsk = 0x0f0000000000ull;
211 static const uptr kAppMemXor = 0x002100000000ull;
212 static const uptr kVdsoBeg = 0x3c0000000000000ull;
213 };
214
215 /*
216 C/C++ on linux/powerpc64 (46-bit VMA)
217 0000 0000 1000 - 0100 0000 0000: main binary
218 0100 0000 0000 - 0200 0000 0000: -
219 0100 0000 0000 - 1000 0000 0000: shadow
220 1000 0000 0000 - 1000 0000 0000: -
221 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects)
222 2000 0000 0000 - 2000 0000 0000: -
223 2000 0000 0000 - 2200 0000 0000: traces
224 2200 0000 0000 - 3d00 0000 0000: -
225 3d00 0000 0000 - 3e00 0000 0000: heap
226 3e00 0000 0000 - 3e80 0000 0000: -
227 3e80 0000 0000 - 4000 0000 0000: modules and main thread stack
228 */
229 struct Mapping46 {
230 static const uptr kMetaShadowBeg = 0x100000000000ull;
231 static const uptr kMetaShadowEnd = 0x200000000000ull;
232 static const uptr kTraceMemBeg = 0x200000000000ull;
233 static const uptr kTraceMemEnd = 0x220000000000ull;
234 static const uptr kShadowBeg = 0x010000000000ull;
235 static const uptr kShadowEnd = 0x100000000000ull;
236 static const uptr kHeapMemBeg = 0x3d0000000000ull;
237 static const uptr kHeapMemEnd = 0x3e0000000000ull;
238 static const uptr kLoAppMemBeg = 0x000000001000ull;
239 static const uptr kLoAppMemEnd = 0x010000000000ull;
240 static const uptr kHiAppMemBeg = 0x3e8000000000ull;
241 static const uptr kHiAppMemEnd = 0x400000000000ull; // 46 bits
242 static const uptr kAppMemMsk = 0x3c0000000000ull;
243 static const uptr kAppMemXor = 0x020000000000ull;
244 static const uptr kVdsoBeg = 0x7800000000000000ull;
245 };
246
247 // Indicates the runtime will define the memory regions at runtime.
248 #define TSAN_RUNTIME_VMA 1
249 #endif
250
251 #elif defined(SANITIZER_GO) && !SANITIZER_WINDOWS
252
253 /* Go on linux, darwin and freebsd
254 0000 0000 1000 - 0000 1000 0000: executable
255 0000 1000 0000 - 00c0 0000 0000: -
256 00c0 0000 0000 - 00e0 0000 0000: heap
257 00e0 0000 0000 - 2000 0000 0000: -
258 2000 0000 0000 - 2380 0000 0000: shadow
259 2380 0000 0000 - 3000 0000 0000: -
260 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
261 4000 0000 0000 - 6000 0000 0000: -
262 6000 0000 0000 - 6200 0000 0000: traces
263 6200 0000 0000 - 8000 0000 0000: -
264 */
265
266 struct Mapping {
267 static const uptr kMetaShadowBeg = 0x300000000000ull;
268 static const uptr kMetaShadowEnd = 0x400000000000ull;
269 static const uptr kTraceMemBeg = 0x600000000000ull;
270 static const uptr kTraceMemEnd = 0x620000000000ull;
271 static const uptr kShadowBeg = 0x200000000000ull;
272 static const uptr kShadowEnd = 0x238000000000ull;
273 static const uptr kAppMemBeg = 0x000000001000ull;
274 static const uptr kAppMemEnd = 0x00e000000000ull;
275 };
276
277 #elif defined(SANITIZER_GO) && SANITIZER_WINDOWS
278
279 /* Go on windows
280 0000 0000 1000 - 0000 1000 0000: executable
281 0000 1000 0000 - 00f8 0000 0000: -
282 00c0 0000 0000 - 00e0 0000 0000: heap
283 00e0 0000 0000 - 0100 0000 0000: -
284 0100 0000 0000 - 0500 0000 0000: shadow
285 0500 0000 0000 - 0560 0000 0000: -
286 0560 0000 0000 - 0760 0000 0000: traces
287 0760 0000 0000 - 07d0 0000 0000: metainfo (memory blocks and sync objects)
288 07d0 0000 0000 - 8000 0000 0000: -
289 */
290
291 struct Mapping {
292 static const uptr kMetaShadowBeg = 0x076000000000ull;
293 static const uptr kMetaShadowEnd = 0x07d000000000ull;
294 static const uptr kTraceMemBeg = 0x056000000000ull;
295 static const uptr kTraceMemEnd = 0x076000000000ull;
296 static const uptr kShadowBeg = 0x010000000000ull;
297 static const uptr kShadowEnd = 0x050000000000ull;
298 static const uptr kAppMemBeg = 0x000000001000ull;
299 static const uptr kAppMemEnd = 0x00e000000000ull;
300 }
301
302 #else
303 # error "Unknown platform"
304 #endif
305
306
307 #ifdef TSAN_RUNTIME_VMA
308 extern uptr vmaSize;
309 #endif
310
311
312 enum MappingType {
313 MAPPING_LO_APP_BEG,
314 MAPPING_LO_APP_END,
315 MAPPING_HI_APP_BEG,
316 MAPPING_HI_APP_END,
317 #ifdef TSAN_MID_APP_RANGE
318 MAPPING_MID_APP_BEG,
319 MAPPING_MID_APP_END,
320 #endif
321 MAPPING_HEAP_BEG,
322 MAPPING_HEAP_END,
323 MAPPING_APP_BEG,
324 MAPPING_APP_END,
325 MAPPING_SHADOW_BEG,
326 MAPPING_SHADOW_END,
327 MAPPING_META_SHADOW_BEG,
328 MAPPING_META_SHADOW_END,
329 MAPPING_TRACE_BEG,
330 MAPPING_TRACE_END,
331 MAPPING_VDSO_BEG,
332 };
333
334 template<typename Mapping, int Type>
MappingImpl(void)335 uptr MappingImpl(void) {
336 switch (Type) {
337 #ifndef SANITIZER_GO
338 case MAPPING_LO_APP_BEG: return Mapping::kLoAppMemBeg;
339 case MAPPING_LO_APP_END: return Mapping::kLoAppMemEnd;
340 # ifdef TSAN_MID_APP_RANGE
341 case MAPPING_MID_APP_BEG: return Mapping::kMidAppMemBeg;
342 case MAPPING_MID_APP_END: return Mapping::kMidAppMemEnd;
343 # endif
344 case MAPPING_HI_APP_BEG: return Mapping::kHiAppMemBeg;
345 case MAPPING_HI_APP_END: return Mapping::kHiAppMemEnd;
346 case MAPPING_HEAP_BEG: return Mapping::kHeapMemBeg;
347 case MAPPING_HEAP_END: return Mapping::kHeapMemEnd;
348 case MAPPING_VDSO_BEG: return Mapping::kVdsoBeg;
349 #else
350 case MAPPING_APP_BEG: return Mapping::kAppMemBeg;
351 case MAPPING_APP_END: return Mapping::kAppMemEnd;
352 #endif
353 case MAPPING_SHADOW_BEG: return Mapping::kShadowBeg;
354 case MAPPING_SHADOW_END: return Mapping::kShadowEnd;
355 case MAPPING_META_SHADOW_BEG: return Mapping::kMetaShadowBeg;
356 case MAPPING_META_SHADOW_END: return Mapping::kMetaShadowEnd;
357 case MAPPING_TRACE_BEG: return Mapping::kTraceMemBeg;
358 case MAPPING_TRACE_END: return Mapping::kTraceMemEnd;
359 }
360 }
361
362 template<int Type>
MappingArchImpl(void)363 uptr MappingArchImpl(void) {
364 #ifdef __aarch64__
365 if (vmaSize == 39)
366 return MappingImpl<Mapping39, Type>();
367 else
368 return MappingImpl<Mapping42, Type>();
369 DCHECK(0);
370 #elif defined(__powerpc64__)
371 if (vmaSize == 44)
372 return MappingImpl<Mapping44, Type>();
373 else
374 return MappingImpl<Mapping46, Type>();
375 DCHECK(0);
376 #else
377 return MappingImpl<Mapping, Type>();
378 #endif
379 }
380
381 #ifndef SANITIZER_GO
382 ALWAYS_INLINE
LoAppMemBeg(void)383 uptr LoAppMemBeg(void) {
384 return MappingArchImpl<MAPPING_LO_APP_BEG>();
385 }
386 ALWAYS_INLINE
LoAppMemEnd(void)387 uptr LoAppMemEnd(void) {
388 return MappingArchImpl<MAPPING_LO_APP_END>();
389 }
390
391 #ifdef TSAN_MID_APP_RANGE
392 ALWAYS_INLINE
MidAppMemBeg(void)393 uptr MidAppMemBeg(void) {
394 return MappingArchImpl<MAPPING_MID_APP_BEG>();
395 }
396 ALWAYS_INLINE
MidAppMemEnd(void)397 uptr MidAppMemEnd(void) {
398 return MappingArchImpl<MAPPING_MID_APP_END>();
399 }
400 #endif
401
402 ALWAYS_INLINE
HeapMemBeg(void)403 uptr HeapMemBeg(void) {
404 return MappingArchImpl<MAPPING_HEAP_BEG>();
405 }
406 ALWAYS_INLINE
HeapMemEnd(void)407 uptr HeapMemEnd(void) {
408 return MappingArchImpl<MAPPING_HEAP_END>();
409 }
410
411 ALWAYS_INLINE
HiAppMemBeg(void)412 uptr HiAppMemBeg(void) {
413 return MappingArchImpl<MAPPING_HI_APP_BEG>();
414 }
415 ALWAYS_INLINE
HiAppMemEnd(void)416 uptr HiAppMemEnd(void) {
417 return MappingArchImpl<MAPPING_HI_APP_END>();
418 }
419
420 ALWAYS_INLINE
VdsoBeg(void)421 uptr VdsoBeg(void) {
422 return MappingArchImpl<MAPPING_VDSO_BEG>();
423 }
424
425 #else
426
427 ALWAYS_INLINE
AppMemBeg(void)428 uptr AppMemBeg(void) {
429 return MappingArchImpl<MAPPING_APP_BEG>();
430 }
431 ALWAYS_INLINE
AppMemEnd(void)432 uptr AppMemEnd(void) {
433 return MappingArchImpl<MAPPING_APP_END>();
434 }
435
436 #endif
437
438 static inline
GetUserRegion(int i,uptr * start,uptr * end)439 bool GetUserRegion(int i, uptr *start, uptr *end) {
440 switch (i) {
441 default:
442 return false;
443 #ifndef SANITIZER_GO
444 case 0:
445 *start = LoAppMemBeg();
446 *end = LoAppMemEnd();
447 return true;
448 case 1:
449 *start = HiAppMemBeg();
450 *end = HiAppMemEnd();
451 return true;
452 case 2:
453 *start = HeapMemBeg();
454 *end = HeapMemEnd();
455 return true;
456 # ifdef TSAN_MID_APP_RANGE
457 case 3:
458 *start = MidAppMemBeg();
459 *end = MidAppMemEnd();
460 return true;
461 # endif
462 #else
463 case 0:
464 *start = AppMemBeg();
465 *end = AppMemEnd();
466 return true;
467 #endif
468 }
469 }
470
471 ALWAYS_INLINE
ShadowBeg(void)472 uptr ShadowBeg(void) {
473 return MappingArchImpl<MAPPING_SHADOW_BEG>();
474 }
475 ALWAYS_INLINE
ShadowEnd(void)476 uptr ShadowEnd(void) {
477 return MappingArchImpl<MAPPING_SHADOW_END>();
478 }
479
480 ALWAYS_INLINE
MetaShadowBeg(void)481 uptr MetaShadowBeg(void) {
482 return MappingArchImpl<MAPPING_META_SHADOW_BEG>();
483 }
484 ALWAYS_INLINE
MetaShadowEnd(void)485 uptr MetaShadowEnd(void) {
486 return MappingArchImpl<MAPPING_META_SHADOW_END>();
487 }
488
489 ALWAYS_INLINE
TraceMemBeg(void)490 uptr TraceMemBeg(void) {
491 return MappingArchImpl<MAPPING_TRACE_BEG>();
492 }
493 ALWAYS_INLINE
TraceMemEnd(void)494 uptr TraceMemEnd(void) {
495 return MappingArchImpl<MAPPING_TRACE_END>();
496 }
497
498
499 template<typename Mapping>
IsAppMemImpl(uptr mem)500 bool IsAppMemImpl(uptr mem) {
501 #ifndef SANITIZER_GO
502 return (mem >= Mapping::kHeapMemBeg && mem < Mapping::kHeapMemEnd) ||
503 # ifdef TSAN_MID_APP_RANGE
504 (mem >= Mapping::kMidAppMemBeg && mem < Mapping::kMidAppMemEnd) ||
505 # endif
506 (mem >= Mapping::kLoAppMemBeg && mem < Mapping::kLoAppMemEnd) ||
507 (mem >= Mapping::kHiAppMemBeg && mem < Mapping::kHiAppMemEnd);
508 #else
509 return mem >= Mapping::kAppMemBeg && mem < Mapping::kAppMemEnd;
510 #endif
511 }
512
513 ALWAYS_INLINE
IsAppMem(uptr mem)514 bool IsAppMem(uptr mem) {
515 #ifdef __aarch64__
516 if (vmaSize == 39)
517 return IsAppMemImpl<Mapping39>(mem);
518 else
519 return IsAppMemImpl<Mapping42>(mem);
520 DCHECK(0);
521 #elif defined(__powerpc64__)
522 if (vmaSize == 44)
523 return IsAppMemImpl<Mapping44>(mem);
524 else
525 return IsAppMemImpl<Mapping46>(mem);
526 DCHECK(0);
527 #else
528 return IsAppMemImpl<Mapping>(mem);
529 #endif
530 }
531
532
533 template<typename Mapping>
IsShadowMemImpl(uptr mem)534 bool IsShadowMemImpl(uptr mem) {
535 return mem >= Mapping::kShadowBeg && mem <= Mapping::kShadowEnd;
536 }
537
538 ALWAYS_INLINE
IsShadowMem(uptr mem)539 bool IsShadowMem(uptr mem) {
540 #ifdef __aarch64__
541 if (vmaSize == 39)
542 return IsShadowMemImpl<Mapping39>(mem);
543 else
544 return IsShadowMemImpl<Mapping42>(mem);
545 DCHECK(0);
546 #elif defined(__powerpc64__)
547 if (vmaSize == 44)
548 return IsShadowMemImpl<Mapping44>(mem);
549 else
550 return IsShadowMemImpl<Mapping46>(mem);
551 DCHECK(0);
552 #else
553 return IsShadowMemImpl<Mapping>(mem);
554 #endif
555 }
556
557
558 template<typename Mapping>
IsMetaMemImpl(uptr mem)559 bool IsMetaMemImpl(uptr mem) {
560 return mem >= Mapping::kMetaShadowBeg && mem <= Mapping::kMetaShadowEnd;
561 }
562
563 ALWAYS_INLINE
IsMetaMem(uptr mem)564 bool IsMetaMem(uptr mem) {
565 #ifdef __aarch64__
566 if (vmaSize == 39)
567 return IsMetaMemImpl<Mapping39>(mem);
568 else
569 return IsMetaMemImpl<Mapping42>(mem);
570 DCHECK(0);
571 #elif defined(__powerpc64__)
572 if (vmaSize == 44)
573 return IsMetaMemImpl<Mapping44>(mem);
574 else
575 return IsMetaMemImpl<Mapping46>(mem);
576 DCHECK(0);
577 #else
578 return IsMetaMemImpl<Mapping>(mem);
579 #endif
580 }
581
582
583 template<typename Mapping>
MemToShadowImpl(uptr x)584 uptr MemToShadowImpl(uptr x) {
585 DCHECK(IsAppMem(x));
586 #ifndef SANITIZER_GO
587 return (((x) & ~(Mapping::kAppMemMsk | (kShadowCell - 1)))
588 ^ Mapping::kAppMemXor) * kShadowCnt;
589 #else
590 return ((x & ~(kShadowCell - 1)) * kShadowCnt) | Mapping::kShadowBeg;
591 #endif
592 }
593
594 ALWAYS_INLINE
MemToShadow(uptr x)595 uptr MemToShadow(uptr x) {
596 #ifdef __aarch64__
597 if (vmaSize == 39)
598 return MemToShadowImpl<Mapping39>(x);
599 else
600 return MemToShadowImpl<Mapping42>(x);
601 DCHECK(0);
602 #elif defined(__powerpc64__)
603 if (vmaSize == 44)
604 return MemToShadowImpl<Mapping44>(x);
605 else
606 return MemToShadowImpl<Mapping46>(x);
607 DCHECK(0);
608 #else
609 return MemToShadowImpl<Mapping>(x);
610 #endif
611 }
612
613
614 template<typename Mapping>
MemToMetaImpl(uptr x)615 u32 *MemToMetaImpl(uptr x) {
616 DCHECK(IsAppMem(x));
617 #ifndef SANITIZER_GO
618 return (u32*)(((((x) & ~(Mapping::kAppMemMsk | (kMetaShadowCell - 1)))
619 ^ Mapping::kAppMemXor) / kMetaShadowCell * kMetaShadowSize)
620 | Mapping::kMetaShadowBeg);
621 #else
622 return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
623 kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
624 #endif
625 }
626
627 ALWAYS_INLINE
MemToMeta(uptr x)628 u32 *MemToMeta(uptr x) {
629 #ifdef __aarch64__
630 if (vmaSize == 39)
631 return MemToMetaImpl<Mapping39>(x);
632 else
633 return MemToMetaImpl<Mapping42>(x);
634 DCHECK(0);
635 #elif defined(__powerpc64__)
636 if (vmaSize == 44)
637 return MemToMetaImpl<Mapping44>(x);
638 else
639 return MemToMetaImpl<Mapping46>(x);
640 DCHECK(0);
641 #else
642 return MemToMetaImpl<Mapping>(x);
643 #endif
644 }
645
646
647 template<typename Mapping>
ShadowToMemImpl(uptr s)648 uptr ShadowToMemImpl(uptr s) {
649 DCHECK(IsShadowMem(s));
650 #ifndef SANITIZER_GO
651 if (s >= MemToShadow(Mapping::kLoAppMemBeg)
652 && s <= MemToShadow(Mapping::kLoAppMemEnd - 1))
653 return (s / kShadowCnt) ^ Mapping::kAppMemXor;
654 # ifdef TSAN_MID_APP_RANGE
655 if (s >= MemToShadow(Mapping::kMidAppMemBeg)
656 && s <= MemToShadow(Mapping::kMidAppMemEnd - 1))
657 return ((s / kShadowCnt) ^ Mapping::kAppMemXor) + Mapping::kMidShadowOff;
658 # endif
659 else
660 return ((s / kShadowCnt) ^ Mapping::kAppMemXor) | Mapping::kAppMemMsk;
661 #else
662 # ifndef SANITIZER_WINDOWS
663 return (s & ~Mapping::kShadowBeg) / kShadowCnt;
664 # else
665 // FIXME(dvyukov): this is most likely wrong as the mapping is not bijection.
666 return (s - Mapping::kShadowBeg) / kShadowCnt;
667 # endif // SANITIZER_WINDOWS
668 #endif
669 }
670
671 ALWAYS_INLINE
ShadowToMem(uptr s)672 uptr ShadowToMem(uptr s) {
673 #ifdef __aarch64__
674 if (vmaSize == 39)
675 return ShadowToMemImpl<Mapping39>(s);
676 else
677 return ShadowToMemImpl<Mapping42>(s);
678 DCHECK(0);
679 #elif defined(__powerpc64__)
680 if (vmaSize == 44)
681 return ShadowToMemImpl<Mapping44>(s);
682 else
683 return ShadowToMemImpl<Mapping46>(s);
684 DCHECK(0);
685 #else
686 return ShadowToMemImpl<Mapping>(s);
687 #endif
688 }
689
690
691
692 // The additional page is to catch shadow stack overflow as paging fault.
693 // Windows wants 64K alignment for mmaps.
694 const uptr kTotalTraceSize = (kTraceSize * sizeof(Event) + sizeof(Trace)
695 + (64 << 10) + (64 << 10) - 1) & ~((64 << 10) - 1);
696
697 template<typename Mapping>
GetThreadTraceImpl(int tid)698 uptr GetThreadTraceImpl(int tid) {
699 uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize;
700 DCHECK_LT(p, Mapping::kTraceMemEnd);
701 return p;
702 }
703
704 ALWAYS_INLINE
GetThreadTrace(int tid)705 uptr GetThreadTrace(int tid) {
706 #ifdef __aarch64__
707 if (vmaSize == 39)
708 return GetThreadTraceImpl<Mapping39>(tid);
709 else
710 return GetThreadTraceImpl<Mapping42>(tid);
711 DCHECK(0);
712 #elif defined(__powerpc64__)
713 if (vmaSize == 44)
714 return GetThreadTraceImpl<Mapping44>(tid);
715 else
716 return GetThreadTraceImpl<Mapping46>(tid);
717 DCHECK(0);
718 #else
719 return GetThreadTraceImpl<Mapping>(tid);
720 #endif
721 }
722
723
724 template<typename Mapping>
GetThreadTraceHeaderImpl(int tid)725 uptr GetThreadTraceHeaderImpl(int tid) {
726 uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize
727 + kTraceSize * sizeof(Event);
728 DCHECK_LT(p, Mapping::kTraceMemEnd);
729 return p;
730 }
731
732 ALWAYS_INLINE
GetThreadTraceHeader(int tid)733 uptr GetThreadTraceHeader(int tid) {
734 #ifdef __aarch64__
735 if (vmaSize == 39)
736 return GetThreadTraceHeaderImpl<Mapping39>(tid);
737 else
738 return GetThreadTraceHeaderImpl<Mapping42>(tid);
739 DCHECK(0);
740 #elif defined(__powerpc64__)
741 if (vmaSize == 44)
742 return GetThreadTraceHeaderImpl<Mapping44>(tid);
743 else
744 return GetThreadTraceHeaderImpl<Mapping46>(tid);
745 DCHECK(0);
746 #else
747 return GetThreadTraceHeaderImpl<Mapping>(tid);
748 #endif
749 }
750
751 void InitializePlatform();
752 void InitializePlatformEarly();
753 void CheckAndProtect();
754 void InitializeShadowMemoryPlatform();
755 void FlushShadowMemory();
756 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive);
757
758 // Says whether the addr relates to a global var.
759 // Guesses with high probability, may yield both false positives and negatives.
760 bool IsGlobalVar(uptr addr);
761 int ExtractResolvFDs(void *state, int *fds, int nfd);
762 int ExtractRecvmsgFDs(void *msg, int *fds, int nfd);
763
764 int call_pthread_cancel_with_cleanup(int(*fn)(void *c, void *m,
765 void *abstime), void *c, void *m, void *abstime,
766 void(*cleanup)(void *arg), void *arg);
767
768 void DestroyThreadState();
769
770 } // namespace __tsan
771
772 #endif // TSAN_PLATFORM_H
773