1 //===-- msan.h --------------------------------------------------*- C++ -*-===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file is a part of MemorySanitizer.
11 //
12 // Private MSan header.
13 //===----------------------------------------------------------------------===//
14 
15 #ifndef MSAN_H
16 #define MSAN_H
17 
18 #include "sanitizer_common/sanitizer_flags.h"
19 #include "sanitizer_common/sanitizer_internal_defs.h"
20 #include "sanitizer_common/sanitizer_stacktrace.h"
21 #include "msan_interface_internal.h"
22 #include "msan_flags.h"
23 
24 #ifndef MSAN_REPLACE_OPERATORS_NEW_AND_DELETE
25 # define MSAN_REPLACE_OPERATORS_NEW_AND_DELETE 1
26 #endif
27 
28 struct MappingDesc {
29   uptr start;
30   uptr end;
31   enum Type {
32     INVALID, APP, SHADOW, ORIGIN
33   } type;
34   const char *name;
35 };
36 
37 
38 #if SANITIZER_LINUX && defined(__mips64)
39 
40 // Everything is above 0x00e000000000.
41 const MappingDesc kMemoryLayout[] = {
42     {0x000000000000ULL, 0x00a000000000ULL, MappingDesc::INVALID, "invalid"},
43     {0x00a000000000ULL, 0x00c000000000ULL, MappingDesc::SHADOW, "shadow"},
44     {0x00c000000000ULL, 0x00e000000000ULL, MappingDesc::ORIGIN, "origin"},
45     {0x00e000000000ULL, 0x010000000000ULL, MappingDesc::APP, "app"}};
46 
47 #define MEM_TO_SHADOW(mem) (((uptr)(mem)) & ~0x4000000000ULL)
48 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x002000000000)
49 
50 #elif SANITIZER_FREEBSD && SANITIZER_WORDSIZE == 64
51 
52 // Low memory: main binary, MAP_32BIT mappings and modules
53 // High memory: heap, modules and main thread stack
54 const MappingDesc kMemoryLayout[] = {
55     {0x000000000000ULL, 0x010000000000ULL, MappingDesc::APP, "low memory"},
56     {0x010000000000ULL, 0x100000000000ULL, MappingDesc::INVALID, "invalid"},
57     {0x100000000000ULL, 0x310000000000ULL, MappingDesc::SHADOW, "shadow"},
58     {0x310000000000ULL, 0x380000000000ULL, MappingDesc::INVALID, "invalid"},
59     {0x380000000000ULL, 0x590000000000ULL, MappingDesc::ORIGIN, "origin"},
60     {0x590000000000ULL, 0x600000000000ULL, MappingDesc::INVALID, "invalid"},
61     {0x600000000000ULL, 0x800000000000ULL, MappingDesc::APP, "high memory"}};
62 
63 // Maps low and high app ranges to contiguous space with zero base:
64 //   Low:  0000 0000 0000 - 00ff ffff ffff  ->  2000 0000 0000 - 20ff ffff ffff
65 //   High: 6000 0000 0000 - 7fff ffff ffff  ->  0000 0000 0000 - 1fff ffff ffff
66 #define LINEARIZE_MEM(mem) \
67   (((uptr)(mem) & ~0xc00000000000ULL) ^ 0x200000000000ULL)
68 #define MEM_TO_SHADOW(mem) (LINEARIZE_MEM((mem)) + 0x100000000000ULL)
69 #define SHADOW_TO_ORIGIN(shadow) (((uptr)(shadow)) + 0x280000000000)
70 
71 #elif SANITIZER_LINUX && SANITIZER_WORDSIZE == 64
72 
73 // Requries PIE binary and ASLR enabled.
74 // Main thread stack and DSOs at 0x7f0000000000 (sometimes 0x7e0000000000).
75 // Heap at 0x600000000000.
76 const MappingDesc kMemoryLayout[] = {
77     {0x000000000000ULL, 0x200000000000ULL, MappingDesc::INVALID, "invalid"},
78     {0x200000000000ULL, 0x400000000000ULL, MappingDesc::SHADOW, "shadow"},
79     {0x400000000000ULL, 0x600000000000ULL, MappingDesc::ORIGIN, "origin"},
80     {0x600000000000ULL, 0x800000000000ULL, MappingDesc::APP, "app"}};
81 
82 #define MEM_TO_SHADOW(mem) (((uptr)(mem)) & ~0x400000000000ULL)
83 #define SHADOW_TO_ORIGIN(mem) (((uptr)(mem)) + 0x200000000000ULL)
84 
85 #else
86 #error "Unsupported platform"
87 #endif
88 
89 const uptr kMemoryLayoutSize = sizeof(kMemoryLayout) / sizeof(kMemoryLayout[0]);
90 
91 #define MEM_TO_ORIGIN(mem) (SHADOW_TO_ORIGIN(MEM_TO_SHADOW((mem))))
92 
93 #ifndef __clang__
94 __attribute__((optimize("unroll-loops")))
95 #endif
addr_is_type(uptr addr,MappingDesc::Type mapping_type)96 inline bool addr_is_type(uptr addr, MappingDesc::Type mapping_type) {
97 // It is critical for performance that this loop is unrolled (because then it is
98 // simplified into just a few constant comparisons).
99 #ifdef __clang__
100 #pragma unroll
101 #endif
102   for (unsigned i = 0; i < kMemoryLayoutSize; ++i)
103     if (kMemoryLayout[i].type == mapping_type &&
104         addr >= kMemoryLayout[i].start && addr < kMemoryLayout[i].end)
105       return true;
106   return false;
107 }
108 
109 #define MEM_IS_APP(mem) addr_is_type((uptr)(mem), MappingDesc::APP)
110 #define MEM_IS_SHADOW(mem) addr_is_type((uptr)(mem), MappingDesc::SHADOW)
111 #define MEM_IS_ORIGIN(mem) addr_is_type((uptr)(mem), MappingDesc::ORIGIN)
112 
113 // These constants must be kept in sync with the ones in MemorySanitizer.cc.
114 const int kMsanParamTlsSize = 800;
115 const int kMsanRetvalTlsSize = 800;
116 
117 namespace __msan {
118 extern int msan_inited;
119 extern bool msan_init_is_running;
120 extern int msan_report_count;
121 
122 bool ProtectRange(uptr beg, uptr end);
123 bool InitShadow(bool map_shadow, bool init_origins);
124 char *GetProcSelfMaps();
125 void InitializeInterceptors();
126 
127 void MsanAllocatorThreadFinish();
128 void *MsanCalloc(StackTrace *stack, uptr nmemb, uptr size);
129 void *MsanReallocate(StackTrace *stack, void *oldp, uptr size,
130                      uptr alignment, bool zeroise);
131 void MsanDeallocate(StackTrace *stack, void *ptr);
132 void InstallTrapHandler();
133 void InstallAtExitHandler();
134 void ReplaceOperatorsNewAndDelete();
135 
136 const char *GetStackOriginDescr(u32 id, uptr *pc);
137 
138 void EnterSymbolizer();
139 void ExitSymbolizer();
140 bool IsInSymbolizer();
141 
142 struct SymbolizerScope {
SymbolizerScopeSymbolizerScope143   SymbolizerScope() { EnterSymbolizer(); }
~SymbolizerScopeSymbolizerScope144   ~SymbolizerScope() { ExitSymbolizer(); }
145 };
146 
147 void MsanDie();
148 void PrintWarning(uptr pc, uptr bp);
149 void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin);
150 
151 void GetStackTrace(BufferedStackTrace *stack, uptr max_s, uptr pc, uptr bp,
152                    bool request_fast_unwind);
153 
154 void ReportUMR(StackTrace *stack, u32 origin);
155 void ReportExpectedUMRNotFound(StackTrace *stack);
156 void ReportStats();
157 void ReportAtExitStatistics();
158 void DescribeMemoryRange(const void *x, uptr size);
159 void ReportUMRInsideAddressRange(const char *what, const void *start, uptr size,
160                                  uptr offset);
161 
162 // Unpoison first n function arguments.
163 void UnpoisonParam(uptr n);
164 void UnpoisonThreadLocalState();
165 
166 // Returns a "chained" origin id, pointing to the given stack trace followed by
167 // the previous origin id.
168 u32 ChainOrigin(u32 id, StackTrace *stack);
169 
170 const int STACK_TRACE_TAG_POISON = StackTrace::TAG_CUSTOM + 1;
171 
172 #define GET_MALLOC_STACK_TRACE                                                 \
173   BufferedStackTrace stack;                                                    \
174   if (__msan_get_track_origins() && msan_inited)                               \
175   GetStackTrace(&stack, common_flags()->malloc_context_size,                   \
176                 StackTrace::GetCurrentPc(), GET_CURRENT_FRAME(),               \
177                 common_flags()->fast_unwind_on_malloc)
178 
179 #define GET_STORE_STACK_TRACE_PC_BP(pc, bp)                                    \
180   BufferedStackTrace stack;                                                    \
181   if (__msan_get_track_origins() > 1 && msan_inited)                           \
182   GetStackTrace(&stack, flags()->store_context_size, pc, bp,                   \
183                 common_flags()->fast_unwind_on_malloc)
184 
185 #define GET_FATAL_STACK_TRACE_PC_BP(pc, bp)                                    \
186   BufferedStackTrace stack;                                                    \
187   if (msan_inited)                                                             \
188   GetStackTrace(&stack, kStackTraceMax, pc, bp,                                \
189                 common_flags()->fast_unwind_on_fatal)
190 
191 #define GET_STORE_STACK_TRACE \
192   GET_STORE_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME())
193 
194 class ScopedThreadLocalStateBackup {
195  public:
ScopedThreadLocalStateBackup()196   ScopedThreadLocalStateBackup() { Backup(); }
~ScopedThreadLocalStateBackup()197   ~ScopedThreadLocalStateBackup() { Restore(); }
198   void Backup();
199   void Restore();
200  private:
201   u64 va_arg_overflow_size_tls;
202 };
203 
204 extern void (*death_callback)(void);
205 
206 void MsanTSDInit(void (*destructor)(void *tsd));
207 void *MsanTSDGet();
208 void MsanTSDSet(void *tsd);
209 void MsanTSDDtor(void *tsd);
210 
211 }  // namespace __msan
212 
213 #define MSAN_MALLOC_HOOK(ptr, size) \
214   if (&__sanitizer_malloc_hook) __sanitizer_malloc_hook(ptr, size)
215 #define MSAN_FREE_HOOK(ptr) \
216   if (&__sanitizer_free_hook) __sanitizer_free_hook(ptr)
217 
218 #endif  // MSAN_H
219