1 //===- subzero/src/IceGlobalContext.h - Global context defs -----*- C++ -*-===//
2 //
3 //                        The Subzero Code Generator
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 ///
10 /// \file
11 /// \brief Declares aspects of the compilation that persist across multiple
12 /// functions.
13 ///
14 //===----------------------------------------------------------------------===//
15 
16 #ifndef SUBZERO_SRC_ICEGLOBALCONTEXT_H
17 #define SUBZERO_SRC_ICEGLOBALCONTEXT_H
18 
19 #include "IceClFlags.h"
20 #include "IceDefs.h"
21 #include "IceInstrumentation.h"
22 #include "IceIntrinsics.h"
23 #include "IceStringPool.h"
24 #include "IceSwitchLowering.h"
25 #include "IceTargetLowering.def"
26 #include "IceThreading.h"
27 #include "IceTimerTree.h"
28 #include "IceTypes.h"
29 #include "IceUtils.h"
30 
31 #include <array>
32 #include <atomic>
33 #include <cassert>
34 #include <functional>
35 #include <memory>
36 #include <mutex>
37 #include <thread>
38 #include <type_traits>
39 #include <utility>
40 #include <vector>
41 
42 namespace Ice {
43 
44 class ConstantPool;
45 class EmitterWorkItem;
46 class FuncSigType;
47 class Instrumentation;
48 
49 // Runtime helper function IDs
50 
51 enum class RuntimeHelper {
52 #define X(Tag, Name) H_##Tag,
53   RUNTIME_HELPER_FUNCTIONS_TABLE
54 #undef X
55       H_Num
56 };
57 
58 /// OptWorkItem is a simple wrapper used to pass parse information on a function
59 /// block, to a translator thread.
60 class OptWorkItem {
61   OptWorkItem(const OptWorkItem &) = delete;
62   OptWorkItem &operator=(const OptWorkItem &) = delete;
63 
64 public:
65   // Get the Cfg for the funtion to translate.
66   virtual std::unique_ptr<Cfg> getParsedCfg() = 0;
67   virtual ~OptWorkItem() = default;
68 
69 protected:
70   OptWorkItem() = default;
71 };
72 
73 class GlobalContext {
74   GlobalContext() = delete;
75   GlobalContext(const GlobalContext &) = delete;
76   GlobalContext &operator=(const GlobalContext &) = delete;
77 
78   /// CodeStats collects rudimentary statistics during translation.
79   class CodeStats {
80     CodeStats(const CodeStats &) = delete;
81     CodeStats &operator=(const CodeStats &) = default;
82 #define CODESTATS_TABLE                                                        \
83   /* dump string, enum value */                                                \
84   X("Inst Count  ", InstCount)                                                 \
85   X("Regs Saved  ", RegsSaved)                                                 \
86   X("Frame Bytes ", FrameByte)                                                 \
87   X("Spills      ", NumSpills)                                                 \
88   X("Fills       ", NumFills)                                                  \
89   X("R/P Imms    ", NumRPImms)
90     //#define X(str, tag)
91 
92   public:
93     enum CSTag {
94 #define X(str, tag) CS_##tag,
95       CODESTATS_TABLE
96 #undef X
97           CS_NUM
98     };
CodeStats()99     CodeStats() { reset(); }
reset()100     void reset() { Stats.fill(0); }
101     void update(CSTag Tag, uint32_t Count = 1) {
102       assert(static_cast<SizeT>(Tag) < Stats.size());
103       Stats[Tag] += Count;
104     }
add(const CodeStats & Other)105     void add(const CodeStats &Other) {
106       for (uint32_t i = 0; i < Stats.size(); ++i)
107         Stats[i] += Other.Stats[i];
108     }
109     /// Dumps the stats for the given Cfg.  If Func==nullptr, it identifies it
110     /// as the "final" cumulative stats instead as a specific function's name.
111     void dump(const Cfg *Func, GlobalContext *Ctx);
112 
113   private:
114     std::array<uint32_t, CS_NUM> Stats;
115   };
116 
117   /// TimerList is a vector of TimerStack objects, with extra methods
118   /// to initialize and merge these vectors.
119   class TimerList : public std::vector<TimerStack> {
120     TimerList(const TimerList &) = delete;
121     TimerList &operator=(const TimerList &) = delete;
122 
123   public:
124     TimerList() = default;
125     /// initInto() initializes a target list of timers based on the
126     /// current list.  In particular, it creates the same number of
127     /// timers, in the same order, with the same names, but initially
128     /// empty of timing data.
initInto(TimerList & Dest)129     void initInto(TimerList &Dest) const {
130       if (!BuildDefs::timers())
131         return;
132       Dest.clear();
133       for (const TimerStack &Stack : *this) {
134         Dest.push_back(TimerStack(Stack.getName()));
135       }
136     }
mergeFrom(TimerList & Src)137     void mergeFrom(TimerList &Src) {
138       if (!BuildDefs::timers())
139         return;
140       assert(size() == Src.size());
141       size_type i = 0;
142       for (TimerStack &Stack : *this) {
143         assert(Stack.getName() == Src[i].getName());
144         Stack.mergeFrom(Src[i]);
145         ++i;
146       }
147     }
148   };
149 
150   /// ThreadContext contains thread-local data.  This data can be
151   /// combined/reduced as needed after all threads complete.
152   class ThreadContext {
153     ThreadContext(const ThreadContext &) = delete;
154     ThreadContext &operator=(const ThreadContext &) = delete;
155 
156   public:
157     ThreadContext() = default;
158     CodeStats StatsFunction;
159     CodeStats StatsCumulative;
160     TimerList Timers;
161   };
162 
163 public:
164   /// The dump stream is a log stream while emit is the stream code
165   /// is emitted to. The error stream is strictly for logging errors.
166   GlobalContext(Ostream *OsDump, Ostream *OsEmit, Ostream *OsError,
167                 ELFStreamer *ELFStreamer);
168   ~GlobalContext();
169 
170   void dumpStrings();
171   ///
172   /// The dump, error, and emit streams need to be used by only one
173   /// thread at a time.  This is done by exclusively reserving the
174   /// streams via lockStr() and unlockStr().  The OstreamLocker class
175   /// can be used to conveniently manage this.
176   ///
177   /// The model is that a thread grabs the stream lock, then does an
178   /// arbitrary amount of work during which far-away callees may grab
179   /// the stream and do something with it, and finally the thread
180   /// releases the stream lock.  This allows large chunks of output to
181   /// be dumped or emitted without risking interleaving from multiple
182   /// threads.
lockStr()183   void lockStr() { StrLock.lock(); }
unlockStr()184   void unlockStr() { StrLock.unlock(); }
getStrDump()185   Ostream &getStrDump() { return *StrDump; }
getStrError()186   Ostream &getStrError() { return *StrError; }
getStrEmit()187   Ostream &getStrEmit() { return *StrEmit; }
setStrEmit(Ostream & NewStrEmit)188   void setStrEmit(Ostream &NewStrEmit) { StrEmit = &NewStrEmit; }
189 
getErrorStatus()190   LockedPtr<ErrorCode> getErrorStatus() {
191     return LockedPtr<ErrorCode>(&ErrorStatus, &ErrorStatusLock);
192   }
193 
194   /// \name Manage Constants.
195   /// @{
196   // getConstant*() functions are not const because they might add something to
197   // the constant pool.
198   Constant *getConstantInt(Type Ty, int64_t Value);
getConstantInt1(int8_t ConstantInt1)199   Constant *getConstantInt1(int8_t ConstantInt1) {
200     ConstantInt1 &= INT8_C(1);
201     switch (ConstantInt1) {
202     case 0:
203       return getConstantZero(IceType_i1);
204     case 1:
205       return ConstantTrue;
206     default:
207       assert(false && "getConstantInt1 not on true/false");
208       return getConstantInt1Internal(ConstantInt1);
209     }
210   }
getConstantInt8(int8_t ConstantInt8)211   Constant *getConstantInt8(int8_t ConstantInt8) {
212     switch (ConstantInt8) {
213     case 0:
214       return getConstantZero(IceType_i8);
215     default:
216       return getConstantInt8Internal(ConstantInt8);
217     }
218   }
getConstantInt16(int16_t ConstantInt16)219   Constant *getConstantInt16(int16_t ConstantInt16) {
220     switch (ConstantInt16) {
221     case 0:
222       return getConstantZero(IceType_i16);
223     default:
224       return getConstantInt16Internal(ConstantInt16);
225     }
226   }
getConstantInt32(int32_t ConstantInt32)227   Constant *getConstantInt32(int32_t ConstantInt32) {
228     switch (ConstantInt32) {
229     case 0:
230       return getConstantZero(IceType_i32);
231     default:
232       return getConstantInt32Internal(ConstantInt32);
233     }
234   }
getConstantInt64(int64_t ConstantInt64)235   Constant *getConstantInt64(int64_t ConstantInt64) {
236     switch (ConstantInt64) {
237     case 0:
238       return getConstantZero(IceType_i64);
239     default:
240       return getConstantInt64Internal(ConstantInt64);
241     }
242   }
243   Constant *getConstantFloat(float Value);
244   Constant *getConstantDouble(double Value);
245   /// Returns a symbolic constant.
246   Constant *getConstantSymWithEmitString(const RelocOffsetT Offset,
247                                          const RelocOffsetArray &OffsetExpr,
248                                          GlobalString Name,
249                                          const std::string &EmitString);
250   Constant *getConstantSym(RelocOffsetT Offset, GlobalString Name);
251   Constant *getConstantExternSym(GlobalString Name);
252   /// Returns an undef.
253   Constant *getConstantUndef(Type Ty);
254   /// Returns a zero value.
255   Constant *getConstantZero(Type Ty);
256   /// getConstantPool() returns a copy of the constant pool for constants of a
257   /// given type.
258   ConstantList getConstantPool(Type Ty);
259   /// Returns a copy of the list of external symbols.
260   ConstantList getConstantExternSyms();
261   /// @}
getRuntimeHelperFunc(RuntimeHelper FuncID)262   Constant *getRuntimeHelperFunc(RuntimeHelper FuncID) const {
263     assert(FuncID < RuntimeHelper::H_Num);
264     Constant *Result = RuntimeHelperFunc[static_cast<size_t>(FuncID)];
265     assert(Result != nullptr && "No such runtime helper function");
266     return Result;
267   }
268   GlobalString getGlobalString(const std::string &Name);
269 
270   /// Return a locked pointer to the registered jump tables.
271   JumpTableDataList getJumpTables();
272   /// Adds JumpTable to the list of know jump tables, for a posteriori emission.
273   void addJumpTableData(JumpTableData JumpTable);
274 
275   /// Allocate data of type T using the global allocator. We allow entities
276   /// allocated from this global allocator to be either trivially or
277   /// non-trivially destructible. We optimize the case when T is trivially
278   /// destructible by not registering a destructor. Destructors will be invoked
279   /// during GlobalContext destruction in the reverse object creation order.
280   template <typename T>
281   typename std::enable_if<std::is_trivially_destructible<T>::value, T>::type *
allocate()282   allocate() {
283     return getAllocator()->Allocate<T>();
284   }
285 
286   template <typename T>
287   typename std::enable_if<!std::is_trivially_destructible<T>::value, T>::type *
allocate()288   allocate() {
289     T *Ret = getAllocator()->Allocate<T>();
290     getDestructors()->emplace_back([Ret]() { Ret->~T(); });
291     return Ret;
292   }
293 
getObjectWriter()294   ELFObjectWriter *getObjectWriter() const { return ObjectWriter.get(); }
295 
296   /// Reset stats at the beginning of a function.
297   void resetStats();
298   void dumpStats(const Cfg *Func = nullptr);
299   void statsUpdateEmitted(uint32_t InstCount);
300   void statsUpdateRegistersSaved(uint32_t Num);
301   void statsUpdateFrameBytes(uint32_t Bytes);
302   void statsUpdateSpills();
303   void statsUpdateFills();
304 
305   /// Number of Randomized or Pooled Immediates
306   void statsUpdateRPImms();
307 
308   /// These are predefined TimerStackIdT values.
309   enum TimerStackKind { TSK_Default = 0, TSK_Funcs, TSK_Num };
310 
311   /// newTimerStackID() creates a new TimerStack in the global space. It does
312   /// not affect any TimerStack objects in TLS.
313   TimerStackIdT newTimerStackID(const std::string &Name);
314   /// dumpTimers() dumps the global timer data.  This assumes all the
315   /// thread-local copies of timer data have been merged into the global timer
316   /// data.
317   void dumpTimers(TimerStackIdT StackID = TSK_Default,
318                   bool DumpCumulative = true);
319   void dumpLocalTimers(const std::string &TimerNameOverride,
320                        TimerStackIdT StackID = TSK_Default,
321                        bool DumpCumulative = true);
322   /// The following methods affect only the calling thread's TLS timer data.
323   TimerIdT getTimerID(TimerStackIdT StackID, const std::string &Name);
324   void pushTimer(TimerIdT ID, TimerStackIdT StackID);
325   void popTimer(TimerIdT ID, TimerStackIdT StackID);
326   void resetTimer(TimerStackIdT StackID);
327   std::string getTimerName(TimerStackIdT StackID);
328   void setTimerName(TimerStackIdT StackID, const std::string &NewName);
329 
330   /// This is the first work item sequence number that the parser produces, and
331   /// correspondingly the first sequence number that the emitter thread will
332   /// wait for. Start numbering at 1 to leave room for a sentinel, in case e.g.
333   /// we wish to inject items with a special sequence number that may be
334   /// executed out of order.
getFirstSequenceNumber()335   static constexpr uint32_t getFirstSequenceNumber() { return 1; }
336   /// Adds a newly parsed and constructed function to the Cfg work queue.
337   /// Notifies any idle workers that a new function is available for
338   /// translating. May block if the work queue is too large, in order to control
339   /// memory footprint.
340   void optQueueBlockingPush(std::unique_ptr<OptWorkItem> Item);
341   /// Takes a Cfg from the work queue for translating. May block if the work
342   /// queue is currently empty. Returns nullptr if there is no more work - the
343   /// queue is empty and either end() has been called or the Sequential flag was
344   /// set.
345   std::unique_ptr<OptWorkItem> optQueueBlockingPop();
346   /// Notifies that no more work will be added to the work queue.
optQueueNotifyEnd()347   void optQueueNotifyEnd() { OptQ.notifyEnd(); }
348 
349   /// Emit file header for output file.
350   void emitFileHeader();
351 
352   void lowerConstants();
353 
354   void lowerJumpTables();
355 
356   /// Emit target specific read-only data sections if any. E.g., for MIPS this
357   /// generates a .MIPS.abiflags section.
358   void emitTargetRODataSections();
359 
360   void emitQueueBlockingPush(std::unique_ptr<EmitterWorkItem> Item);
361   std::unique_ptr<EmitterWorkItem> emitQueueBlockingPop();
emitQueueNotifyEnd()362   void emitQueueNotifyEnd() { EmitQ.notifyEnd(); }
363 
364   void initParserThread();
365   void startWorkerThreads();
366 
367   void waitForWorkerThreads();
368 
369   /// sets the instrumentation object to use.
setInstrumentation(std::unique_ptr<Instrumentation> Instr)370   void setInstrumentation(std::unique_ptr<Instrumentation> Instr) {
371     if (!BuildDefs::minimal())
372       Instrumentor = std::move(Instr);
373   }
374 
instrumentFunc(Cfg * Func)375   void instrumentFunc(Cfg *Func) {
376     if (!BuildDefs::minimal() && Instrumentor)
377       Instrumentor->instrumentFunc(Func);
378   }
379 
380   /// Translation thread startup routine.
381   void translateFunctionsWrapper(ThreadContext *MyTLS);
382   /// Translate functions from the Cfg queue until the queue is empty.
383   void translateFunctions();
384 
385   /// Emitter thread startup routine.
386   void emitterWrapper(ThreadContext *MyTLS);
387   /// Emit functions and global initializers from the emitter queue until the
388   /// queue is empty.
389   void emitItems();
390 
391   /// Uses DataLowering to lower Globals. Side effects:
392   ///  - discards the initializer list for the global variable in Globals.
393   ///  - clears the Globals array.
394   void lowerGlobals(const std::string &SectionSuffix);
395 
396   void dumpConstantLookupCounts();
397 
398   /// DisposeGlobalVariablesAfterLowering controls whether the memory used by
399   /// GlobaleVariables can be reclaimed right after they have been lowered.
400   /// @{
getDisposeGlobalVariablesAfterLowering()401   bool getDisposeGlobalVariablesAfterLowering() const {
402     return DisposeGlobalVariablesAfterLowering;
403   }
404 
setDisposeGlobalVariablesAfterLowering(bool Value)405   void setDisposeGlobalVariablesAfterLowering(bool Value) {
406     DisposeGlobalVariablesAfterLowering = Value;
407   }
408   /// @}
409 
getStrings()410   LockedPtr<StringPool> getStrings() const {
411     return LockedPtr<StringPool>(Strings.get(), &StringsLock);
412   }
413 
getGlobals()414   LockedPtr<VariableDeclarationList> getGlobals() {
415     return LockedPtr<VariableDeclarationList>(&Globals, &InitAllocLock);
416   }
417 
418   /// Number of function blocks that can be queued before waiting for
419   /// translation
420   /// threads to consume.
421   static constexpr size_t MaxOptQSize = 1 << 16;
422 
423 private:
424   // Try to ensure mutexes are allocated on separate cache lines.
425 
426   // Destructors collaborate with Allocator
427   ICE_CACHELINE_BOUNDARY;
428   // Managed by getAllocator()
429   mutable GlobalLockType AllocLock;
430   ArenaAllocator Allocator;
431 
432   ICE_CACHELINE_BOUNDARY;
433   // Managed by getInitializerAllocator()
434   mutable GlobalLockType InitAllocLock;
435   VariableDeclarationList Globals;
436 
437   ICE_CACHELINE_BOUNDARY;
438   // Managed by getDestructors()
439   using DestructorArray = std::vector<std::function<void()>>;
440   mutable GlobalLockType DestructorsLock;
441   DestructorArray Destructors;
442 
443   ICE_CACHELINE_BOUNDARY;
444   // Managed by getStrings()
445   mutable GlobalLockType StringsLock;
446   std::unique_ptr<StringPool> Strings;
447 
448   ICE_CACHELINE_BOUNDARY;
449   // Managed by getConstPool()
450   mutable GlobalLockType ConstPoolLock;
451   std::unique_ptr<ConstantPool> ConstPool;
452 
453   ICE_CACHELINE_BOUNDARY;
454   // Managed by getJumpTableList()
455   mutable GlobalLockType JumpTablesLock;
456   JumpTableDataList JumpTableList;
457 
458   ICE_CACHELINE_BOUNDARY;
459   // Managed by getErrorStatus()
460   mutable GlobalLockType ErrorStatusLock;
461   ErrorCode ErrorStatus;
462 
463   ICE_CACHELINE_BOUNDARY;
464   // Managed by getStatsCumulative()
465   mutable GlobalLockType StatsLock;
466   CodeStats StatsCumulative;
467 
468   ICE_CACHELINE_BOUNDARY;
469   // Managed by getTimers()
470   mutable GlobalLockType TimerLock;
471   TimerList Timers;
472 
473   ICE_CACHELINE_BOUNDARY;
474   /// StrLock is a global lock on the dump and emit output streams.
475   using StrLockType = std::mutex;
476   StrLockType StrLock;
477   Ostream *StrDump;  /// Stream for dumping / diagnostics
478   Ostream *StrEmit;  /// Stream for code emission
479   Ostream *StrError; /// Stream for logging errors.
480 
481   // True if waitForWorkerThreads() has been called.
482   std::atomic_bool WaitForWorkerThreadsCalled;
483 
484   ICE_CACHELINE_BOUNDARY;
485 
486   // TODO(jpp): move to EmitterContext.
487   std::unique_ptr<ELFObjectWriter> ObjectWriter;
488   // Value defining when to wake up the main parse thread.
489   const size_t OptQWakeupSize;
490   BoundedProducerConsumerQueue<OptWorkItem, MaxOptQSize> OptQ;
491   BoundedProducerConsumerQueue<EmitterWorkItem> EmitQ;
492   // DataLowering is only ever used by a single thread at a time (either in
493   // emitItems(), or in IceCompiler::run before the compilation is over.)
494   // TODO(jpp): move to EmitterContext.
495   std::unique_ptr<TargetDataLowering> DataLowering;
496   /// If !HasEmittedCode, SubZero will accumulate all Globals (which are "true"
497   /// program global variables) until the first code WorkItem is seen.
498   // TODO(jpp): move to EmitterContext.
499   bool HasSeenCode = false;
500   // If Instrumentor is not empty then it will be used to instrument globals and
501   // CFGs.
502   std::unique_ptr<Instrumentation> Instrumentor = nullptr;
503   /// Indicates if global variable declarations can be disposed of right after
504   /// lowering.
505   bool DisposeGlobalVariablesAfterLowering = true;
506   Constant *ConstZeroForType[IceType_NUM];
507   Constant *ConstantTrue;
508   // Holds the constants representing each runtime helper function.
509   Constant *RuntimeHelperFunc[static_cast<size_t>(RuntimeHelper::H_Num)];
510 
511   Constant *getConstantZeroInternal(Type Ty);
512   Constant *getConstantIntInternal(Type Ty, int64_t Value);
513   Constant *getConstantInt1Internal(int8_t ConstantInt1);
514   Constant *getConstantInt8Internal(int8_t ConstantInt8);
515   Constant *getConstantInt16Internal(int16_t ConstantInt16);
516   Constant *getConstantInt32Internal(int32_t ConstantInt32);
517   Constant *getConstantInt64Internal(int64_t ConstantInt64);
getAllocator()518   LockedPtr<ArenaAllocator> getAllocator() {
519     return LockedPtr<ArenaAllocator>(&Allocator, &AllocLock);
520   }
getInitializerAllocator()521   LockedPtr<VariableDeclarationList> getInitializerAllocator() {
522     return LockedPtr<VariableDeclarationList>(&Globals, &InitAllocLock);
523   }
getConstPool()524   LockedPtr<ConstantPool> getConstPool() {
525     return LockedPtr<ConstantPool>(ConstPool.get(), &ConstPoolLock);
526   }
getJumpTableList()527   LockedPtr<JumpTableDataList> getJumpTableList() {
528     return LockedPtr<JumpTableDataList>(&JumpTableList, &JumpTablesLock);
529   }
getStatsCumulative()530   LockedPtr<CodeStats> getStatsCumulative() {
531     return LockedPtr<CodeStats>(&StatsCumulative, &StatsLock);
532   }
getTimers()533   LockedPtr<TimerList> getTimers() {
534     return LockedPtr<TimerList>(&Timers, &TimerLock);
535   }
getDestructors()536   LockedPtr<DestructorArray> getDestructors() {
537     return LockedPtr<DestructorArray>(&Destructors, &DestructorsLock);
538   }
539 
accumulateGlobals(std::unique_ptr<VariableDeclarationList> Globls)540   void accumulateGlobals(std::unique_ptr<VariableDeclarationList> Globls) {
541     LockedPtr<VariableDeclarationList> _(&Globals, &InitAllocLock);
542     if (Globls != nullptr) {
543       Globals.merge(Globls.get());
544       if (!BuildDefs::minimal() && Instrumentor != nullptr)
545         Instrumentor->setHasSeenGlobals();
546     }
547   }
548 
lowerGlobalsIfNoCodeHasBeenSeen()549   void lowerGlobalsIfNoCodeHasBeenSeen() {
550     if (HasSeenCode)
551       return;
552     constexpr char NoSuffix[] = "";
553     lowerGlobals(NoSuffix);
554     HasSeenCode = true;
555   }
556 
557   llvm::SmallVector<ThreadContext *, 128> AllThreadContexts;
558   llvm::SmallVector<std::thread, 128> TranslationThreads;
559   llvm::SmallVector<std::thread, 128> EmitterThreads;
560   // Each thread has its own TLS pointer which is also held in
561   // AllThreadContexts.
562   ICE_TLS_DECLARE_FIELD(ThreadContext *, TLS);
563 
564 public:
565   static void TlsInit();
566 };
567 
568 /// Helper class to push and pop a timer marker. The constructor pushes a
569 /// marker, and the destructor pops it. This is for convenient timing of regions
570 /// of code.
571 class TimerMarker {
572   TimerMarker() = delete;
573   TimerMarker(const TimerMarker &) = delete;
574   TimerMarker &operator=(const TimerMarker &) = delete;
575 
576 public:
577   TimerMarker(TimerIdT ID, GlobalContext *Ctx,
578               TimerStackIdT StackID = GlobalContext::TSK_Default)
ID(ID)579       : ID(ID), Ctx(Ctx), StackID(StackID) {
580     if (BuildDefs::timers())
581       push();
582   }
583   TimerMarker(TimerIdT ID, const Cfg *Func,
584               TimerStackIdT StackID = GlobalContext::TSK_Default)
ID(ID)585       : ID(ID), Ctx(nullptr), StackID(StackID) {
586     // Ctx gets set at the beginning of pushCfg().
587     if (BuildDefs::timers())
588       pushCfg(Func);
589   }
TimerMarker(GlobalContext * Ctx,const std::string & FuncName)590   TimerMarker(GlobalContext *Ctx, const std::string &FuncName)
591       : ID(getTimerIdFromFuncName(Ctx, FuncName)), Ctx(Ctx),
592         StackID(GlobalContext::TSK_Funcs) {
593     if (BuildDefs::timers())
594       push();
595   }
596 
~TimerMarker()597   ~TimerMarker() {
598     if (BuildDefs::timers() && Active)
599       Ctx->popTimer(ID, StackID);
600   }
601 
602 private:
603   void push();
604   void pushCfg(const Cfg *Func);
605   static TimerIdT getTimerIdFromFuncName(GlobalContext *Ctx,
606                                          const std::string &FuncName);
607   const TimerIdT ID;
608   GlobalContext *Ctx;
609   const TimerStackIdT StackID;
610   bool Active = false;
611 };
612 
613 /// Helper class for locking the streams and then automatically unlocking them.
614 class OstreamLocker {
615 private:
616   OstreamLocker() = delete;
617   OstreamLocker(const OstreamLocker &) = delete;
618   OstreamLocker &operator=(const OstreamLocker &) = delete;
619 
620 public:
OstreamLocker(GlobalContext * Ctx)621   explicit OstreamLocker(GlobalContext *Ctx) : Ctx(Ctx) { Ctx->lockStr(); }
~OstreamLocker()622   ~OstreamLocker() { Ctx->unlockStr(); }
623 
624 private:
625   GlobalContext *const Ctx;
626 };
627 
628 } // end of namespace Ice
629 
630 #endif // SUBZERO_SRC_ICEGLOBALCONTEXT_H
631