1 /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #include "tensorflow/compiler/xla/service/dump.h"
17 
18 #include "absl/strings/ascii.h"
19 #include "tensorflow/compiler/xla/service/hlo_graph_dumper.h"
20 #include "tensorflow/compiler/xla/service/hlo_module.h"
21 #include "tensorflow/compiler/xla/service/hlo_proto_util.h"
22 #include "tensorflow/compiler/xla/util.h"
23 #include "tensorflow/core/lib/core/status.h"
24 #include "tensorflow/core/lib/io/path.h"
25 #include "tensorflow/core/lib/strings/proto_serialization.h"
26 #include "tensorflow/core/platform/env.h"
27 #include "tensorflow/core/platform/path.h"
28 #include "tensorflow/core/platform/regexp.h"
29 
30 namespace xla {
31 
32 namespace {
33 
34 using absl::StrCat;
35 using absl::StrFormat;
36 using absl::string_view;
37 
38 struct CanonicalDebugOptions {
CanonicalDebugOptionsxla::__anon0d7000fd0111::CanonicalDebugOptions39   explicit CanonicalDebugOptions(const DebugOptions& opts)
40       : dump_to(opts.xla_dump_to()),
41         dump_as_text(opts.xla_dump_hlo_as_text()),
42         dump_as_proto(opts.xla_dump_hlo_as_proto()),
43         dump_as_dot(opts.xla_dump_hlo_as_dot()),
44         dump_as_html(opts.xla_dump_hlo_as_html()),
45         dump_as_url(opts.xla_dump_hlo_as_url()),
46         dump_fusion_visualization(opts.xla_dump_fusion_visualization()),
47         dump_snapshots(opts.xla_dump_hlo_snapshots()),
48         dump_include_timestamp(opts.xla_dump_include_timestamp()),
49         dump_max_hlo_modules(opts.xla_dump_max_hlo_modules()),
50         dump_module_metadata(opts.xla_dump_module_metadata()) {
51     // This constructor examines the values in `opts` and turns on other flags
52     // based on what we think is the user's intent.  To reduce confusion about
53     // what was a user-specified value versus an extrapolated value, within this
54     // function we treat this struct's members as write-only, and read only from
55     // `opts`.
56 
57     // Did the user specify an explicit format for dumping?
58     bool output_format_other_than_url_specified =
59         opts.xla_dump_hlo_as_text() || opts.xla_dump_hlo_as_proto() ||
60         opts.xla_dump_hlo_as_dot() || opts.xla_dump_hlo_as_html() ||
61         opts.xla_dump_hlo_snapshots();
62     bool output_format_specified =
63         output_format_other_than_url_specified || opts.xla_dump_hlo_as_url();
64 
65     // If we haven't specified an output format, default to dumping as text.
66     if (!output_format_specified) {
67       dump_as_text = true;
68     }
69 
70     // If dump_to is empty, default to dumping to stdout, so long as some dump
71     // format other than dump-as-url was specified.  If the user only specified
72     // --xla_dump_hlo_as_url, then don't dump to stdout, that is likely noise
73     // they don't want.
74     if (opts.xla_dump_to().empty() && output_format_other_than_url_specified) {
75       dump_to = "-";
76     }
77 
78     // If we specified a regular expression restricting which modules to dump,
79     // respect that.
80     //
81     // If we didn't specify which modules to dump but we passed some other flag
82     // which implies dumping modules, dump all modules.
83     //
84     // Otherwise, don't dump any HLO modules.
85     if (!opts.xla_dump_hlo_module_re().empty()) {
86       // RE2 object is not copyable, and we can't capture "by move", so we
87       // resort to this hack.
88       string pattern = opts.xla_dump_hlo_module_re();
89       should_dump_module = [pattern](string_view module_name) {
90         return RE2::PartialMatch(module_name, pattern);
91       };
92     } else if (!opts.xla_dump_hlo_pass_re().empty() ||
93                !opts.xla_dump_to().empty() || output_format_specified) {
94       should_dump_module = [](string_view) { return true; };
95     } else {
96       should_dump_module = [](string_view) { return false; };
97     }
98 
99     // Initialize should_dump_pass.  This one is easy: We only dump per-pass
100     // data if the user asked for it explicitly.
101     if (!opts.xla_dump_hlo_pass_re().empty()) {
102       string pattern = opts.xla_dump_hlo_pass_re();
103       should_dump_pass = [pattern](string_view pass_name) {
104         return RE2::PartialMatch(pass_name, pattern);
105       };
106     } else {
107       should_dump_pass = [](string_view) { return false; };
108     }
109 
110     // Output dirs "sponge" and "test_undeclared_outputs_dir" (case-insensitive)
111     // have a special meaning: Dump into the directory specified by the
112     // environment variable TEST_UNDECLARED_OUTPUTS_DIR.
113     string dump_to_lower = absl::AsciiStrToLower(opts.xla_dump_to());
114     if (dump_to_lower == "sponge" ||
115         dump_to_lower == "test_undeclared_outputs_dir") {
116       if (!tensorflow::io::GetTestUndeclaredOutputsDir(&dump_to)) {
117         LOG(ERROR) << "--xla_dump_to=" << opts.xla_dump_to()
118                    << ", but environment variable TEST_UNDECLARED_OUTPUTS_DIR "
119                       "is not set, so cannot dump anywhere.";
120         should_dump_module = [](string_view) { return false; };
121         should_dump_pass = [](string_view) { return false; };
122       }
123     }
124   }
125 
dumping_to_stdoutxla::__anon0d7000fd0111::CanonicalDebugOptions126   bool dumping_to_stdout() const { return dump_to == "-"; }
127 
128   string dump_to;
129   std::function<bool(string_view module_name)> should_dump_module;
130   std::function<bool(string_view pass_name)> should_dump_pass;
131 
132   // dump_ir isn't present here because this file is mostly concerned with
133   // dumping HLO.
134   bool dump_as_text;
135   bool dump_as_proto;
136   bool dump_as_dot;
137   bool dump_as_html;
138   bool dump_as_url;
139   bool dump_fusion_visualization;
140   bool dump_snapshots;
141   bool dump_include_timestamp;
142   int64 dump_max_hlo_modules;
143   bool dump_module_metadata;
144 };
145 
DumpToFileInDirImpl(string_view filename,string_view contents,const CanonicalDebugOptions & opts)146 absl::optional<std::string> DumpToFileInDirImpl(
147     string_view filename, string_view contents,
148     const CanonicalDebugOptions& opts) {
149   if (opts.dumping_to_stdout()) {
150     LOG(ERROR) << "Refusing to write " << filename
151                << " to stdout.  Pass --xla_dump_to=<path> to write to a file.";
152     return absl::nullopt;
153   }
154 
155   if (opts.dump_to.empty()) {
156     return absl::nullopt;
157   }
158 
159   const string& dir = opts.dump_to;
160   VLOG(1) << "Dumping " << filename << " to " << dir;
161 
162   tensorflow::Env* env = tensorflow::Env::Default();
163   // Two threads can race to observe the absence of the dump directory and
164   // simultaneously try to create it, causing the "losing" thread to get a
165   // "directory already exists" error.  We can work around this by checking
166   // again whether the dir exists.
167   if (!env->IsDirectory(dir).ok()) {
168     auto status = env->RecursivelyCreateDir(dir);
169     if (!status.ok() && !env->IsDirectory(dir).ok()) {
170       LOG(ERROR) << "Could not create directory " << dir
171                  << " for dumping XLA debug data: " << status;
172       return absl::nullopt;
173     }
174   }
175 
176   // Make sure we are not going to dump more modules than the user has asked.
177   if (opts.dump_max_hlo_modules > 0) {
178     std::vector<string> matches;
179     auto pattern = tensorflow::io::JoinPath(dir, "*module_*.0000.*");
180     auto status = env->GetMatchingPaths(pattern, &matches);
181     if (!status.ok()) {
182       LOG(ERROR) << "Could not get matching paths for pattern " << pattern
183                  << ": " << status;
184     }
185     if (matches.size() > opts.dump_max_hlo_modules) {
186       LOG(ERROR) << "Have already dumped " << matches.size()
187                  << " modules, more than the limit of "
188                  << opts.dump_max_hlo_modules;
189       return absl::nullopt;
190     }
191   }
192 
193   string file_path =
194       tensorflow::io::JoinPath(dir, SanitizeFileName(string(filename)));
195   auto status = tensorflow::WriteStringToFile(env, file_path, contents);
196   if (!status.ok()) {
197     LOG(ERROR) << "Could not write XLA debug data to " << file_path << ": "
198                << status;
199   }
200 
201   return file_path;
202 }
203 
DumpToFileInDirOrStdoutImpl(string_view filename,string_view contents,const CanonicalDebugOptions & opts)204 absl::optional<std::string> DumpToFileInDirOrStdoutImpl(
205     string_view filename, string_view contents,
206     const CanonicalDebugOptions& opts) {
207   // Dump to stdout if that's called for.
208   if (opts.dumping_to_stdout()) {
209     std::cout << "*** Begin " << filename << " ***\n"
210               << contents << "\n*** End " << filename << " ***" << std::endl;
211     return absl::nullopt;
212   }
213 
214   // Otherwise, dump to a file.
215   return DumpToFileInDirImpl(filename, contents, opts);
216 }
217 
218 // Returns full file paths of all dumps of the module.
DumpHloModuleImpl(const HloModule & module,const BufferAssignment * buffer_assn,const HloExecutionProfile * profile,string_view prefix,string_view suffix,const CanonicalDebugOptions & opts)219 std::vector<std::string> DumpHloModuleImpl(const HloModule& module,
220                                            const BufferAssignment* buffer_assn,
221                                            const HloExecutionProfile* profile,
222                                            string_view prefix,
223                                            string_view suffix,
224                                            const CanonicalDebugOptions& opts) {
225   string filename = FilenameFor(module, prefix, suffix);
226 
227   std::vector<absl::optional<std::string>> file_paths;
228 
229   if (opts.dump_as_text) {
230     file_paths.push_back(DumpToFileInDirOrStdoutImpl(StrCat(filename, ".txt"),
231                                                      module.ToString(), opts));
232     if (buffer_assn) {
233       file_paths.push_back(DumpToFileInDirOrStdoutImpl(
234           StrCat(filename, "-buffer-assignment.txt"), buffer_assn->ToString(),
235           opts));
236     }
237   }
238 
239   if (opts.dump_as_proto) {
240     HloProto module_proto =
241         buffer_assn ? MakeHloProto(module, *buffer_assn) : MakeHloProto(module);
242     string pb;
243     if (!tensorflow::SerializeToStringDeterministic(module_proto, &pb)) {
244       pb = "Failed to serialize HLO module proto.";
245     }
246     file_paths.push_back(
247         DumpToFileInDirImpl(StrCat(filename, ".hlo.pb"), pb, opts));
248   }
249 
250   auto render_graph = [&](RenderedGraphFormat format) {
251     StatusOr<string> rendered_graph = RenderGraph(
252         *module.entry_computation(),
253         /*label=*/filename, module.config().debug_options(), format, profile);
254     if (rendered_graph.ok()) {
255       return std::move(rendered_graph).ValueOrDie();
256     }
257     return StrFormat("Error rendering graph: %s",
258                      rendered_graph.status().ToString());
259   };
260 
261   if (opts.dump_as_dot) {
262     file_paths.push_back(
263         DumpToFileInDirImpl(StrFormat("%s.dot", filename),
264                             render_graph(RenderedGraphFormat::kDot), opts));
265   }
266 
267   if (opts.dump_as_html) {
268     file_paths.push_back(
269         DumpToFileInDirImpl(StrFormat("%s.html", filename),
270                             render_graph(RenderedGraphFormat::kHtml), opts));
271   }
272 
273   if (opts.dump_fusion_visualization) {
274     for (const HloComputation* computation :
275          module.MakeNonfusionComputations()) {
276       StatusOr<string> rendered_graph = RenderGraph(
277           *computation,
278           /*label=*/absl::StrCat(filename, "_", computation->name()),
279           module.config().debug_options(),
280           RenderedGraphFormat::kFusionVisualization, profile);
281       file_paths.push_back(DumpToFileInDirImpl(
282           StrFormat("%s_%s_fusion_visualization.html", filename,
283                     computation->name()),
284           rendered_graph.ok() ? *rendered_graph
285                               : StrFormat("Error rendering graph: %s",
286                                           rendered_graph.status().ToString()),
287           opts));
288     }
289   }
290 
291   // Special case for rendering graphs as URLs.  We'll dump them to a file
292   // because why not, but we always log them to stdout as well.
293   if (opts.dump_as_url) {
294     string url = render_graph(RenderedGraphFormat::kUrl);
295     std::cout << filename << " --> " << url << std::endl;
296     if (!opts.dumping_to_stdout()) {
297       file_paths.push_back(
298           DumpToFileInDirImpl(StrFormat("%s.url", filename), url, opts));
299     }
300   }
301 
302   std::vector<std::string> dumped_file_paths;
303   for (const absl::optional<std::string>& path : file_paths) {
304     if (path.has_value()) {
305       dumped_file_paths.push_back(*path);
306     }
307   }
308   return dumped_file_paths;
309 }
310 
DumpHloModuleMetadata(const HloModuleMetadataProto & metadata,const CanonicalDebugOptions & opts,absl::flat_hash_set<int64> * dumped_module_ids)311 void DumpHloModuleMetadata(const HloModuleMetadataProto& metadata,
312                            const CanonicalDebugOptions& opts,
313                            absl::flat_hash_set<int64>* dumped_module_ids) {
314   // Return if metadata for this module has already been dumped.
315   if (!dumped_module_ids->insert(metadata.canonical_module_id()).second) {
316     return;
317   }
318   std::string filename = absl::StrFormat("module_%04d.metadata.textproto",
319                                          metadata.canonical_module_id());
320   std::string content;
321   if (tensorflow::protobuf::TextFormat::PrintToString(metadata, &content)) {
322     DumpToFileInDirImpl(filename, content, opts);
323   } else {
324     LOG(ERROR) << "Failed to convert HloModuleMetadataProto to text.";
325   }
326 }
327 
328 static tensorflow::mutex mu(tensorflow::LINKER_INITIALIZED);
329 
330 // Maps a module's unique ID to a counter indicating how many times we've dumped
331 // this module during the compilation pipeline.  This lets us keep the filenames
332 // ordered nicely.
333 //
334 // Entries added here leak forever; we have no way to GC them when a module
335 // dies.  But we only add an entry if dumping is enabled for this module, and
336 // dumping a module leaks buffer space in stdout or bytes on disk *way* faster
337 // than this hashtable leaks memory.
338 static auto& module_id_to_step_number TF_GUARDED_BY(mu) =
339     *new absl::flat_hash_map<int64, int64>();
340 
341 // Maps a module's unique ID to a timestamp indicating when we've first dumped
342 // this module during the compilation pipeline and when we first started
343 // compiling this module.  This lets us keep the filenames ordered nicely.
344 //
345 // Entries added here leak forever; we have no way to GC them when a module
346 // dies.  But we only add an entry if dumping is enabled for this module, and
347 // dumping a module leaks buffer space in stdout or bytes on disk *way* faster
348 // than this hashtable leaks memory.
349 static auto& module_id_to_timestamp TF_GUARDED_BY(mu) =
350     *new absl::flat_hash_map<int64, uint64>();
351 
StepNumberForModule(const HloModule & module)352 int64 StepNumberForModule(const HloModule& module) {
353   tensorflow::mutex_lock lock(mu);
354   return module_id_to_step_number[module.unique_id()]++;
355 }
356 }  // namespace
357 
TimestampFor(const HloModule & module)358 string TimestampFor(const HloModule& module) {
359   if (!module.config().debug_options().xla_dump_include_timestamp()) {
360     return "";
361   }
362   tensorflow::mutex_lock lock(mu);
363   auto timestamp_emplace = module_id_to_timestamp.try_emplace(
364       module.unique_id(), tensorflow::Env::Default()->NowMicros());
365   return std::to_string(timestamp_emplace.first->second);
366 }
367 
FilenameFor(const HloModule & module,string_view prefix,string_view suffix)368 string FilenameFor(const HloModule& module, string_view prefix,
369                    string_view suffix) {
370   return StrFormat("%s%smodule_%04d.%s", prefix, prefix.empty() ? "" : ".",
371                    module.unique_id(), suffix);
372 }
373 
DumpToFileInDir(const HloModule & module,string_view file_prefix,string_view file_suffix,string_view contents)374 void DumpToFileInDir(const HloModule& module, string_view file_prefix,
375                      string_view file_suffix, string_view contents) {
376   DumpToFileInDirImpl(FilenameFor(module, file_prefix, file_suffix), contents,
377                       CanonicalDebugOptions(module.config().debug_options()));
378 }
379 
DumpToFileInDirOrStdout(const HloModule & module,string_view file_prefix,string_view file_suffix,string_view contents)380 void DumpToFileInDirOrStdout(const HloModule& module, string_view file_prefix,
381                              string_view file_suffix, string_view contents) {
382   DumpToFileInDirOrStdoutImpl(
383       FilenameFor(module, file_prefix, file_suffix), contents,
384       CanonicalDebugOptions(module.config().debug_options()));
385 }
386 
DumpExecutionOptions(const ExecutionOptions & execution_options,const DebugOptions & debug_options)387 void DumpExecutionOptions(const ExecutionOptions& execution_options,
388                           const DebugOptions& debug_options) {
389   CanonicalDebugOptions opts(debug_options);
390   tensorflow::Env* env = tensorflow::Env::Default();
391   const string& dir = opts.dump_to;
392   if (env->IsDirectory(dir).ok()) {
393     string filename = tensorflow::io::JoinPath(dir, "execution_options");
394     Status status;
395     if (opts.dump_as_text) {
396       status = tensorflow::WriteTextProto(env, absl::StrCat(filename, ".txt"),
397                                           execution_options);
398     } else {
399       status = tensorflow::WriteBinaryProto(env, absl::StrCat(filename, ".pb"),
400                                             execution_options);
401     }
402     if (!status.ok()) {
403       LOG(ERROR) << "Could not write XLA debug data to " << filename << ": "
404                  << status;
405     }
406   }
407 }
408 
DumpHloModuleIfEnabled(const HloModule & module,string_view name)409 void DumpHloModuleIfEnabled(const HloModule& module, string_view name) {
410   CanonicalDebugOptions opts(module.config().debug_options());
411   if (opts.should_dump_module(module.name())) {
412     DumpHloModuleImpl(module, /*buffer_assn=*/nullptr, /*profile=*/nullptr,
413                       TimestampFor(module), name, opts);
414   }
415 }
DumpHloModuleIfEnabled(const HloModule & module,const BufferAssignment & buffer_assn,string_view name)416 void DumpHloModuleIfEnabled(const HloModule& module,
417                             const BufferAssignment& buffer_assn,
418                             string_view name) {
419   CanonicalDebugOptions opts(module.config().debug_options());
420   if (opts.should_dump_module(module.name())) {
421     DumpHloModuleImpl(module, &buffer_assn, /*profile=*/nullptr,
422                       TimestampFor(module), name, opts);
423   }
424 }
425 
DumpHloModuleIfEnabled(const HloModule & module,const HloExecutionProfile & profile,string_view name)426 void DumpHloModuleIfEnabled(const HloModule& module,
427                             const HloExecutionProfile& profile,
428                             string_view name) {
429   CanonicalDebugOptions opts(module.config().debug_options());
430   if (opts.should_dump_module(module.name())) {
431     DumpHloModuleImpl(module, /*buffer_assn=*/nullptr, &profile,
432                       TimestampFor(module), name, opts);
433   }
434 }
435 
DumpingEnabledForHloModule(string_view hlo_module_name,const DebugOptions & opts)436 bool DumpingEnabledForHloModule(string_view hlo_module_name,
437                                 const DebugOptions& opts) {
438   return CanonicalDebugOptions(opts).should_dump_module(hlo_module_name);
439 }
440 
DumpingToStdout(const DebugOptions & opts)441 bool DumpingToStdout(const DebugOptions& opts) {
442   return CanonicalDebugOptions(opts).dumping_to_stdout();
443 }
444 
DumpHloModuleBetweenPassesIfEnabled(string_view pipeline_name,string_view before_pass_name,string_view after_pass_name,const HloModule & module)445 std::vector<std::string> DumpHloModuleBetweenPassesIfEnabled(
446     string_view pipeline_name, string_view before_pass_name,
447     string_view after_pass_name, const HloModule& module) {
448   CanonicalDebugOptions opts(module.config().debug_options());
449   if (!opts.should_dump_module(module.name())) {
450     return {};
451   }
452 
453   if (!opts.should_dump_pass(before_pass_name) &&
454       !opts.should_dump_pass(after_pass_name)) {
455     return {};
456   }
457 
458   int64 step_number = StepNumberForModule(module);
459   std::string timestamp = TimestampFor(module);
460 
461   string filename_suffix =
462       StrFormat("%04d.%s.after_%s.before_%s", step_number, pipeline_name,
463                 after_pass_name, before_pass_name);
464   return DumpHloModuleImpl(module, /*buffer_assn=*/nullptr, /*profile=*/nullptr,
465                            timestamp, filename_suffix, opts);
466 }
467 
DumpHloModuleDuringPassIfEnabled(string_view pass_name,string_view step_name,const HloModule & module)468 void DumpHloModuleDuringPassIfEnabled(string_view pass_name,
469                                       string_view step_name,
470                                       const HloModule& module) {
471   CanonicalDebugOptions opts(module.config().debug_options());
472   if (!opts.should_dump_module(module.name()) ||
473       !opts.should_dump_pass(pass_name)) {
474     return;
475   }
476 
477   int64 step_number = StepNumberForModule(module);
478   std::string timestamp = TimestampFor(module);
479 
480   string filename_suffix =
481       StrFormat("%04d.%s.%s", step_number, pass_name, step_name);
482   DumpHloModuleImpl(module, /*buffer_assn=*/nullptr, /*profile=*/nullptr,
483                     timestamp, filename_suffix, opts);
484 }
485 
DumpHloSnapshotIfEnabled(const HloModule & module,const HloSnapshot & snapshot)486 void DumpHloSnapshotIfEnabled(const HloModule& module,
487                               const HloSnapshot& snapshot) {
488   CanonicalDebugOptions opts(module.config().debug_options());
489   if (!opts.should_dump_module(module.name()) || !opts.dump_snapshots) {
490     return;
491   }
492   int64 execution_count;
493   uint64 timestamp;
494   {
495     static auto& module_id_to_execution_count TF_GUARDED_BY(mu) =
496         *new absl::flat_hash_map<int64, int64>();
497     tensorflow::mutex_lock lock(mu);
498     execution_count = module_id_to_execution_count[module.unique_id()]++;
499     auto timestamp_emplace = module_id_to_timestamp.try_emplace(
500         module.unique_id(), tensorflow::Env::Default()->NowMicros());
501     timestamp = timestamp_emplace.first->second;
502   }
503   string filename =
504       StrCat(FilenameFor(module, std::to_string(timestamp),
505                          StrFormat("execution_%04d", execution_count)),
506              ".hlo_snapshot.pb");
507   if (opts.dumping_to_stdout()) {
508     LOG(ERROR) << "Refusing to write HLO snapshot proto for " << filename
509                << " to stdout.  Pass --xla_dump_to=<path> to write to a file.";
510     return;
511   }
512   string pb;
513   if (!tensorflow::SerializeToStringDeterministic(snapshot, &pb)) {
514     LOG(ERROR) << "Failed to serialize HLO snapshot proto " << filename;
515   }
516   DumpToFileInDirImpl(filename, pb, opts);
517 }
518 
DumpHloSnapshotIfEnabled(const HloSnapshot & snapshot,const DebugOptions & opts)519 void DumpHloSnapshotIfEnabled(const HloSnapshot& snapshot,
520                               const DebugOptions& opts) {
521   CanonicalDebugOptions canonical_opts(opts);
522   string name = snapshot.hlo().hlo_module().name();
523   if (!canonical_opts.should_dump_module(name) ||
524       !canonical_opts.dump_snapshots) {
525     return;
526   }
527 
528   // We don't have a unique id for an HloSnapshot, so in this overload we just
529   // have to use its name.
530   int64 execution_count;
531   {
532     static auto& module_name_to_execution_count TF_GUARDED_BY(mu) =
533         *new absl::flat_hash_map<string, int64>();
534     tensorflow::mutex_lock lock(mu);
535     execution_count = module_name_to_execution_count[name]++;
536   }
537   string filename = StrFormat("module_%s.execution_%04d.hlo_snapshot.pb", name,
538                               execution_count);
539   if (canonical_opts.dumping_to_stdout()) {
540     LOG(ERROR) << "Refusing to write HLO snapshot proto for " << filename
541                << " to stdout.  Pass --xla_dump_to=<path> to write to a file.";
542     return;
543   }
544   string pb;
545   if (!tensorflow::SerializeToStringDeterministic(snapshot, &pb)) {
546     LOG(ERROR) << "Failed to serialize HLO snapshot proto " << filename;
547   }
548   DumpToFileInDirImpl(filename, pb, canonical_opts);
549 }
550 
DumpHloModuleMetadataIfEnabled(const std::vector<HloModule * > & modules)551 void DumpHloModuleMetadataIfEnabled(const std::vector<HloModule*>& modules) {
552   absl::flat_hash_set<int64> dumped_module_ids;
553   for (const HloModule* module : modules) {
554     CanonicalDebugOptions opts(module->config().debug_options());
555     if (!opts.dump_module_metadata) {
556       continue;
557     }
558     DumpHloModuleMetadata(module->metadata().proto(), opts, &dumped_module_ids);
559     const absl::optional<HloModuleMetadataProto>& prepartitioning_metadata =
560         module->metadata().prepartitioning_metadata();
561     if (prepartitioning_metadata.has_value()) {
562       DumpHloModuleMetadata(*prepartitioning_metadata, opts,
563                             &dumped_module_ids);
564     }
565   }
566 }
567 
568 }  // namespace xla
569