CIRCT 22.0.0git
Loading...
Searching...
No Matches
PrintLongestPathAnalysis.cpp
Go to the documentation of this file.
1//===----------------------------------------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This pass prints the longest path analysis results to a file.
10//
11//===----------------------------------------------------------------------===//
12
19#include "circt/Support/LLVM.h"
20#include "mlir/Support/FileUtilities.h"
21#include "mlir/Support/LLVM.h"
22#include "llvm/ADT/DenseMapInfoVariant.h"
23#include "llvm/ADT/ScopeExit.h"
24#include "llvm/Support/Format.h"
25#include "llvm/Support/JSON.h"
26#include "llvm/Support/ToolOutputFile.h"
27#include "llvm/Support/raw_ostream.h"
28#include <memory>
29
30#define DEBUG_TYPE "synth-longest-path-analysis"
31using namespace circt;
32using namespace synth;
33
34namespace circt {
35namespace synth {
36#define GEN_PASS_DEF_PRINTLONGESTPATHANALYSIS
37#include "circt/Dialect/Synth/Transforms/SynthPasses.h.inc"
38} // namespace synth
39} // namespace circt
40
41//===----------------------------------------------------------------------===//
42// PrintLongestPathAnalysisPass
43//===----------------------------------------------------------------------===//
44
45namespace {
46struct PrintLongestPathAnalysisPass
47 : public impl::PrintLongestPathAnalysisBase<PrintLongestPathAnalysisPass> {
48 void runOnOperation() override;
49 using PrintLongestPathAnalysisBase::PrintLongestPathAnalysisBase;
50 LogicalResult printAnalysisResult(const LongestPathAnalysis &analysis,
52 hw::HWModuleOp top, llvm::raw_ostream *os,
53 llvm::json::OStream *jsonOS);
54
55private:
56 /// Print timing level statistics showing delay distribution
57 void printTimingLevelStatistics(SmallVectorImpl<DataflowPath> &allTimingPaths,
58 llvm::raw_ostream *os,
59 llvm::json::OStream *jsonOS);
60
61 /// Print detailed information for the top K critical paths
62 void printTopKPathDetails(SmallVectorImpl<DataflowPath> &allTimingPaths,
63 hw::HWModuleOp top, llvm::raw_ostream *os,
64 llvm::json::OStream *jsonOS);
65
66 /// Print detailed history of a timing path showing intermediate debug points
67 void printPathHistory(const OpenPath &timingPath, llvm::raw_ostream &os);
68};
69
70} // namespace
71
72/// Main method to print comprehensive longest path analysis results.
73LogicalResult PrintLongestPathAnalysisPass::printAnalysisResult(
74 const LongestPathAnalysis &analysis, igraph::InstancePathCache &pathCache,
75 hw::HWModuleOp top, llvm::raw_ostream *os, llvm::json::OStream *jsonOS) {
76 auto moduleName = top.getModuleNameAttr();
77
78 LongestPathCollection collection(top.getContext());
79
80 // Get all timing paths with full hierarchical elaboration
81 if (failed(analysis.getAllPaths(moduleName, collection.paths, true)))
82 return failure();
83
84 // Emit diagnostics if testing is enabled (for regression testing)
85 if (test) {
86 for (auto &result : collection.paths) {
87 auto endPointLoc = result.getEndPointLoc();
88 auto diag = mlir::emitRemark(endPointLoc);
89 SmallString<128> buf;
90 llvm::raw_svector_ostream os(buf);
91 result.print(os);
92 diag << buf;
93 }
94 }
95
96 size_t oldPathCount = collection.paths.size();
97 collection.sortInDescendingOrder();
98 collection.dropNonCriticalPaths(/*perEndPoint=*/true);
99 auto &longestPathForEachEndPoint = collection.paths;
100
101 // Print analysis header
102 if (os) {
103 *os << "# Longest Path Analysis result for " << top.getModuleNameAttr()
104 << "\n"
105 << "Found " << oldPathCount << " paths\n"
106 << "Found " << longestPathForEachEndPoint.size()
107 << " unique end points \n"
108 << "Maximum path delay: "
109 << (longestPathForEachEndPoint.empty()
110 ? 0
111 : longestPathForEachEndPoint.front().getDelay())
112 << "\n";
113
114 *os << "## Showing Levels\n";
115 }
116
117 // Handle JSON output.
118 if (jsonOS) {
119 jsonOS->objectBegin();
120 jsonOS->attribute("module_name", top.getModuleNameAttr().getValue());
121 }
122
123 auto deferClose = llvm::make_scope_exit([&]() {
124 if (jsonOS)
125 jsonOS->objectEnd();
126 });
127
128 // Print timing distribution statistics (histogram of delay levels)
129 printTimingLevelStatistics(longestPathForEachEndPoint, os, jsonOS);
130
131 // Print detailed information for top K critical paths if requested
132 if (showTopKPercent.getValue() > 0)
133 printTopKPathDetails(longestPathForEachEndPoint, top, os, jsonOS);
134
135 return success();
136}
137
138/// Print timing level statistics showing delay distribution across all paths.
139/// This provides a histogram-like view of timing levels in the design, showing
140/// how many paths exist at each delay level and the cumulative percentage.
141/// This is useful for understanding the overall timing characteristics of the
142/// design.
143void PrintLongestPathAnalysisPass::printTimingLevelStatistics(
144 SmallVectorImpl<DataflowPath> &allTimingPaths, llvm::raw_ostream *os,
145 llvm::json::OStream *jsonOS) {
146
147 int64_t totalTimingPoints = allTimingPaths.size();
148 int64_t cumulativeCount = 0;
149
150 if (jsonOS) {
151 jsonOS->attributeBegin("timing_levels");
152 jsonOS->arrayBegin();
153 }
154 auto closeJson = llvm::make_scope_exit([&]() {
155 if (jsonOS) {
156 jsonOS->arrayEnd();
157 jsonOS->attributeEnd();
158 }
159 });
160
161 // Process paths grouped by delay level (paths are already sorted by delay)
162 if (allTimingPaths.empty())
163 return;
164 for (int64_t index = allTimingPaths.size() - 1; index >= 0;) {
165 int64_t oldIndex = index;
166 auto currentDelay = allTimingPaths[index--].getDelay();
167
168 // Count all paths with the same delay value to create histogram bins
169 while (index >= 0 && allTimingPaths[index].getDelay() == currentDelay)
170 --index;
171
172 int64_t pathsWithSameDelay = oldIndex - index;
173
174 cumulativeCount += pathsWithSameDelay;
175
176 // Calculate cumulative percentage to show timing distribution
177 double cumulativePercentage =
178 (double)cumulativeCount / totalTimingPoints * 100.0;
179
180 // Print formatted timing level statistics in tabular format
181 // Format: Level = delay_value . Count = path_count . percentage%
182 if (os)
183 *os << llvm::format("Level = %-10d. Count = %-10d. %-10.2f%%\n",
184 currentDelay, pathsWithSameDelay,
185 cumulativePercentage);
186 if (jsonOS) {
187 jsonOS->objectBegin();
188 jsonOS->attribute("level", currentDelay);
189 jsonOS->attribute("count", pathsWithSameDelay);
190 jsonOS->attribute("percentage", cumulativePercentage);
191 jsonOS->objectEnd();
192 }
193 }
194}
195
196/// Print detailed information for the top K critical paths.
197/// This shows the most critical timing paths in the design, providing detailed
198/// information about each path including end/start points and path history.
199void PrintLongestPathAnalysisPass::printTopKPathDetails(
200 SmallVectorImpl<DataflowPath> &allTimingPaths, hw::HWModuleOp top,
201 llvm::raw_ostream *os, llvm::json::OStream *jsonOS) {
202
203 auto topKCount = static_cast<uint64_t>(allTimingPaths.size()) *
204 std::clamp(showTopKPercent.getValue(), 0, 100) / 100;
205
206 if (os)
207 *os << "## Top " << topKCount << " (out of " << allTimingPaths.size()
208 << ") end points\n\n";
209
210 if (jsonOS) {
211 jsonOS->attributeBegin("top_paths");
212 jsonOS->arrayBegin();
213 }
214 auto closeJson = llvm::make_scope_exit([&]() {
215 if (jsonOS) {
216 jsonOS->arrayEnd();
217 jsonOS->attributeEnd();
218 }
219 });
220
221 // Process paths from highest delay to lowest (reverse order since paths are
222 // sorted ascending)
223 for (size_t i = 0; i < std::min<size_t>(topKCount, allTimingPaths.size());
224 ++i) {
225 auto &path = allTimingPaths[i];
226 if (jsonOS)
227 jsonOS->value(toJSON(path));
228
229 if (!os)
230 continue;
231 // Print path header with ranking and delay information
232 *os << "==============================================\n"
233 << "#" << i + 1 << ": Distance=" << path.getDelay() << "\n";
234
235 // Print end point (where the critical path starts)
236 *os << "EndPoint=";
237 path.printEndPoint(*os);
238
239 // Print start point (where the critical path ends)
240 *os << "\n"
241 << "StartPoint=";
242 path.getStartPoint().print(*os);
243 *os << "\n";
244
245 // Print detailed path history showing intermediate logic stages
246 printPathHistory(path.getPath(), *os);
247 }
248}
249
250/// Print detailed history of a timing path showing intermediate debug points.
251/// This traces the path from end point to start point, showing each logic stage
252/// and the delay contribution of each stage. This is crucial for understanding
253/// where delay is being accumulated along the critical path and identifying
254/// optimization opportunities.
255void PrintLongestPathAnalysisPass::printPathHistory(const OpenPath &timingPath,
256 llvm::raw_ostream &os) {
257 int64_t remainingDelay = timingPath.getDelay();
258
259 if (!timingPath.getHistory().isEmpty()) {
260 os << "== History Start (closer to end point) ==\n";
261
262 // Walk through debug points in order from end point to start point
263 for (auto &debugPoint : timingPath.getHistory()) {
264 // Calculate delay contribution of this logic stage
265 int64_t stepDelay = remainingDelay - debugPoint.delay;
266 remainingDelay = debugPoint.delay;
267
268 // Show the delay contribution and the debug point information
269 os << "<--- (logic delay " << stepDelay << ") ---\n";
270 debugPoint.print(os);
271 os << "\n";
272 }
273
274 os << "== History End (closer to start point) ==\n";
275 }
276}
277
278void PrintLongestPathAnalysisPass::runOnOperation() {
279 auto am = getAnalysisManager();
280 LongestPathAnalysis analysis(
281 getOperation(), am,
282 LongestPathAnalysisOptions(
283 /*collectDebugInfo=*/showTopKPercent.getValue() > 0,
284 /*lazyComputation=*/false,
285 /*keepOnlyMaxDelayPaths=*/
286 !test, StringAttr::get(&getContext(), topModuleName.getValue())));
287
289 getAnalysis<circt::igraph::InstanceGraph>());
290 auto outputFileVal = outputFile.getValue();
291
292 std::string error;
293 auto file = mlir::openOutputFile(outputFile.getValue(), &error);
294 if (!file) {
295 llvm::errs() << error;
296 return signalPassFailure();
297 }
298
299 auto &os = file->os();
300 std::unique_ptr<llvm::json::OStream> jsonOS;
301 if (emitJSON.getValue()) {
302 jsonOS = std::make_unique<llvm::json::OStream>(os);
303 jsonOS->arrayBegin();
304 }
305
306 auto closeJson = llvm::make_scope_exit([&]() {
307 if (jsonOS)
308 jsonOS->arrayEnd();
309 });
310
311 for (auto top : analysis.getTopModules())
312 if (failed(printAnalysisResult(analysis, pathCache, top,
313 jsonOS ? nullptr : &os, jsonOS.get())))
314 return signalPassFailure();
315 file->keep();
316 return markAllAnalysesPreserved();
317}
static llvm::json::Value toJSON(const circt::igraph::InstancePath &path)
Direction get(bool isOutput)
Returns an output direction if isOutput is true, otherwise returns an input direction.
Definition CalyxOps.cpp:55
void error(Twine message)
Definition LSPUtils.cpp:16
The InstanceGraph op interface, see InstanceGraphInterface.td for more details.
Definition synth.py:1
A data structure that caches and provides paths to module instances in the IR.