diff --git a/docs/contrib/DeveloperGuide.md b/docs/contrib/DeveloperGuide.md index fad3e338ea..395c95ab3c 100644 --- a/docs/contrib/DeveloperGuide.md +++ b/docs/contrib/DeveloperGuide.md @@ -5,3 +5,63 @@ - Continuous Integration: [Guide](./CI.md). - How do I update the codebase? There are different ways to update your codebase depending on the method you installed it. We provide detailed instructions in this [guide](../user/FAQS.md). - How do I contribute? Follow our Git Quickstart guide [here](./GitGuide.md). + +## Timing and Logging (`run_command.py`) + +Every flow stage (synthesis, floorplan, CTS, routing, etc.) is wrapped by +`flow/scripts/run_command.py`, which replaces the previous GNU `time` + `tee` +shell pipeline with a pure-Python implementation. + +### What it does + +`run_command.py` runs a command and: + +- Measures **wall-clock time**, **CPU time** (user + sys), and **peak memory** + using Python's `time.monotonic()` and `resource.getrusage()`. +- Streams output **line-by-line** to both the console and a log file + (replacing `tee`), flushing after every line for real-time `tail -f` + observability. +- Appends an `Elapsed time: ...` line in the format expected by + `genElapsedTime.py` and `genMetrics.py`. + +### Usage + +``` +python3 flow/scripts/run_command.py [--log FILE] [--append] [--tee] -- command [args...] +``` + +| Flag | Effect | +|------------|--------| +| `--log FILE` | Write command output + timing line to FILE | +| `--append` | Append to log file instead of overwriting | +| `--tee` | Also write output to stdout (like the `tee` command) | + +### Monitoring long-running stages + +When running under Bazel (`bazelisk test ...`) or other batch systems that +hide console output, you can monitor progress by finding and tailing the log: + +```bash +# Find the running stage's log file +ps -Af | grep run_command +# or +ps -Af | grep tmp.log + +# Watch it in real time +tail -f /path/to/logs/4_cts.tmp.log +``` + +Output appears immediately in the log file (line-buffered with flush), +so `tail -f` shows real-time progress. + +### Cross-platform support + +Works on both Linux and macOS using only Python standard library modules. +Peak memory is normalized automatically (`ru_maxrss` is KB on Linux, +bytes on macOS). + +### Testing + +```bash +python3 -m pytest flow/test/test_run_command.py -v +``` diff --git a/flow/Makefile b/flow/Makefile index 462d37bb04..dbb4692d96 100644 --- a/flow/Makefile +++ b/flow/Makefile @@ -241,7 +241,7 @@ synth-report: synth .PHONY: do-synth-report do-synth-report: - ($(TIME_CMD) $(OPENROAD_CMD) $(SCRIPTS_DIR)/synth_metrics.tcl) 2>&1 | tee $(abspath $(LOG_DIR)/1_2_yosys_metrics.log) + $(RUN_CMD) --log $(abspath $(LOG_DIR)/1_2_yosys_metrics.log) --tee -- $(OPENROAD_CMD) $(SCRIPTS_DIR)/synth_metrics.tcl .PHONY: memory memory: @@ -641,7 +641,7 @@ generate_abstract: $(RESULTS_DIR)/6_final.gds $(RESULTS_DIR)/6_final.def $(RESU .PHONY: do-generate_abstract do-generate_abstract: mkdir -p $(LOG_DIR) $(REPORTS_DIR) - ($(TIME_CMD) $(OPENROAD_CMD) $(SCRIPTS_DIR)/generate_abstract.tcl -metrics $(LOG_DIR)/generate_abstract.json) 2>&1 | tee $(abspath $(LOG_DIR)/generate_abstract.log) + $(RUN_CMD) --log $(abspath $(LOG_DIR)/generate_abstract.log) --tee -- $(OPENROAD_CMD) $(SCRIPTS_DIR)/generate_abstract.tcl -metrics $(LOG_DIR)/generate_abstract.json .PHONY: clean_abstract clean_abstract: @@ -661,7 +661,8 @@ gds: $(GDS_FINAL_FILE) # Merge wrapped macros using Klayout #------------------------------------------------------------------------------- $(WRAPPED_GDSOAS): $(OBJECTS_DIR)/klayout_wrap.lyt $(WRAPPED_LEFS) - ($(TIME_CMD) $(SCRIPTS_DIR)/klayout.sh -zz -rd design_name=$(basename $(notdir $@)) \ + $(RUN_CMD) --log $(abspath $(LOG_DIR)/6_merge_$(basename $(notdir $@)).log) --tee -- \ + $(SCRIPTS_DIR)/klayout.sh -zz -rd design_name=$(basename $(notdir $@)) \ -rd in_def=$(OBJECTS_DIR)/def/$(notdir $(@:$(STREAM_SYSTEM_EXT)=def)) \ -rd in_files="$(ADDITIONAL_GDSOAS)" \ -rd config_file=$(FILL_CONFIG) \ @@ -669,7 +670,7 @@ $(WRAPPED_GDSOAS): $(OBJECTS_DIR)/klayout_wrap.lyt $(WRAPPED_LEFS) -rd out_file=$@ \ -rd tech_file=$(OBJECTS_DIR)/klayout_wrap.lyt \ -rd layer_map=$(GDS_LAYER_MAP) \ - -r $(UTILS_DIR)/def2stream.py) 2>&1 | tee $(abspath $(LOG_DIR)/6_merge_$(basename $(notdir $@)).log) + -r $(UTILS_DIR)/def2stream.py # Merge GDS using Klayout #------------------------------------------------------------------------------- @@ -678,14 +679,15 @@ $(GDS_MERGED_FILE): check-klayout $(RESULTS_DIR)/6_final.def $(OBJECTS_DIR)/klay .PHONY: do-gds-merged do-gds-merged: - ($(TIME_CMD) $(STDBUF_CMD) $(SCRIPTS_DIR)/klayout.sh -zz -rd design_name=$(DESIGN_NAME) \ + $(RUN_CMD) --log $(abspath $(LOG_DIR)/6_1_merge.log) --tee -- \ + $(SCRIPTS_DIR)/klayout.sh -zz -rd design_name=$(DESIGN_NAME) \ -rd in_def=$(RESULTS_DIR)/6_final.def \ -rd in_files="$(GDSOAS_FILES) $(WRAPPED_GDSOAS)" \ -rd seal_file="$(SEAL_GDSOAS)" \ -rd out_file=$(GDS_MERGED_FILE) \ -rd tech_file=$(OBJECTS_DIR)/klayout.lyt \ -rd layer_map=$(GDS_LAYER_MAP) \ - -r $(UTILS_DIR)/def2stream.py) 2>&1 | tee $(abspath $(LOG_DIR)/6_1_merge.log) + -r $(UTILS_DIR)/def2stream.py $(RESULTS_DIR)/6_final.v: $(LOG_DIR)/6_report.log @@ -702,9 +704,10 @@ drc: $(REPORTS_DIR)/6_drc.lyrdb $(REPORTS_DIR)/6_drc.lyrdb: $(GDS_FINAL_FILE) $(KLAYOUT_DRC_FILE) ifneq ($(KLAYOUT_DRC_FILE),) - ($(TIME_CMD) $(SCRIPTS_DIR)/klayout.sh -zz -rd in_gds="$<" \ + $(RUN_CMD) --log $(abspath $(LOG_DIR)/6_drc.log) --tee -- \ + $(SCRIPTS_DIR)/klayout.sh -zz -rd in_gds="$<" \ -rd report_file=$(abspath $@) \ - -r $(KLAYOUT_DRC_FILE)) 2>&1 | tee $(abspath $(LOG_DIR)/6_drc.log) + -r $(KLAYOUT_DRC_FILE) # Hacky way of getting DRV count (don't error on no matches) grep -c "" $@ > $(REPORTS_DIR)/6_drc_count.rpt || [[ $$? == 1 ]] else @@ -712,7 +715,7 @@ else endif $(RESULTS_DIR)/6_final.cdl: $(RESULTS_DIR)/6_final.v - ($(TIME_CMD) $(OPENROAD_CMD) $(SCRIPTS_DIR)/cdl.tcl) 2>&1 | tee $(abspath $(LOG_DIR)/6_cdl.log) + $(RUN_CMD) --log $(abspath $(LOG_DIR)/6_cdl.log) --tee -- $(OPENROAD_CMD) $(SCRIPTS_DIR)/cdl.tcl $(OBJECTS_DIR)/6_final_concat.cdl: $(RESULTS_DIR)/6_final.cdl $(CDL_FILE) cat $^ > $@ @@ -722,10 +725,11 @@ lvs: $(RESULTS_DIR)/6_lvs.lvsdb $(RESULTS_DIR)/6_lvs.lvsdb: $(GDS_FINAL_FILE) $(KLAYOUT_LVS_FILE) $(OBJECTS_DIR)/6_final_concat.cdl ifneq ($(KLAYOUT_LVS_FILE),) - ($(TIME_CMD) $(SCRIPTS_DIR)/klayout.sh -b -rd in_gds="$<" \ + $(RUN_CMD) --log $(abspath $(LOG_DIR)/6_lvs.log) --tee -- \ + $(SCRIPTS_DIR)/klayout.sh -b -rd in_gds="$<" \ -rd cdl_file=$(abspath $(OBJECTS_DIR)/6_final_concat.cdl) \ -rd report_file=$(abspath $@) \ - -r $(KLAYOUT_LVS_FILE)) 2>&1 | tee $(abspath $(LOG_DIR)/6_lvs.log) + -r $(KLAYOUT_LVS_FILE) else echo "LVS not supported on this platform" > $@ endif diff --git a/flow/scripts/deleteNonClkNets.tcl b/flow/scripts/deleteNonClkNets.tcl index bad10a4772..f7e36f3a6c 100644 --- a/flow/scripts/deleteNonClkNets.tcl +++ b/flow/scripts/deleteNonClkNets.tcl @@ -1,3 +1,5 @@ +source $::env(SCRIPTS_DIR)/util.tcl + read_lef $::env(TECH_LEF) read_lef $::env(SC_LEF) if { [info exist ::env(ADDITIONAL_LEFS)] } { diff --git a/flow/scripts/deletePowerNets.tcl b/flow/scripts/deletePowerNets.tcl index e1e6def4e1..a64fb189dd 100644 --- a/flow/scripts/deletePowerNets.tcl +++ b/flow/scripts/deletePowerNets.tcl @@ -1,3 +1,5 @@ +source $::env(SCRIPTS_DIR)/util.tcl + read_lef $::env(TECH_LEF) read_lef $::env(SC_LEF) if { [info exist ::env(ADDITIONAL_LEFS)] } { diff --git a/flow/scripts/flow.sh b/flow/scripts/flow.sh index 10f065394f..241e92fe6c 100755 --- a/flow/scripts/flow.sh +++ b/flow/scripts/flow.sh @@ -11,8 +11,8 @@ echo "Running $2.tcl, stage $1" eval "$OPENROAD_EXE $OPENROAD_ARGS -exit \"$SCRIPTS_DIR/noop.tcl\"" \ >"$LOG_DIR/$1.tmp.log" 2>&1 - eval "$TIME_CMD $OPENROAD_CMD -no_splash \"$SCRIPTS_DIR/$2.tcl\" -metrics \"$LOG_DIR/$1.json\"" \ - 2>&1 | tee -a "$(realpath "$LOG_DIR/$1.tmp.log")" + $PYTHON_EXE "$SCRIPTS_DIR/run_command.py" --log "$(realpath "$LOG_DIR/$1.tmp.log")" --append --tee -- \ + $OPENROAD_CMD -no_splash "$SCRIPTS_DIR/$2.tcl" -metrics "$LOG_DIR/$1.json" ) # Log the hash for this step. The summary "make elapsed" in "make finish", diff --git a/flow/scripts/run_command.py b/flow/scripts/run_command.py new file mode 100644 index 0000000000..fdc7315881 --- /dev/null +++ b/flow/scripts/run_command.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python3 +"""Run a command with timing and optional log file output. + +Replaces the GNU `time` + `tee` shell pipeline with a pure-Python +implementation that works on both Linux and macOS without external +dependencies. + +Usage: + run_command.py [--log FILE] [--append] [--tee] -- command [args...] + +Output is streamed line-by-line and flushed after every line so that +`tail -f ` works in real time. The log path is visible in +`ps` output for discoverability (replaces `ps -Af | grep tee`). + +On completion an "Elapsed time: ..." line is appended to stderr (and +the log file) in the same format that the rest of the ORFS +infrastructure expects. +""" + +import argparse +import resource +import subprocess +import sys +import time + + +def _maxrss_kb(ru_maxrss: int) -> int: + """Normalize ru_maxrss to kilobytes (Linux reports KB, macOS reports bytes).""" + if sys.platform == "darwin": + return ru_maxrss // 1024 + return ru_maxrss + + +def _format_elapsed(seconds: float) -> str: + """Format seconds as [H:]MM:SS.ff matching GNU time %E output.""" + h = int(seconds // 3600) + m = int((seconds % 3600) // 60) + s = seconds % 60 + if h: + return f"{h}:{m:02d}:{s:05.2f}" + return f"{m}:{s:05.2f}" + + +def _build_timing_line( + wall: float, + user: float, + sys_: float, + peak_kb: int, +) -> str: + cpu_pct = int(100 * (user + sys_) / wall) if wall > 0 else 0 + return ( + f"Elapsed time: {_format_elapsed(wall)}[h:]min:sec. " + f"CPU time: user {user:.2f} sys {sys_:.2f} ({cpu_pct}%). " + f"Peak memory: {peak_kb}KB." + ) + + +def run(argv: list[str] | None = None) -> int: + parser = argparse.ArgumentParser( + description="Run a command with timing, optional tee-to-log." + ) + parser.add_argument("--log", help="Log file path") + parser.add_argument( + "--append", action="store_true", help="Append to log file instead of overwrite" + ) + parser.add_argument( + "--tee", + action="store_true", + help="Also write output to stdout (like tee)", + ) + parser.add_argument("command", nargs=argparse.REMAINDER) + args = parser.parse_args(argv) + + cmd = args.command + if cmd and cmd[0] == "--": + cmd = cmd[1:] + if not cmd: + parser.error("No command specified") + + # Snapshot children resource usage before the subprocess. + before = resource.getrusage(resource.RUSAGE_CHILDREN) + + log_file = None + if args.log: + log_file = open(args.log, "a" if args.append else "w") + + wall_start = time.monotonic() + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + + try: + for line in iter(proc.stdout.readline, b""): + if args.tee: + sys.stdout.buffer.write(line) + sys.stdout.buffer.flush() + if log_file: + log_file.write(line.decode(errors="replace")) + log_file.flush() + proc.wait() + except BaseException: + proc.kill() + proc.wait() + raise + finally: + wall = time.monotonic() - wall_start + + after = resource.getrusage(resource.RUSAGE_CHILDREN) + user = after.ru_utime - before.ru_utime + sys_ = after.ru_stime - before.ru_stime + peak_kb = _maxrss_kb(after.ru_maxrss) + + timing_line = _build_timing_line(wall, user, sys_, peak_kb) + + sys.stderr.write(timing_line + "\n") + sys.stderr.flush() + if log_file: + log_file.write(timing_line + "\n") + log_file.flush() + log_file.close() + + return proc.returncode + + +if __name__ == "__main__": + sys.exit(run()) diff --git a/flow/scripts/synth.sh b/flow/scripts/synth.sh index 2fc500e060..113a5f1263 100755 --- a/flow/scripts/synth.sh +++ b/flow/scripts/synth.sh @@ -2,4 +2,5 @@ set -u -eo pipefail mkdir -p $RESULTS_DIR $LOG_DIR $REPORTS_DIR $OBJECTS_DIR $YOSYS_EXE -V > $(realpath $2) -eval "$TIME_CMD $YOSYS_EXE $YOSYS_FLAGS -c $1" 2>&1 | tee --append $(realpath $2) +$PYTHON_EXE "$SCRIPTS_DIR/run_command.py" --log "$(realpath $2)" --append --tee -- \ + $YOSYS_EXE $YOSYS_FLAGS -c $1 diff --git a/flow/scripts/variables.mk b/flow/scripts/variables.mk index d6f0712079..a9efa1c67c 100644 --- a/flow/scripts/variables.mk +++ b/flow/scripts/variables.mk @@ -72,13 +72,7 @@ export NUM_CORES # setup all commands used within this flow export PYTHON_EXE ?= $(shell command -v python3) -export TIME_BIN ?= env time -TIME_CMD = $(TIME_BIN) -f 'Elapsed time: %E[h:]min:sec. CPU time: user %U sys %S (%P). Peak memory: %MKB.' -TIME_TEST = $(shell $(TIME_CMD) echo foo 2>/dev/null) -ifeq (,$(strip $(TIME_TEST))) - TIME_CMD = $(TIME_BIN) -endif -export TIME_CMD +export RUN_CMD = $(PYTHON_EXE) $(FLOW_HOME)/scripts/run_command.py # The following determine the executable location for each tool used by this flow. # Priority is given to @@ -128,9 +122,6 @@ export KLAYOUT_CMD := $(shell command -v klayout) endif endif -ifneq ($(shell command -v stdbuf),) - STDBUF_CMD ?= stdbuf -o L -endif #------------------------------------------------------------------------------- WRAPPED_LEFS = $(foreach lef,$(notdir $(WRAP_LEFS)),$(OBJECTS_DIR)/lef/$(lef:.lef=_mod.lef)) @@ -191,7 +182,7 @@ export RESULTS_V = $(notdir $(sort $(wildcard $(RESULTS_DIR)/*.v))) export GDS_MERGED_FILE = $(RESULTS_DIR)/6_1_merged.$(STREAM_SYSTEM_EXT) define get_variables -$(foreach V, $(.VARIABLES),$(if $(filter-out $(1), $(origin $V)), $(if $(filter-out .% %QT_QPA_PLATFORM% %TIME_CMD% KLAYOUT% GENERATE_ABSTRACT_RULE% do-step% do-copy% OPEN_GUI% OPEN_GUI_SHORTCUT% SUB_MAKE% UNSET_VARS% export%, $(V)), $V$ ))) +$(foreach V, $(.VARIABLES),$(if $(filter-out $(1), $(origin $V)), $(if $(filter-out .% %QT_QPA_PLATFORM% KLAYOUT% GENERATE_ABSTRACT_RULE% do-step% do-copy% OPEN_GUI% OPEN_GUI_SHORTCUT% SUB_MAKE% UNSET_VARS% export%, $(V)), $V$ ))) endef export UNSET_VARIABLES_NAMES := $(call get_variables,command% line environment% default automatic) diff --git a/flow/test/test_run_command.py b/flow/test/test_run_command.py new file mode 100644 index 0000000000..c71f3c8b0e --- /dev/null +++ b/flow/test/test_run_command.py @@ -0,0 +1,289 @@ +#!/usr/bin/env python3 + +import os +import re +import subprocess +import sys +import tempfile +import threading +import time +import unittest + +sys.path.append( + os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "scripts") +) + +import run_command + +TIMING_RE = re.compile( + r"^Elapsed time: (\S+)\[h:\]min:sec\. " + r"CPU time: user (\S+) sys (\S+) \((\d+)%\)\. " + r"Peak memory: (\d+)KB\.$", + re.MULTILINE, +) + + +class TestRunCommand(unittest.TestCase): + def setUp(self): + self.tmp_dir = tempfile.TemporaryDirectory() + self.log_path = os.path.join(self.tmp_dir.name, "test.log") + + def tearDown(self): + self.tmp_dir.cleanup() + + def test_basic_timing(self): + rc = run_command.run(["--log", self.log_path, "--", "sleep", "0.1"]) + self.assertEqual(rc, 0) + with open(self.log_path) as f: + content = f.read() + m = TIMING_RE.search(content) + self.assertIsNotNone(m, f"Timing line not found in: {content!r}") + + def test_output_format(self): + run_command.run(["--log", self.log_path, "--", "echo", "hello world"]) + with open(self.log_path) as f: + content = f.read() + self.assertIn("hello world", content) + m = TIMING_RE.search(content) + self.assertIsNotNone(m, f"Timing line not found in: {content!r}") + # Verify time fields are parseable floats + float(m.group(2)) # user + float(m.group(3)) # sys + + def test_log_file_created(self): + self.assertFalse(os.path.exists(self.log_path)) + run_command.run(["--log", self.log_path, "--", "echo", "test"]) + self.assertTrue(os.path.exists(self.log_path)) + + def test_append_mode(self): + with open(self.log_path, "w") as f: + f.write("pre-existing content\n") + run_command.run(["--log", self.log_path, "--append", "--", "echo", "appended"]) + with open(self.log_path) as f: + content = f.read() + self.assertIn("pre-existing content", content) + self.assertIn("appended", content) + + def test_overwrite_mode(self): + with open(self.log_path, "w") as f: + f.write("old content\n") + run_command.run(["--log", self.log_path, "--", "echo", "new"]) + with open(self.log_path) as f: + content = f.read() + self.assertNotIn("old content", content) + self.assertIn("new", content) + + def test_tee_output(self): + """Verify --tee sends output to stdout as well as the log file.""" + result = subprocess.run( + [ + sys.executable, + os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "..", + "scripts", + "run_command.py", + ), + "--log", + self.log_path, + "--tee", + "--", + "echo", + "visible", + ], + capture_output=True, + text=True, + ) + self.assertIn("visible", result.stdout) + with open(self.log_path) as f: + self.assertIn("visible", f.read()) + + def test_exit_code_propagation(self): + rc = run_command.run(["--log", self.log_path, "--", "false"]) + self.assertNotEqual(rc, 0) + + def test_exit_code_success(self): + rc = run_command.run(["--log", self.log_path, "--", "true"]) + self.assertEqual(rc, 0) + + def test_peak_memory(self): + """Peak memory should be reported and > 0 for a real command.""" + run_command.run( + ["--log", self.log_path, "--", sys.executable, "-c", "x = 'a' * 1000000"] + ) + with open(self.log_path) as f: + content = f.read() + m = TIMING_RE.search(content) + self.assertIsNotNone(m) + peak_kb = int(m.group(5)) + self.assertGreater(peak_kb, 0) + + def test_cpu_time(self): + """CPU time fields should be present and parseable.""" + run_command.run( + [ + "--log", + self.log_path, + "--", + sys.executable, + "-c", + "sum(range(100000))", + ] + ) + with open(self.log_path) as f: + content = f.read() + m = TIMING_RE.search(content) + self.assertIsNotNone(m) + user = float(m.group(2)) + sys_ = float(m.group(3)) + self.assertGreaterEqual(user, 0) + self.assertGreaterEqual(sys_, 0) + + def test_stderr_captured(self): + """Stderr from the subprocess should appear in the log.""" + run_command.run( + [ + "--log", + self.log_path, + "--", + sys.executable, + "-c", + "import sys; sys.stderr.write('err_msg\\n')", + ] + ) + with open(self.log_path) as f: + content = f.read() + self.assertIn("err_msg", content) + + def test_cross_platform_memory_units(self): + """Verify _maxrss_kb normalizes correctly for current platform.""" + if sys.platform == "darwin": + # macOS: bytes -> KB + self.assertEqual(run_command._maxrss_kb(1024000), 1000) + else: + # Linux: already KB + self.assertEqual(run_command._maxrss_kb(1000), 1000) + + def test_streaming_flush(self): + """Verify log file is written incrementally, not buffered until exit. + + This validates the tail -f use case: users must see output in the + log file while the subprocess is still running. + """ + script = ( + "import sys, time; " + "print('line1', flush=True); " + "time.sleep(0.5); " + "print('line2', flush=True); " + "time.sleep(1)" + ) + proc = subprocess.Popen( + [ + sys.executable, + os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "..", + "scripts", + "run_command.py", + ), + "--log", + self.log_path, + "--", + sys.executable, + "-c", + script, + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + # Wait a bit for first line to be flushed + time.sleep(0.3) + with open(self.log_path) as f: + content = f.read() + self.assertIn( + "line1", content, "line1 should appear before subprocess finishes" + ) + proc.wait() + + def test_log_path_in_cmdline(self): + """Verify the log path is visible in ps output (for discoverability). + + Users find logs via `ps -Af | grep run_command` or `ps -Af | grep tmp.log`. + The log path must appear in /proc//cmdline (ps -Af may truncate). + """ + script = "import time; time.sleep(2)" + proc = subprocess.Popen( + [ + sys.executable, + os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "..", + "scripts", + "run_command.py", + ), + "--log", + self.log_path, + "--", + sys.executable, + "-c", + script, + ], + ) + time.sleep(0.3) + try: + # Read full cmdline from /proc (not truncated like ps -Af) + cmdline_path = f"/proc/{proc.pid}/cmdline" + if os.path.exists(cmdline_path): + with open(cmdline_path) as f: + cmdline = f.read() + self.assertIn( + self.log_path, + cmdline, + "Log path should be visible in /proc//cmdline", + ) + else: + # Fallback for non-Linux: use ps with wide output + ps = subprocess.run( + ["ps", "-ww", "-p", str(proc.pid), "-o", "args="], + capture_output=True, + text=True, + ) + self.assertIn(self.log_path, ps.stdout) + finally: + proc.kill() + proc.wait() + + def test_no_command_error(self): + with self.assertRaises(SystemExit): + run_command.run(["--log", self.log_path]) + + def test_format_elapsed_minutes_seconds(self): + self.assertEqual(run_command._format_elapsed(65.5), "1:05.50") + + def test_format_elapsed_hours(self): + self.assertEqual(run_command._format_elapsed(3661.0), "1:01:01.00") + + def test_format_elapsed_subsecond(self): + self.assertEqual(run_command._format_elapsed(0.5), "0:00.50") + + def test_genElapsedTime_parses_output(self): + """Verify genElapsedTime.py can parse the timing line produced by run_command.""" + sys.path.insert( + 0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "util") + ) + import genElapsedTime + from io import StringIO + from unittest.mock import patch + + log_dir = self.tmp_dir.name + log_file = os.path.join(log_dir, "1_test.log") + # Run a real command to generate a timing line + run_command.run(["--log", log_file, "--", "echo", "hello"]) + with patch("sys.stdout", new_callable=StringIO) as mock_out: + genElapsedTime.scan_logs(["--logDir", log_dir, "--noHeader"]) + output = mock_out.getvalue() + self.assertIn("1_test", output) + + +if __name__ == "__main__": + unittest.main() diff --git a/flow/util/utils.mk b/flow/util/utils.mk index 25c1cfc2ac..8e6136d981 100644 --- a/flow/util/utils.mk +++ b/flow/util/utils.mk @@ -89,13 +89,13 @@ write_net_rc: $(RESULTS_DIR)/6_net_rc.csv #$(RESULTS_DIR)/6_net_rc.csv: $(RESULTS_DIR)/4_cts.odb $(RESULTS_DIR)/6_final.spef $(RESULTS_DIR)/6_net_rc.csv: - ($(TIME_CMD) $(OPENROAD_CMD) $(UTILS_DIR)/write_net_rc_script.tcl) 2>&1 | tee $(LOG_DIR)/6_write_net_rc.log + $(RUN_CMD) --log $(LOG_DIR)/6_write_net_rc.log --tee -- $(OPENROAD_CMD) $(UTILS_DIR)/write_net_rc_script.tcl .PHONY: write_segment_rc write_segment_rc: $(RESULTS_DIR)/6_segment_rc.csv $(RESULTS_DIR)/6_segment_rc.csv: - ($(TIME_CMD) $(OPENROAD_CMD) $(UTILS_DIR)/write_segment_rc_script.tcl) 2>&1 | tee $(LOG_DIR)/6_write_segment_rc.log + $(RUN_CMD) --log $(LOG_DIR)/6_write_segment_rc.log --tee -- $(OPENROAD_CMD) $(UTILS_DIR)/write_segment_rc_script.tcl .PHONY: correlate_rc correlate_rc: $(RESULTS_DIR)/6_net_rc.csv @@ -150,18 +150,19 @@ clean_issues: rm -f vars*.sh vars*.tcl vars*.gdb run-me*.sh $(RESULTS_DIR)/6_final_only_clk.def: $(RESULTS_DIR)/6_final.def - $(TIME_CMD) $(OPENROAD_CMD) $(SCRIPTS_DIR)/deleteNonClkNets.tcl + $(RUN_CMD) --tee -- $(OPENROAD_CMD) $(SCRIPTS_DIR)/deleteNonClkNets.tcl $(RESULTS_DIR)/6_final_no_power.def: $(RESULTS_DIR)/6_final.def - $(TIME_CMD) $(OPENROAD_CMD) $(SCRIPTS_DIR)/deletePowerNets.tcl + $(RUN_CMD) --tee -- $(OPENROAD_CMD) $(SCRIPTS_DIR)/deletePowerNets.tcl .PHONY: gallery gallery: check-klayout $(RESULTS_DIR)/6_final_no_power.def $(RESULTS_DIR)/6_final_only_clk.def - ($(TIME_CMD) klayout -z -nc -rx -rd gallery_json=util/gallery.json \ + $(RUN_CMD) --log $(LOG_DIR)/6_1_merge.log --tee -- \ + klayout -z -nc -rx -rd gallery_json=util/gallery.json \ -rd results_path=$(RESULTS_DIR) \ -rd tech_file=$(OBJECTS_DIR)/klayout.lyt \ - -rm $(UTILS_DIR)/createGallery.py) 2>&1 | tee $(LOG_DIR)/6_1_merge.log + -rm $(UTILS_DIR)/createGallery.py .PHONY: view_cells view_cells: