Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 60 additions & 0 deletions docs/contrib/DeveloperGuide.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,63 @@
- Continuous Integration: [Guide](./CI.md).
- How do I update the codebase? There are different ways to update your codebase depending on the method you installed it. We provide detailed instructions in this [guide](../user/FAQS.md).
- How do I contribute? Follow our Git Quickstart guide [here](./GitGuide.md).

## Timing and Logging (`run_command.py`)

Every flow stage (synthesis, floorplan, CTS, routing, etc.) is wrapped by
`flow/scripts/run_command.py`, which replaces the previous GNU `time` + `tee`
shell pipeline with a pure-Python implementation.

### What it does

`run_command.py` runs a command and:

- Measures **wall-clock time**, **CPU time** (user + sys), and **peak memory**
using Python's `time.monotonic()` and `resource.getrusage()`.
- Streams output **line-by-line** to both the console and a log file
(replacing `tee`), flushing after every line for real-time `tail -f`
observability.
- Appends an `Elapsed time: ...` line in the format expected by
`genElapsedTime.py` and `genMetrics.py`.

### Usage

```
python3 flow/scripts/run_command.py [--log FILE] [--append] [--tee] -- command [args...]
```

| Flag | Effect |
|------------|--------|
| `--log FILE` | Write command output + timing line to FILE |
| `--append` | Append to log file instead of overwriting |
| `--tee` | Also write output to stdout (like the `tee` command) |

### Monitoring long-running stages

When running under Bazel (`bazelisk test ...`) or other batch systems that
hide console output, you can monitor progress by finding and tailing the log:

```bash
# Find the running stage's log file
ps -Af | grep run_command
# or
ps -Af | grep tmp.log

# Watch it in real time
tail -f /path/to/logs/4_cts.tmp.log
```

Output appears immediately in the log file (line-buffered with flush),
so `tail -f` shows real-time progress.

### Cross-platform support

Works on both Linux and macOS using only Python standard library modules.
Peak memory is normalized automatically (`ru_maxrss` is KB on Linux,
bytes on macOS).

### Testing

```bash
python3 -m pytest flow/test/test_run_command.py -v
```
26 changes: 15 additions & 11 deletions flow/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ synth-report: synth

.PHONY: do-synth-report
do-synth-report:
($(TIME_CMD) $(OPENROAD_CMD) $(SCRIPTS_DIR)/synth_metrics.tcl) 2>&1 | tee $(abspath $(LOG_DIR)/1_2_yosys_metrics.log)
$(RUN_CMD) --log $(abspath $(LOG_DIR)/1_2_yosys_metrics.log) --tee -- $(OPENROAD_CMD) $(SCRIPTS_DIR)/synth_metrics.tcl

.PHONY: memory
memory:
Expand Down Expand Up @@ -641,7 +641,7 @@ generate_abstract: $(RESULTS_DIR)/6_final.gds $(RESULTS_DIR)/6_final.def $(RESU
.PHONY: do-generate_abstract
do-generate_abstract:
mkdir -p $(LOG_DIR) $(REPORTS_DIR)
($(TIME_CMD) $(OPENROAD_CMD) $(SCRIPTS_DIR)/generate_abstract.tcl -metrics $(LOG_DIR)/generate_abstract.json) 2>&1 | tee $(abspath $(LOG_DIR)/generate_abstract.log)
$(RUN_CMD) --log $(abspath $(LOG_DIR)/generate_abstract.log) --tee -- $(OPENROAD_CMD) $(SCRIPTS_DIR)/generate_abstract.tcl -metrics $(LOG_DIR)/generate_abstract.json

.PHONY: clean_abstract
clean_abstract:
Expand All @@ -661,15 +661,16 @@ gds: $(GDS_FINAL_FILE)
# Merge wrapped macros using Klayout
#-------------------------------------------------------------------------------
$(WRAPPED_GDSOAS): $(OBJECTS_DIR)/klayout_wrap.lyt $(WRAPPED_LEFS)
($(TIME_CMD) $(SCRIPTS_DIR)/klayout.sh -zz -rd design_name=$(basename $(notdir $@)) \
$(RUN_CMD) --log $(abspath $(LOG_DIR)/6_merge_$(basename $(notdir $@)).log) --tee -- \
$(SCRIPTS_DIR)/klayout.sh -zz -rd design_name=$(basename $(notdir $@)) \
-rd in_def=$(OBJECTS_DIR)/def/$(notdir $(@:$(STREAM_SYSTEM_EXT)=def)) \
-rd in_files="$(ADDITIONAL_GDSOAS)" \
-rd config_file=$(FILL_CONFIG) \
-rd seal_file="" \
-rd out_file=$@ \
-rd tech_file=$(OBJECTS_DIR)/klayout_wrap.lyt \
-rd layer_map=$(GDS_LAYER_MAP) \
-r $(UTILS_DIR)/def2stream.py) 2>&1 | tee $(abspath $(LOG_DIR)/6_merge_$(basename $(notdir $@)).log)
-r $(UTILS_DIR)/def2stream.py

# Merge GDS using Klayout
#-------------------------------------------------------------------------------
Expand All @@ -678,14 +679,15 @@ $(GDS_MERGED_FILE): check-klayout $(RESULTS_DIR)/6_final.def $(OBJECTS_DIR)/klay

.PHONY: do-gds-merged
do-gds-merged:
($(TIME_CMD) $(STDBUF_CMD) $(SCRIPTS_DIR)/klayout.sh -zz -rd design_name=$(DESIGN_NAME) \
$(RUN_CMD) --log $(abspath $(LOG_DIR)/6_1_merge.log) --tee -- \
$(SCRIPTS_DIR)/klayout.sh -zz -rd design_name=$(DESIGN_NAME) \
-rd in_def=$(RESULTS_DIR)/6_final.def \
-rd in_files="$(GDSOAS_FILES) $(WRAPPED_GDSOAS)" \
-rd seal_file="$(SEAL_GDSOAS)" \
-rd out_file=$(GDS_MERGED_FILE) \
-rd tech_file=$(OBJECTS_DIR)/klayout.lyt \
-rd layer_map=$(GDS_LAYER_MAP) \
-r $(UTILS_DIR)/def2stream.py) 2>&1 | tee $(abspath $(LOG_DIR)/6_1_merge.log)
-r $(UTILS_DIR)/def2stream.py

$(RESULTS_DIR)/6_final.v: $(LOG_DIR)/6_report.log

Expand All @@ -702,17 +704,18 @@ drc: $(REPORTS_DIR)/6_drc.lyrdb

$(REPORTS_DIR)/6_drc.lyrdb: $(GDS_FINAL_FILE) $(KLAYOUT_DRC_FILE)
ifneq ($(KLAYOUT_DRC_FILE),)
($(TIME_CMD) $(SCRIPTS_DIR)/klayout.sh -zz -rd in_gds="$<" \
$(RUN_CMD) --log $(abspath $(LOG_DIR)/6_drc.log) --tee -- \
$(SCRIPTS_DIR)/klayout.sh -zz -rd in_gds="$<" \
-rd report_file=$(abspath $@) \
-r $(KLAYOUT_DRC_FILE)) 2>&1 | tee $(abspath $(LOG_DIR)/6_drc.log)
-r $(KLAYOUT_DRC_FILE)
# Hacky way of getting DRV count (don't error on no matches)
grep -c "<value>" $@ > $(REPORTS_DIR)/6_drc_count.rpt || [[ $$? == 1 ]]
else
echo "DRC not supported on this platform" > $@
endif

$(RESULTS_DIR)/6_final.cdl: $(RESULTS_DIR)/6_final.v
($(TIME_CMD) $(OPENROAD_CMD) $(SCRIPTS_DIR)/cdl.tcl) 2>&1 | tee $(abspath $(LOG_DIR)/6_cdl.log)
$(RUN_CMD) --log $(abspath $(LOG_DIR)/6_cdl.log) --tee -- $(OPENROAD_CMD) $(SCRIPTS_DIR)/cdl.tcl

$(OBJECTS_DIR)/6_final_concat.cdl: $(RESULTS_DIR)/6_final.cdl $(CDL_FILE)
cat $^ > $@
Expand All @@ -722,10 +725,11 @@ lvs: $(RESULTS_DIR)/6_lvs.lvsdb

$(RESULTS_DIR)/6_lvs.lvsdb: $(GDS_FINAL_FILE) $(KLAYOUT_LVS_FILE) $(OBJECTS_DIR)/6_final_concat.cdl
ifneq ($(KLAYOUT_LVS_FILE),)
($(TIME_CMD) $(SCRIPTS_DIR)/klayout.sh -b -rd in_gds="$<" \
$(RUN_CMD) --log $(abspath $(LOG_DIR)/6_lvs.log) --tee -- \
$(SCRIPTS_DIR)/klayout.sh -b -rd in_gds="$<" \
-rd cdl_file=$(abspath $(OBJECTS_DIR)/6_final_concat.cdl) \
-rd report_file=$(abspath $@) \
-r $(KLAYOUT_LVS_FILE)) 2>&1 | tee $(abspath $(LOG_DIR)/6_lvs.log)
-r $(KLAYOUT_LVS_FILE)
else
echo "LVS not supported on this platform" > $@
endif
Expand Down
4 changes: 2 additions & 2 deletions flow/scripts/flow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ echo "Running $2.tcl, stage $1"
eval "$OPENROAD_EXE $OPENROAD_ARGS -exit \"$SCRIPTS_DIR/noop.tcl\"" \
>"$LOG_DIR/$1.tmp.log" 2>&1

eval "$TIME_CMD $OPENROAD_CMD -no_splash \"$SCRIPTS_DIR/$2.tcl\" -metrics \"$LOG_DIR/$1.json\"" \
2>&1 | tee -a "$(realpath "$LOG_DIR/$1.tmp.log")"
$PYTHON_EXE "$SCRIPTS_DIR/run_command.py" --log "$(realpath "$LOG_DIR/$1.tmp.log")" --append --tee -- \
$OPENROAD_CMD -no_splash "$SCRIPTS_DIR/$2.tcl" -metrics "$LOG_DIR/$1.json"
)

# Log the hash for this step. The summary "make elapsed" in "make finish",
Expand Down
124 changes: 124 additions & 0 deletions flow/scripts/run_command.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
#!/usr/bin/env python3
"""Run a command with timing and optional log file output.

Replaces the GNU `time` + `tee` shell pipeline with a pure-Python
implementation that works on both Linux and macOS without external
dependencies.

Usage:
run_command.py [--log FILE] [--append] [--tee] -- command [args...]

Output is streamed line-by-line and flushed after every line so that
`tail -f <logfile>` works in real time. The log path is visible in
`ps` output for discoverability (replaces `ps -Af | grep tee`).

On completion an "Elapsed time: ..." line is appended to stderr (and
the log file) in the same format that the rest of the ORFS
infrastructure expects.
"""

import argparse
import resource
import subprocess
import sys
import time


def _maxrss_kb(ru_maxrss: int) -> int:
"""Normalize ru_maxrss to kilobytes (Linux reports KB, macOS reports bytes)."""
if sys.platform == "darwin":
return ru_maxrss // 1024
return ru_maxrss


def _format_elapsed(seconds: float) -> str:
"""Format seconds as [H:]MM:SS.ff matching GNU time %E output."""
h = int(seconds // 3600)
m = int((seconds % 3600) // 60)
s = seconds % 60
if h:
return f"{h}:{m:02d}:{s:05.2f}"
return f"{m}:{s:05.2f}"


def _build_timing_line(
wall: float,
user: float,
sys_: float,
peak_kb: int,
) -> str:
cpu_pct = int(100 * (user + sys_) / wall) if wall > 0 else 0
return (
f"Elapsed time: {_format_elapsed(wall)}[h:]min:sec. "
f"CPU time: user {user:.2f} sys {sys_:.2f} ({cpu_pct}%). "
f"Peak memory: {peak_kb}KB."
)


def run(argv: list[str] | None = None) -> int:
parser = argparse.ArgumentParser(
description="Run a command with timing, optional tee-to-log."
)
parser.add_argument("--log", help="Log file path")
parser.add_argument(
"--append", action="store_true", help="Append to log file instead of overwrite"
)
parser.add_argument(
"--tee",
action="store_true",
help="Also write output to stdout (like tee)",
)
parser.add_argument("command", nargs=argparse.REMAINDER)
args = parser.parse_args(argv)

cmd = args.command
if cmd and cmd[0] == "--":
cmd = cmd[1:]
if not cmd:
parser.error("No command specified")

# Snapshot children resource usage before the subprocess.
before = resource.getrusage(resource.RUSAGE_CHILDREN)

log_file = None
if args.log:
log_file = open(args.log, "a" if args.append else "w")

wall_start = time.monotonic()
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)

try:
for line in iter(proc.stdout.readline, b""):
if args.tee:
sys.stdout.buffer.write(line)
sys.stdout.buffer.flush()
if log_file:
log_file.write(line.decode(errors="replace"))
log_file.flush()
proc.wait()
except BaseException:
proc.kill()
proc.wait()
raise
finally:
wall = time.monotonic() - wall_start

after = resource.getrusage(resource.RUSAGE_CHILDREN)
user = after.ru_utime - before.ru_utime
sys_ = after.ru_stime - before.ru_stime
peak_kb = _maxrss_kb(after.ru_maxrss)

timing_line = _build_timing_line(wall, user, sys_, peak_kb)

sys.stderr.write(timing_line + "\n")
sys.stderr.flush()
if log_file:
log_file.write(timing_line + "\n")
log_file.flush()
log_file.close()

return proc.returncode


if __name__ == "__main__":
sys.exit(run())
3 changes: 2 additions & 1 deletion flow/scripts/synth.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@
set -u -eo pipefail
mkdir -p $RESULTS_DIR $LOG_DIR $REPORTS_DIR $OBJECTS_DIR
$YOSYS_EXE -V > $(realpath $2)
eval "$TIME_CMD $YOSYS_EXE $YOSYS_FLAGS -c $1" 2>&1 | tee --append $(realpath $2)
$PYTHON_EXE "$SCRIPTS_DIR/run_command.py" --log "$(realpath $2)" --append --tee -- \
$YOSYS_EXE $YOSYS_FLAGS -c $1
13 changes: 2 additions & 11 deletions flow/scripts/variables.mk
Original file line number Diff line number Diff line change
Expand Up @@ -72,13 +72,7 @@ export NUM_CORES
# setup all commands used within this flow
export PYTHON_EXE ?= $(shell command -v python3)

export TIME_BIN ?= env time
TIME_CMD = $(TIME_BIN) -f 'Elapsed time: %E[h:]min:sec. CPU time: user %U sys %S (%P). Peak memory: %MKB.'
TIME_TEST = $(shell $(TIME_CMD) echo foo 2>/dev/null)
ifeq (,$(strip $(TIME_TEST)))
TIME_CMD = $(TIME_BIN)
endif
export TIME_CMD
export RUN_CMD = $(PYTHON_EXE) $(FLOW_HOME)/scripts/run_command.py

# The following determine the executable location for each tool used by this flow.
# Priority is given to
Expand Down Expand Up @@ -128,9 +122,6 @@ export KLAYOUT_CMD := $(shell command -v klayout)
endif
endif

ifneq ($(shell command -v stdbuf),)
STDBUF_CMD ?= stdbuf -o L
endif

#-------------------------------------------------------------------------------
WRAPPED_LEFS = $(foreach lef,$(notdir $(WRAP_LEFS)),$(OBJECTS_DIR)/lef/$(lef:.lef=_mod.lef))
Expand Down Expand Up @@ -191,7 +182,7 @@ export RESULTS_V = $(notdir $(sort $(wildcard $(RESULTS_DIR)/*.v)))
export GDS_MERGED_FILE = $(RESULTS_DIR)/6_1_merged.$(STREAM_SYSTEM_EXT)

define get_variables
$(foreach V, $(.VARIABLES),$(if $(filter-out $(1), $(origin $V)), $(if $(filter-out .% %QT_QPA_PLATFORM% %TIME_CMD% KLAYOUT% GENERATE_ABSTRACT_RULE% do-step% do-copy% OPEN_GUI% OPEN_GUI_SHORTCUT% SUB_MAKE% UNSET_VARS% export%, $(V)), $V$ )))
$(foreach V, $(.VARIABLES),$(if $(filter-out $(1), $(origin $V)), $(if $(filter-out .% %QT_QPA_PLATFORM% KLAYOUT% GENERATE_ABSTRACT_RULE% do-step% do-copy% OPEN_GUI% OPEN_GUI_SHORTCUT% SUB_MAKE% UNSET_VARS% export%, $(V)), $V$ )))
endef

export UNSET_VARIABLES_NAMES := $(call get_variables,command% line environment% default automatic)
Expand Down
Loading