Browse Source

s_len param for vcd

devel
stefan 15 hours ago
parent
commit
32e0766d41
  1. 8
      src/kyupy/vcd.py

8
src/kyupy/vcd.py

@ -154,12 +154,13 @@ GRAMMAR = r"""
""" """
def load(file, var_locs = lambda _: [], step_filter = lambda *_: True): def load(file, var_locs = lambda _: [], step_filter = lambda *_: True, s_len = None):
"""Parses the contents of ``file`` as Verilog Change Dump (VCD). """Parses the contents of ``file`` as Verilog Change Dump (VCD).
:param file: A file name or a file handle. Files with `.gz`-suffix are decompressed on-the-fly. :param file: A file name or a file handle. Files with `.gz`-suffix are decompressed on-the-fly.
:param var_locs: A callback ``(var) -> list[int]`` mapping each variable to ndarray column indices. Empty list drops the variable. :param var_locs: A callback ``(var) -> list[int]`` mapping each variable to ndarray column indices. Empty list drops the variable.
:param step_filter: A callback ``(time, values, var_map) -> bool`` to select which timesteps to include. :param step_filter: A callback ``(time, values, var_map) -> bool`` to select which timesteps to include.
:param s_len: length of the first axis of returned pattern data.
:return: A VcdData object with metadata and an ndarray with all values. :return: A VcdData object with metadata and an ndarray with all values.
""" """
vcd = readtext(file) vcd = readtext(file)
@ -169,10 +170,11 @@ def load(file, var_locs = lambda _: [], step_filter = lambda *_: True):
vcd_header : VcdHeader = Lark(GRAMMAR, parser="lalr", lexer='contextual', transformer=VcdHeaderTransformer()).parse(vcd_header_str) # type: ignore vcd_header : VcdHeader = Lark(GRAMMAR, parser="lalr", lexer='contextual', transformer=VcdHeaderTransformer()).parse(vcd_header_str) # type: ignore
vcd_data = vcd[header_size:].splitlines() vcd_data = vcd[header_size:].splitlines()
var_map = VcdVarMap(vcd_header, var_locs) var_map = VcdVarMap(vcd_header, var_locs)
width = s_len if s_len is not None else var_map.total_width
chunk_size = 10240 chunk_size = 10240
chunks = [] chunks = []
chunk = np.full((chunk_size, var_map.total_width), logic.UNASSIGNED, dtype=np.uint8) chunk = np.full((chunk_size, width), logic.UNASSIGNED, dtype=np.uint8)
_val_map = {'0': logic.ZERO, '1': logic.ONE, _val_map = {'0': logic.ZERO, '1': logic.ONE,
'x': logic.UNKNOWN, 'X': logic.UNKNOWN, 'x': logic.UNKNOWN, 'X': logic.UNKNOWN,
@ -191,7 +193,7 @@ def load(file, var_locs = lambda _: [], step_filter = lambda *_: True):
steps.append(current_time) steps.append(current_time)
if step_idx >= chunk_size: if step_idx >= chunk_size:
chunks.append(chunk) chunks.append(chunk)
chunk = np.empty((chunk_size, var_map.total_width), dtype=np.uint8) chunk = np.empty((chunk_size, width), dtype=np.uint8)
chunk[0] = chunks[-1][-1] chunk[0] = chunks[-1][-1]
step_idx = 0 step_idx = 0
else: else:

Loading…
Cancel
Save