Skip to content

Commit

Permalink
[difftest] refactor some output format
Browse files Browse the repository at this point in the history
  • Loading branch information
Clo91eaf committed Sep 9, 2024
1 parent 76d9fde commit d66f28a
Show file tree
Hide file tree
Showing 6 changed files with 59 additions and 73 deletions.
2 changes: 1 addition & 1 deletion difftest/offline_t1/src/difftest.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use spike_rs::runner::*;
use spike_rs::runner::{SpikeArgs, SpikeRunner};
use std::path::Path;

use crate::dut::Dut;
Expand Down
44 changes: 20 additions & 24 deletions difftest/offline_t1/src/json_events.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,8 +163,10 @@ impl JsonEventRunner for SpikeRunner {
se.issue_idx = issue.idx as u8;

info!(
"[{}] SpikePeekIssue: issue_idx={}, pc={:#x}, inst={}",
issue.cycle, issue.idx, se.pc, se.disasm
"[{}] PeekIssue: issue_idx={} ({})",
issue.cycle,
issue.idx,
se.describe_insn()
);

Ok(())
Expand All @@ -181,10 +183,10 @@ impl JsonEventRunner for SpikeRunner {
.rev()
.find(|se| (se.is_vload() || se.is_vstore()) && se.lsu_idx == LSU_IDX_DEFAULT)
{
let index = enq.trailing_zeros() as u8;
se.lsu_idx = index;
se.lsu_idx = enq.trailing_zeros() as u8;
info!(
"[{cycle}] UpdateLSUIdx: instr ({}) is allocated with lsu_idx: {index}",
"[{cycle}] UpdateLSUIdx: allocated with lsu_idx: {} ({})",
se.lsu_idx,
se.describe_insn()
);
}
Expand All @@ -204,8 +206,7 @@ impl JsonEventRunner for SpikeRunner {
self.commit_queue.iter_mut().rev().find(|se| se.issue_idx == vrf_write.issue_idx)
{
debug!(
"[{}] VrfWrite: lane={}, vd={}, idx_base={}, issue_idx={}, offset={}, mask={}, data={:x?} ({})",
vrf_write.cycle,
"[{cycle}] VrfWrite: lane={}, vd={}, idx_base={}, issue_idx={}, offset={}, mask={}, data={:x?} ({})",
vrf_write.lane,
record_idx_base,
vrf_write.vd,
Expand All @@ -219,8 +220,7 @@ impl JsonEventRunner for SpikeRunner {
if let Some(unretired_writes) = se.vrf_access_record.unretired_writes {
assert!(
unretired_writes > 0,
"[{}] unretired_writes should be greater than 0, issue_idx={} ({})",
vrf_write.cycle,
"[{cycle}] VrfWrite: unretired_writes should be greater than 0, issue_idx={} ({})",
vrf_write.issue_idx,
se.describe_insn()
);
Expand All @@ -239,10 +239,9 @@ impl JsonEventRunner for SpikeRunner {
assert_eq!(
record.byte,
written_byte,
"[{}] {offset}th byte incorrect ({:02x} record != {written_byte:02x} written) \
"[{cycle}] VrfWrite: {offset}th byte incorrect ({:02x} record != {written_byte:02x} written) \
for vrf write (lane={}, vd={}, offset={}, mask={}, data={:x?}) \
issue_idx={} [vrf_idx={}] (disasm: {}, pc: {:#x}, bits: {:#x})",
vrf_write.cycle,
issue_idx={} [vrf_idx={}] ({})",
record.byte,
vrf_write.lane,
vrf_write.vd,
Expand All @@ -251,15 +250,12 @@ impl JsonEventRunner for SpikeRunner {
vrf_write.data,
se.issue_idx,
record_idx_base + offset,
se.disasm,
se.pc,
se.inst_bits
se.describe_insn()
);
record.executed = true;
} else {
debug!(
"[{}] cannot find vrf write record, maybe not changed (lane={}, vd={}, idx={}, offset={}, mask={}, data={:x?})",
vrf_write.cycle,
"[{cycle}] VrfWrite: cannot find vrf write record, maybe not changed (lane={}, vd={}, idx={}, offset={}, mask={}, data={:x?})",
vrf_write.lane,
vrf_write.vd,
record_idx_base + offset,
Expand All @@ -271,7 +267,7 @@ impl JsonEventRunner for SpikeRunner {
})
} else {
info!(
"[{cycle}] RecordRFAccess: rtl detect vrf write on lane={}, vd={} \
"[{cycle}] VrfWrite: rtl detect vrf write on lane={}, vd={} \
with no matched se (issue_idx={}), \
maybe from committed load insn",
vrf_write.lane, vrf_write.vd, vrf_write.issue_idx
Expand All @@ -293,7 +289,7 @@ impl JsonEventRunner for SpikeRunner {
let lsu_idx = memory_write.lsu_idx;

if let Some(se) = self.commit_queue.iter_mut().find(|se| se.lsu_idx == lsu_idx) {
info!("[{cycle}] MemoryWrite: address={base_addr:08x}, size={}, data={data:x?}, mask={}, pc = {:#x}, disasm = {}", data.len(), mask_display(&mask), se.pc, se.disasm);
info!("[{cycle}] MemoryWrite: address={base_addr:#x}, size={}, data={data:x?}, mask={} ({})", data.len(), mask_display(&mask), se.describe_insn());
// compare with spike event record
mask.iter().enumerate()
.filter(|(_, &mask)| mask)
Expand All @@ -302,7 +298,7 @@ impl JsonEventRunner for SpikeRunner {
let data_byte = *data.get(offset).unwrap_or(&0);
let mem_write =
se.mem_access_record.all_writes.get_mut(&byte_addr).unwrap_or_else(|| {
panic!("[{cycle}] cannot find mem write of byte_addr {byte_addr:08x}")
panic!("[{cycle}] MemoryWrite: cannot find mem write of byte_addr {byte_addr:#x}")
});
let single_mem_write_val = mem_write.writes[mem_write.num_completed_writes].val;
mem_write.num_completed_writes += 1;
Expand All @@ -311,7 +307,7 @@ impl JsonEventRunner for SpikeRunner {
return Ok(());
}

panic!("[{cycle}] cannot find se with instruction lsu_idx={lsu_idx}")
panic!("[{cycle}] MemoryWrite: cannot find se with instruction lsu_idx={lsu_idx}")
}

fn vrf_scoreboard_report(&mut self, report: &VrfScoreboardReportEvent) -> anyhow::Result<()> {
Expand All @@ -324,7 +320,7 @@ impl JsonEventRunner for SpikeRunner {
if let Some(se) = self.commit_queue.iter_mut().rev().find(|se| se.issue_idx == issue_idx) {
assert!(
se.vrf_access_record.retired_writes <= count,
"[{cycle}] retired_writes({}) should be less than count({count}), issue_idx={issue_idx} ({})",
"[{cycle}] VrfScoreboardReport: retired_writes({}) should be less than count({count}), issue_idx={issue_idx} ({})",
se.vrf_access_record.retired_writes, se.describe_insn()
);

Expand All @@ -341,7 +337,7 @@ impl JsonEventRunner for SpikeRunner {
se.describe_insn()
);
} else {
panic!("[{cycle}] cannot find se with instruction issue_idx={issue_idx}");
panic!("[{cycle}] VrfScoreboardReport: cannot find se with instruction issue_idx={issue_idx}");
}

if let Some(issue_idx) = should_retire {
Expand Down Expand Up @@ -369,7 +365,7 @@ impl JsonEventRunner for SpikeRunner {

let se =
self.commit_queue.iter_mut().find(|se| se.issue_idx == issue_idx).unwrap_or_else(|| {
panic!("[{cycle}] cannot find se with instruction issue_idx={issue_idx}")
panic!("[{cycle}] CheckRd: cannot find se with instruction issue_idx={issue_idx}")
});

info!("[{cycle}] CheckRd: issue_idx={issue_idx}, data={data:x?}");
Expand Down
2 changes: 1 addition & 1 deletion difftest/offline_t1rocket/src/difftest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ impl Difftest {
anyhow::bail!("error: simulation stopped at cycle {cycle}, reason {reason}")
}
JsonEvents::SimulationEnd { cycle } => {
anyhow::bail!("simulation quit successfullly cycle {cycle}");
anyhow::bail!("simulation quit successfullly cycle {cycle}")
}
JsonEvents::RegWrite { idx, data, cycle } => {
self.runner.cycle = *cycle;
Expand Down
53 changes: 26 additions & 27 deletions difftest/offline_t1rocket/src/json_events.rs
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ impl JsonEventRunner for SpikeRunner {

assert!(
data == board_data,
"rtl data({data:#x}) should be equal to board data({board_data:#x})"
"[{cycle}] RegWrite: rtl data({data:#x}) should be equal to board data({board_data:#x})"
);

self.rf_board[idx as usize] = None;
Expand All @@ -226,12 +226,12 @@ impl JsonEventRunner for SpikeRunner {

assert!(
idx as u32 == se.rd_idx,
"rtl idx({idx}) should be equal to spike idx({})",
"[{cycle}] RegWrite: rtl idx({idx}) should be equal to spike idx({})",
se.rd_idx
);
assert!(
data == se.rd_bits,
"rtl data({data:#x}) should be equal to spike data({:#x})",
"[{cycle}] RegWrite: rtl data({data:#x}) should be equal to spike data({:#x})",
se.rd_bits
);

Expand All @@ -253,7 +253,7 @@ impl JsonEventRunner for SpikeRunner {

assert!(
idx as u32 == se.rd_idx,
"rtl idx({idx}) should be equal to spike idx({})",
"[{cycle}] RegWriteWait: rtl idx({idx}) should be equal to spike idx({})",
se.rd_idx
);

Expand All @@ -274,7 +274,7 @@ impl JsonEventRunner for SpikeRunner {

assert!(
data == board_data,
"rtl data({data:#x}) should be equal to board data({board_data:#x})"
"[{cycle}] FregWrite: rtl data({data:#x}) should be equal to board data({board_data:#x})"
);

self.frf_board[idx as usize] = None;
Expand All @@ -293,7 +293,7 @@ impl JsonEventRunner for SpikeRunner {

assert!(
idx as u32 == se.rd_idx,
"rtl idx({idx}) should be equal to spike idx({})",
"[{cycle}] FregWrite: rtl idx({idx}) should be equal to spike idx({})",
se.rd_idx
);
assert!(
Expand All @@ -320,7 +320,7 @@ impl JsonEventRunner for SpikeRunner {

assert!(
idx as u32 == se.rd_idx,
"rtl idx({idx}) should be equal to spike idx({})",
"[{cycle}] FregWriteWait: rtl idx({idx}) should be equal to spike idx({})",
se.rd_idx
);

Expand Down Expand Up @@ -378,8 +378,7 @@ impl JsonEventRunner for SpikeRunner {
self.commit_queue.iter_mut().rev().find(|se| se.issue_idx == vrf_write.issue_idx)
{
debug!(
"[{}] VrfWrite: lane={}, vd={}, idx_base={}, issue_idx={}, offset={}, mask={}, data={:x?} ({})",
vrf_write.cycle,
"[{cycle}] VrfWrite: lane={}, vd={}, idx_base={}, issue_idx={}, offset={}, mask={}, data={:x?} ({})",
vrf_write.lane,
record_idx_base,
vrf_write.vd,
Expand All @@ -393,8 +392,7 @@ impl JsonEventRunner for SpikeRunner {
if let Some(unretired_writes) = se.vrf_access_record.unretired_writes {
assert!(
unretired_writes > 0,
"[{}] unretired_writes should be greater than 0, issue_idx={} ({})",
vrf_write.cycle,
"[{cycle}] VrfWrite: unretired_writes should be greater than 0, issue_idx={} ({})",
vrf_write.issue_idx,
se.describe_insn()
);
Expand All @@ -413,10 +411,9 @@ impl JsonEventRunner for SpikeRunner {
assert_eq!(
record.byte,
written_byte,
"[{}] {offset}th byte incorrect ({:#02x} record != {written_byte:#02x} written) \
"[{cycle}] VrfWrite: {offset}th byte incorrect ({} record != {written_byte} written) \
for vrf write (lane={}, vd={}, offset={}, mask={}, data={:x?}) \
issue_idx={} [vrf_idx={}] (disasm: {}, pc: {:#x}, bits: {:#x})",
vrf_write.cycle,
issue_idx={} [vrf_idx={}] ({})",
record.byte,
vrf_write.lane,
vrf_write.vd,
Expand All @@ -425,15 +422,12 @@ impl JsonEventRunner for SpikeRunner {
vrf_write.data,
se.issue_idx,
record_idx_base + offset,
se.disasm,
se.pc,
se.inst_bits
se.describe_insn()
);
record.executed = true;
} else {
debug!(
"[{}] cannot find vrf write record, maybe not changed (lane={}, vd={}, idx={}, offset={}, mask={}, data={:x?})",
vrf_write.cycle,
"[{cycle}] VrfWrite: cannot find vrf write record, maybe not changed (lane={}, vd={}, idx={}, offset={}, mask={}, data={:x?})",
vrf_write.lane,
vrf_write.vd,
record_idx_base + offset,
Expand All @@ -445,7 +439,7 @@ impl JsonEventRunner for SpikeRunner {
})
} else {
info!(
"[{cycle}] RecordRFAccess: rtl detect vrf write on lane={}, vd={} \
"[{cycle}] VrfWrite: rtl detect vrf write on lane={}, vd={} \
with no matched se (issue_idx={}), \
maybe from committed load insn",
vrf_write.lane, vrf_write.vd, vrf_write.issue_idx
Expand All @@ -467,7 +461,12 @@ impl JsonEventRunner for SpikeRunner {
let lsu_idx = memory_write.lsu_idx;

if let Some(se) = self.commit_queue.iter_mut().find(|se| se.lsu_idx == lsu_idx) {
info!("[{cycle}] MemoryWrite: address={base_addr:#x}, size={}, data={data:x?}, mask={}, pc = {:#x}, disasm = {}", data.len(), mask_display(&mask), se.pc, se.disasm);
info!(
"[{cycle}] MemoryWrite: address={base_addr:#x}, size={}, data={data:x?}, mask={} ({})",
data.len(),
mask_display(&mask),
se.describe_insn()
);
// compare with spike event record
mask.iter().enumerate()
.filter(|(_, &mask)| mask)
Expand All @@ -476,16 +475,16 @@ impl JsonEventRunner for SpikeRunner {
let data_byte = *data.get(offset).unwrap_or(&0);
let mem_write =
se.mem_access_record.all_writes.get_mut(&byte_addr).unwrap_or_else(|| {
panic!("[{cycle}] cannot find mem write of byte_addr {byte_addr:#x}")
panic!("[{cycle}] MemoryWrite: cannot find mem write of byte_addr {byte_addr:#x}")
});
let single_mem_write_val = mem_write.writes[mem_write.num_completed_writes].val;
mem_write.num_completed_writes += 1;
assert_eq!(single_mem_write_val, data_byte, "[{cycle}] expect mem write of byte {single_mem_write_val:#02x}, actual byte {data_byte:#02x} (byte_addr={byte_addr:#x}, pc = {:#x}, disasm = {})", se.pc, se.disasm);
assert_eq!(single_mem_write_val, data_byte, "[{cycle}] MemoryWrite: expect mem write of byte {single_mem_write_val:#02x}, actual byte {data_byte:#02x}, byte_addr={byte_addr:#x} ({})", se.describe_insn());
});
return Ok(());
}

panic!("[{cycle}] cannot find se with instruction lsu_idx={lsu_idx}")
panic!("[{cycle}] MemoryWrite: cannot find se with instruction lsu_idx={lsu_idx}")
}

fn vrf_scoreboard(&mut self, vrf_scoreboard: &VrfScoreboardEvent) -> anyhow::Result<()> {
Expand All @@ -498,7 +497,7 @@ impl JsonEventRunner for SpikeRunner {
if let Some(se) = self.commit_queue.iter_mut().rev().find(|se| se.issue_idx == issue_idx) {
assert!(
se.vrf_access_record.retired_writes <= count,
"[{cycle}] retired_writes({}) should be less than count({count}), issue_idx={issue_idx} ({})",
"[{cycle}] VrfScoreboardReport: retired_writes({}) should be less than count({count}), issue_idx={issue_idx} ({})",
se.vrf_access_record.retired_writes, se.describe_insn()
);

Expand All @@ -515,7 +514,7 @@ impl JsonEventRunner for SpikeRunner {
se.describe_insn()
);
} else {
panic!("[{cycle}] cannot find se with instruction issue_idx={issue_idx}");
panic!("[{cycle}] VrfScoreboardReport: cannot find se with instruction issue_idx={issue_idx}");
}

if let Some(issue_idx) = should_retire {
Expand Down Expand Up @@ -543,7 +542,7 @@ impl JsonEventRunner for SpikeRunner {

let se =
self.commit_queue.iter_mut().find(|se| se.issue_idx == issue_idx).unwrap_or_else(|| {
panic!("[{cycle}] cannot find se with instruction issue_idx={issue_idx}")
panic!("[{cycle}] CheckRd: cannot find se with instruction issue_idx={issue_idx}")
});

info!("[{cycle}] CheckRd: issue_idx={issue_idx}, data={data:x?}");
Expand Down
2 changes: 1 addition & 1 deletion difftest/spike_rs/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl Spike {
len: usize,
bytes: Vec<u8>,
) -> anyhow::Result<()> {
trace!("ld: addr: 0x{:x}, len: 0x{:x}", addr, len);
trace!("ld: addr: {:#x}, len: {:#x}", addr, len);
assert!(addr + len <= self.size);

let dst = &mut self.mem[addr..addr + len];
Expand Down
Loading

0 comments on commit d66f28a

Please sign in to comment.