Skip to content

Commit

Permalink
Merge branch 'master' into debug_logs
Browse files Browse the repository at this point in the history
  • Loading branch information
Jacob Urbanczyk committed Mar 14, 2024
2 parents 60b91f4 + f53c30e commit fedd275
Show file tree
Hide file tree
Showing 27 changed files with 1,621 additions and 302 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ jobs:
build-perf-benchmarks:
name: Build performance benchmarks
runs-on: ubuntu-latest
container: ghcr.io/kuznia-rdzeni/riscv-toolchain:2023.11.19_v
container: ghcr.io/kuznia-rdzeni/riscv-toolchain:2024.03.12
steps:
- name: Checkout
uses: actions/checkout@v3
Expand Down
90 changes: 61 additions & 29 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ jobs:
build-riscof-tests:
name: Build regression tests (riscv-arch-test)
runs-on: ubuntu-latest
container: ghcr.io/kuznia-rdzeni/riscv-toolchain:2023.11.19_v
container: ghcr.io/kuznia-rdzeni/riscv-toolchain:2024.03.12
timeout-minutes: 10
env:
PYENV_ROOT: "/root/.pyenv"
Expand All @@ -61,8 +61,14 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: recursive

- name: Get submodules HEAD hash
working-directory: .
run: |
# ownership workaround
git config --global --add safe.directory /__w/coreblocks/coreblocks
# paths in command are relative!
git submodule > .gitmodules-hash
- name: Cache compiled and reference riscv-arch-test
id: cache-riscv-arch-test
Expand All @@ -79,11 +85,17 @@ jobs:
'**/test/external/riscof/coreblocks/**',
'**/test/external/riscof/spike_simple/**',
'**/test/external/riscof/config.ini',
'**/.git/modules/test/external/riscof/riscv-arch-test/HEAD',
'**/docker/riscv-toolchain.Dockerfile'
'**/.gitmodules-hash',
'**/docker/riscv-toolchain.Dockerfile',
'**/.github/workflows/main.yml'
) }}
restore-keys: |
${{ env.cache-name }}-${{ runner.os }}-
lookup-only: true

- if: ${{ steps.cache-riscv-arch-test.outputs.cache-hit != 'true' }}
name: Checkout with submodules
uses: actions/checkout@v3
with:
submodules: recursive

- if: ${{ steps.cache-riscv-arch-test.outputs.cache-hit != 'true' }}
name: Setup PATH
Expand Down Expand Up @@ -112,7 +124,9 @@ jobs:
name: Build tests for Coreblocks
run: |
MAKEFILE_PATH=riscof_work/Makefile.build-DUT-coreblocks ../../../ci/riscof_run_makefile.sh
- if: ${{ steps.cache-riscv-arch-test.outputs.cache-hit != 'true' }}
name: Upload compiled and reference tests artifact
uses: actions/upload-artifact@v3
with:
name: "riscof-tests"
Expand All @@ -130,8 +144,6 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: recursive

- name: Set up Python
uses: actions/setup-python@v4
Expand All @@ -146,11 +158,18 @@ jobs:
python3 -m pip install -r requirements-dev.txt
- uses: actions/download-artifact@v3
name: Download full verilog core
with:
name: "verilog-full-core"
path: .

- name: Get submodules HEAD hash
run: |
git config --global --add safe.directory /__w/coreblocks/coreblocks
git submodule > .gitmodules-hash
- uses: actions/cache@v3
name: Download tests from cache
env:
cache-name: cache-riscv-arch-test
with:
Expand All @@ -163,8 +182,9 @@ jobs:
'**/test/external/riscof/coreblocks/**',
'**/test/external/riscof/spike_simple/**',
'**/test/external/riscof/config.ini',
'**/.git/modules/test/external/riscof/riscv-arch-test/HEAD',
'**/docker/riscv-toolchain.Dockerfile'
'**/.gitmodules-hash',
'**/docker/riscv-toolchain.Dockerfile',
'**/.github/workflows/main.yml'
) }}
fail-on-cache-miss: true

Expand All @@ -180,15 +200,16 @@ jobs:
build-regression-tests:
name: Build regression tests (riscv-tests)
runs-on: ubuntu-latest
container: ghcr.io/kuznia-rdzeni/riscv-toolchain:2023.11.19_v
container: ghcr.io/kuznia-rdzeni/riscv-toolchain:2024.03.12
timeout-minutes: 10
outputs:
cache_hit: ${{ steps.cache-regression.outputs.cache-hit }}
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: recursive

- name: Get submodules HEAD hash
run: |
git config --global --add safe.directory /__w/coreblocks/coreblocks
git submodule > .gitmodules-hash
- name: Cache regression-tests
id: cache-regression
Expand All @@ -197,15 +218,20 @@ jobs:
cache-name: cache-regression-tests
with:
path: test/external/riscv-tests/test-*

key: ${{ env.cache-name }}-${{ runner.os }}-${{ hashFiles(
'**/test/external/riscv-tests/environment/**',
'**/test/external/riscv-tests/environment/custom/**',
'**/test/external/riscv-tests/Makefile',
'**/.git/modules/test/external/riscv-tests/riscv-tests/HEAD',
'**/docker/riscv-toolchain.Dockerfile'
'**/.gitmodules-hash',
'**/docker/riscv-toolchain.Dockerfile',
'**/.github/workflows/main.yml'
) }}
restore-keys: |
${{ env.cache-name }}-${{ runner.os }}-
lookup-only: true

- if: ${{ steps.cache-regression.outputs.cache-hit != 'true' }}
name: Checkout with submodules
uses: actions/checkout@v3
with:
submodules: recursive

- if: ${{ steps.cache-regression.outputs.cache-hit != 'true' }}
run: cd test/external/riscv-tests && make
Expand All @@ -225,8 +251,6 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: recursive

- name: Set up Python
uses: actions/setup-python@v4
Expand All @@ -241,21 +265,29 @@ jobs:
python3 -m pip install -r requirements-dev.txt
- uses: actions/download-artifact@v3
name: Download full verilog core
with:
name: "verilog-full-core"
path: .

- name: Get submodules HEAD hash
run: |
git config --global --add safe.directory /__w/coreblocks/coreblocks
git submodule > .gitmodules-hash
- uses: actions/cache@v3
name: Download tests from cache
env:
cache-name: cache-regression-tests
with:
path: test/external/riscv-tests/test-*
key: ${{ env.cache-name }}-${{ runner.os }}-${{ hashFiles(
'**/test/external/riscv-tests/environment/**',
'**/test/external/riscv-tests/Makefile',
'**/.git/modules/test/external/riscv-tests/riscv-tests/HEAD',
'**/docker/riscv-toolchain.Dockerfile'
) }}
'**/test/external/riscv-tests/environment/custom/**',
'**/test/external/riscv-tests/Makefile',
'**/.gitmodules-hash',
'**/docker/riscv-toolchain.Dockerfile',
'**/.github/workflows/main.yml'
) }}
fail-on-cache-miss: true

- name: Run tests
Expand Down
28 changes: 15 additions & 13 deletions coreblocks/cache/icache.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import operator

from amaranth import *
from amaranth.lib.data import View
from amaranth.utils import exact_log2

from transactron.core import def_method, Priority, TModule
Expand All @@ -12,6 +13,7 @@
from coreblocks.peripherals.bus_adapter import BusMasterInterface

from coreblocks.cache.iface import CacheInterface, CacheRefillerInterface
from transactron.utils.transactron_helpers import make_layout

__all__ = [
"ICache",
Expand Down Expand Up @@ -109,11 +111,11 @@ def __init__(self, layouts: ICacheLayouts, params: ICacheParameters, refiller: C
self.flush = Method()
self.flush.add_conflict(self.issue_req, Priority.LEFT)

self.addr_layout = [
self.addr_layout = make_layout(
("offset", self.params.offset_bits),
("index", self.params.index_bits),
("tag", self.params.tag_bits),
]
)

self.perf_loads = HwCounter("frontend.icache.loads", "Number of requests to the L1 Instruction Cache")
self.perf_hits = HwCounter("frontend.icache.hits")
Expand All @@ -131,7 +133,7 @@ def deserialize_addr(self, raw_addr: Value) -> dict[str, Value]:
"tag": raw_addr[-self.params.tag_bits :],
}

def serialize_addr(self, addr: Record) -> Value:
def serialize_addr(self, addr: View) -> Value:
return Cat(addr.offset, addr.index, addr.tag)

def elaborate(self, platform):
Expand Down Expand Up @@ -186,7 +188,7 @@ def elaborate(self, platform):

# Fast path - read requests
request_valid = self.req_fifo.read.ready
request_addr = Record(self.addr_layout)
request_addr = Signal(self.addr_layout)

tag_hit = [tag_data.valid & (tag_data.tag == request_addr.tag) for tag_data in self.mem.tag_rd_data]
tag_hit_any = reduce(operator.or_, tag_hit)
Expand All @@ -195,7 +197,7 @@ def elaborate(self, platform):
for i in OneHotSwitchDynamic(m, Cat(tag_hit)):
m.d.comb += mem_out.eq(self.mem.data_rd_data[i])

instr_out = extract_instr_from_word(m, self.params, mem_out, request_addr[:])
instr_out = extract_instr_from_word(m, self.params, mem_out, Value.cast(request_addr))

refill_error_saved = Signal()
m.d.comb += needs_refill.eq(request_valid & ~tag_hit_any & ~refill_error_saved)
Expand All @@ -214,7 +216,7 @@ def _():
self.req_latency.stop(m)
return self.res_fwd.read(m)

mem_read_addr = Record(self.addr_layout)
mem_read_addr = Signal(self.addr_layout)
m.d.comb += assign(mem_read_addr, request_addr)

@def_method(m, self.issue_req, ready=accepting_requests)
Expand Down Expand Up @@ -304,21 +306,21 @@ class ICacheMemory(Elaboratable):
def __init__(self, params: ICacheParameters) -> None:
self.params = params

self.tag_data_layout = [("valid", 1), ("tag", self.params.tag_bits)]
self.tag_data_layout = make_layout(("valid", 1), ("tag", self.params.tag_bits))

self.way_wr_en = Signal(self.params.num_of_ways)

self.tag_rd_index = Signal(self.params.index_bits)
self.tag_rd_data = Array([Record(self.tag_data_layout) for _ in range(self.params.num_of_ways)])
self.tag_rd_data = Array([Signal(self.tag_data_layout) for _ in range(self.params.num_of_ways)])
self.tag_wr_index = Signal(self.params.index_bits)
self.tag_wr_en = Signal()
self.tag_wr_data = Record(self.tag_data_layout)
self.tag_wr_data = Signal(self.tag_data_layout)

self.data_addr_layout = [("index", self.params.index_bits), ("offset", self.params.offset_bits)]
self.data_addr_layout = make_layout(("index", self.params.index_bits), ("offset", self.params.offset_bits))

self.data_rd_addr = Record(self.data_addr_layout)
self.data_rd_addr = Signal(self.data_addr_layout)
self.data_rd_data = Array([Signal(self.params.word_width) for _ in range(self.params.num_of_ways)])
self.data_wr_addr = Record(self.data_addr_layout)
self.data_wr_addr = Signal(self.data_addr_layout)
self.data_wr_en = Signal()
self.data_wr_data = Signal(self.params.word_width)

Expand All @@ -328,7 +330,7 @@ def elaborate(self, platform):
for i in range(self.params.num_of_ways):
way_wr = self.way_wr_en[i]

tag_mem = Memory(width=len(self.tag_wr_data), depth=self.params.num_of_sets)
tag_mem = Memory(width=len(Value.cast(self.tag_wr_data)), depth=self.params.num_of_sets)
tag_mem_rp = tag_mem.read_port()
tag_mem_wp = tag_mem.write_port()
m.submodules[f"tag_mem_{i}_rp"] = tag_mem_rp
Expand Down
9 changes: 5 additions & 4 deletions coreblocks/core.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from amaranth import *
from amaranth.lib.wiring import flipped, connect

from transactron.utils.dependencies import DependencyManager, DependencyContext
from coreblocks.stages.func_blocks_unifier import FuncBlocksUnifier
Expand Down Expand Up @@ -27,7 +28,7 @@
from coreblocks.stages.retirement import Retirement
from coreblocks.cache.icache import ICache, ICacheBypass
from coreblocks.peripherals.bus_adapter import WishboneMasterAdapter
from coreblocks.peripherals.wishbone import WishboneMaster, WishboneBus
from coreblocks.peripherals.wishbone import WishboneMaster, WishboneInterface
from coreblocks.cache.refiller import SimpleCommonBusCacheRefiller
from coreblocks.frontend.fetch import Fetch, UnalignedFetch
from transactron.lib.transformers import MethodMap, MethodProduct
Expand All @@ -38,7 +39,7 @@


class Core(Elaboratable):
def __init__(self, *, gen_params: GenParams, wb_instr_bus: WishboneBus, wb_data_bus: WishboneBus):
def __init__(self, *, gen_params: GenParams, wb_instr_bus: WishboneInterface, wb_data_bus: WishboneInterface):
self.gen_params = gen_params

dep_manager = DependencyContext.get()
Expand Down Expand Up @@ -117,8 +118,8 @@ def __init__(self, *, gen_params: GenParams, wb_instr_bus: WishboneBus, wb_data_
def elaborate(self, platform):
m = TModule()

m.d.comb += self.wb_master_instr.wb_master.connect(self.wb_instr_bus)
m.d.comb += self.wb_master_data.wb_master.connect(self.wb_data_bus)
connect(m, flipped(self.wb_instr_bus), self.wb_master_instr.wb_master)
connect(m, flipped(self.wb_data_bus), self.wb_master_data.wb_master)

m.submodules.wb_master_instr = self.wb_master_instr
m.submodules.wb_master_data = self.wb_master_data
Expand Down
3 changes: 2 additions & 1 deletion coreblocks/fu/div_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from collections.abc import Sequence

from amaranth import *
from amaranth.lib import data

from coreblocks.params.fu_params import FunctionalComponentParams
from coreblocks.params import Funct3, GenParams, FuncUnitLayouts, OpType
Expand Down Expand Up @@ -33,7 +34,7 @@ def get_instructions(self) -> Sequence[tuple]:
]


def get_input(arg: Record) -> tuple[Value, Value]:
def get_input(arg: data.View) -> tuple[Value, Value]:
return arg.s1_val, Mux(arg.imm, arg.imm, arg.s2_val)


Expand Down
2 changes: 1 addition & 1 deletion coreblocks/fu/fu_decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ class Decoder(Elaboratable):
Attributes
----------
decode_fn: Signal
exec_fn: Record
exec_fn: View
"""

def __init__(self, gen_params: GenParams, decode_fn: Type[IntFlag], ops: Sequence[tuple], check_optype: bool):
Expand Down
Loading

0 comments on commit fedd275

Please sign in to comment.