From 49488d19afa5429c4f03f4854542210f37c691d8 Mon Sep 17 00:00:00 2001 From: Dolu1990 Date: Mon, 7 Sep 2020 12:01:03 +0200 Subject: [PATCH] pipeline data cache unaligned access check --- src/main/scala/vexriscv/demo/smp/VexRiscvSmpCluster.scala | 5 +++-- src/main/scala/vexriscv/ip/DataCache.scala | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/scala/vexriscv/demo/smp/VexRiscvSmpCluster.scala b/src/main/scala/vexriscv/demo/smp/VexRiscvSmpCluster.scala index 343d271..2ca7a3e 100644 --- a/src/main/scala/vexriscv/demo/smp/VexRiscvSmpCluster.scala +++ b/src/main/scala/vexriscv/demo/smp/VexRiscvSmpCluster.scala @@ -157,7 +157,8 @@ object VexRiscvSmpClusterGen { iCacheSize : Int = 8192, dCacheSize : Int = 8192, iCacheWays : Int = 2, - dCacheWays : Int = 2) = { + dCacheWays : Int = 2, + iBusRelax : Boolean = false) = { assert(iCacheSize/iCacheWays <= 4096, "Instruction cache ways can't be bigger than 4096 bytes") assert(dCacheSize/dCacheWays <= 4096, "Data cache ways can't be bigger than 4096 bytes") val config = VexRiscvConfig( @@ -173,7 +174,7 @@ object VexRiscvSmpClusterGen { historyRamSizeLog2 = 9, relaxPredictorAddress = true, injectorStage = false, - relaxedPcCalculation = false, + relaxedPcCalculation = iBusRelax, config = InstructionCacheConfig( cacheSize = iCacheSize, bytePerLine = 64, diff --git a/src/main/scala/vexriscv/ip/DataCache.scala b/src/main/scala/vexriscv/ip/DataCache.scala index 7922db8..2d247e2 100644 --- a/src/main/scala/vexriscv/ip/DataCache.scala +++ b/src/main/scala/vexriscv/ip/DataCache.scala @@ -791,6 +791,7 @@ class DataCache(val p : DataCacheConfig, mmuParameter : MemoryTranslatorBusParam val wayInvalidate = stagePipe(stageA. wayInvalidate) val consistancyHazard = if(stageA.consistancyCheck != null) stagePipe(stageA.consistancyCheck.hazard) else False val dataColisions = stagePipe(stageA.dataColisions) + val unaligned = if(!catchUnaligned) False else stagePipe((stageA.request.size === 2 && io.cpu.memory.address(1 downto 0) =/= 0) || (stageA.request.size === 1 && io.cpu.memory.address(0 downto 0) =/= 0)) val waysHitsBeforeInvalidate = if(earlyWaysHits) stagePipe(B(stageA.wayHits)) else B(tagsReadRsp.map(tag => mmuRsp.physicalAddress(tagRange) === tag.address && tag.valid).asBits()) val waysHits = waysHitsBeforeInvalidate & ~wayInvalidate val waysHit = waysHits.orR @@ -891,7 +892,7 @@ class DataCache(val p : DataCacheConfig, mmuParameter : MemoryTranslatorBusParam io.cpu.redo := False io.cpu.writeBack.accessError := False io.cpu.writeBack.mmuException := io.cpu.writeBack.isValid && (if(catchIllegal) mmuRsp.exception || (!mmuRsp.allowWrite && request.wr) || (!mmuRsp.allowRead && (!request.wr || isAmo)) else False) - io.cpu.writeBack.unalignedAccess := io.cpu.writeBack.isValid && (if(catchUnaligned) ((request.size === 2 && mmuRsp.physicalAddress(1 downto 0) =/= 0) || (request.size === 1 && mmuRsp.physicalAddress(0 downto 0) =/= 0)) else False) + io.cpu.writeBack.unalignedAccess := io.cpu.writeBack.isValid && unaligned io.cpu.writeBack.isWrite := request.wr io.mem.cmd.valid := False