Skip to content

Commit

Permalink
fix(MemBlock): only non-AMO stds are written into SQ
Browse files Browse the repository at this point in the history
  • Loading branch information
linjuanZ committed Dec 10, 2024
1 parent ccdd6d2 commit 287a3ff
Showing 1 changed file with 7 additions and 9 deletions.
16 changes: 7 additions & 9 deletions src/main/scala/xiangshan/backend/MemBlock.scala
Original file line number Diff line number Diff line change
Expand Up @@ -363,9 +363,6 @@ class MemBlockInlinedImp(outer: MemBlockInlined) extends LazyModuleImp(outer)
val hybridUnits = Seq.fill(HyuCnt)(Module(new HybridUnit)) // Todo: replace it with HybridUnit
val stData = stdExeUnits.map(_.io.out)
val exeUnits = loadUnits ++ storeUnits
// val vlWrapper = Module(new VectorLoadWrapper)
// val vsUopQueue = Module(new VsUopQueue)
// val vsFlowQueue = Module(new VsFlowQueue)

// The number of vector load/store units is decoupled with the number of load/store units
val vlSplit = Seq.fill(VlduCnt)(Module(new VLSplitImp))
Expand Down Expand Up @@ -455,6 +452,10 @@ class MemBlockInlinedImp(outer: MemBlockInlined) extends LazyModuleImp(outer)
atomicsUnit.io.out.ready := ldaExeWbReqs(AtomicWBPort).ready
loadUnits(AtomicWBPort).io.ldout.ready := ldaExeWbReqs(AtomicWBPort).ready

val st_data_atomics = Seq.tabulate(StdCnt)(i =>
stData(i).valid && FuType.storeIsAMO(stData(i).bits.uop.fuType)
)

// misalignBuffer will overwrite the source from ldu if it is about to writeback
val misalignWritebackOverride = Mux(
loadMisalignBuffer.io.writeBack.valid,
Expand Down Expand Up @@ -1048,6 +1049,7 @@ class MemBlockInlinedImp(outer: MemBlockInlined) extends LazyModuleImp(outer)

// Lsq to std unit's rs
lsq.io.std.storeDataIn(StaCnt + i) := stData(StaCnt + i)
lsq.io.std.storeDataIn(StaCnt + i).valid := stData(StaCnt + i).valid && !st_data_atomics(StaCnt + i)
// prefetch
hybridUnits(i).io.stu_io.prefetch_req <> sbuffer.io.store_prefetch(StaCnt + i)

Expand Down Expand Up @@ -1186,7 +1188,7 @@ class MemBlockInlinedImp(outer: MemBlockInlined) extends LazyModuleImp(outer)
lsq.io.std.storeDataIn(i).bits := vsSplit(i).io.vstd.get.bits
stData(i).ready := false.B
}.otherwise {
lsq.io.std.storeDataIn(i).valid := stData(i).valid
lsq.io.std.storeDataIn(i).valid := stData(i).valid && !st_data_atomics(i)
lsq.io.std.storeDataIn(i).bits.uop := stData(i).bits.uop
lsq.io.std.storeDataIn(i).bits.data := stData(i).bits.data
lsq.io.std.storeDataIn(i).bits.mask.map(_ := 0.U)
Expand All @@ -1195,7 +1197,7 @@ class MemBlockInlinedImp(outer: MemBlockInlined) extends LazyModuleImp(outer)
stData(i).ready := true.B
}
} else {
lsq.io.std.storeDataIn(i).valid := stData(i).valid
lsq.io.std.storeDataIn(i).valid := stData(i).valid && !st_data_atomics(i)
lsq.io.std.storeDataIn(i).bits.uop := stData(i).bits.uop
lsq.io.std.storeDataIn(i).bits.data := stData(i).bits.data
lsq.io.std.storeDataIn(i).bits.mask.map(_ := 0.U)
Expand Down Expand Up @@ -1591,10 +1593,6 @@ class MemBlockInlinedImp(outer: MemBlockInlined) extends LazyModuleImp(outer)
io.ooo_to_mem.issueHya(i).valid && FuType.storeIsAMO((io.ooo_to_mem.issueHya(i).bits.uop.fuType))
)

val st_data_atomics = Seq.tabulate(StdCnt)(i =>
stData(i).valid && FuType.storeIsAMO(stData(i).bits.uop.fuType)
)

for (i <- 0 until StaCnt) when(st_atomics(i)) {
io.ooo_to_mem.issueSta(i).ready := atomicsUnit.io.in.ready
storeUnits(i).io.stin.valid := false.B
Expand Down

0 comments on commit 287a3ff

Please sign in to comment.