Skip to content

Commit

Permalink
[SelectionDAG] Remove pointer from MMO for VP strided load/store. (ll…
Browse files Browse the repository at this point in the history
…vm#82667)

MachineIR alias analysis assumes that only bytes after the pointer will
be accessed. This is incorrect if the stride is negative.

This is causing miscompiles in our downstream after SLP started making
strided loads.

Fixes llvm#82657

commit-id:dbe83cb3
  • Loading branch information
topperc authored and vzakhari committed Mar 14, 2024
1 parent 4a51fa8 commit 497f17d
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 2 deletions.
6 changes: 4 additions & 2 deletions llvm/lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -8030,8 +8030,9 @@ void SelectionDAGBuilder::visitVPStridedLoad(
MemoryLocation ML = MemoryLocation::getAfter(PtrOperand, AAInfo);
bool AddToChain = !AA || !AA->pointsToConstantMemory(ML);
SDValue InChain = AddToChain ? DAG.getRoot() : DAG.getEntryNode();
unsigned AS = PtrOperand->getType()->getPointerAddressSpace();
MachineMemOperand *MMO = DAG.getMachineFunction().getMachineMemOperand(
MachinePointerInfo(PtrOperand), MachineMemOperand::MOLoad,
MachinePointerInfo(AS), MachineMemOperand::MOLoad,
MemoryLocation::UnknownSize, *Alignment, AAInfo, Ranges);

SDValue LD = DAG.getStridedLoadVP(VT, DL, InChain, OpValues[0], OpValues[1],
Expand All @@ -8052,8 +8053,9 @@ void SelectionDAGBuilder::visitVPStridedStore(
if (!Alignment)
Alignment = DAG.getEVTAlign(VT.getScalarType());
AAMDNodes AAInfo = VPIntrin.getAAMetadata();
unsigned AS = PtrOperand->getType()->getPointerAddressSpace();
MachineMemOperand *MMO = DAG.getMachineFunction().getMachineMemOperand(
MachinePointerInfo(PtrOperand), MachineMemOperand::MOStore,
MachinePointerInfo(AS), MachineMemOperand::MOStore,
MemoryLocation::UnknownSize, *Alignment, AAInfo);

SDValue ST = DAG.getStridedStoreVP(
Expand Down
46 changes: 46 additions & 0 deletions llvm/test/CodeGen/RISCV/rvv/strided-vpload-vpstore-output.ll
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
; NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py UTC_ARGS: --version 4
; RUN: llc -mtriple=riscv64 -mattr=+v -stop-after=finalize-isel %s -o - | FileCheck %s

; Test makes sure we don't store a pointer in the MachineMemOperand created for
; these instructions. MachineMemOperand handling can't currently deal with a
; negative stride that would allow memory before the pointer to be read.

declare <vscale x 1 x i8> @llvm.experimental.vp.strided.load.nxv1i8.p0.i8(ptr, i8, <vscale x 1 x i1>, i32)

define <vscale x 1 x i8> @strided_vpload_nxv1i8_i8(ptr %ptr, i8 signext %stride, <vscale x 1 x i1> %m, i32 zeroext %evl) {
; CHECK-LABEL: name: strided_vpload_nxv1i8_i8
; CHECK: bb.0 (%ir-block.0):
; CHECK-NEXT: liveins: $x10, $x11, $v0, $x12
; CHECK-NEXT: {{ $}}
; CHECK-NEXT: [[COPY:%[0-9]+]]:gprnox0 = COPY $x12
; CHECK-NEXT: [[COPY1:%[0-9]+]]:vr = COPY $v0
; CHECK-NEXT: [[COPY2:%[0-9]+]]:gpr = COPY $x11
; CHECK-NEXT: [[COPY3:%[0-9]+]]:gpr = COPY $x10
; CHECK-NEXT: $v0 = COPY [[COPY1]]
; CHECK-NEXT: [[PseudoVLSE8_V_MF8_MASK:%[0-9]+]]:vrnov0 = PseudoVLSE8_V_MF8_MASK $noreg, [[COPY3]], [[COPY2]], $v0, [[COPY]], 3 /* e8 */, 1 /* ta, mu */ :: (load unknown-size, align 1)
; CHECK-NEXT: $v8 = COPY [[PseudoVLSE8_V_MF8_MASK]]
; CHECK-NEXT: PseudoRET implicit $v8
%load = call <vscale x 1 x i8> @llvm.experimental.vp.strided.load.nxv1i8.p0.i8(ptr %ptr, i8 %stride, <vscale x 1 x i1> %m, i32 %evl)
ret <vscale x 1 x i8> %load
}

declare void @llvm.experimental.vp.strided.store.nxv1i8.p0.i8(<vscale x 1 x i8>, ptr, i8, <vscale x 1 x i1>, i32)

define void @strided_vpstore_nxv1i8_i8(<vscale x 1 x i8> %val, ptr %ptr, i8 signext %stride, <vscale x 1 x i1> %m, i32 zeroext %evl) {
; CHECK-LABEL: name: strided_vpstore_nxv1i8_i8
; CHECK: bb.0 (%ir-block.0):
; CHECK-NEXT: liveins: $v8, $x10, $x11, $v0, $x12
; CHECK-NEXT: {{ $}}
; CHECK-NEXT: [[COPY:%[0-9]+]]:gprnox0 = COPY $x12
; CHECK-NEXT: [[COPY1:%[0-9]+]]:vr = COPY $v0
; CHECK-NEXT: [[COPY2:%[0-9]+]]:gpr = COPY $x11
; CHECK-NEXT: [[COPY3:%[0-9]+]]:gpr = COPY $x10
; CHECK-NEXT: [[COPY4:%[0-9]+]]:vr = COPY $v8
; CHECK-NEXT: $v0 = COPY [[COPY1]]
; CHECK-NEXT: PseudoVSSE8_V_MF8_MASK [[COPY4]], [[COPY3]], [[COPY2]], $v0, [[COPY]], 3 /* e8 */ :: (store unknown-size, align 1)
; CHECK-NEXT: PseudoRET
call void @llvm.experimental.vp.strided.store.nxv1i8.p0.i8(<vscale x 1 x i8> %val, ptr %ptr, i8 %stride, <vscale x 1 x i1> %m, i32 %evl)
ret void
}
;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
; CHECK: {{.*}}

0 comments on commit 497f17d

Please sign in to comment.