Auto merge of #138759 - scottmcm:operand-builder, r= · rust-lang/rust@875f416 (original) (raw)

1

1

`use std::fmt;

`

2

2

``

3

``

`-

use arrayvec::ArrayVec;

`

4

``

`-

use either::Either;

`

5

3

`use rustc_abi as abi;

`

6

``

`-

use rustc_abi::{Align, BackendRepr, FIRST_VARIANT, Primitive, Size, TagEncoding, Variants};

`

``

4

`+

use rustc_abi::{

`

``

5

`+

Align, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, TagEncoding, VariantIdx, Variants,

`

``

6

`+

};

`

7

7

`use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};

`

8

8

`use rustc_middle::mir::{self, ConstValue};

`

9

9

`use rustc_middle::ty::Ty;

`

10

10

`use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};

`

11

11

`use rustc_middle::{bug, span_bug};

`

``

12

`+

use rustc_session::config::OptLevel;

`

12

13

`use tracing::{debug, instrument};

`

13

14

``

14

15

`use super::place::{PlaceRef, PlaceValue};

`

`@@ -62,31 +63,6 @@ pub enum OperandValue {

`

62

63

`}

`

63

64

``

64

65

`impl<V: CodegenObject> OperandValue {

`

65

``

`-

/// If this is ZeroSized/Immediate/Pair, return an array of the 0/1/2 values.

`

66

``

`-

/// If this is Ref, return the place.

`

67

``

`-

#[inline]

`

68

``

`-

pub(crate) fn immediates_or_place(self) -> Either<ArrayVec<V, 2>, PlaceValue> {

`

69

``

`-

match self {

`

70

``

`-

OperandValue::ZeroSized => Either::Left(ArrayVec::new()),

`

71

``

`-

OperandValue::Immediate(a) => Either::Left(ArrayVec::from_iter([a])),

`

72

``

`-

OperandValue::Pair(a, b) => Either::Left([a, b].into()),

`

73

``

`-

OperandValue::Ref(p) => Either::Right(p),

`

74

``

`-

}

`

75

``

`-

}

`

76

``

-

77

``

`-

/// Given an array of 0/1/2 immediate values, return ZeroSized/Immediate/Pair.

`

78

``

`-

#[inline]

`

79

``

`-

pub(crate) fn from_immediates(immediates: ArrayVec<V, 2>) -> Self {

`

80

``

`-

let mut it = immediates.into_iter();

`

81

``

`-

let Some(a) = it.next() else {

`

82

``

`-

return OperandValue::ZeroSized;

`

83

``

`-

};

`

84

``

`-

let Some(b) = it.next() else {

`

85

``

`-

return OperandValue::Immediate(a);

`

86

``

`-

};

`

87

``

`-

OperandValue::Pair(a, b)

`

88

``

`-

}

`

89

``

-

90

66

`/// Treat this value as a pointer and return the data pointer and

`

91

67

`/// optional metadata as backend values.

`

92

68

`///

`

`@@ -559,6 +535,123 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {

`

559

535

`}

`

560

536

`}

`

561

537

`}

`

``

538

+

``

539

`+

pub(crate) fn builder(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, Result<V, abi::Scalar>> {

`

``

540

`+

let val = match layout.backend_repr {

`

``

541

`+

BackendRepr::Memory { .. } if layout.is_zst() => OperandValue::ZeroSized,

`

``

542

`+

BackendRepr::Scalar(s) => OperandValue::Immediate(Err(s)),

`

``

543

`+

BackendRepr::ScalarPair(a, b) => OperandValue::Pair(Err(a), Err(b)),

`

``

544

`+

_ => bug!("Cannot use type in operand builder: {layout:?}"),

`

``

545

`+

};

`

``

546

`+

OperandRef { val, layout }

`

``

547

`+

}

`

``

548

+

``

549

`+

pub(crate) fn supports_builder(layout: TyAndLayout<'tcx>) -> bool {

`

``

550

`+

match layout.backend_repr {

`

``

551

`+

BackendRepr::Memory { .. } if layout.is_zst() => true,

`

``

552

`+

BackendRepr::Scalar() | BackendRepr::ScalarPair(, _) => true,

`

``

553

`+

_ => false,

`

``

554

`+

}

`

``

555

`+

}

`

``

556

`+

}

`

``

557

+

``

558

`+

impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, Result<V, abi::Scalar>> {

`

``

559

`+

pub(crate) fn insert_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(

`

``

560

`+

&mut self,

`

``

561

`+

bx: &mut Bx,

`

``

562

`+

v: VariantIdx,

`

``

563

`+

f: FieldIdx,

`

``

564

`+

operand: OperandRef<'tcx, V>,

`

``

565

`+

) {

`

``

566

`+

let (expect_zst, is_zero_offset) = if let abi::FieldsShape::Primitive = self.layout.fields {

`

``

567

`+

// Don't ask for field layout for primitives, because that will panic.

`

``

568

`+

if !self.layout.uninhabited {

`

``

569

`+

// Real primitives only have one variant, but weird types like

`

``

570

`` +

// Result<!, !> turn out to also be "Primitive", and dead code

``

``

571

`` +

// like Err(never) needs to not ICE.

``

``

572

`+

assert_eq!(v, FIRST_VARIANT);

`

``

573

`+

}

`

``

574

`+

let first_field = f == FieldIdx::ZERO;

`

``

575

`+

(self.layout.is_zst() || !first_field, first_field)

`

``

576

`+

} else {

`

``

577

`+

let variant_layout = self.layout.for_variant(bx.cx(), v);

`

``

578

`+

let field_layout = variant_layout.field(bx.cx(), f.as_usize());

`

``

579

`+

let field_offset = variant_layout.fields.offset(f.as_usize());

`

``

580

`+

(field_layout.is_zst(), field_offset == Size::ZERO)

`

``

581

`+

};

`

``

582

+

``

583

`+

let mut update = |tgt: &mut Result<V, abi::Scalar>, src, from_scalar| {

`

``

584

`+

let from_bty = bx.cx().type_from_scalar(from_scalar);

`

``

585

`+

let to_scalar = tgt.unwrap_err();

`

``

586

`+

let to_bty = bx.cx().type_from_scalar(to_scalar);

`

``

587

`+

let imm = transmute_immediate(bx, src, from_scalar, from_bty, to_scalar, to_bty);

`

``

588

`+

*tgt = Ok(imm);

`

``

589

`+

};

`

``

590

+

``

591

`+

match (operand.val, operand.layout.backend_repr) {

`

``

592

`+

(OperandValue::ZeroSized, _) if expect_zst => {}

`

``

593

`+

(OperandValue::Immediate(v), BackendRepr::Scalar(from_scalar)) => match &mut self.val {

`

``

594

`+

OperandValue::Immediate(val @ Err(_)) if is_zero_offset => {

`

``

595

`+

update(val, v, from_scalar);

`

``

596

`+

}

`

``

597

`+

OperandValue::Pair(fst @ Err(_), _) if is_zero_offset => {

`

``

598

`+

update(fst, v, from_scalar);

`

``

599

`+

}

`

``

600

`+

OperandValue::Pair(, snd @ Err()) if !is_zero_offset => {

`

``

601

`+

update(snd, v, from_scalar);

`

``

602

`+

}

`

``

603

`+

_ => bug!("Tried to insert {operand:?} into {v:?}.{f:?} of {self:?}"),

`

``

604

`+

},

`

``

605

`+

(OperandValue::Pair(a, b), BackendRepr::ScalarPair(from_sa, from_sb)) => {

`

``

606

`+

match &mut self.val {

`

``

607

`+

OperandValue::Pair(fst @ Err(), snd @ Err()) => {

`

``

608

`+

update(fst, a, from_sa);

`

``

609

`+

update(snd, b, from_sb);

`

``

610

`+

}

`

``

611

`+

_ => bug!("Tried to insert {operand:?} into {v:?}.{f:?} of {self:?}"),

`

``

612

`+

}

`

``

613

`+

}

`

``

614

`+

_ => bug!("Unsupported operand {operand:?} inserting into {v:?}.{f:?} of {self:?}"),

`

``

615

`+

}

`

``

616

`+

}

`

``

617

+

``

618

`+

pub(super) fn insert_imm(&mut self, f: FieldIdx, imm: V) {

`

``

619

`+

let field_offset = self.layout.fields.offset(f.as_usize());

`

``

620

`+

let is_zero_offset = field_offset == Size::ZERO;

`

``

621

`+

match &mut self.val {

`

``

622

`+

OperandValue::Immediate(val @ Err(_)) if is_zero_offset => {

`

``

623

`+

*val = Ok(imm);

`

``

624

`+

}

`

``

625

`+

OperandValue::Pair(fst @ Err(_), _) if is_zero_offset => {

`

``

626

`+

*fst = Ok(imm);

`

``

627

`+

}

`

``

628

`+

OperandValue::Pair(, snd @ Err()) if !is_zero_offset => {

`

``

629

`+

*snd = Ok(imm);

`

``

630

`+

}

`

``

631

`+

_ => bug!("Tried to insert {imm:?} into field {f:?} of {self:?}"),

`

``

632

`+

}

`

``

633

`+

}

`

``

634

+

``

635

`+

pub fn finalize(&self, cx: &impl CodegenMethods<'tcx, Value = V>) -> OperandRef<'tcx, V> {

`

``

636

`+

let OperandRef { val, layout } = *self;

`

``

637

+

``

638

`+

let unwrap = |r: Result<V, abi::Scalar>| match r {

`

``

639

`+

Ok(v) => v,

`

``

640

`+

Err(s) if s.is_uninit_valid() => {

`

``

641

`+

let bty = cx.type_from_scalar(s);

`

``

642

`+

cx.const_undef(bty)

`

``

643

`+

}

`

``

644

`+

Err(_) => bug!("OperandRef::finalize called while fields are missing {self:?}"),

`

``

645

`+

};

`

``

646

+

``

647

`+

let val = match val {

`

``

648

`+

OperandValue::ZeroSized => OperandValue::ZeroSized,

`

``

649

`+

OperandValue::Immediate(v) => OperandValue::Immediate(unwrap(v)),

`

``

650

`+

OperandValue::Pair(a, b) => OperandValue::Pair(unwrap(a), unwrap(b)),

`

``

651

`+

OperandValue::Ref(_) => bug!(),

`

``

652

`+

};

`

``

653

`+

OperandRef { val, layout }

`

``

654

`+

}

`

562

655

`}

`

563

656

``

564

657

`impl<'a, 'tcx, V: CodegenObject> OperandValue {

`

`@@ -808,3 +901,93 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {

`

808

901

`}

`

809

902

`}

`

810

903

`}

`

``

904

+

``

905

`` +

/// Transmutes one of the immediates from an [OperandValue::Immediate]

``

``

906

`` +

/// or an [OperandValue::Pair] to an immediate of the target type.

``

``

907

`+

///

`

``

908

`` +

/// to_backend_ty must be the non-immediate backend type (so it will be

``

``

909

`` +

/// i8, not i1, for bool-like types.)

``

``

910

`+

pub(super) fn transmute_immediate<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(

`

``

911

`+

bx: &mut Bx,

`

``

912

`+

mut imm: Bx::Value,

`

``

913

`+

from_scalar: abi::Scalar,

`

``

914

`+

from_backend_ty: Bx::Type,

`

``

915

`+

to_scalar: abi::Scalar,

`

``

916

`+

to_backend_ty: Bx::Type,

`

``

917

`+

) -> Bx::Value {

`

``

918

`+

assert_eq!(from_scalar.size(bx.cx()), to_scalar.size(bx.cx()));

`

``

919

+

``

920

`+

// While optimizations will remove no-op transmutes, they might still be

`

``

921

`+

// there in debug or things that aren't no-op in MIR because they change

`

``

922

`+

// the Rust type but not the underlying layout/niche.

`

``

923

`+

if from_scalar == to_scalar && from_backend_ty == to_backend_ty {

`

``

924

`+

return imm;

`

``

925

`+

}

`

``

926

+

``

927

`+

use abi::Primitive::*;

`

``

928

`+

imm = bx.from_immediate(imm);

`

``

929

+

``

930

`+

// If we have a scalar, we must already know its range. Either

`

``

931

`+

//

`

``

932

`` +

// 1) It's a parameter with range parameter metadata,

``

``

933

`` +

// 2) It's something we loaded with !range metadata, or

``

``

934

`` +

// 3) After a transmute we assumed the range (see below).

``

``

935

`+

//

`

``

936

`+

// That said, last time we tried removing this, it didn't actually help

`

``

937

`+

// the rustc-perf results, so might as well keep doing it

`

``

938

`+

// https://github.com/rust-lang/rust/pull/135610#issuecomment-2599275182

`

``

939

`+

assume_scalar_range(bx, imm, from_scalar, from_backend_ty);

`

``

940

+

``

941

`+

imm = match (from_scalar.primitive(), to_scalar.primitive()) {

`

``

942

`+

(Int(..) | Float(), Int(..) | Float()) => bx.bitcast(imm, to_backend_ty),

`

``

943

`+

(Pointer(..), Pointer(..)) => bx.pointercast(imm, to_backend_ty),

`

``

944

`+

(Int(..), Pointer(..)) => bx.ptradd(bx.const_null(bx.type_ptr()), imm),

`

``

945

`+

(Pointer(..), Int(..)) => {

`

``

946

`+

// FIXME: this exposes the provenance, which shouldn't be necessary.

`

``

947

`+

bx.ptrtoint(imm, to_backend_ty)

`

``

948

`+

}

`

``

949

`+

(Float(_), Pointer(..)) => {

`

``

950

`+

let int_imm = bx.bitcast(imm, bx.cx().type_isize());

`

``

951

`+

bx.ptradd(bx.const_null(bx.type_ptr()), int_imm)

`

``

952

`+

}

`

``

953

`+

(Pointer(..), Float(_)) => {

`

``

954

`+

// FIXME: this exposes the provenance, which shouldn't be necessary.

`

``

955

`+

let int_imm = bx.ptrtoint(imm, bx.cx().type_isize());

`

``

956

`+

bx.bitcast(int_imm, to_backend_ty)

`

``

957

`+

}

`

``

958

`+

};

`

``

959

+

``

960

`` +

// This assume remains important for cases like (a conceptual)

``

``

961

`+

// transmute::<u32, NonZeroU32>(x) == 0

`

``

962

`+

// since it's never passed to something with parameter metadata (especially

`

``

963

`+

// after MIR inlining) so the only way to tell the backend about the

`

``

964

`` +

// constraint that the transmute introduced is to assume it.

``

``

965

`+

assume_scalar_range(bx, imm, to_scalar, to_backend_ty);

`

``

966

+

``

967

`+

imm = bx.to_immediate_scalar(imm, to_scalar);

`

``

968

`+

imm

`

``

969

`+

}

`

``

970

+

``

971

`+

pub(super) fn assume_scalar_range<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(

`

``

972

`+

bx: &mut Bx,

`

``

973

`+

imm: Bx::Value,

`

``

974

`+

scalar: abi::Scalar,

`

``

975

`+

backend_ty: Bx::Type,

`

``

976

`+

) {

`

``

977

`+

if matches!(bx.cx().sess().opts.optimize, OptLevel::No) || scalar.is_always_valid(bx.cx()) {

`

``

978

`+

return;

`

``

979

`+

}

`

``

980

+

``

981

`+

match scalar.primitive() {

`

``

982

`+

abi::Primitive::Int(..) => {

`

``

983

`+

let range = scalar.valid_range(bx.cx());

`

``

984

`+

bx.assume_integer_range(imm, backend_ty, range);

`

``

985

`+

}

`

``

986

`+

abi::Primitive::Pointer(abi::AddressSpace::DATA)

`

``

987

`+

if !scalar.valid_range(bx.cx()).contains(0) =>

`

``

988

`+

{

`

``

989

`+

bx.assume_nonnull(imm);

`

``

990

`+

}

`

``

991

`+

abi::Primitive::Pointer(..) | abi::Primitive::Float(..) => {}

`

``

992

`+

}

`

``

993

`+

}

`