Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/cranelift/codegen/src/opts/cprop.isle
1693 views
;; Constant propagation.

(rule (simplify
       (iadd (fits_in_64 ty)
             (iconst ty (u64_from_imm64 k1))
             (iconst ty (u64_from_imm64 k2))))
      (subsume (iconst ty (imm64_masked ty (u64_wrapping_add k1 k2)))))

(rule (simplify
       (isub (fits_in_64 ty)
             (iconst ty (u64_from_imm64 k1))
             (iconst ty (u64_from_imm64 k2))))
      (subsume (iconst ty (imm64_masked ty (u64_wrapping_sub k1 k2)))))

(rule (simplify
       (imul (fits_in_64 ty)
             (iconst ty (u64_from_imm64 k1))
             (iconst ty (u64_from_imm64 k2))))
      (subsume (iconst ty (imm64_masked ty (u64_wrapping_mul k1 k2)))))

(rule (simplify_skeleton
       (sdiv (iconst ty k1)
             (iconst _ k2)))
      (if-let d (imm64_sdiv ty k1 k2))
      (iconst ty d))

(rule (simplify_skeleton
       (udiv (iconst_u ty k1)
             (iconst_u ty k2)))
      (if-let d (u64_checked_div k1 k2))
      (iconst ty (imm64_masked ty d)))

(rule (simplify
       (bor (fits_in_64 ty)
            (iconst ty (u64_from_imm64 k1))
            (iconst ty (u64_from_imm64 k2))))
      (subsume (iconst ty (imm64_masked ty (u64_or k1 k2)))))

(rule (simplify
       (band (fits_in_64 ty)
            (iconst ty (u64_from_imm64 k1))
            (iconst ty (u64_from_imm64 k2))))
      (subsume (iconst ty (imm64_masked ty (u64_and k1 k2)))))

(rule (simplify
       (bxor (fits_in_64 ty)
            (iconst ty (u64_from_imm64 k1))
            (iconst ty (u64_from_imm64 k2))))
      (subsume (iconst ty (imm64_masked ty (u64_xor k1 k2)))))

(rule (simplify
       (bnot (fits_in_64 ty)
            (iconst ty (u64_from_imm64 k))))
      (subsume (iconst ty (imm64_masked ty (u64_not k)))))

(rule (simplify (ishl (fits_in_64 ty)
                      (iconst ty k1)
                      (iconst _ k2)))
      (subsume (iconst ty (imm64_shl ty k1 k2))))

(rule (simplify (ushr (fits_in_64 ty)
                      (iconst ty k1)
                      (iconst _ k2)))
      (subsume (iconst ty (imm64_ushr ty k1 k2))))

(rule (simplify (sshr (fits_in_64 ty)
                      (iconst ty k1)
                      (iconst _ k2)))
      (subsume (iconst ty (imm64_sshr ty k1 k2))))

(rule (simplify (ireduce narrow (iconst (fits_in_64 _) (u64_from_imm64 imm))))
      (subsume (iconst narrow (imm64_masked narrow imm))))

;; iconst_[su] support $I128, but do so by extending, so restricting to
;; 64-bit or smaller keeps it from just remaking essentially the same thing.
(rule (simplify (uextend (fits_in_64 wide) (iconst_u narrow k)))
      (subsume (iconst_u wide k)))
(rule (simplify (sextend (fits_in_64 wide) (iconst_s narrow k)))
      (subsume (iconst_s wide k)))

(rule (simplify
       (icmp result_ty
            cc
            (iconst ty k1)
            (iconst ty k2)))
      (subsume (iconst result_ty (imm64_icmp ty cc k1 k2))))


;; Canonicalize via commutativity: push immediates to the right.
;;
;;   (op k x) --> (op x k)

(rule (simplify
       (iadd ty k @ (iconst ty _) x))
      (iadd ty x k))
;; sub is not commutative, but we can flip the args and negate the
;; whole thing.
(rule (simplify
       (isub ty k @ (iconst ty _) x))
      (ineg ty (isub ty x k)))
(rule (simplify
       (imul ty k @ (iconst ty _) x))
      (imul ty x k))

(rule (simplify
       (bor ty k @ (iconst ty _) x))
      (bor ty x k))
(rule (simplify
       (band ty k @ (iconst ty _) x))
      (band ty x k))
(rule (simplify
       (bxor ty k @ (iconst ty _) x))
      (bxor ty x k))

(rule (simplify
       (icmp ty cc k @ (iconst _ _) x))
      (icmp ty (intcc_swap_args cc) x k))

;; Canonicalize via associativity: reassociate to a right-heavy tree
;; for constants.
;;
;;   (op (op x k) k) --> (op x (op k k))

(rule (simplify
       (iadd ty (iadd ty x k1 @ (iconst ty _)) k2 @ (iconst ty _)))
      (iadd ty x (iadd ty k1 k2)))
;; sub is not directly associative, but we can flip a sub to an add to
;; make it work:
;; - (sub (sub x k1) k2) -> (sub x (add k1 k2))
;; - (sub (sub k1 x) k2) -> (sub (sub k1 k2) x)
;; - (sub (add x k1) k2) -> (sub x (sub k2 k1))
;; - (add (sub x k1) k2) -> (add x (sub k2 k1))
;; - (add (sub k1 x) k2) -> (sub (add k1 k2) x)
(rule (simplify (isub ty
                      (isub ty x (iconst ty (u64_from_imm64 k1)))
                      (iconst ty (u64_from_imm64 k2))))
      (isub ty x (iconst ty (imm64_masked ty (u64_wrapping_add k1 k2)))))
(rule (simplify (isub ty
                      (isub ty (iconst ty (u64_from_imm64 k1)) x)
                      (iconst ty (u64_from_imm64 k2))))
      (isub ty (iconst ty (imm64_masked ty (u64_wrapping_sub k1 k2))) x))
(rule (simplify (isub ty
                      (iadd ty x (iconst ty (u64_from_imm64 k1)))
                      (iconst ty (u64_from_imm64 k2))))
      (isub ty x (iconst ty (imm64_masked ty (u64_wrapping_sub k2 k1)))))
(rule (simplify (iadd ty
                      (isub ty x (iconst ty (u64_from_imm64 k1)))
                      (iconst ty (u64_from_imm64 k2))))
      (iadd ty x (iconst ty (imm64_masked ty (u64_wrapping_sub k2 k1)))))
(rule (simplify (iadd ty
                      (isub ty (iconst ty (u64_from_imm64 k1)) x)
                      (iconst ty (u64_from_imm64 k2))))
      (isub ty (iconst ty (imm64_masked ty (u64_wrapping_add k1 k2))) x))

(rule (simplify
       (imul ty (imul ty x k1 @ (iconst ty _)) k2 @ (iconst ty _)))
      (imul ty x (imul ty k1 k2)))
(rule (simplify
       (bor ty (bor ty x k1 @ (iconst ty _)) k2 @ (iconst ty _)))
      (bor ty x (bor ty k1 k2)))
(rule (simplify
       (band ty (band ty x k1 @ (iconst ty _)) k2 @ (iconst ty _)))
      (band ty x (band ty k1 k2)))
(rule (simplify
       (bxor ty (bxor ty x k1 @ (iconst ty _)) k2 @ (iconst ty _)))
      (bxor ty x (bxor ty k1 k2)))

(rule (simplify (select ty (iconst_u _ (u64_when_non_zero)) x _))
      (subsume x))
(rule (simplify (select ty (iconst_u _ 0) _ y))
      (subsume y))

;; Reassociate across `==`/`!=` when we can simplify a constant
;; `x + K1 == K2` --> `x == K2 - K1`
(rule (simplify (eq ty1 (iadd ty2 x k1@(iconst _ _)) k2@(iconst _ _)))
      (eq ty1 x (isub ty2 k2 k1)))
(rule (simplify (ne ty1 (iadd ty2 x k1@(iconst _ _)) k2@(iconst _ _)))
      (ne ty1 x (isub ty2 k2 k1)))
;; `x - K1 == K2` --> `x == K2 + K1`
(rule (simplify (eq ty1 (isub ty2 x k1@(iconst _ _)) k2@(iconst _ _)))
      (eq ty1 x (iadd ty2 k2 k1)))
(rule (simplify (ne ty1 (isub ty2 x k1@(iconst _ _)) k2@(iconst _ _)))
      (ne ty1 x (iadd ty2 k2 k1)))
;; `x + K1 == y + K2` --> `x == y + (K2 - K1)`
(rule (simplify (eq ty1 (iadd ty2 x k1@(iconst _ _)) (iadd ty3 y k2@(iconst _ _))))
      (eq ty1 x (iadd ty2 y (isub ty3 k2 k1))))
(rule (simplify (ne ty1 (iadd ty2 x k1@(iconst _ _)) (iadd ty3 y k2@(iconst _ _))))
      (ne ty1 x (iadd ty2 y (isub ty3 k2 k1))))
;; An icmp rule normalizes (eq sub sub), so we don't need to handle it here.

;; Replace subtraction by a "negative" constant with addition.
;; Notably, this gives `x - (-1) == x + 1`, so other patterns don't have to
;; match the subtract-negative-one version too.
;; TODO: it would be nice to do this for `x + (-1) == x - 1` as well, but
;; that needs work in lowering first to avoid regressing addressing modes.

(rule (simplify (isub ty x (iconst_s ty k)))
      (if-let true (u64_lt (i64_cast_unsigned (i64_wrapping_neg k))
                           (i64_cast_unsigned k)))
      (iadd ty x (iconst ty (imm64_masked ty (i64_cast_unsigned (i64_wrapping_neg k))))))

;; A splat of a constant can become a direct `vconst` with the appropriate bit
;; pattern.
(rule (simplify (splat (ty_vec128 dst) (iconst $I8 n)))
      (vconst dst (splat8 (u64_uextend_imm64 $I8 n))))
(rule (simplify (splat (ty_vec128 dst) (iconst $I16 n)))
      (vconst dst (splat16 (u64_uextend_imm64 $I16 n))))
(rule (simplify (splat (ty_vec128 dst) (iconst $I32 n)))
      (vconst dst (splat32 (u64_uextend_imm64 $I32 n))))
(rule (simplify (splat (ty_vec128 dst) (iconst $I64 n)))
      (vconst dst (splat64 (u64_uextend_imm64 $I64 n))))
(rule (simplify (splat (ty_vec128 dst) (f32const _ (u32_from_ieee32 n))))
      (vconst dst (splat32 n)))
(rule (simplify (splat (ty_vec128 dst) (f64const _ (u64_from_ieee64 n))))
      (vconst dst (splat64 n)))

(decl splat8 (u64) Constant)
(rule (splat8 n) (splat16 (u64_or n (u64_shl n 8))))
(decl splat16 (u64) Constant)
(rule (splat16 n) (splat32 (u64_or n (u64_shl n 16))))
(decl splat32 (u64) Constant)
(rule (splat32 n) (splat64 (u64_or n (u64_shl n 32))))
(decl splat64 (u64) Constant)
(extern constructor splat64 splat64)

;; Reassociate nested shifts of constants to put constants together for cprop.
;;
;; ((A shift b) shift C) ==> ((A shift C) shift b)
(rule (simplify (ishl ty (ishl ty a@(iconst _ _) b) c@(iconst _ _)))
      (ishl ty (ishl ty a c) b))
(rule (simplify (ushr ty (ushr ty a@(iconst _ _) b) c@(iconst _ _)))
      (ushr ty (ushr ty a c) b))
(rule (simplify (sshr ty (sshr ty a@(iconst _ _) b) c@(iconst _ _)))
      (sshr ty (sshr ty a c) b))

;; When we operations that are both commutative and associative, reassociate
;; constants together for cprop:
;;
;; ((a op B) op (c op D)) ==> ((a op c) op (B op D))
;;
;; Where `op` is one of: `iadd`, `imul`, `band`, `bor`, or `bxor`.
(rule (simplify (iadd ty
                      (iadd ty a b@(iconst _ _))
                      (iadd ty c d@(iconst _ _))))
      (iadd ty (iadd ty a c) (iadd ty b d)))
(rule (simplify (imul ty
                      (imul ty a b@(iconst _ _))
                      (imul ty c d@(iconst _ _))))
      (imul ty (imul ty a c) (imul ty b d)))
(rule (simplify (band ty
                      (band ty a b@(iconst _ _))
                      (band ty c d@(iconst _ _))))
      (band ty (band ty a c) (band ty b d)))
(rule (simplify (bor ty
                      (bor ty a b@(iconst _ _))
                      (bor ty c d@(iconst _ _))))
      (bor ty (bor ty a c) (bor ty b d)))
(rule (simplify (bxor ty
                      (bxor ty a b@(iconst _ _))
                      (bxor ty c d@(iconst _ _))))
      (bxor ty (bxor ty a c) (bxor ty b d)))


;; Constant fold int-to-float conversions.
(rule (simplify (fcvt_from_uint $F32 (iconst_u _ n)))
      (f32const $F32 (f32_from_uint n)))
(rule (simplify (fcvt_from_uint $F64 (iconst_u _ n)))
      (f64const $F64 (f64_from_uint n)))
(rule (simplify (fcvt_from_sint $F32 (iconst_s _ n)))
      (f32const $F32 (f32_from_sint n)))
(rule (simplify (fcvt_from_sint $F64 (iconst_s _ n)))
      (f64const $F64 (f64_from_sint n)))

(decl f32_from_uint (u64) Ieee32)
(extern constructor f32_from_uint f32_from_uint)
(decl f64_from_uint (u64) Ieee64)
(extern constructor f64_from_uint f64_from_uint)
(decl f32_from_sint (i64) Ieee32)
(extern constructor f32_from_sint f32_from_sint)
(decl f64_from_sint (i64) Ieee64)
(extern constructor f64_from_sint f64_from_sint)

;; Constant fold bswap of a constant.
(rule (simplify (bswap $I16 (iconst ty (u64_from_imm64 n))))
      (subsume (iconst $I16 (imm64 (u64_bswap16 n)))))
(rule (simplify (bswap $I32 (iconst ty (u64_from_imm64 n))))
      (subsume (iconst $I32 (imm64 (u64_bswap32 n)))))
(rule (simplify (bswap $I64 (iconst ty (u64_from_imm64 n))))
      (subsume (iconst $I64 (imm64 (u64_bswap64 n)))))

(decl pure u64_bswap16 (u64) u64)
(extern constructor u64_bswap16 u64_bswap16)
(decl pure u64_bswap32 (u64) u64)
(extern constructor u64_bswap32 u64_bswap32)
(decl pure u64_bswap64 (u64) u64)
(extern constructor u64_bswap64 u64_bswap64)

;; Constant fold float operations
;; Note: With the exception of fabs, fneg and copysign,
;; constant folding is only performed when the result of
;; an instruction isn't NaN. We want the NaN bit patterns
;; produced by an instruction to be consistent, and
;; compile-time evaluation in a cross-compilation scenario
;; risks producing different NaN bit patterns than the target
;; would have at run-time.
;; TODO: fcmp, fma, demote, promote, to-int ops
(rule (simplify (fadd $F32 (f32const $F32 lhs) (f32const $F32 rhs)))
      (if-let r (f32_add lhs rhs))
      (subsume (f32const $F32 r)))
(rule (simplify (fadd $F64 (f64const $F64 lhs) (f64const $F64 rhs)))
      (if-let r (f64_add lhs rhs))
      (subsume (f64const $F64 r)))

(rule (simplify (fsub $F32 (f32const $F32 lhs) (f32const $F32 rhs)))
      (if-let r (f32_sub lhs rhs))
      (subsume (f32const $F32 r)))
(rule (simplify (fsub $F64 (f64const $F64 lhs) (f64const $F64 rhs)))
      (if-let r (f64_sub lhs rhs))
      (subsume (f64const $F64 r)))

(rule (simplify (fmul $F32 (f32const $F32 lhs) (f32const $F32 rhs)))
      (if-let r (f32_mul lhs rhs))
      (subsume (f32const $F32 r)))
(rule (simplify (fmul $F64 (f64const $F64 lhs) (f64const $F64 rhs)))
      (if-let r (f64_mul lhs rhs))
      (subsume (f64const $F64 r)))

(rule (simplify (fdiv $F32 (f32const $F32 lhs) (f32const $F32 rhs)))
      (if-let r (f32_div lhs rhs))
      (subsume (f32const $F32 r)))
(rule (simplify (fdiv $F64 (f64const $F64 lhs) (f64const $F64 rhs)))
      (if-let r (f64_div lhs rhs))
      (subsume (f64const $F64 r)))

(rule (simplify (sqrt $F32 (f32const $F32 n)))
      (if-let r (f32_sqrt n))
      (subsume (f32const $F32 r)))
(rule (simplify (sqrt $F64 (f64const $F64 n)))
      (if-let r (f64_sqrt n))
      (subsume (f64const $F64 r)))

(rule (simplify (ceil $F32 (f32const $F32 n)))
      (if-let r (f32_ceil n))
      (subsume (f32const $F32 r)))
(rule (simplify (ceil $F64 (f64const $F64 n)))
      (if-let r (f64_ceil n))
      (subsume (f64const $F64 r)))

(rule (simplify (floor $F32 (f32const $F32 n)))
      (if-let r (f32_floor n))
      (subsume (f32const $F32 r)))
(rule (simplify (floor $F64 (f64const $F64 n)))
      (if-let r (f64_floor n))
      (subsume (f64const $F64 r)))

(rule (simplify (trunc $F32 (f32const $F32 n)))
      (if-let r (f32_trunc n))
      (subsume (f32const $F32 r)))
(rule (simplify (trunc $F64 (f64const $F64 n)))
      (if-let r (f64_trunc n))
      (subsume (f64const $F64 r)))

(rule (simplify (nearest $F32 (f32const $F32 n)))
      (if-let r (f32_nearest n))
      (subsume (f32const $F32 r)))
(rule (simplify (nearest $F64 (f64const $F64 n)))
      (if-let r (f64_nearest n))
      (subsume (f64const $F64 r)))

(rule (simplify (fmin $F16 (f16const $F16 n) (f16const $F16 m)))
      (if-let r (f16_min n m))
      (subsume (f16const $F32 r)))
(rule (simplify (fmin $F32 (f32const $F32 n) (f32const $F32 m)))
      (if-let r (f32_min n m))
      (subsume (f32const $F32 r)))
(rule (simplify (fmin $F64 (f64const $F64 n) (f64const $F64 m)))
      (if-let r (f64_min n m))
      (subsume (f64const $F64 r)))
(rule (simplify (fmin $F128 (f128const $F128 (ieee128_constant n)) (f128const $F128 (ieee128_constant m))))
      (if-let r (f128_min n m))
      (subsume (f128const $F128 (ieee128_constant r))))

(rule (simplify (fmax $F16 (f16const $F16 n) (f16const $F16 m)))
      (if-let r (f16_max n m))
      (subsume (f16const $F16 r)))
(rule (simplify (fmax $F32 (f32const $F32 n) (f32const $F32 m)))
      (if-let r (f32_max n m))
      (subsume (f32const $F32 r)))
(rule (simplify (fmax $F64 (f64const $F64 n) (f64const $F64 m)))
      (if-let r (f64_max n m))
      (subsume (f64const $F64 r)))
(rule (simplify (fmax $F128 (f128const $F128 (ieee128_constant n)) (f128const $F128 (ieee128_constant m))))
      (if-let r (f128_max n m))
      (subsume (f128const $F128 (ieee128_constant r))))

(rule (simplify (fneg $F16 (f16const $F16 n)))
      (subsume (f16const $F16 (f16_neg n))))
(rule (simplify (fneg $F32 (f32const $F32 n)))
      (subsume (f32const $F32 (f32_neg n))))
(rule (simplify (fneg $F64 (f64const $F64 n)))
      (subsume (f64const $F64 (f64_neg n))))
(rule (simplify (fneg $F128 (f128const $F128 (ieee128_constant n))))
      (subsume (f128const $F128 (ieee128_constant (f128_neg n)))))

(rule (simplify (fabs $F16 (f16const $F16 n)))
      (subsume (f16const $F16 (f16_abs n))))
(rule (simplify (fabs $F32 (f32const $F32 n)))
      (subsume (f32const $F32 (f32_abs n))))
(rule (simplify (fabs $F64 (f64const $F64 n)))
      (subsume (f64const $F64 (f64_abs n))))
(rule (simplify (fabs $F128 (f128const $F128 (ieee128_constant n))))
      (subsume (f128const $F128 (ieee128_constant (f128_abs n)))))

(rule (simplify (fcopysign $F16 (f16const $F16 n) (f16const $F16 m)))
      (subsume (f16const $F16 (f16_copysign n m))))
(rule (simplify (fcopysign $F32 (f32const $F32 n) (f32const $F32 m)))
      (subsume (f32const $F32 (f32_copysign n m))))
(rule (simplify (fcopysign $F64 (f64const $F64 n) (f64const $F64 m)))
      (subsume (f64const $F64 (f64_copysign n m))))
(rule (simplify (fcopysign $F128 (f128const $F128 (ieee128_constant n)) (f128const $F128 (ieee128_constant m))))
      (subsume (f128const $F128 (ieee128_constant (f128_copysign n m)))))

(decl ieee128_constant (Ieee128) Constant)
(extern constructor ieee128_constant ieee128_constant)
(extern extractor ieee128_constant ieee128_constant_extractor)