@@ -385,46 +385,110 @@ let ignore_low_bit_int = function
385
385
| Cop (Cor, [c ; Cconst_int (1 , _ )], _ ) -> c
386
386
| c -> c
387
387
388
+ let is_defined_shift n = 0 < = n && n < arch_bits
389
+
390
+ let [@ inline] get_const = function
391
+ | Cconst_int (i , _ ) -> Some (Nativeint. of_int i)
392
+ | Cconst_natint (i , _ ) -> Some i
393
+ | _ -> None
394
+
395
+ let or_int c1 c2 dbg =
396
+ match get_const c1, get_const c2 with
397
+ | Some c1 , Some c2 -> natint_const_untagged dbg (Nativeint. logor c1 c2)
398
+ | _ , Some 0n | Some - 1n , _ -> c1
399
+ | Some 0n , _ | _ , Some - 1n -> c2
400
+ | Some _ , None ->
401
+ (* prefer putting constants on the right *)
402
+ Cop (Cor , [c2; c1], dbg)
403
+ | _ , _ -> Cop (Cor , [c1; c2], dbg)
404
+
405
+ let and_int c1 c2 dbg =
406
+ match get_const c1, get_const c2 with
407
+ | Some c1 , Some c2 -> natint_const_untagged dbg (Nativeint. logand c1 c2)
408
+ | _ , Some 0n | Some - 1n , _ -> c2
409
+ | Some 0n , _ | _ , Some - 1n -> c1
410
+ | Some _ , None ->
411
+ (* prefer putting constants on the right *)
412
+ Cop (Cand , [c2; c1], dbg)
413
+ | _ , _ -> Cop (Cand , [c1; c2], dbg)
414
+
415
+ let xor_int c1 c2 dbg =
416
+ match get_const c1, get_const c2 with
417
+ | Some c1 , Some c2 -> natint_const_untagged dbg (Nativeint. logxor c1 c2)
418
+ | _ , Some 0n -> c1
419
+ | Some 0n , _ -> c2
420
+ | Some _ , None ->
421
+ (* prefer putting constants on the right *)
422
+ Cop (Cxor , [c2; c1], dbg)
423
+ | _ , _ -> Cop (Cxor , [c1; c2], dbg)
424
+
425
+ let replace x ~with_ =
426
+ match x with
427
+ | Cconst_int _ | Cconst_natint _ | Cconst_symbol _ | Cvar _ | Ctuple [] ->
428
+ with_
429
+ | inner -> Csequence (inner, with_)
430
+
388
431
let rec lsr_int c1 c2 dbg =
389
432
match c1, c2 with
390
433
| c1 , Cconst_int (0 , _ ) -> c1
391
- | Cop (Clsr , [c; Cconst_int (n1, _)], _), Cconst_int (n2, _)
392
- when n1 > 0 && n2 > 0 && n1 + n2 < size_int * 8 ->
393
- Cop (Clsr , [c; Cconst_int (n1 + n2, dbg)], dbg)
394
- | c1 , Cconst_int (n , _ ) when n > 0 ->
395
- Cop (Clsr , [ignore_low_bit_int c1; c2], dbg)
396
- | _ -> Cop (Clsr , [c1; c2], dbg)
434
+ | c1 , Cconst_int (n , _ ) when is_defined_shift n -> (
435
+ let c1 = ignore_low_bit_int c1 in
436
+ match get_const c1 with
437
+ | Some x -> natint_const_untagged dbg (Nativeint. shift_right_logical x n)
438
+ | None -> (
439
+ match c1 with
440
+ | Cop (Clsr, [inner ; Cconst_int (n' , _ )], _ ) when is_defined_shift n' ->
441
+ if is_defined_shift (n + n')
442
+ then lsr_const inner (n + n') dbg
443
+ else replace inner ~with_: (Cconst_int (0 , dbg))
444
+ | Cop (Cor , [x; ((Cconst_int _ | Cconst_natint _ ) as y )], _ ) ->
445
+ or_int (lsr_int x c2 dbg) (lsr_int y c2 dbg) dbg
446
+ | Cop (Cand , [x; ((Cconst_int _ | Cconst_natint _ ) as y )], _ ) ->
447
+ and_int (lsr_int x c2 dbg) (lsr_int y c2 dbg) dbg
448
+ | Cop (Cxor , [x; ((Cconst_int _ | Cconst_natint _ ) as y )], _ ) ->
449
+ xor_int (lsr_int x c2 dbg) (lsr_int y c2 dbg) dbg
450
+ | c1 -> Cop (Clsr , [c1; c2], dbg)))
451
+ | Cop (Clsr , [x; (Cconst_int (n', _) as y)], z), c2 when is_defined_shift n'
452
+ ->
453
+ (* prefer putting the constant shift on the outside to help enable further
454
+ peephole optimizations *)
455
+ Cop (Clsr , [Cop (Clsr , [x; c2], dbg); y], z)
456
+ | c1 , c2 -> Cop (Clsr , [c1; c2], dbg)
397
457
398
458
and asr_int c1 c2 dbg =
399
459
match c1, c2 with
400
460
| c1 , Cconst_int (0 , _ ) -> c1
401
- | c1 , Cconst_int (n , _ ) when 0 < n && n < arch_bits -> (
402
- match ignore_low_bit_int c1 with
403
- | Cop (Casr , [inner; Cconst_int (n', _)], _) when 0 < = n' && n' < arch_bits
404
- ->
405
- asr_const inner (Int. min (n + n') (arch_bits - 1 )) dbg
406
- | Cop (Clsr , [inner; Cconst_int (n', _)], _) when 0 < n' && n' < arch_bits
407
- ->
408
- (* If the inner unsigned shift is guaranteed positive, then we know the
409
- sign bit is 0 and we can weaken this operation to a logical shift *)
410
- if n + n' < arch_bits
411
- then lsr_const inner (n + n') dbg
412
- else Csequence (inner, Cconst_int (0 , dbg))
413
- | Cop (Cor, [inner ; Cconst_int (x , _ )], _ ) when n < = Sys. int_size ->
414
- let inner = asr_const inner n dbg in
415
- let x = x asr n in
416
- if x = 0 then inner else Cop (Cor , [inner; Cconst_int (x, dbg)], dbg)
417
- | Cop (Clsl , [c; Cconst_int (1 , _)], _)
418
- (* some operations always return small enough integers that it is safe and
419
- correct to optimise [asr (lsl x 1) 1] into [x]. *)
420
- when n = 1 && guaranteed_to_be_small_int c ->
421
- c
422
- | Cop (Cor, [inner ; Cconst_int (x , _ )], _ ) when n < Sys. int_size ->
423
- let inner = asr_const inner n dbg in
424
- let x = x asr n in
425
- if x = 0 then inner else Cop (Cor , [inner; Cconst_int (x, dbg)], dbg)
426
- | c1' -> Cop (Casr , [c1'; c2], dbg))
427
- | Cop (Casr, [x ; (Cconst_int _ as y )], z ), c2 ->
461
+ | c1 , Cconst_int (n , _ ) when is_defined_shift n -> (
462
+ let c1 = ignore_low_bit_int c1 in
463
+ match get_const c1 with
464
+ | Some x -> natint_const_untagged dbg (Nativeint. shift_right x n)
465
+ | None -> (
466
+ match c1 with
467
+ | Cconst_int (x , _ ) -> Cconst_int (x asr n, dbg)
468
+ | Cconst_natint (x , _ ) ->
469
+ natint_const_untagged dbg (Nativeint. shift_right x n)
470
+ | Cop (Casr, [inner ; Cconst_int (n' , _ )], _ ) when is_defined_shift n' ->
471
+ (* saturating add, since the sign bit extends to the left *)
472
+ asr_const inner (Int. min (n + n') (arch_bits - 1 )) dbg
473
+ | Cop (Clsr , [_; Cconst_int (n', _)], _)
474
+ when n' > 0 && is_defined_shift n' ->
475
+ (* If the argument is guaranteed non-negative, then we know the sign bit
476
+ is 0 and we can weaken this operation to a logical shift *)
477
+ lsr_const c1 n dbg
478
+ | Cop (Clsl , [c; Cconst_int (1 , _)], _)
479
+ when n = 1 && guaranteed_to_be_small_int c ->
480
+ (* some operations always return small enough integers that it is safe
481
+ and correct to optimise [asr (lsl x 1) 1] into [x]. *)
482
+ c
483
+ | Cop (Cor , [x; ((Cconst_int _ | Cconst_natint _ ) as y )], _ ) ->
484
+ or_int (asr_int x c2 dbg) (asr_int y c2 dbg) dbg
485
+ | Cop (Cand , [x; ((Cconst_int _ | Cconst_natint _ ) as y )], _ ) ->
486
+ and_int (asr_int x c2 dbg) (asr_int y c2 dbg) dbg
487
+ | Cop (Cxor , [x; ((Cconst_int _ | Cconst_natint _ ) as y )], _ ) ->
488
+ xor_int (asr_int x c2 dbg) (asr_int y c2 dbg) dbg
489
+ | c1 -> Cop (Casr , [c1; c2], dbg)))
490
+ | Cop (Casr , [x; (Cconst_int (n', _) as y)], z), c2 when is_defined_shift n'
491
+ ->
428
492
(* prefer putting the constant shift on the outside to help enable further
429
493
peephole optimizations *)
430
494
Cop (Casr , [Cop (Casr , [x; c2], dbg); y], z)
@@ -433,16 +497,27 @@ and asr_int c1 c2 dbg =
433
497
and lsl_int c1 c2 dbg =
434
498
match c1, c2 with
435
499
| c1 , Cconst_int (0 , _ ) -> c1
436
- | Cop (Clsl , [c; Cconst_int (n1, _)], _), Cconst_int (n2, _)
437
- when n1 > 0 && n2 > 0 && n1 + n2 < size_int * 8 ->
438
- Cop (Clsl , [c; Cconst_int (n1 + n2, dbg)], dbg)
439
- | Cop (Caddi , [c1; Cconst_int (n1, _)], _), Cconst_int (n2, _)
440
- when Misc. no_overflow_lsl n1 n2 ->
441
- add_const (lsl_int c1 c2 dbg) (n1 lsl n2) dbg
442
- | Cop (Cor , [c1; Cconst_int (n1, _)], _), Cconst_int (n2, _)
443
- when Misc. no_overflow_lsl n1 n2 ->
444
- Cop (Cor , [lsl_int c1 c2 dbg; Cconst_int (n1 lsl n2, dbg)], dbg)
445
- | Cop (Clsl, [x ; (Cconst_int _ as y )], z ), c2 ->
500
+ | c1 , Cconst_int (n , _ ) when is_defined_shift n -> (
501
+ match get_const c1 with
502
+ | Some c1 -> natint_const_untagged dbg (Nativeint. shift_left c1 n)
503
+ | None -> (
504
+ match c1 with
505
+ | Cop (Clsl, [inner ; Cconst_int (n' , _ )], dbg ) when is_defined_shift n' ->
506
+ if is_defined_shift (n + n')
507
+ then lsl_const inner (n + n') dbg
508
+ else replace inner ~with_: (Cconst_int (0 , dbg))
509
+ | Cop (Caddi , [c1; Cconst_int (offset, _)], _)
510
+ when Misc. no_overflow_lsl offset n ->
511
+ add_const (lsl_int c1 c2 dbg) (offset lsl n) dbg
512
+ | Cop (Cor , [x; ((Cconst_int _ | Cconst_natint _ ) as y )], _ ) ->
513
+ or_int (lsl_int x c2 dbg) (lsl_int y c2 dbg) dbg
514
+ | Cop (Cand , [x; ((Cconst_int _ | Cconst_natint _ ) as y )], _ ) ->
515
+ and_int (lsl_int x c2 dbg) (lsl_int y c2 dbg) dbg
516
+ | Cop (Cxor , [x; ((Cconst_int _ | Cconst_natint _ ) as y )], _ ) ->
517
+ xor_int (lsl_int x c2 dbg) (lsl_int y c2 dbg) dbg
518
+ | c1 -> Cop (Clsl , [c1; c2], dbg)))
519
+ | Cop (Clsl , [x; (Cconst_int (n', _) as y)], z), c2 when is_defined_shift n'
520
+ ->
446
521
(* prefer putting the constant shift on the outside to help enable further
447
522
peephole optimizations *)
448
523
Cop (Clsl , [Cop (Clsl , [x; c2], dbg); y], z)
@@ -2020,11 +2095,7 @@ let and_int e1 e2 dbg =
2020
2095
match e1, e2 with
2021
2096
| e , m when is_mask32 m -> zero_extend ~bits: 32 e ~dbg
2022
2097
| m , e when is_mask32 m -> zero_extend ~bits: 32 e ~dbg
2023
- | e1 , e2 -> Cop (Cand , [e1; e2], dbg)
2024
-
2025
- let or_int e1 e2 dbg = Cop (Cor , [e1; e2], dbg)
2026
-
2027
- let xor_int e1 e2 dbg = Cop (Cxor , [e1; e2], dbg)
2098
+ | e1 , e2 -> and_int e1 e2 dbg
2028
2099
2029
2100
(* Boxed integers *)
2030
2101
0 commit comments