GCC with patches for OS216
Revisión | 91d014fffae1fcf1eb750d40e622e5b80cd1a4ec (tree) |
---|---|
Tiempo | 2018-03-21 02:25:09 |
Autor | Peter Bergner <bergner@vnet...> |
Commiter | Peter Bergner |
re PR target/83789 (builtin_altivec_lvx fails for powerpc for altivec-4.c)
PR target/83789
* config/rs6000/altivec.md (altivec_lvx_<mode>_2op): Delete define_insn.
(altivec_lvx_<mode>_1op): Likewise.
(altivec_stvx_<mode>_2op): Likewise.
(altivec_stvx_<mode>_1op): Likewise.
(altivec_lvx_<VM2:mode>): New define_expand.
(altivec_stvx_<VM2:mode>): Likewise.
(altivec_lvx_<VM2:mode>_2op_<P:mptrsize>): New define_insn.
(altivec_lvx_<VM2:mode>_1op_<P:mptrsize>): Likewise.
(altivec_stvx_<VM2:mode>_2op_<P:mptrsize>): Likewise.
(altivec_stvx_<VM2:mode>_1op_<P:mptrsize>): Likewise.
* config/rs6000/rs6000-p8swap.c (rs6000_gen_stvx): Use new expanders.
(rs6000_gen_lvx): Likewise.
* config/rs6000/rs6000.c (altivec_expand_lv_builtin): Likewise.
(altivec_expand_stv_builtin): Likewise.
(altivec_expand_builtin): Likewise.
* config/rs6000/vector.md: Likewise.
From-SVN: r258688
@@ -1,3 +1,23 @@ | ||
1 | +2018-03-20 Peter Bergner <bergner@vnet.ibm.com> | |
2 | + | |
3 | + PR target/83789 | |
4 | + * config/rs6000/altivec.md (altivec_lvx_<mode>_2op): Delete define_insn. | |
5 | + (altivec_lvx_<mode>_1op): Likewise. | |
6 | + (altivec_stvx_<mode>_2op): Likewise. | |
7 | + (altivec_stvx_<mode>_1op): Likewise. | |
8 | + (altivec_lvx_<VM2:mode>): New define_expand. | |
9 | + (altivec_stvx_<VM2:mode>): Likewise. | |
10 | + (altivec_lvx_<VM2:mode>_2op_<P:mptrsize>): New define_insn. | |
11 | + (altivec_lvx_<VM2:mode>_1op_<P:mptrsize>): Likewise. | |
12 | + (altivec_stvx_<VM2:mode>_2op_<P:mptrsize>): Likewise. | |
13 | + (altivec_stvx_<VM2:mode>_1op_<P:mptrsize>): Likewise. | |
14 | + * config/rs6000/rs6000-p8swap.c (rs6000_gen_stvx): Use new expanders. | |
15 | + (rs6000_gen_lvx): Likewise. | |
16 | + * config/rs6000/rs6000.c (altivec_expand_lv_builtin): Likewise. | |
17 | + (altivec_expand_stv_builtin): Likewise. | |
18 | + (altivec_expand_builtin): Likewise. | |
19 | + * config/rs6000/vector.md: Likewise. | |
20 | + | |
1 | 21 | 2018-03-20 Richard Biener <rguenther@suse.de> |
2 | 22 | |
3 | 23 | PR target/84986 |
@@ -2747,39 +2747,47 @@ | ||
2747 | 2747 | "lvx %0,%y1" |
2748 | 2748 | [(set_attr "type" "vecload")]) |
2749 | 2749 | |
2750 | -; The next two patterns embody what lvx should usually look like. | |
2751 | -(define_insn "altivec_lvx_<mode>_2op" | |
2752 | - [(set (match_operand:VM2 0 "register_operand" "=v") | |
2753 | - (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b") | |
2754 | - (match_operand:DI 2 "register_operand" "r")) | |
2755 | - (const_int -16))))] | |
2756 | - "TARGET_ALTIVEC && TARGET_64BIT" | |
2757 | - "lvx %0,%1,%2" | |
2758 | - [(set_attr "type" "vecload")]) | |
2759 | - | |
2760 | -(define_insn "altivec_lvx_<mode>_1op" | |
2761 | - [(set (match_operand:VM2 0 "register_operand" "=v") | |
2762 | - (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r") | |
2763 | - (const_int -16))))] | |
2764 | - "TARGET_ALTIVEC && TARGET_64BIT" | |
2765 | - "lvx %0,0,%1" | |
2766 | - [(set_attr "type" "vecload")]) | |
2750 | +; The following patterns embody what lvx should usually look like. | |
2751 | +(define_expand "altivec_lvx_<VM2:mode>" | |
2752 | + [(set (match_operand:VM2 0 "register_operand") | |
2753 | + (match_operand:VM2 1 "altivec_indexed_or_indirect_operand"))] | |
2754 | + "TARGET_ALTIVEC" | |
2755 | +{ | |
2756 | + rtx addr = XEXP (operand1, 0); | |
2757 | + if (rs6000_sum_of_two_registers_p (addr)) | |
2758 | + { | |
2759 | + rtx op1 = XEXP (addr, 0); | |
2760 | + rtx op2 = XEXP (addr, 1); | |
2761 | + if (TARGET_64BIT) | |
2762 | + emit_insn (gen_altivec_lvx_<VM2:mode>_2op_di (operand0, op1, op2)); | |
2763 | + else | |
2764 | + emit_insn (gen_altivec_lvx_<VM2:mode>_2op_si (operand0, op1, op2)); | |
2765 | + } | |
2766 | + else | |
2767 | + { | |
2768 | + if (TARGET_64BIT) | |
2769 | + emit_insn (gen_altivec_lvx_<VM2:mode>_1op_di (operand0, addr)); | |
2770 | + else | |
2771 | + emit_insn (gen_altivec_lvx_<VM2:mode>_1op_si (operand0, addr)); | |
2772 | + } | |
2773 | + DONE; | |
2774 | +}) | |
2767 | 2775 | |
2768 | -; 32-bit versions of the above. | |
2769 | -(define_insn "altivec_lvx_<mode>_2op_si" | |
2776 | +; The next two patterns embody what lvx should usually look like. | |
2777 | +(define_insn "altivec_lvx_<VM2:mode>_2op_<P:mptrsize>" | |
2770 | 2778 | [(set (match_operand:VM2 0 "register_operand" "=v") |
2771 | - (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b") | |
2772 | - (match_operand:SI 2 "register_operand" "r")) | |
2773 | - (const_int -16))))] | |
2774 | - "TARGET_ALTIVEC && TARGET_32BIT" | |
2779 | + (mem:VM2 (and:P (plus:P (match_operand:P 1 "register_operand" "b") | |
2780 | + (match_operand:P 2 "register_operand" "r")) | |
2781 | + (const_int -16))))] | |
2782 | + "TARGET_ALTIVEC" | |
2775 | 2783 | "lvx %0,%1,%2" |
2776 | 2784 | [(set_attr "type" "vecload")]) |
2777 | 2785 | |
2778 | -(define_insn "altivec_lvx_<mode>_1op_si" | |
2786 | +(define_insn "altivec_lvx_<VM2:mode>_1op_<P:mptrsize>" | |
2779 | 2787 | [(set (match_operand:VM2 0 "register_operand" "=v") |
2780 | - (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r") | |
2781 | - (const_int -16))))] | |
2782 | - "TARGET_ALTIVEC && TARGET_32BIT" | |
2788 | + (mem:VM2 (and:P (match_operand:P 1 "register_operand" "r") | |
2789 | + (const_int -16))))] | |
2790 | + "TARGET_ALTIVEC" | |
2783 | 2791 | "lvx %0,0,%1" |
2784 | 2792 | [(set_attr "type" "vecload")]) |
2785 | 2793 |
@@ -2795,39 +2803,47 @@ | ||
2795 | 2803 | "stvx %1,%y0" |
2796 | 2804 | [(set_attr "type" "vecstore")]) |
2797 | 2805 | |
2798 | -; The next two patterns embody what stvx should usually look like. | |
2799 | -(define_insn "altivec_stvx_<mode>_2op" | |
2800 | - [(set (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b") | |
2801 | - (match_operand:DI 2 "register_operand" "r")) | |
2802 | - (const_int -16))) | |
2803 | - (match_operand:VM2 0 "register_operand" "v"))] | |
2804 | - "TARGET_ALTIVEC && TARGET_64BIT" | |
2805 | - "stvx %0,%1,%2" | |
2806 | - [(set_attr "type" "vecstore")]) | |
2807 | - | |
2808 | -(define_insn "altivec_stvx_<mode>_1op" | |
2809 | - [(set (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r") | |
2810 | - (const_int -16))) | |
2811 | - (match_operand:VM2 0 "register_operand" "v"))] | |
2812 | - "TARGET_ALTIVEC && TARGET_64BIT" | |
2813 | - "stvx %0,0,%1" | |
2814 | - [(set_attr "type" "vecstore")]) | |
2806 | +; The following patterns embody what stvx should usually look like. | |
2807 | +(define_expand "altivec_stvx_<VM2:mode>" | |
2808 | + [(set (match_operand:VM2 1 "altivec_indexed_or_indirect_operand") | |
2809 | + (match_operand:VM2 0 "register_operand"))] | |
2810 | + "TARGET_ALTIVEC" | |
2811 | +{ | |
2812 | + rtx addr = XEXP (operand1, 0); | |
2813 | + if (rs6000_sum_of_two_registers_p (addr)) | |
2814 | + { | |
2815 | + rtx op1 = XEXP (addr, 0); | |
2816 | + rtx op2 = XEXP (addr, 1); | |
2817 | + if (TARGET_64BIT) | |
2818 | + emit_insn (gen_altivec_stvx_<VM2:mode>_2op_di (operand0, op1, op2)); | |
2819 | + else | |
2820 | + emit_insn (gen_altivec_stvx_<VM2:mode>_2op_si (operand0, op1, op2)); | |
2821 | + } | |
2822 | + else | |
2823 | + { | |
2824 | + if (TARGET_64BIT) | |
2825 | + emit_insn (gen_altivec_stvx_<VM2:mode>_1op_di (operand0, addr)); | |
2826 | + else | |
2827 | + emit_insn (gen_altivec_stvx_<VM2:mode>_1op_si (operand0, addr)); | |
2828 | + } | |
2829 | + DONE; | |
2830 | +}) | |
2815 | 2831 | |
2816 | -; 32-bit versions of the above. | |
2817 | -(define_insn "altivec_stvx_<mode>_2op_si" | |
2818 | - [(set (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b") | |
2819 | - (match_operand:SI 2 "register_operand" "r")) | |
2820 | - (const_int -16))) | |
2821 | - (match_operand:VM2 0 "register_operand" "v"))] | |
2822 | - "TARGET_ALTIVEC && TARGET_32BIT" | |
2832 | +; The next two patterns embody what stvx should usually look like. | |
2833 | +(define_insn "altivec_stvx_<VM2:mode>_2op_<P:mptrsize>" | |
2834 | + [(set (mem:VM2 (and:P (plus:P (match_operand:P 1 "register_operand" "b") | |
2835 | + (match_operand:P 2 "register_operand" "r")) | |
2836 | + (const_int -16))) | |
2837 | + (match_operand:VM2 0 "register_operand" "v"))] | |
2838 | + "TARGET_ALTIVEC" | |
2823 | 2839 | "stvx %0,%1,%2" |
2824 | 2840 | [(set_attr "type" "vecstore")]) |
2825 | 2841 | |
2826 | -(define_insn "altivec_stvx_<mode>_1op_si" | |
2827 | - [(set (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r") | |
2828 | - (const_int -16))) | |
2829 | - (match_operand:VM2 0 "register_operand" "v"))] | |
2830 | - "TARGET_ALTIVEC && TARGET_32BIT" | |
2842 | +(define_insn "altivec_stvx_<VM2:mode>_1op_<P:mptrsize>" | |
2843 | + [(set (mem:VM2 (and:P (match_operand:P 1 "register_operand" "r") | |
2844 | + (const_int -16))) | |
2845 | + (match_operand:VM2 0 "register_operand" "v"))] | |
2846 | + "TARGET_ALTIVEC" | |
2831 | 2847 | "stvx %0,0,%1" |
2832 | 2848 | [(set_attr "type" "vecstore")]) |
2833 | 2849 |
@@ -1548,94 +1548,31 @@ mimic_memory_attributes_and_flags (rtx new_mem_exp, const_rtx original_mem_exp) | ||
1548 | 1548 | rtx |
1549 | 1549 | rs6000_gen_stvx (enum machine_mode mode, rtx dest_exp, rtx src_exp) |
1550 | 1550 | { |
1551 | - rtx memory_address = XEXP (dest_exp, 0); | |
1552 | 1551 | rtx stvx; |
1553 | 1552 | |
1554 | - if (rs6000_sum_of_two_registers_p (memory_address)) | |
1555 | - { | |
1556 | - rtx op1, op2; | |
1557 | - op1 = XEXP (memory_address, 0); | |
1558 | - op2 = XEXP (memory_address, 1); | |
1559 | - if (mode == V16QImode) | |
1560 | - stvx = TARGET_64BIT | |
1561 | - ? gen_altivec_stvx_v16qi_2op (src_exp, op1, op2) | |
1562 | - : gen_altivec_stvx_v16qi_2op_si (src_exp, op1, op2); | |
1563 | - else if (mode == V8HImode) | |
1564 | - stvx = TARGET_64BIT | |
1565 | - ? gen_altivec_stvx_v8hi_2op (src_exp, op1, op2) | |
1566 | - : gen_altivec_stvx_v8hi_2op_si (src_exp, op1, op2); | |
1567 | -#ifdef HAVE_V8HFmode | |
1568 | - else if (mode == V8HFmode) | |
1569 | - stvx = TARGET_64BIT | |
1570 | - ? gen_altivec_stvx_v8hf_2op (src_exp, op1, op2) | |
1571 | - : gen_altivec_stvx_v8hf_2op_si (src_exp, op1, op2); | |
1572 | -#endif | |
1573 | - else if (mode == V4SImode) | |
1574 | - stvx = TARGET_64BIT | |
1575 | - ? gen_altivec_stvx_v4si_2op (src_exp, op1, op2) | |
1576 | - : gen_altivec_stvx_v4si_2op_si (src_exp, op1, op2); | |
1577 | - else if (mode == V4SFmode) | |
1578 | - stvx = TARGET_64BIT | |
1579 | - ? gen_altivec_stvx_v4sf_2op (src_exp, op1, op2) | |
1580 | - : gen_altivec_stvx_v4sf_2op_si (src_exp, op1, op2); | |
1581 | - else if (mode == V2DImode) | |
1582 | - stvx = TARGET_64BIT | |
1583 | - ? gen_altivec_stvx_v2di_2op (src_exp, op1, op2) | |
1584 | - : gen_altivec_stvx_v2di_2op_si (src_exp, op1, op2); | |
1585 | - else if (mode == V2DFmode) | |
1586 | - stvx = TARGET_64BIT | |
1587 | - ? gen_altivec_stvx_v2df_2op (src_exp, op1, op2) | |
1588 | - : gen_altivec_stvx_v2df_2op_si (src_exp, op1, op2); | |
1589 | - else if (mode == V1TImode) | |
1590 | - stvx = TARGET_64BIT | |
1591 | - ? gen_altivec_stvx_v1ti_2op (src_exp, op1, op2) | |
1592 | - : gen_altivec_stvx_v1ti_2op_si (src_exp, op1, op2); | |
1593 | - else | |
1594 | - /* KFmode, TFmode, other modes not expected in this context. */ | |
1595 | - gcc_unreachable (); | |
1596 | - } | |
1597 | - else /* REG_P (memory_address) */ | |
1598 | - { | |
1599 | - if (mode == V16QImode) | |
1600 | - stvx = TARGET_64BIT | |
1601 | - ? gen_altivec_stvx_v16qi_1op (src_exp, memory_address) | |
1602 | - : gen_altivec_stvx_v16qi_1op_si (src_exp, memory_address); | |
1603 | - else if (mode == V8HImode) | |
1604 | - stvx = TARGET_64BIT | |
1605 | - ? gen_altivec_stvx_v8hi_1op (src_exp, memory_address) | |
1606 | - : gen_altivec_stvx_v8hi_1op_si (src_exp, memory_address); | |
1553 | + if (mode == V16QImode) | |
1554 | + stvx = gen_altivec_stvx_v16qi (src_exp, dest_exp); | |
1555 | + else if (mode == V8HImode) | |
1556 | + stvx = gen_altivec_stvx_v8hi (src_exp, dest_exp); | |
1607 | 1557 | #ifdef HAVE_V8HFmode |
1608 | - else if (mode == V8HFmode) | |
1609 | - stvx = TARGET_64BIT | |
1610 | - ? gen_altivec_stvx_v8hf_1op (src_exp, memory_address) | |
1611 | - : gen_altivec_stvx_v8hf_1op_si (src_exp, memory_address); | |
1558 | + else if (mode == V8HFmode) | |
1559 | + stvx = gen_altivec_stvx_v8hf (src_exp, dest_exp); | |
1612 | 1560 | #endif |
1613 | - else if (mode == V4SImode) | |
1614 | - stvx =TARGET_64BIT | |
1615 | - ? gen_altivec_stvx_v4si_1op (src_exp, memory_address) | |
1616 | - : gen_altivec_stvx_v4si_1op_si (src_exp, memory_address); | |
1617 | - else if (mode == V4SFmode) | |
1618 | - stvx = TARGET_64BIT | |
1619 | - ? gen_altivec_stvx_v4sf_1op (src_exp, memory_address) | |
1620 | - : gen_altivec_stvx_v4sf_1op_si (src_exp, memory_address); | |
1621 | - else if (mode == V2DImode) | |
1622 | - stvx = TARGET_64BIT | |
1623 | - ? gen_altivec_stvx_v2di_1op (src_exp, memory_address) | |
1624 | - : gen_altivec_stvx_v2di_1op_si (src_exp, memory_address); | |
1625 | - else if (mode == V2DFmode) | |
1626 | - stvx = TARGET_64BIT | |
1627 | - ? gen_altivec_stvx_v2df_1op (src_exp, memory_address) | |
1628 | - : gen_altivec_stvx_v2df_1op_si (src_exp, memory_address); | |
1629 | - else if (mode == V1TImode) | |
1630 | - stvx = TARGET_64BIT | |
1631 | - ? gen_altivec_stvx_v1ti_1op (src_exp, memory_address) | |
1632 | - : gen_altivec_stvx_v1ti_1op_si (src_exp, memory_address); | |
1633 | - else | |
1634 | - /* KFmode, TFmode, other modes not expected in this context. */ | |
1635 | - gcc_unreachable (); | |
1636 | - } | |
1561 | + else if (mode == V4SImode) | |
1562 | + stvx = gen_altivec_stvx_v4si (src_exp, dest_exp); | |
1563 | + else if (mode == V4SFmode) | |
1564 | + stvx = gen_altivec_stvx_v4sf (src_exp, dest_exp); | |
1565 | + else if (mode == V2DImode) | |
1566 | + stvx = gen_altivec_stvx_v2di (src_exp, dest_exp); | |
1567 | + else if (mode == V2DFmode) | |
1568 | + stvx = gen_altivec_stvx_v2df (src_exp, dest_exp); | |
1569 | + else if (mode == V1TImode) | |
1570 | + stvx = gen_altivec_stvx_v1ti (src_exp, dest_exp); | |
1571 | + else | |
1572 | + /* KFmode, TFmode, other modes not expected in this context. */ | |
1573 | + gcc_unreachable (); | |
1637 | 1574 | |
1638 | - rtx new_mem_exp = SET_DEST (stvx); | |
1575 | + rtx new_mem_exp = SET_DEST (PATTERN (stvx)); | |
1639 | 1576 | mimic_memory_attributes_and_flags (new_mem_exp, dest_exp); |
1640 | 1577 | return stvx; |
1641 | 1578 | } |
@@ -1727,95 +1664,31 @@ replace_swapped_aligned_store (swap_web_entry *insn_entry, | ||
1727 | 1664 | rtx |
1728 | 1665 | rs6000_gen_lvx (enum machine_mode mode, rtx dest_exp, rtx src_exp) |
1729 | 1666 | { |
1730 | - rtx memory_address = XEXP (src_exp, 0); | |
1731 | 1667 | rtx lvx; |
1732 | 1668 | |
1733 | - if (rs6000_sum_of_two_registers_p (memory_address)) | |
1734 | - { | |
1735 | - rtx op1, op2; | |
1736 | - op1 = XEXP (memory_address, 0); | |
1737 | - op2 = XEXP (memory_address, 1); | |
1738 | - | |
1739 | - if (mode == V16QImode) | |
1740 | - lvx = TARGET_64BIT | |
1741 | - ? gen_altivec_lvx_v16qi_2op (dest_exp, op1, op2) | |
1742 | - : gen_altivec_lvx_v16qi_2op_si (dest_exp, op1, op2); | |
1743 | - else if (mode == V8HImode) | |
1744 | - lvx = TARGET_64BIT | |
1745 | - ? gen_altivec_lvx_v8hi_2op (dest_exp, op1, op2) | |
1746 | - : gen_altivec_lvx_v8hi_2op_si (dest_exp, op1, op2); | |
1747 | -#ifdef HAVE_V8HFmode | |
1748 | - else if (mode == V8HFmode) | |
1749 | - lvx = TARGET_64BIT | |
1750 | - ? gen_altivec_lvx_v8hf_2op (dest_exp, op1, op2) | |
1751 | - : gen_altivec_lvx_v8hf_2op_si (dest_exp, op1, op2); | |
1752 | -#endif | |
1753 | - else if (mode == V4SImode) | |
1754 | - lvx = TARGET_64BIT | |
1755 | - ? gen_altivec_lvx_v4si_2op (dest_exp, op1, op2) | |
1756 | - : gen_altivec_lvx_v4si_2op_si (dest_exp, op1, op2); | |
1757 | - else if (mode == V4SFmode) | |
1758 | - lvx = TARGET_64BIT | |
1759 | - ? gen_altivec_lvx_v4sf_2op (dest_exp, op1, op2) | |
1760 | - : gen_altivec_lvx_v4sf_2op_si (dest_exp, op1, op2); | |
1761 | - else if (mode == V2DImode) | |
1762 | - lvx = TARGET_64BIT | |
1763 | - ? gen_altivec_lvx_v2di_2op (dest_exp, op1, op2) | |
1764 | - : gen_altivec_lvx_v2di_2op_si (dest_exp, op1, op2); | |
1765 | - else if (mode == V2DFmode) | |
1766 | - lvx = TARGET_64BIT | |
1767 | - ? gen_altivec_lvx_v2df_2op (dest_exp, op1, op2) | |
1768 | - : gen_altivec_lvx_v2df_2op_si (dest_exp, op1, op2); | |
1769 | - else if (mode == V1TImode) | |
1770 | - lvx = TARGET_64BIT | |
1771 | - ? gen_altivec_lvx_v1ti_2op (dest_exp, op1, op2) | |
1772 | - : gen_altivec_lvx_v1ti_2op_si (dest_exp, op1, op2); | |
1773 | - else | |
1774 | - /* KFmode, TFmode, other modes not expected in this context. */ | |
1775 | - gcc_unreachable (); | |
1776 | - } | |
1777 | - else /* REG_P (memory_address) */ | |
1778 | - { | |
1779 | - if (mode == V16QImode) | |
1780 | - lvx = TARGET_64BIT | |
1781 | - ? gen_altivec_lvx_v16qi_1op (dest_exp, memory_address) | |
1782 | - : gen_altivec_lvx_v16qi_1op_si (dest_exp, memory_address); | |
1783 | - else if (mode == V8HImode) | |
1784 | - lvx = TARGET_64BIT | |
1785 | - ? gen_altivec_lvx_v8hi_1op (dest_exp, memory_address) | |
1786 | - : gen_altivec_lvx_v8hi_1op_si (dest_exp, memory_address); | |
1669 | + if (mode == V16QImode) | |
1670 | + lvx = gen_altivec_lvx_v16qi (dest_exp, src_exp); | |
1671 | + else if (mode == V8HImode) | |
1672 | + lvx = gen_altivec_lvx_v8hi (dest_exp, src_exp); | |
1787 | 1673 | #ifdef HAVE_V8HFmode |
1788 | - else if (mode == V8HFmode) | |
1789 | - lvx = TARGET_64BIT | |
1790 | - ? gen_altivec_lvx_v8hf_1op (dest_exp, memory_address) | |
1791 | - : gen_altivec_lvx_v8hf_1op_si (dest_exp, memory_address); | |
1674 | + else if (mode == V8HFmode) | |
1675 | + lvx = gen_altivec_lvx_v8hf (dest_exp, src_exp); | |
1792 | 1676 | #endif |
1793 | - else if (mode == V4SImode) | |
1794 | - lvx = TARGET_64BIT | |
1795 | - ? gen_altivec_lvx_v4si_1op (dest_exp, memory_address) | |
1796 | - : gen_altivec_lvx_v4si_1op_si (dest_exp, memory_address); | |
1797 | - else if (mode == V4SFmode) | |
1798 | - lvx = TARGET_64BIT | |
1799 | - ? gen_altivec_lvx_v4sf_1op (dest_exp, memory_address) | |
1800 | - : gen_altivec_lvx_v4sf_1op_si (dest_exp, memory_address); | |
1801 | - else if (mode == V2DImode) | |
1802 | - lvx = TARGET_64BIT | |
1803 | - ? gen_altivec_lvx_v2di_1op (dest_exp, memory_address) | |
1804 | - : gen_altivec_lvx_v2di_1op_si (dest_exp, memory_address); | |
1805 | - else if (mode == V2DFmode) | |
1806 | - lvx = TARGET_64BIT | |
1807 | - ? gen_altivec_lvx_v2df_1op (dest_exp, memory_address) | |
1808 | - : gen_altivec_lvx_v2df_1op_si (dest_exp, memory_address); | |
1809 | - else if (mode == V1TImode) | |
1810 | - lvx = TARGET_64BIT | |
1811 | - ? gen_altivec_lvx_v1ti_1op (dest_exp, memory_address) | |
1812 | - : gen_altivec_lvx_v1ti_1op_si (dest_exp, memory_address); | |
1813 | - else | |
1814 | - /* KFmode, TFmode, other modes not expected in this context. */ | |
1815 | - gcc_unreachable (); | |
1816 | - } | |
1677 | + else if (mode == V4SImode) | |
1678 | + lvx = gen_altivec_lvx_v4si (dest_exp, src_exp); | |
1679 | + else if (mode == V4SFmode) | |
1680 | + lvx = gen_altivec_lvx_v4sf (dest_exp, src_exp); | |
1681 | + else if (mode == V2DImode) | |
1682 | + lvx = gen_altivec_lvx_v2di (dest_exp, src_exp); | |
1683 | + else if (mode == V2DFmode) | |
1684 | + lvx = gen_altivec_lvx_v2df (dest_exp, src_exp); | |
1685 | + else if (mode == V1TImode) | |
1686 | + lvx = gen_altivec_lvx_v1ti (dest_exp, src_exp); | |
1687 | + else | |
1688 | + /* KFmode, TFmode, other modes not expected in this context. */ | |
1689 | + gcc_unreachable (); | |
1817 | 1690 | |
1818 | - rtx new_mem_exp = SET_SRC (lvx); | |
1691 | + rtx new_mem_exp = SET_SRC (PATTERN (lvx)); | |
1819 | 1692 | mimic_memory_attributes_and_flags (new_mem_exp, src_exp); |
1820 | 1693 | |
1821 | 1694 | return lvx; |
@@ -14451,12 +14451,12 @@ altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target, bool blk) | ||
14451 | 14451 | /* For LVX, express the RTL accurately by ANDing the address with -16. |
14452 | 14452 | LVXL and LVE*X expand to use UNSPECs to hide their special behavior, |
14453 | 14453 | so the raw address is fine. */ |
14454 | - if (icode == CODE_FOR_altivec_lvx_v2df_2op | |
14455 | - || icode == CODE_FOR_altivec_lvx_v2di_2op | |
14456 | - || icode == CODE_FOR_altivec_lvx_v4sf_2op | |
14457 | - || icode == CODE_FOR_altivec_lvx_v4si_2op | |
14458 | - || icode == CODE_FOR_altivec_lvx_v8hi_2op | |
14459 | - || icode == CODE_FOR_altivec_lvx_v16qi_2op) | |
14454 | + if (icode == CODE_FOR_altivec_lvx_v2df | |
14455 | + || icode == CODE_FOR_altivec_lvx_v2di | |
14456 | + || icode == CODE_FOR_altivec_lvx_v4sf | |
14457 | + || icode == CODE_FOR_altivec_lvx_v4si | |
14458 | + || icode == CODE_FOR_altivec_lvx_v8hi | |
14459 | + || icode == CODE_FOR_altivec_lvx_v16qi) | |
14460 | 14460 | { |
14461 | 14461 | rtx rawaddr; |
14462 | 14462 | if (op0 == const0_rtx) |
@@ -14609,12 +14609,12 @@ altivec_expand_stv_builtin (enum insn_code icode, tree exp) | ||
14609 | 14609 | /* For STVX, express the RTL accurately by ANDing the address with -16. |
14610 | 14610 | STVXL and STVE*X expand to use UNSPECs to hide their special behavior, |
14611 | 14611 | so the raw address is fine. */ |
14612 | - if (icode == CODE_FOR_altivec_stvx_v2df_2op | |
14613 | - || icode == CODE_FOR_altivec_stvx_v2di_2op | |
14614 | - || icode == CODE_FOR_altivec_stvx_v4sf_2op | |
14615 | - || icode == CODE_FOR_altivec_stvx_v4si_2op | |
14616 | - || icode == CODE_FOR_altivec_stvx_v8hi_2op | |
14617 | - || icode == CODE_FOR_altivec_stvx_v16qi_2op) | |
14612 | + if (icode == CODE_FOR_altivec_stvx_v2df | |
14613 | + || icode == CODE_FOR_altivec_stvx_v2di | |
14614 | + || icode == CODE_FOR_altivec_stvx_v4sf | |
14615 | + || icode == CODE_FOR_altivec_stvx_v4si | |
14616 | + || icode == CODE_FOR_altivec_stvx_v8hi | |
14617 | + || icode == CODE_FOR_altivec_stvx_v16qi) | |
14618 | 14618 | { |
14619 | 14619 | if (op1 == const0_rtx) |
14620 | 14620 | rawaddr = op2; |
@@ -15524,18 +15524,18 @@ altivec_expand_builtin (tree exp, rtx target, bool *expandedp) | ||
15524 | 15524 | switch (fcode) |
15525 | 15525 | { |
15526 | 15526 | case ALTIVEC_BUILTIN_STVX_V2DF: |
15527 | - return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v2df_2op, exp); | |
15527 | + return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v2df, exp); | |
15528 | 15528 | case ALTIVEC_BUILTIN_STVX_V2DI: |
15529 | - return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v2di_2op, exp); | |
15529 | + return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v2di, exp); | |
15530 | 15530 | case ALTIVEC_BUILTIN_STVX_V4SF: |
15531 | - return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4sf_2op, exp); | |
15531 | + return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4sf, exp); | |
15532 | 15532 | case ALTIVEC_BUILTIN_STVX: |
15533 | 15533 | case ALTIVEC_BUILTIN_STVX_V4SI: |
15534 | - return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4si_2op, exp); | |
15534 | + return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4si, exp); | |
15535 | 15535 | case ALTIVEC_BUILTIN_STVX_V8HI: |
15536 | - return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v8hi_2op, exp); | |
15536 | + return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v8hi, exp); | |
15537 | 15537 | case ALTIVEC_BUILTIN_STVX_V16QI: |
15538 | - return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v16qi_2op, exp); | |
15538 | + return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v16qi, exp); | |
15539 | 15539 | case ALTIVEC_BUILTIN_STVEBX: |
15540 | 15540 | return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp); |
15541 | 15541 | case ALTIVEC_BUILTIN_STVEHX: |
@@ -15806,23 +15806,23 @@ altivec_expand_builtin (tree exp, rtx target, bool *expandedp) | ||
15806 | 15806 | return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl_v16qi, |
15807 | 15807 | exp, target, false); |
15808 | 15808 | case ALTIVEC_BUILTIN_LVX_V2DF: |
15809 | - return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v2df_2op, | |
15809 | + return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v2df, | |
15810 | 15810 | exp, target, false); |
15811 | 15811 | case ALTIVEC_BUILTIN_LVX_V2DI: |
15812 | - return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v2di_2op, | |
15812 | + return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v2di, | |
15813 | 15813 | exp, target, false); |
15814 | 15814 | case ALTIVEC_BUILTIN_LVX_V4SF: |
15815 | - return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4sf_2op, | |
15815 | + return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4sf, | |
15816 | 15816 | exp, target, false); |
15817 | 15817 | case ALTIVEC_BUILTIN_LVX: |
15818 | 15818 | case ALTIVEC_BUILTIN_LVX_V4SI: |
15819 | - return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4si_2op, | |
15819 | + return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4si, | |
15820 | 15820 | exp, target, false); |
15821 | 15821 | case ALTIVEC_BUILTIN_LVX_V8HI: |
15822 | - return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v8hi_2op, | |
15822 | + return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v8hi, | |
15823 | 15823 | exp, target, false); |
15824 | 15824 | case ALTIVEC_BUILTIN_LVX_V16QI: |
15825 | - return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v16qi_2op, | |
15825 | + return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v16qi, | |
15826 | 15826 | exp, target, false); |
15827 | 15827 | case ALTIVEC_BUILTIN_LVLX: |
15828 | 15828 | return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlx, |
@@ -196,12 +196,7 @@ | ||
196 | 196 | operands[1] = rs6000_address_for_altivec (operands[1]); |
197 | 197 | rtx and_op = XEXP (operands[1], 0); |
198 | 198 | gcc_assert (GET_CODE (and_op) == AND); |
199 | - rtx addr = XEXP (and_op, 0); | |
200 | - if (GET_CODE (addr) == PLUS) | |
201 | - emit_insn (gen_altivec_lvx_<mode>_2op (operands[0], XEXP (addr, 0), | |
202 | - XEXP (addr, 1))); | |
203 | - else | |
204 | - emit_insn (gen_altivec_lvx_<mode>_1op (operands[0], operands[1])); | |
199 | + emit_insn (gen_altivec_lvx_<mode> (operands[0], operands[1])); | |
205 | 200 | DONE; |
206 | 201 | } |
207 | 202 | }) |
@@ -218,12 +213,7 @@ | ||
218 | 213 | operands[0] = rs6000_address_for_altivec (operands[0]); |
219 | 214 | rtx and_op = XEXP (operands[0], 0); |
220 | 215 | gcc_assert (GET_CODE (and_op) == AND); |
221 | - rtx addr = XEXP (and_op, 0); | |
222 | - if (GET_CODE (addr) == PLUS) | |
223 | - emit_insn (gen_altivec_stvx_<mode>_2op (operands[1], XEXP (addr, 0), | |
224 | - XEXP (addr, 1))); | |
225 | - else | |
226 | - emit_insn (gen_altivec_stvx_<mode>_1op (operands[1], operands[0])); | |
216 | + emit_insn (gen_altivec_stvx_<mode> (operands[1], operands[0])); | |
227 | 217 | DONE; |
228 | 218 | } |
229 | 219 | }) |