• R/O
  • HTTP
  • SSH
  • HTTPS

Commit

Tags
No Tags

Frequently used words (click to add to your profile)

javac++androidlinuxc#windowsobjective-ccocoa誰得qtpythonphprubygameguibathyscaphec計画中(planning stage)翻訳omegatframeworktwitterdomtestvb.netdirectxゲームエンジンbtronarduinopreviewer

GCC with patches for OS216


Commit MetaInfo

Revisión91d014fffae1fcf1eb750d40e622e5b80cd1a4ec (tree)
Tiempo2018-03-21 02:25:09
AutorPeter Bergner <bergner@vnet...>
CommiterPeter Bergner

Log Message

re PR target/83789 (builtin_altivec_lvx fails for powerpc for altivec-4.c)

PR target/83789
* config/rs6000/altivec.md (altivec_lvx_<mode>_2op): Delete define_insn.
(altivec_lvx_<mode>_1op): Likewise.
(altivec_stvx_<mode>_2op): Likewise.
(altivec_stvx_<mode>_1op): Likewise.
(altivec_lvx_<VM2:mode>): New define_expand.
(altivec_stvx_<VM2:mode>): Likewise.
(altivec_lvx_<VM2:mode>_2op_<P:mptrsize>): New define_insn.
(altivec_lvx_<VM2:mode>_1op_<P:mptrsize>): Likewise.
(altivec_stvx_<VM2:mode>_2op_<P:mptrsize>): Likewise.
(altivec_stvx_<VM2:mode>_1op_<P:mptrsize>): Likewise.
* config/rs6000/rs6000-p8swap.c (rs6000_gen_stvx): Use new expanders.
(rs6000_gen_lvx): Likewise.
* config/rs6000/rs6000.c (altivec_expand_lv_builtin): Likewise.
(altivec_expand_stv_builtin): Likewise.
(altivec_expand_builtin): Likewise.
* config/rs6000/vector.md: Likewise.

From-SVN: r258688

Cambiar Resumen

Diferencia incremental

--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,23 @@
1+2018-03-20 Peter Bergner <bergner@vnet.ibm.com>
2+
3+ PR target/83789
4+ * config/rs6000/altivec.md (altivec_lvx_<mode>_2op): Delete define_insn.
5+ (altivec_lvx_<mode>_1op): Likewise.
6+ (altivec_stvx_<mode>_2op): Likewise.
7+ (altivec_stvx_<mode>_1op): Likewise.
8+ (altivec_lvx_<VM2:mode>): New define_expand.
9+ (altivec_stvx_<VM2:mode>): Likewise.
10+ (altivec_lvx_<VM2:mode>_2op_<P:mptrsize>): New define_insn.
11+ (altivec_lvx_<VM2:mode>_1op_<P:mptrsize>): Likewise.
12+ (altivec_stvx_<VM2:mode>_2op_<P:mptrsize>): Likewise.
13+ (altivec_stvx_<VM2:mode>_1op_<P:mptrsize>): Likewise.
14+ * config/rs6000/rs6000-p8swap.c (rs6000_gen_stvx): Use new expanders.
15+ (rs6000_gen_lvx): Likewise.
16+ * config/rs6000/rs6000.c (altivec_expand_lv_builtin): Likewise.
17+ (altivec_expand_stv_builtin): Likewise.
18+ (altivec_expand_builtin): Likewise.
19+ * config/rs6000/vector.md: Likewise.
20+
121 2018-03-20 Richard Biener <rguenther@suse.de>
222
323 PR target/84986
--- a/gcc/config/rs6000/altivec.md
+++ b/gcc/config/rs6000/altivec.md
@@ -2747,39 +2747,47 @@
27472747 "lvx %0,%y1"
27482748 [(set_attr "type" "vecload")])
27492749
2750-; The next two patterns embody what lvx should usually look like.
2751-(define_insn "altivec_lvx_<mode>_2op"
2752- [(set (match_operand:VM2 0 "register_operand" "=v")
2753- (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b")
2754- (match_operand:DI 2 "register_operand" "r"))
2755- (const_int -16))))]
2756- "TARGET_ALTIVEC && TARGET_64BIT"
2757- "lvx %0,%1,%2"
2758- [(set_attr "type" "vecload")])
2759-
2760-(define_insn "altivec_lvx_<mode>_1op"
2761- [(set (match_operand:VM2 0 "register_operand" "=v")
2762- (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r")
2763- (const_int -16))))]
2764- "TARGET_ALTIVEC && TARGET_64BIT"
2765- "lvx %0,0,%1"
2766- [(set_attr "type" "vecload")])
2750+; The following patterns embody what lvx should usually look like.
2751+(define_expand "altivec_lvx_<VM2:mode>"
2752+ [(set (match_operand:VM2 0 "register_operand")
2753+ (match_operand:VM2 1 "altivec_indexed_or_indirect_operand"))]
2754+ "TARGET_ALTIVEC"
2755+{
2756+ rtx addr = XEXP (operand1, 0);
2757+ if (rs6000_sum_of_two_registers_p (addr))
2758+ {
2759+ rtx op1 = XEXP (addr, 0);
2760+ rtx op2 = XEXP (addr, 1);
2761+ if (TARGET_64BIT)
2762+ emit_insn (gen_altivec_lvx_<VM2:mode>_2op_di (operand0, op1, op2));
2763+ else
2764+ emit_insn (gen_altivec_lvx_<VM2:mode>_2op_si (operand0, op1, op2));
2765+ }
2766+ else
2767+ {
2768+ if (TARGET_64BIT)
2769+ emit_insn (gen_altivec_lvx_<VM2:mode>_1op_di (operand0, addr));
2770+ else
2771+ emit_insn (gen_altivec_lvx_<VM2:mode>_1op_si (operand0, addr));
2772+ }
2773+ DONE;
2774+})
27672775
2768-; 32-bit versions of the above.
2769-(define_insn "altivec_lvx_<mode>_2op_si"
2776+; The next two patterns embody what lvx should usually look like.
2777+(define_insn "altivec_lvx_<VM2:mode>_2op_<P:mptrsize>"
27702778 [(set (match_operand:VM2 0 "register_operand" "=v")
2771- (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b")
2772- (match_operand:SI 2 "register_operand" "r"))
2773- (const_int -16))))]
2774- "TARGET_ALTIVEC && TARGET_32BIT"
2779+ (mem:VM2 (and:P (plus:P (match_operand:P 1 "register_operand" "b")
2780+ (match_operand:P 2 "register_operand" "r"))
2781+ (const_int -16))))]
2782+ "TARGET_ALTIVEC"
27752783 "lvx %0,%1,%2"
27762784 [(set_attr "type" "vecload")])
27772785
2778-(define_insn "altivec_lvx_<mode>_1op_si"
2786+(define_insn "altivec_lvx_<VM2:mode>_1op_<P:mptrsize>"
27792787 [(set (match_operand:VM2 0 "register_operand" "=v")
2780- (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r")
2781- (const_int -16))))]
2782- "TARGET_ALTIVEC && TARGET_32BIT"
2788+ (mem:VM2 (and:P (match_operand:P 1 "register_operand" "r")
2789+ (const_int -16))))]
2790+ "TARGET_ALTIVEC"
27832791 "lvx %0,0,%1"
27842792 [(set_attr "type" "vecload")])
27852793
@@ -2795,39 +2803,47 @@
27952803 "stvx %1,%y0"
27962804 [(set_attr "type" "vecstore")])
27972805
2798-; The next two patterns embody what stvx should usually look like.
2799-(define_insn "altivec_stvx_<mode>_2op"
2800- [(set (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b")
2801- (match_operand:DI 2 "register_operand" "r"))
2802- (const_int -16)))
2803- (match_operand:VM2 0 "register_operand" "v"))]
2804- "TARGET_ALTIVEC && TARGET_64BIT"
2805- "stvx %0,%1,%2"
2806- [(set_attr "type" "vecstore")])
2807-
2808-(define_insn "altivec_stvx_<mode>_1op"
2809- [(set (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r")
2810- (const_int -16)))
2811- (match_operand:VM2 0 "register_operand" "v"))]
2812- "TARGET_ALTIVEC && TARGET_64BIT"
2813- "stvx %0,0,%1"
2814- [(set_attr "type" "vecstore")])
2806+; The following patterns embody what stvx should usually look like.
2807+(define_expand "altivec_stvx_<VM2:mode>"
2808+ [(set (match_operand:VM2 1 "altivec_indexed_or_indirect_operand")
2809+ (match_operand:VM2 0 "register_operand"))]
2810+ "TARGET_ALTIVEC"
2811+{
2812+ rtx addr = XEXP (operand1, 0);
2813+ if (rs6000_sum_of_two_registers_p (addr))
2814+ {
2815+ rtx op1 = XEXP (addr, 0);
2816+ rtx op2 = XEXP (addr, 1);
2817+ if (TARGET_64BIT)
2818+ emit_insn (gen_altivec_stvx_<VM2:mode>_2op_di (operand0, op1, op2));
2819+ else
2820+ emit_insn (gen_altivec_stvx_<VM2:mode>_2op_si (operand0, op1, op2));
2821+ }
2822+ else
2823+ {
2824+ if (TARGET_64BIT)
2825+ emit_insn (gen_altivec_stvx_<VM2:mode>_1op_di (operand0, addr));
2826+ else
2827+ emit_insn (gen_altivec_stvx_<VM2:mode>_1op_si (operand0, addr));
2828+ }
2829+ DONE;
2830+})
28152831
2816-; 32-bit versions of the above.
2817-(define_insn "altivec_stvx_<mode>_2op_si"
2818- [(set (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b")
2819- (match_operand:SI 2 "register_operand" "r"))
2820- (const_int -16)))
2821- (match_operand:VM2 0 "register_operand" "v"))]
2822- "TARGET_ALTIVEC && TARGET_32BIT"
2832+; The next two patterns embody what stvx should usually look like.
2833+(define_insn "altivec_stvx_<VM2:mode>_2op_<P:mptrsize>"
2834+ [(set (mem:VM2 (and:P (plus:P (match_operand:P 1 "register_operand" "b")
2835+ (match_operand:P 2 "register_operand" "r"))
2836+ (const_int -16)))
2837+ (match_operand:VM2 0 "register_operand" "v"))]
2838+ "TARGET_ALTIVEC"
28232839 "stvx %0,%1,%2"
28242840 [(set_attr "type" "vecstore")])
28252841
2826-(define_insn "altivec_stvx_<mode>_1op_si"
2827- [(set (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r")
2828- (const_int -16)))
2829- (match_operand:VM2 0 "register_operand" "v"))]
2830- "TARGET_ALTIVEC && TARGET_32BIT"
2842+(define_insn "altivec_stvx_<VM2:mode>_1op_<P:mptrsize>"
2843+ [(set (mem:VM2 (and:P (match_operand:P 1 "register_operand" "r")
2844+ (const_int -16)))
2845+ (match_operand:VM2 0 "register_operand" "v"))]
2846+ "TARGET_ALTIVEC"
28312847 "stvx %0,0,%1"
28322848 [(set_attr "type" "vecstore")])
28332849
--- a/gcc/config/rs6000/rs6000-p8swap.c
+++ b/gcc/config/rs6000/rs6000-p8swap.c
@@ -1548,94 +1548,31 @@ mimic_memory_attributes_and_flags (rtx new_mem_exp, const_rtx original_mem_exp)
15481548 rtx
15491549 rs6000_gen_stvx (enum machine_mode mode, rtx dest_exp, rtx src_exp)
15501550 {
1551- rtx memory_address = XEXP (dest_exp, 0);
15521551 rtx stvx;
15531552
1554- if (rs6000_sum_of_two_registers_p (memory_address))
1555- {
1556- rtx op1, op2;
1557- op1 = XEXP (memory_address, 0);
1558- op2 = XEXP (memory_address, 1);
1559- if (mode == V16QImode)
1560- stvx = TARGET_64BIT
1561- ? gen_altivec_stvx_v16qi_2op (src_exp, op1, op2)
1562- : gen_altivec_stvx_v16qi_2op_si (src_exp, op1, op2);
1563- else if (mode == V8HImode)
1564- stvx = TARGET_64BIT
1565- ? gen_altivec_stvx_v8hi_2op (src_exp, op1, op2)
1566- : gen_altivec_stvx_v8hi_2op_si (src_exp, op1, op2);
1567-#ifdef HAVE_V8HFmode
1568- else if (mode == V8HFmode)
1569- stvx = TARGET_64BIT
1570- ? gen_altivec_stvx_v8hf_2op (src_exp, op1, op2)
1571- : gen_altivec_stvx_v8hf_2op_si (src_exp, op1, op2);
1572-#endif
1573- else if (mode == V4SImode)
1574- stvx = TARGET_64BIT
1575- ? gen_altivec_stvx_v4si_2op (src_exp, op1, op2)
1576- : gen_altivec_stvx_v4si_2op_si (src_exp, op1, op2);
1577- else if (mode == V4SFmode)
1578- stvx = TARGET_64BIT
1579- ? gen_altivec_stvx_v4sf_2op (src_exp, op1, op2)
1580- : gen_altivec_stvx_v4sf_2op_si (src_exp, op1, op2);
1581- else if (mode == V2DImode)
1582- stvx = TARGET_64BIT
1583- ? gen_altivec_stvx_v2di_2op (src_exp, op1, op2)
1584- : gen_altivec_stvx_v2di_2op_si (src_exp, op1, op2);
1585- else if (mode == V2DFmode)
1586- stvx = TARGET_64BIT
1587- ? gen_altivec_stvx_v2df_2op (src_exp, op1, op2)
1588- : gen_altivec_stvx_v2df_2op_si (src_exp, op1, op2);
1589- else if (mode == V1TImode)
1590- stvx = TARGET_64BIT
1591- ? gen_altivec_stvx_v1ti_2op (src_exp, op1, op2)
1592- : gen_altivec_stvx_v1ti_2op_si (src_exp, op1, op2);
1593- else
1594- /* KFmode, TFmode, other modes not expected in this context. */
1595- gcc_unreachable ();
1596- }
1597- else /* REG_P (memory_address) */
1598- {
1599- if (mode == V16QImode)
1600- stvx = TARGET_64BIT
1601- ? gen_altivec_stvx_v16qi_1op (src_exp, memory_address)
1602- : gen_altivec_stvx_v16qi_1op_si (src_exp, memory_address);
1603- else if (mode == V8HImode)
1604- stvx = TARGET_64BIT
1605- ? gen_altivec_stvx_v8hi_1op (src_exp, memory_address)
1606- : gen_altivec_stvx_v8hi_1op_si (src_exp, memory_address);
1553+ if (mode == V16QImode)
1554+ stvx = gen_altivec_stvx_v16qi (src_exp, dest_exp);
1555+ else if (mode == V8HImode)
1556+ stvx = gen_altivec_stvx_v8hi (src_exp, dest_exp);
16071557 #ifdef HAVE_V8HFmode
1608- else if (mode == V8HFmode)
1609- stvx = TARGET_64BIT
1610- ? gen_altivec_stvx_v8hf_1op (src_exp, memory_address)
1611- : gen_altivec_stvx_v8hf_1op_si (src_exp, memory_address);
1558+ else if (mode == V8HFmode)
1559+ stvx = gen_altivec_stvx_v8hf (src_exp, dest_exp);
16121560 #endif
1613- else if (mode == V4SImode)
1614- stvx =TARGET_64BIT
1615- ? gen_altivec_stvx_v4si_1op (src_exp, memory_address)
1616- : gen_altivec_stvx_v4si_1op_si (src_exp, memory_address);
1617- else if (mode == V4SFmode)
1618- stvx = TARGET_64BIT
1619- ? gen_altivec_stvx_v4sf_1op (src_exp, memory_address)
1620- : gen_altivec_stvx_v4sf_1op_si (src_exp, memory_address);
1621- else if (mode == V2DImode)
1622- stvx = TARGET_64BIT
1623- ? gen_altivec_stvx_v2di_1op (src_exp, memory_address)
1624- : gen_altivec_stvx_v2di_1op_si (src_exp, memory_address);
1625- else if (mode == V2DFmode)
1626- stvx = TARGET_64BIT
1627- ? gen_altivec_stvx_v2df_1op (src_exp, memory_address)
1628- : gen_altivec_stvx_v2df_1op_si (src_exp, memory_address);
1629- else if (mode == V1TImode)
1630- stvx = TARGET_64BIT
1631- ? gen_altivec_stvx_v1ti_1op (src_exp, memory_address)
1632- : gen_altivec_stvx_v1ti_1op_si (src_exp, memory_address);
1633- else
1634- /* KFmode, TFmode, other modes not expected in this context. */
1635- gcc_unreachable ();
1636- }
1561+ else if (mode == V4SImode)
1562+ stvx = gen_altivec_stvx_v4si (src_exp, dest_exp);
1563+ else if (mode == V4SFmode)
1564+ stvx = gen_altivec_stvx_v4sf (src_exp, dest_exp);
1565+ else if (mode == V2DImode)
1566+ stvx = gen_altivec_stvx_v2di (src_exp, dest_exp);
1567+ else if (mode == V2DFmode)
1568+ stvx = gen_altivec_stvx_v2df (src_exp, dest_exp);
1569+ else if (mode == V1TImode)
1570+ stvx = gen_altivec_stvx_v1ti (src_exp, dest_exp);
1571+ else
1572+ /* KFmode, TFmode, other modes not expected in this context. */
1573+ gcc_unreachable ();
16371574
1638- rtx new_mem_exp = SET_DEST (stvx);
1575+ rtx new_mem_exp = SET_DEST (PATTERN (stvx));
16391576 mimic_memory_attributes_and_flags (new_mem_exp, dest_exp);
16401577 return stvx;
16411578 }
@@ -1727,95 +1664,31 @@ replace_swapped_aligned_store (swap_web_entry *insn_entry,
17271664 rtx
17281665 rs6000_gen_lvx (enum machine_mode mode, rtx dest_exp, rtx src_exp)
17291666 {
1730- rtx memory_address = XEXP (src_exp, 0);
17311667 rtx lvx;
17321668
1733- if (rs6000_sum_of_two_registers_p (memory_address))
1734- {
1735- rtx op1, op2;
1736- op1 = XEXP (memory_address, 0);
1737- op2 = XEXP (memory_address, 1);
1738-
1739- if (mode == V16QImode)
1740- lvx = TARGET_64BIT
1741- ? gen_altivec_lvx_v16qi_2op (dest_exp, op1, op2)
1742- : gen_altivec_lvx_v16qi_2op_si (dest_exp, op1, op2);
1743- else if (mode == V8HImode)
1744- lvx = TARGET_64BIT
1745- ? gen_altivec_lvx_v8hi_2op (dest_exp, op1, op2)
1746- : gen_altivec_lvx_v8hi_2op_si (dest_exp, op1, op2);
1747-#ifdef HAVE_V8HFmode
1748- else if (mode == V8HFmode)
1749- lvx = TARGET_64BIT
1750- ? gen_altivec_lvx_v8hf_2op (dest_exp, op1, op2)
1751- : gen_altivec_lvx_v8hf_2op_si (dest_exp, op1, op2);
1752-#endif
1753- else if (mode == V4SImode)
1754- lvx = TARGET_64BIT
1755- ? gen_altivec_lvx_v4si_2op (dest_exp, op1, op2)
1756- : gen_altivec_lvx_v4si_2op_si (dest_exp, op1, op2);
1757- else if (mode == V4SFmode)
1758- lvx = TARGET_64BIT
1759- ? gen_altivec_lvx_v4sf_2op (dest_exp, op1, op2)
1760- : gen_altivec_lvx_v4sf_2op_si (dest_exp, op1, op2);
1761- else if (mode == V2DImode)
1762- lvx = TARGET_64BIT
1763- ? gen_altivec_lvx_v2di_2op (dest_exp, op1, op2)
1764- : gen_altivec_lvx_v2di_2op_si (dest_exp, op1, op2);
1765- else if (mode == V2DFmode)
1766- lvx = TARGET_64BIT
1767- ? gen_altivec_lvx_v2df_2op (dest_exp, op1, op2)
1768- : gen_altivec_lvx_v2df_2op_si (dest_exp, op1, op2);
1769- else if (mode == V1TImode)
1770- lvx = TARGET_64BIT
1771- ? gen_altivec_lvx_v1ti_2op (dest_exp, op1, op2)
1772- : gen_altivec_lvx_v1ti_2op_si (dest_exp, op1, op2);
1773- else
1774- /* KFmode, TFmode, other modes not expected in this context. */
1775- gcc_unreachable ();
1776- }
1777- else /* REG_P (memory_address) */
1778- {
1779- if (mode == V16QImode)
1780- lvx = TARGET_64BIT
1781- ? gen_altivec_lvx_v16qi_1op (dest_exp, memory_address)
1782- : gen_altivec_lvx_v16qi_1op_si (dest_exp, memory_address);
1783- else if (mode == V8HImode)
1784- lvx = TARGET_64BIT
1785- ? gen_altivec_lvx_v8hi_1op (dest_exp, memory_address)
1786- : gen_altivec_lvx_v8hi_1op_si (dest_exp, memory_address);
1669+ if (mode == V16QImode)
1670+ lvx = gen_altivec_lvx_v16qi (dest_exp, src_exp);
1671+ else if (mode == V8HImode)
1672+ lvx = gen_altivec_lvx_v8hi (dest_exp, src_exp);
17871673 #ifdef HAVE_V8HFmode
1788- else if (mode == V8HFmode)
1789- lvx = TARGET_64BIT
1790- ? gen_altivec_lvx_v8hf_1op (dest_exp, memory_address)
1791- : gen_altivec_lvx_v8hf_1op_si (dest_exp, memory_address);
1674+ else if (mode == V8HFmode)
1675+ lvx = gen_altivec_lvx_v8hf (dest_exp, src_exp);
17921676 #endif
1793- else if (mode == V4SImode)
1794- lvx = TARGET_64BIT
1795- ? gen_altivec_lvx_v4si_1op (dest_exp, memory_address)
1796- : gen_altivec_lvx_v4si_1op_si (dest_exp, memory_address);
1797- else if (mode == V4SFmode)
1798- lvx = TARGET_64BIT
1799- ? gen_altivec_lvx_v4sf_1op (dest_exp, memory_address)
1800- : gen_altivec_lvx_v4sf_1op_si (dest_exp, memory_address);
1801- else if (mode == V2DImode)
1802- lvx = TARGET_64BIT
1803- ? gen_altivec_lvx_v2di_1op (dest_exp, memory_address)
1804- : gen_altivec_lvx_v2di_1op_si (dest_exp, memory_address);
1805- else if (mode == V2DFmode)
1806- lvx = TARGET_64BIT
1807- ? gen_altivec_lvx_v2df_1op (dest_exp, memory_address)
1808- : gen_altivec_lvx_v2df_1op_si (dest_exp, memory_address);
1809- else if (mode == V1TImode)
1810- lvx = TARGET_64BIT
1811- ? gen_altivec_lvx_v1ti_1op (dest_exp, memory_address)
1812- : gen_altivec_lvx_v1ti_1op_si (dest_exp, memory_address);
1813- else
1814- /* KFmode, TFmode, other modes not expected in this context. */
1815- gcc_unreachable ();
1816- }
1677+ else if (mode == V4SImode)
1678+ lvx = gen_altivec_lvx_v4si (dest_exp, src_exp);
1679+ else if (mode == V4SFmode)
1680+ lvx = gen_altivec_lvx_v4sf (dest_exp, src_exp);
1681+ else if (mode == V2DImode)
1682+ lvx = gen_altivec_lvx_v2di (dest_exp, src_exp);
1683+ else if (mode == V2DFmode)
1684+ lvx = gen_altivec_lvx_v2df (dest_exp, src_exp);
1685+ else if (mode == V1TImode)
1686+ lvx = gen_altivec_lvx_v1ti (dest_exp, src_exp);
1687+ else
1688+ /* KFmode, TFmode, other modes not expected in this context. */
1689+ gcc_unreachable ();
18171690
1818- rtx new_mem_exp = SET_SRC (lvx);
1691+ rtx new_mem_exp = SET_SRC (PATTERN (lvx));
18191692 mimic_memory_attributes_and_flags (new_mem_exp, src_exp);
18201693
18211694 return lvx;
--- a/gcc/config/rs6000/rs6000.c
+++ b/gcc/config/rs6000/rs6000.c
@@ -14451,12 +14451,12 @@ altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target, bool blk)
1445114451 /* For LVX, express the RTL accurately by ANDing the address with -16.
1445214452 LVXL and LVE*X expand to use UNSPECs to hide their special behavior,
1445314453 so the raw address is fine. */
14454- if (icode == CODE_FOR_altivec_lvx_v2df_2op
14455- || icode == CODE_FOR_altivec_lvx_v2di_2op
14456- || icode == CODE_FOR_altivec_lvx_v4sf_2op
14457- || icode == CODE_FOR_altivec_lvx_v4si_2op
14458- || icode == CODE_FOR_altivec_lvx_v8hi_2op
14459- || icode == CODE_FOR_altivec_lvx_v16qi_2op)
14454+ if (icode == CODE_FOR_altivec_lvx_v2df
14455+ || icode == CODE_FOR_altivec_lvx_v2di
14456+ || icode == CODE_FOR_altivec_lvx_v4sf
14457+ || icode == CODE_FOR_altivec_lvx_v4si
14458+ || icode == CODE_FOR_altivec_lvx_v8hi
14459+ || icode == CODE_FOR_altivec_lvx_v16qi)
1446014460 {
1446114461 rtx rawaddr;
1446214462 if (op0 == const0_rtx)
@@ -14609,12 +14609,12 @@ altivec_expand_stv_builtin (enum insn_code icode, tree exp)
1460914609 /* For STVX, express the RTL accurately by ANDing the address with -16.
1461014610 STVXL and STVE*X expand to use UNSPECs to hide their special behavior,
1461114611 so the raw address is fine. */
14612- if (icode == CODE_FOR_altivec_stvx_v2df_2op
14613- || icode == CODE_FOR_altivec_stvx_v2di_2op
14614- || icode == CODE_FOR_altivec_stvx_v4sf_2op
14615- || icode == CODE_FOR_altivec_stvx_v4si_2op
14616- || icode == CODE_FOR_altivec_stvx_v8hi_2op
14617- || icode == CODE_FOR_altivec_stvx_v16qi_2op)
14612+ if (icode == CODE_FOR_altivec_stvx_v2df
14613+ || icode == CODE_FOR_altivec_stvx_v2di
14614+ || icode == CODE_FOR_altivec_stvx_v4sf
14615+ || icode == CODE_FOR_altivec_stvx_v4si
14616+ || icode == CODE_FOR_altivec_stvx_v8hi
14617+ || icode == CODE_FOR_altivec_stvx_v16qi)
1461814618 {
1461914619 if (op1 == const0_rtx)
1462014620 rawaddr = op2;
@@ -15524,18 +15524,18 @@ altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
1552415524 switch (fcode)
1552515525 {
1552615526 case ALTIVEC_BUILTIN_STVX_V2DF:
15527- return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v2df_2op, exp);
15527+ return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v2df, exp);
1552815528 case ALTIVEC_BUILTIN_STVX_V2DI:
15529- return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v2di_2op, exp);
15529+ return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v2di, exp);
1553015530 case ALTIVEC_BUILTIN_STVX_V4SF:
15531- return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4sf_2op, exp);
15531+ return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4sf, exp);
1553215532 case ALTIVEC_BUILTIN_STVX:
1553315533 case ALTIVEC_BUILTIN_STVX_V4SI:
15534- return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4si_2op, exp);
15534+ return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4si, exp);
1553515535 case ALTIVEC_BUILTIN_STVX_V8HI:
15536- return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v8hi_2op, exp);
15536+ return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v8hi, exp);
1553715537 case ALTIVEC_BUILTIN_STVX_V16QI:
15538- return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v16qi_2op, exp);
15538+ return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v16qi, exp);
1553915539 case ALTIVEC_BUILTIN_STVEBX:
1554015540 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
1554115541 case ALTIVEC_BUILTIN_STVEHX:
@@ -15806,23 +15806,23 @@ altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
1580615806 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl_v16qi,
1580715807 exp, target, false);
1580815808 case ALTIVEC_BUILTIN_LVX_V2DF:
15809- return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v2df_2op,
15809+ return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v2df,
1581015810 exp, target, false);
1581115811 case ALTIVEC_BUILTIN_LVX_V2DI:
15812- return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v2di_2op,
15812+ return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v2di,
1581315813 exp, target, false);
1581415814 case ALTIVEC_BUILTIN_LVX_V4SF:
15815- return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4sf_2op,
15815+ return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4sf,
1581615816 exp, target, false);
1581715817 case ALTIVEC_BUILTIN_LVX:
1581815818 case ALTIVEC_BUILTIN_LVX_V4SI:
15819- return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4si_2op,
15819+ return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4si,
1582015820 exp, target, false);
1582115821 case ALTIVEC_BUILTIN_LVX_V8HI:
15822- return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v8hi_2op,
15822+ return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v8hi,
1582315823 exp, target, false);
1582415824 case ALTIVEC_BUILTIN_LVX_V16QI:
15825- return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v16qi_2op,
15825+ return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v16qi,
1582615826 exp, target, false);
1582715827 case ALTIVEC_BUILTIN_LVLX:
1582815828 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlx,
--- a/gcc/config/rs6000/vector.md
+++ b/gcc/config/rs6000/vector.md
@@ -196,12 +196,7 @@
196196 operands[1] = rs6000_address_for_altivec (operands[1]);
197197 rtx and_op = XEXP (operands[1], 0);
198198 gcc_assert (GET_CODE (and_op) == AND);
199- rtx addr = XEXP (and_op, 0);
200- if (GET_CODE (addr) == PLUS)
201- emit_insn (gen_altivec_lvx_<mode>_2op (operands[0], XEXP (addr, 0),
202- XEXP (addr, 1)));
203- else
204- emit_insn (gen_altivec_lvx_<mode>_1op (operands[0], operands[1]));
199+ emit_insn (gen_altivec_lvx_<mode> (operands[0], operands[1]));
205200 DONE;
206201 }
207202 })
@@ -218,12 +213,7 @@
218213 operands[0] = rs6000_address_for_altivec (operands[0]);
219214 rtx and_op = XEXP (operands[0], 0);
220215 gcc_assert (GET_CODE (and_op) == AND);
221- rtx addr = XEXP (and_op, 0);
222- if (GET_CODE (addr) == PLUS)
223- emit_insn (gen_altivec_stvx_<mode>_2op (operands[1], XEXP (addr, 0),
224- XEXP (addr, 1)));
225- else
226- emit_insn (gen_altivec_stvx_<mode>_1op (operands[1], operands[0]));
216+ emit_insn (gen_altivec_stvx_<mode> (operands[1], operands[0]));
227217 DONE;
228218 }
229219 })