From 872f6debcae63309eb39bfc2cc9462fb83450ee0 Mon Sep 17 00:00:00 2001 From: Kyle McMartin Date: Thu, 15 May 2008 10:53:57 -0400 Subject: parisc: use conditional macro for 64-bit wide ops This work enables us to remove -traditional from $AFLAGS on parisc. Signed-off-by: Kyle McMartin --- arch/parisc/kernel/entry.S | 46 ++++++++++++----------------- arch/parisc/kernel/pacache.S | 70 ++++++++++++++++++++++---------------------- 2 files changed, 54 insertions(+), 62 deletions(-) (limited to 'arch/parisc') diff --git a/arch/parisc/kernel/entry.S b/arch/parisc/kernel/entry.S index 1a3935e61ab7..5d0837458c19 100644 --- a/arch/parisc/kernel/entry.S +++ b/arch/parisc/kernel/entry.S @@ -41,16 +41,8 @@ #include #ifdef CONFIG_64BIT -#define CMPIB cmpib,* -#define CMPB cmpb,* -#define COND(x) *x - .level 2.0w #else -#define CMPIB cmpib, -#define CMPB cmpb, -#define COND(x) x - .level 2.0 #endif @@ -958,9 +950,9 @@ intr_check_sig: * Only do signals if we are returning to user space */ LDREG PT_IASQ0(%r16), %r20 - CMPIB=,n 0,%r20,intr_restore /* backward */ + cmpib,COND(=),n 0,%r20,intr_restore /* backward */ LDREG PT_IASQ1(%r16), %r20 - CMPIB=,n 0,%r20,intr_restore /* backward */ + cmpib,COND(=),n 0,%r20,intr_restore /* backward */ copy %r0, %r25 /* long in_syscall = 0 */ #ifdef CONFIG_64BIT @@ -1014,10 +1006,10 @@ intr_do_resched: * we jump back to intr_restore. */ LDREG PT_IASQ0(%r16), %r20 - CMPIB= 0, %r20, intr_do_preempt + cmpib,COND(=) 0, %r20, intr_do_preempt nop LDREG PT_IASQ1(%r16), %r20 - CMPIB= 0, %r20, intr_do_preempt + cmpib,COND(=) 0, %r20, intr_do_preempt nop #ifdef CONFIG_64BIT @@ -1046,7 +1038,7 @@ intr_do_preempt: /* current_thread_info()->preempt_count */ mfctl %cr30, %r1 LDREG TI_PRE_COUNT(%r1), %r19 - CMPIB<> 0, %r19, intr_restore /* if preempt_count > 0 */ + cmpib,COND(<>) 0, %r19, intr_restore /* if preempt_count > 0 */ nop /* prev insn branched backwards */ /* check if we interrupted a critical path */ @@ -1065,7 +1057,7 @@ intr_do_preempt: */ intr_extint: - CMPIB=,n 0,%r16,1f + cmpib,COND(=),n 0,%r16,1f get_stack_use_cr30 b,n 2f @@ -1100,7 +1092,7 @@ ENDPROC(syscall_exit_rfi) ENTRY(intr_save) /* for os_hpmc */ mfsp %sr7,%r16 - CMPIB=,n 0,%r16,1f + cmpib,COND(=),n 0,%r16,1f get_stack_use_cr30 b 2f copy %r8,%r26 @@ -1122,7 +1114,7 @@ ENTRY(intr_save) /* for os_hpmc */ * adjust isr/ior below. */ - CMPIB=,n 6,%r26,skip_save_ior + cmpib,COND(=),n 6,%r26,skip_save_ior mfctl %cr20, %r16 /* isr */ @@ -1451,11 +1443,11 @@ nadtlb_emulate: bb,>=,n %r9,26,nadtlb_nullify /* m bit not set, just nullify */ BL get_register,%r25 extrw,u %r9,15,5,%r8 /* Get index register # */ - CMPIB=,n -1,%r1,nadtlb_fault /* have to use slow path */ + cmpib,COND(=),n -1,%r1,nadtlb_fault /* have to use slow path */ copy %r1,%r24 BL get_register,%r25 extrw,u %r9,10,5,%r8 /* Get base register # */ - CMPIB=,n -1,%r1,nadtlb_fault /* have to use slow path */ + cmpib,COND(=),n -1,%r1,nadtlb_fault /* have to use slow path */ BL set_register,%r25 add,l %r1,%r24,%r1 /* doesn't affect c/b bits */ @@ -1487,7 +1479,7 @@ nadtlb_probe_check: cmpb,<>,n %r16,%r17,nadtlb_fault /* Must be probe,[rw]*/ BL get_register,%r25 /* Find the target register */ extrw,u %r9,31,5,%r8 /* Get target register */ - CMPIB=,n -1,%r1,nadtlb_fault /* have to use slow path */ + cmpib,COND(=),n -1,%r1,nadtlb_fault /* have to use slow path */ BL set_register,%r25 copy %r0,%r1 /* Write zero to target register */ b nadtlb_nullify /* Nullify return insn */ @@ -1571,12 +1563,12 @@ dbit_trap_20w: L3_ptep ptp,pte,t0,va,dbit_fault #ifdef CONFIG_SMP - CMPIB=,n 0,spc,dbit_nolock_20w + cmpib,COND(=),n 0,spc,dbit_nolock_20w load32 PA(pa_dbit_lock),t0 dbit_spin_20w: LDCW 0(t0),t1 - cmpib,= 0,t1,dbit_spin_20w + cmpib,COND(=) 0,t1,dbit_spin_20w nop dbit_nolock_20w: @@ -1587,7 +1579,7 @@ dbit_nolock_20w: idtlbt pte,prot #ifdef CONFIG_SMP - CMPIB=,n 0,spc,dbit_nounlock_20w + cmpib,COND(=),n 0,spc,dbit_nounlock_20w ldi 1,t1 stw t1,0(t0) @@ -1607,7 +1599,7 @@ dbit_trap_11: L2_ptep ptp,pte,t0,va,dbit_fault #ifdef CONFIG_SMP - CMPIB=,n 0,spc,dbit_nolock_11 + cmpib,COND(=),n 0,spc,dbit_nolock_11 load32 PA(pa_dbit_lock),t0 dbit_spin_11: @@ -1629,7 +1621,7 @@ dbit_nolock_11: mtsp t1, %sr1 /* Restore sr1 */ #ifdef CONFIG_SMP - CMPIB=,n 0,spc,dbit_nounlock_11 + cmpib,COND(=),n 0,spc,dbit_nounlock_11 ldi 1,t1 stw t1,0(t0) @@ -1647,7 +1639,7 @@ dbit_trap_20: L2_ptep ptp,pte,t0,va,dbit_fault #ifdef CONFIG_SMP - CMPIB=,n 0,spc,dbit_nolock_20 + cmpib,COND(=),n 0,spc,dbit_nolock_20 load32 PA(pa_dbit_lock),t0 dbit_spin_20: @@ -1666,7 +1658,7 @@ dbit_nolock_20: idtlbt pte,prot #ifdef CONFIG_SMP - CMPIB=,n 0,spc,dbit_nounlock_20 + cmpib,COND(=),n 0,spc,dbit_nounlock_20 ldi 1,t1 stw t1,0(t0) @@ -1995,7 +1987,7 @@ ENTRY(syscall_exit) /* We can't use "CMPIB<> PER_HPUX" since "im5" field is sign extended */ ldo -PER_HPUX(%r19), %r19 - CMPIB<>,n 0,%r19,1f + cmpib,COND(<>),n 0,%r19,1f /* Save other hpux returns if personality is PER_HPUX */ STREG %r22,TASK_PT_GR22(%r1) diff --git a/arch/parisc/kernel/pacache.S b/arch/parisc/kernel/pacache.S index 7e4a33978907..e3246a5ca74f 100644 --- a/arch/parisc/kernel/pacache.S +++ b/arch/parisc/kernel/pacache.S @@ -86,7 +86,7 @@ ENTRY(flush_tlb_all_local) LDREG ITLB_OFF_COUNT(%r1), %arg2 LDREG ITLB_LOOP(%r1), %arg3 - ADDIB= -1, %arg3, fitoneloop /* Preadjust and test */ + addib,COND(=) -1, %arg3, fitoneloop /* Preadjust and test */ movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */ copy %arg0, %r28 /* Init base addr */ @@ -96,14 +96,14 @@ fitmanyloop: /* Loop if LOOP >= 2 */ copy %arg2, %r29 /* Init middle loop count */ fitmanymiddle: /* Loop if LOOP >= 2 */ - ADDIB> -1, %r31, fitmanymiddle /* Adjusted inner loop decr */ + addib,COND(>) -1, %r31, fitmanymiddle /* Adjusted inner loop decr */ pitlbe 0(%sr1, %r28) pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */ - ADDIB> -1, %r29, fitmanymiddle /* Middle loop decr */ + addib,COND(>) -1, %r29, fitmanymiddle /* Middle loop decr */ copy %arg3, %r31 /* Re-init inner loop count */ movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */ - ADDIB<=,n -1, %r22, fitdone /* Outer loop count decr */ + addib,COND(<=),n -1, %r22, fitdone /* Outer loop count decr */ fitoneloop: /* Loop if LOOP = 1 */ mtsp %r20, %sr1 @@ -111,10 +111,10 @@ fitoneloop: /* Loop if LOOP = 1 */ copy %arg2, %r29 /* init middle loop count */ fitonemiddle: /* Loop if LOOP = 1 */ - ADDIB> -1, %r29, fitonemiddle /* Middle loop count decr */ + addib,COND(>) -1, %r29, fitonemiddle /* Middle loop count decr */ pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */ - ADDIB> -1, %r22, fitoneloop /* Outer loop count decr */ + addib,COND(>) -1, %r22, fitoneloop /* Outer loop count decr */ add %r21, %r20, %r20 /* increment space */ fitdone: @@ -129,7 +129,7 @@ fitdone: LDREG DTLB_OFF_COUNT(%r1), %arg2 LDREG DTLB_LOOP(%r1), %arg3 - ADDIB= -1, %arg3, fdtoneloop /* Preadjust and test */ + addib,COND(=) -1, %arg3, fdtoneloop /* Preadjust and test */ movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */ copy %arg0, %r28 /* Init base addr */ @@ -139,14 +139,14 @@ fdtmanyloop: /* Loop if LOOP >= 2 */ copy %arg2, %r29 /* Init middle loop count */ fdtmanymiddle: /* Loop if LOOP >= 2 */ - ADDIB> -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */ + addib,COND(>) -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */ pdtlbe 0(%sr1, %r28) pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */ - ADDIB> -1, %r29, fdtmanymiddle /* Middle loop decr */ + addib,COND(>) -1, %r29, fdtmanymiddle /* Middle loop decr */ copy %arg3, %r31 /* Re-init inner loop count */ movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */ - ADDIB<=,n -1, %r22,fdtdone /* Outer loop count decr */ + addib,COND(<=),n -1, %r22,fdtdone /* Outer loop count decr */ fdtoneloop: /* Loop if LOOP = 1 */ mtsp %r20, %sr1 @@ -154,10 +154,10 @@ fdtoneloop: /* Loop if LOOP = 1 */ copy %arg2, %r29 /* init middle loop count */ fdtonemiddle: /* Loop if LOOP = 1 */ - ADDIB> -1, %r29, fdtonemiddle /* Middle loop count decr */ + addib,COND(>) -1, %r29, fdtonemiddle /* Middle loop count decr */ pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */ - ADDIB> -1, %r22, fdtoneloop /* Outer loop count decr */ + addib,COND(>) -1, %r22, fdtoneloop /* Outer loop count decr */ add %r21, %r20, %r20 /* increment space */ @@ -210,18 +210,18 @@ ENTRY(flush_instruction_cache_local) LDREG ICACHE_COUNT(%r1), %arg2 LDREG ICACHE_LOOP(%r1), %arg3 rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/ - ADDIB= -1, %arg3, fioneloop /* Preadjust and test */ + addib,COND(=) -1, %arg3, fioneloop /* Preadjust and test */ movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */ fimanyloop: /* Loop if LOOP >= 2 */ - ADDIB> -1, %r31, fimanyloop /* Adjusted inner loop decr */ + addib,COND(>) -1, %r31, fimanyloop /* Adjusted inner loop decr */ fice %r0(%sr1, %arg0) fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */ movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */ - ADDIB<=,n -1, %arg2, fisync /* Outer loop decr */ + addib,COND(<=),n -1, %arg2, fisync /* Outer loop decr */ fioneloop: /* Loop if LOOP = 1 */ - ADDIB> -1, %arg2, fioneloop /* Outer loop count decr */ + addib,COND(>) -1, %arg2, fioneloop /* Outer loop count decr */ fice,m %arg1(%sr1, %arg0) /* Fice for one loop */ fisync: @@ -251,18 +251,18 @@ ENTRY(flush_data_cache_local) LDREG DCACHE_COUNT(%r1), %arg2 LDREG DCACHE_LOOP(%r1), %arg3 rsm PSW_SM_I, %r22 - ADDIB= -1, %arg3, fdoneloop /* Preadjust and test */ + addib,COND(=) -1, %arg3, fdoneloop /* Preadjust and test */ movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */ fdmanyloop: /* Loop if LOOP >= 2 */ - ADDIB> -1, %r31, fdmanyloop /* Adjusted inner loop decr */ + addib,COND(>) -1, %r31, fdmanyloop /* Adjusted inner loop decr */ fdce %r0(%sr1, %arg0) fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */ movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */ - ADDIB<=,n -1, %arg2, fdsync /* Outer loop decr */ + addib,COND(<=),n -1, %arg2, fdsync /* Outer loop decr */ fdoneloop: /* Loop if LOOP = 1 */ - ADDIB> -1, %arg2, fdoneloop /* Outer loop count decr */ + addib,COND(>) -1, %arg2, fdoneloop /* Outer loop count decr */ fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */ fdsync: @@ -343,7 +343,7 @@ ENTRY(copy_user_page_asm) * non-taken backward branch. Note that .+4 is a backwards branch. * The ldd should only get executed if the branch is taken. */ - ADDIB>,n -1, %r1, 1b /* bundle 10 */ + addib,COND(>),n -1, %r1, 1b /* bundle 10 */ ldd 0(%r25), %r19 /* start next loads */ #else @@ -392,7 +392,7 @@ ENTRY(copy_user_page_asm) stw %r21, 56(%r26) stw %r22, 60(%r26) ldo 64(%r26), %r26 - ADDIB>,n -1, %r1, 1b + addib,COND(>),n -1, %r1, 1b ldw 0(%r25), %r19 #endif bv %r0(%r2) @@ -516,7 +516,7 @@ ENTRY(copy_user_page_asm) stw %r21, 56(%r28) stw %r22, 60(%r28) ldo 64(%r28), %r28 - ADDIB> -1, %r1,1b + addib,COND(>) -1, %r1,1b ldo 64(%r29), %r29 bv %r0(%r2) @@ -575,7 +575,7 @@ ENTRY(__clear_user_page_asm) std %r0, 104(%r28) std %r0, 112(%r28) std %r0, 120(%r28) - ADDIB> -1, %r1, 1b + addib,COND(>) -1, %r1, 1b ldo 128(%r28), %r28 #else /* ! CONFIG_64BIT */ @@ -598,7 +598,7 @@ ENTRY(__clear_user_page_asm) stw %r0, 52(%r28) stw %r0, 56(%r28) stw %r0, 60(%r28) - ADDIB> -1, %r1, 1b + addib,COND(>) -1, %r1, 1b ldo 64(%r28), %r28 #endif /* CONFIG_64BIT */ @@ -641,7 +641,7 @@ ENTRY(flush_kernel_dcache_page_asm) fdc,m %r23(%r26) fdc,m %r23(%r26) fdc,m %r23(%r26) - CMPB<< %r26, %r25,1b + cmpb,COND(<<) %r26, %r25,1b fdc,m %r23(%r26) sync @@ -684,7 +684,7 @@ ENTRY(flush_user_dcache_page) fdc,m %r23(%sr3, %r26) fdc,m %r23(%sr3, %r26) fdc,m %r23(%sr3, %r26) - CMPB<< %r26, %r25,1b + cmpb,COND(<<) %r26, %r25,1b fdc,m %r23(%sr3, %r26) sync @@ -727,7 +727,7 @@ ENTRY(flush_user_icache_page) fic,m %r23(%sr3, %r26) fic,m %r23(%sr3, %r26) fic,m %r23(%sr3, %r26) - CMPB<< %r26, %r25,1b + cmpb,COND(<<) %r26, %r25,1b fic,m %r23(%sr3, %r26) sync @@ -770,7 +770,7 @@ ENTRY(purge_kernel_dcache_page) pdc,m %r23(%r26) pdc,m %r23(%r26) pdc,m %r23(%r26) - CMPB<< %r26, %r25, 1b + cmpb,COND(<<) %r26, %r25, 1b pdc,m %r23(%r26) sync @@ -834,7 +834,7 @@ ENTRY(flush_alias_page) fdc,m %r23(%r28) fdc,m %r23(%r28) fdc,m %r23(%r28) - CMPB<< %r28, %r29, 1b + cmpb,COND(<<) %r28, %r29, 1b fdc,m %r23(%r28) sync @@ -857,7 +857,7 @@ flush_user_dcache_range_asm: ldo -1(%r23), %r21 ANDCM %r26, %r21, %r26 -1: CMPB<<,n %r26, %r25, 1b +1: cmpb,COND(<<),n %r26, %r25, 1b fdc,m %r23(%sr3, %r26) sync @@ -878,7 +878,7 @@ ENTRY(flush_kernel_dcache_range_asm) ldo -1(%r23), %r21 ANDCM %r26, %r21, %r26 -1: CMPB<<,n %r26, %r25,1b +1: cmpb,COND(<<),n %r26, %r25,1b fdc,m %r23(%r26) sync @@ -900,7 +900,7 @@ ENTRY(flush_user_icache_range_asm) ldo -1(%r23), %r21 ANDCM %r26, %r21, %r26 -1: CMPB<<,n %r26, %r25,1b +1: cmpb,COND(<<),n %r26, %r25,1b fic,m %r23(%sr3, %r26) sync @@ -943,7 +943,7 @@ ENTRY(flush_kernel_icache_page) fic,m %r23(%sr4, %r26) fic,m %r23(%sr4, %r26) fic,m %r23(%sr4, %r26) - CMPB<< %r26, %r25, 1b + cmpb,COND(<<) %r26, %r25, 1b fic,m %r23(%sr4, %r26) sync @@ -964,7 +964,7 @@ ENTRY(flush_kernel_icache_range_asm) ldo -1(%r23), %r21 ANDCM %r26, %r21, %r26 -1: CMPB<<,n %r26, %r25, 1b +1: cmpb,COND(<<),n %r26, %r25, 1b fic,m %r23(%sr4, %r26) sync -- cgit v1.2.3