Searched refs:t2 (Results 1 - 25 of 78) sorted by relevance

1234

/arch/alpha/include/asm/
H A Dword-at-a-time.h41 unsigned long t1, t2, t3;
46 t2 = bits & 0xcc;
49 if (t2) t2 = 2;
51 return t1 + t2 + t3;
/arch/parisc/lib/
H A Dfixup.S28 .macro get_fault_ip t1 t2
31 /* t2 = smp_processor_id() */
32 mfctl 30,\t2
33 ldw TI_CPU(\t2),\t2
35 extrd,u \t2,63,32,\t2
37 /* t2 = &__per_cpu_offset[smp_processor_id()]; */
38 LDREGX \t2(\t1),\t2
[all...]
/arch/x86/purgatory/
H A Dsha256.c50 u32 a, b, c, d, e, f, g, h, t1, t2; local
68 t2 = e0(a) + Maj(a, b, c); d += t1; h = t1 + t2;
70 t2 = e0(h) + Maj(h, a, b); c += t1; g = t1 + t2;
72 t2 = e0(g) + Maj(g, h, a); b += t1; f = t1 + t2;
74 t2 = e0(f) + Maj(f, g, h); a += t1; e = t1 + t2;
76 t2
[all...]
/arch/mips/kernel/
H A Docteon_switch.S37 and t2, t0, t1
38 beqz t2, 1f
60 PTR_L t2, TASK_THREAD_INFO(a0)
61 LONG_L t0, ST_OFF(t2)
67 LONG_S t0, ST_OFF(t2)
97 LONG_ADDI t2, a0, THREAD_CVMSEG /* Where to store CVMSEG to */
105 LONG_S t8, 0(t2) /* Store CVMSEG to thread storage */
106 LONG_ADDU t2, LONGSIZE*2 /* Increment loc in thread storage */
108 LONG_S t9, -LONGSIZE(t2)/* Store CVMSEG to thread storage */
132 set_saved_sp t0, t1, t2
[all...]
H A Dcps-vec.S99 xori t2, t1, 0x7
100 beqz t2, 1f
105 _EXT t2, v0, MIPS_CONF1_IA_SHF, MIPS_CONF1_IA_SZ
106 addi t2, t2, 1
108 mul t1, t1, t2
126 xori t2, t1, 0x7
127 beqz t2, 1f
132 _EXT t2, v0, MIPS_CONF1_DA_SHF, MIPS_CONF1_DA_SZ
133 addi t2, t
[all...]
/arch/mips/netlogic/common/
H A Dsmpboot.S65 dla t2, nlm_boot_siblings
66 dsubu t2, t1
67 daddu t2, t0
69 jalr t2
109 andi t2, t0, 0x3 /* thread num */
118 bnez t2, 1f /* skip thread programming */
126 li t2, 6 /* XLR thread mode mask */
127 nor t3, t2, zero
128 and t2, t1, t2 /* t
[all...]
H A Dreset.S63 lui t2, 0xc080 /* SUE, Enable Unaligned Access, L2HPE */
64 or t1, t1, t2
104 li t2, 0 /* index */
107 sll v0, t2, 5
124 addi t2, 1
125 bne t3, t2, 11b
189 li t2, 0x40000
190 mul t3, t2, t1 /* t3 = node * 0x40000 */
196 li t2, SYS_CPU_COHERENT_BASE
197 add t2, t
[all...]
/arch/alpha/include/uapi/asm/
H A Dswab.h26 __u64 t0, t1, t2, t3; local
31 t2 = t1 >> 16; /* t2 : 0000000000CCDDAA */
33 t3 = t2 & 0x00FF00FF; /* t3 : 0000000000CC00AA */
H A Dregdef.h8 #define t2 $3 macro
/arch/mips/dec/
H A Dint-handler.S134 lw t2,cpu_fpu_mask
142 and t2,t0
143 bnez t2,fpu # handle FPU immediately
150 1: lw t2,(t1)
152 and t2,t0
153 beqz t2,1b
166 lui t2,(KN03_IOASIC_BASE>>16)&0xffff
183 lui t2,(KN02XA_IOASIC_BASE>>16)&0xffff
186 FEXPORT(kn03_io_int) # 3max+ (t2 loaded earlier)
187 lw t0,IO_REG_SIR(t2) # ge
[all...]
/arch/x86/crypto/
H A Dglue_helper-asm-avx2.S61 t1x, t2, t2x, t3, t3x, t4, t5) \
71 vinserti128 $1, t2x, t3, t2; /* ab: le0 ; cd: le1 */ \
72 vpshufb t1, t2, x0; \
75 add2_le128(t2, t0, t4, t3, t5); /* ab: le2 ; cd: le3 */ \
76 vpshufb t1, t2, x1; \
77 add2_le128(t2, t0, t4, t3, t5); \
78 vpshufb t1, t2, x2; \
79 add2_le128(t2, t0, t4, t3, t5); \
80 vpshufb t1, t2, x3; \
81 add2_le128(t2, t
[all...]
H A Dglue_helper-asm-avx.S54 #define load_ctr_8way(iv, bswap, x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2) \
64 inc_le128(x7, t0, t2); \
66 inc_le128(x7, t0, t2); \
68 inc_le128(x7, t0, t2); \
70 inc_le128(x7, t0, t2); \
72 inc_le128(x7, t0, t2); \
74 inc_le128(x7, t0, t2); \
76 inc_le128(x7, t0, t2); \
77 vmovdqa x7, t2; \
79 inc_le128(t2, t
[all...]
/arch/mips/include/asm/mach-malta/
H A Dkernel-entry-init.h35 * The following code uses the t0, t1, t2 and ra registers without
49 move t2, t1 variable
50 ins t2, t1, 16, 3 variable
58 or t0, t2
79 or t0, t2
84 li t2, 0x40000000 /* K bit */ variable
85 or t0, t0, t2
/arch/mips/include/asm/mach-ip27/
H A Dkernel-entry-init.h35 * inputs are the text nasid in t1, data nasid in t2.
48 dsll t2, NASID_SHFT # Same for data nasid variable
50 or t2, t2, t0 # Physical load address of kernel data variable
52 dsrl t2, 12 # 4K pfn variable
54 dsll t2, 6 # Get pfn into place variable
59 or t0, t0, t2
79 move t2, t1 # text and data are here
94 lh t2, KV_RW_NASID_OFFSET(t0) variable
/arch/alpha/lib/
H A Dev6-stxcpy.S57 lda t2, -1 # E : build a mask against false zero
58 mskqh t2, a1, t2 # U : detection in the src word (stall)
60 ornot t1, t2, t2 # E : (stall)
63 cmpbge zero, t2, t8 # E : bits set iff null found
154 ldq_u t2, 8(a1) # L :
157 extqh t2, a1, t4 # U : (stall on a1)
170 or t6, t2, t2 #
[all...]
H A Dstxcpy.S46 lda t2, -1 # e1 : build a mask against false zero
47 mskqh t2, a1, t2 # e0 : detection in the src word
49 ornot t1, t2, t2 # .. e1 :
51 cmpbge zero, t2, t8 # .. e1 : bits set iff null found
131 ldq_u t2, 8(a1) # e0 :
135 extqh t2, a1, t4 # e0 :
147 or t6, t2, t2 # e
[all...]
H A Dstrchr.S28 cmpbge zero, t0, t2 # .. e1 : bits set iff byte == zero
36 or t2, t3, t0 # e1 : bits set iff char match or zero match
44 cmpbge zero, t0, t2 # e0 : bits set iff byte == 0
46 or t2, t3, t0 # e0 :
55 and t0, 0xf0, t2 # e0 : binary search for that set bit
58 cmovne t2, 4, t2 # .. e1 :
61 addq t2, t3, t2 # e0 :
63 addq v0, t2, v
[all...]
H A Dev6-stxncpy.S65 lda t2, -1 # E : build a mask against false zero
66 mskqh t2, a1, t2 # U : detection in the src word (stall)
68 ornot t1, t2, t2 # E : (stall)
71 cmpbge zero, t2, t8 # E : bits set iff null found
159 and a2, 7, t2 # E : (stall)
163 sll t10, t2, t10 # U : t10 = bitmask of last count byte
196 ldq_u t2, 8(a1) # L : Latency=3 load second src word
199 extqh t2, a
[all...]
H A Dev67-strrchr.S34 and a1, 0xff, t2 # E : 00000000000000ch
40 or t2, t4, a1 # E : 000000000000chch
46 sll a1, 32, t2 # U : 0000chch00000000
50 or t2, t3, t2 # E : 0000chchchch0000
51 or a1, t2, a1 # E : chchchchchchchch
56 xor t0, a1, t2 # E : make bytes == c zero
59 cmpbge zero, t2, t3 # E : bits set iff byte == c
74 xor t0, a1, t2 # E :
77 cmpbge zero, t2, t
[all...]
H A Dstxncpy.S54 lda t2, -1 # e1 : build a mask against false zero
55 mskqh t2, a1, t2 # e0 : detection in the src word
57 ornot t1, t2, t2 # .. e1 :
59 cmpbge zero, t2, t8 # .. e1 : bits set iff null found
126 and a2, 7, t2 # e1 :
129 sll t10, t2, t10 # e0 : t10 = bitmask of last count byte
157 ldq_u t2, 8(a1) # e0 : load second src word
160 extqh t2, a
[all...]
H A Dstrrchr.S35 xor t0, a1, t2 # e0 : make bytes == c zero
37 cmpbge zero, t2, t3 # e0 : bits set iff byte == c
48 xor t0, a1, t2 # e0 :
50 cmpbge zero, t2, t3 # e0 : bits set iff byte == c
70 and t8, 0xf0, t2 # e0 : binary search for the high bit set
71 cmovne t2, t2, t8 # .. e1 (zdb)
72 cmovne t2, 4, t2 # e0 :
78 addq t2, t
[all...]
/arch/unicore32/mm/
H A Dproc-macros.S137 .macro dcacheline_flush, addr, t1, t2
139 ldw \t2, =_stext @ _stext must ALIGN(4096)
140 add \t2, \t2, \t1 >> #20
141 ldw \t1, [\t2+], #0x0000
142 ldw \t1, [\t2+], #0x1000
143 ldw \t1, [\t2+], #0x2000
144 ldw \t1, [\t2+], #0x3000
/arch/arm/crypto/
H A Dsha256-armv4.pl56 $t2="r12";
74 add $a,$a,$t2 @ h+=Maj(a,b,c) from the past
81 add $a,$a,$t2 @ h+=Maj(a,b,c) from the past
82 ldrb $t2,[$inp,#2]
84 orr $t1,$t1,$t2,lsl#8
85 ldrb $t2,[$inp],#4
91 orr $t1,$t1,$t2,lsl#24
96 ldr $t2,[$Ktbl],#4 @ *K256++
102 add $h,$h,$t2 @ h+=K256[i]
107 and $t2,
[all...]
/arch/mips/lib/
H A Dcsum_partial.S26 #undef t2
30 #define t2 $10 define
111 move t2, a1
188 andi t2, a1, 0x40
202 beqz t2, 1f
203 andi t2, a1, 0x20
211 beqz t2, .Ldo_end_words
221 andi t2, a1, 0x3
235 move a1, t2
260 lbu t2, (sr
[all...]
/arch/sparc/lib/
H A Dmemcpy.S17 #define MOVE_BIGCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \
19 ldd [%src + (offset) + 0x08], %t2; \
24 st %t2, [%dst + (offset) + 0x08]; \
31 #define MOVE_BIGALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \
33 ldd [%src + (offset) + 0x08], %t2; \
37 std %t2, [%dst + (offset) + 0x08]; \
41 #define MOVE_LASTCHUNK(src, dst, offset, t0, t1, t2, t3) \
43 ldd [%src - (offset) - 0x08], %t2; \
46 st %t2, [%dst - (offset) - 0x08]; \
49 #define MOVE_LASTALIGNCHUNK(src, dst, offset, t0, t1, t2, t
[all...]

Completed in 1062 milliseconds

1234