Searched defs:ctx (Results 1 - 25 of 96) sorted by relevance

1234

/arch/sparc/prom/
H A Dmp.c18 * by 'ctable_reg' in context 'ctx' at program counter 'pc'.
23 prom_startcpu(int cpunode, struct linux_prom_registers *ctable_reg, int ctx, char *pc) argument
36 ret = (*(romvec->v3_cpustart))(cpunode, (int) ctable_reg, ctx, pc);
/arch/metag/include/asm/
H A Dptrace.h14 TBICTX ctx; member in struct:pt_regs
18 #define user_mode(regs) (((regs)->ctx.SaveMask & TBICTX_PRIV_BIT) > 0)
20 #define instruction_pointer(regs) ((unsigned long)(regs)->ctx.CurrPC)
/arch/powerpc/platforms/cell/spufs/
H A Dgang.c65 void spu_gang_add_ctx(struct spu_gang *gang, struct spu_context *ctx) argument
68 ctx->gang = get_spu_gang(gang);
69 list_add(&ctx->gang_list, &gang->list);
74 void spu_gang_remove_ctx(struct spu_gang *gang, struct spu_context *ctx) argument
77 WARN_ON(ctx->gang != gang);
78 if (!list_empty(&ctx->aff_list)) {
79 list_del_init(&ctx->aff_list);
82 list_del_init(&ctx->gang_list);
H A Dcontext.c38 struct spu_context *ctx; local
40 ctx = kzalloc(sizeof *ctx, GFP_KERNEL);
41 if (!ctx)
46 if (spu_init_csa(&ctx->csa))
48 spin_lock_init(&ctx->mmio_lock);
49 mutex_init(&ctx->mapping_lock);
50 kref_init(&ctx->kref);
51 mutex_init(&ctx->state_mutex);
52 mutex_init(&ctx
82 struct spu_context *ctx; local
99 get_spu_context(struct spu_context *ctx) argument
105 put_spu_context(struct spu_context *ctx) argument
111 spu_forget(struct spu_context *ctx) argument
130 spu_unmap_mappings(struct spu_context *ctx) argument
154 spu_acquire_saved(struct spu_context *ctx) argument
176 spu_release_saved(struct spu_context *ctx) argument
[all...]
H A Dfault.c36 static void spufs_handle_event(struct spu_context *ctx, argument
41 if (ctx->flags & SPU_CREATE_EVENTS_ENABLED) {
42 ctx->event_return |= type;
43 wake_up_all(&ctx->stop_wq);
58 ctx->ops->restart_dma(ctx);
68 ctx->ops->npc_read(ctx) - 4;
77 int spufs_handle_class0(struct spu_context *ctx) argument
79 unsigned long stat = ctx
110 spufs_handle_class1(struct spu_context *ctx) argument
[all...]
/arch/x86/include/asm/crypto/
H A Dserpent-sse2.h11 asmlinkage void __serpent_enc_blk_4way(struct serpent_ctx *ctx, u8 *dst,
13 asmlinkage void serpent_dec_blk_4way(struct serpent_ctx *ctx, u8 *dst,
16 static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst, argument
19 __serpent_enc_blk_4way(ctx, dst, src, false);
22 static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst, argument
25 __serpent_enc_blk_4way(ctx, dst, src, true);
28 static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst, argument
31 serpent_dec_blk_4way(ctx, dst, src);
38 asmlinkage void __serpent_enc_blk_8way(struct serpent_ctx *ctx, u8 *dst,
40 asmlinkage void serpent_dec_blk_8way(struct serpent_ctx *ctx, u
43 serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst, const u8 *src) argument
49 serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst, const u8 *src) argument
55 serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst, const u8 *src) argument
[all...]
/arch/arm/crypto/
H A Daes_glue.c18 struct AES_CTX *ctx = crypto_tfm_ctx(tfm); local
19 AES_encrypt(src, dst, &ctx->enc_key);
24 struct AES_CTX *ctx = crypto_tfm_ctx(tfm); local
25 AES_decrypt(src, dst, &ctx->dec_key);
31 struct AES_CTX *ctx = crypto_tfm_ctx(tfm); local
48 if (private_AES_set_encrypt_key(in_key, key_len, &ctx->enc_key) == -1) {
53 ctx->dec_key = ctx->enc_key;
54 if (private_AES_set_decrypt_key(in_key, key_len, &ctx->dec_key) == -1) {
/arch/arm/kernel/
H A Dsuspend.c61 u32 *ctx = ptr; local
83 __cpuc_flush_dcache_area(ctx, ptrsz);
/arch/arm64/crypto/
H A Daes-ce-cipher.c25 static int num_rounds(struct crypto_aes_ctx *ctx) argument
34 return 6 + ctx->key_length / 4;
39 struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); local
75 "1"(ctx->key_enc),
76 "2"(num_rounds(ctx) - 2)
84 struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); local
120 "1"(ctx->key_dec),
121 "2"(num_rounds(ctx) - 2)
H A Dghash-ce-glue.c41 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); local
43 *ctx = (struct ghash_desc_ctx){};
50 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); local
51 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
53 ctx->count += len;
62 memcpy(ctx->buf + partial, src, p);
71 pmull_ghash_update(blocks, ctx->digest, src, key,
72 partial ? ctx->buf : NULL);
78 memcpy(ctx->buf + partial, src, len);
84 struct ghash_desc_ctx *ctx local
[all...]
/arch/ia64/kernel/
H A Dperfmon_itanium.h8 static int pfm_ita_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs);
51 pfm_ita_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs) argument
57 if (ctx == NULL) return -EINVAL;
59 is_loaded = ctx->ctx_state == PFM_CTX_LOADED || ctx->ctx_state == PFM_CTX_MASKED;
65 if (cnum == 13 && is_loaded && ((*val & 0x1) == 0UL) && ctx->ctx_fl_using_dbreg == 0) {
76 ret = pfm_write_ibr_dbr(1, ctx, NULL, 0, regs);
84 if (cnum == 11 && is_loaded && ((*val >> 28)& 0x1) == 0 && ctx->ctx_fl_using_dbreg == 0) {
95 ret = pfm_write_ibr_dbr(0, ctx, NULL, 0, regs);
H A Dperfmon_mckinley.h8 static int pfm_mck_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs);
77 pfm_mck_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs) argument
87 if (ctx == NULL) return -EINVAL;
89 is_loaded = ctx->ctx_state == PFM_CTX_LOADED || ctx->ctx_state == PFM_CTX_MASKED;
101 DPRINT(("cnum=%u val=0x%lx, using_dbreg=%d loaded=%d\n", cnum, *val, ctx->ctx_fl_using_dbreg, is_loaded));
104 && (*val & 0x1e00000000000UL) && (*val & 0x18181818UL) != 0x18181818UL && ctx->ctx_fl_using_dbreg == 0) {
115 ret = pfm_write_ibr_dbr(PFM_DATA_RR, ctx, NULL, 0, regs);
122 if (cnum == 14 && is_loaded && ((*val & 0x2222UL) != 0x2222UL) && ctx->ctx_fl_using_dbreg == 0) {
133 ret = pfm_write_ibr_dbr(PFM_CODE_RR, ctx, NUL
[all...]
H A Dperfmon_montecito.h8 static int pfm_mont_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs);
155 pfm_mont_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs) argument
169 if (ctx == NULL) return -EINVAL;
171 is_loaded = ctx->ctx_state == PFM_CTX_LOADED || ctx->ctx_state == PFM_CTX_MASKED;
185 DPRINT(("cnum=%u val=0x%lx, using_dbreg=%d loaded=%d\n", cnum, tmpval, ctx->ctx_fl_using_dbreg, is_loaded));
188 && (tmpval & 0x1e00000000000UL) && (tmpval & 0x18181818UL) != 0x18181818UL && ctx->ctx_fl_using_dbreg == 0) {
199 ret = pfm_write_ibr_dbr(PFM_DATA_RR, ctx, NULL, 0, regs);
208 if (cnum == 38 && is_loaded && ((tmpval & 0x492UL) != 0x492UL) && ctx->ctx_fl_using_dbreg == 0) {
219 ret = pfm_write_ibr_dbr(PFM_CODE_RR, ctx, NUL
[all...]
/arch/powerpc/mm/
H A Dmmu_context_hash32.c57 * CTX_TO_VSID(ctx, va) (((ctx) * (897 * 16) + ((va) >> 28) * 0x111) \
66 unsigned long ctx = next_mmu_context; local
68 while (test_and_set_bit(ctx, context_map)) {
69 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx);
70 if (ctx > LAST_CONTEXT)
71 ctx = 0;
73 next_mmu_context = (ctx + 1) & LAST_CONTEXT;
75 return ctx;
92 __destroy_context(unsigned long ctx) argument
[all...]
/arch/powerpc/platforms/cell/
H A Dspu_notify.c31 void spu_switch_notify(struct spu *spu, struct spu_context *ctx) argument
34 ctx ? ctx->object_id : 0, spu);
54 void spu_set_profile_private_kref(struct spu_context *ctx, argument
58 ctx->prof_priv_kref = prof_info_kref;
59 ctx->prof_priv_release = prof_info_release;
63 void *spu_get_profile_private_kref(struct spu_context *ctx) argument
65 return ctx->prof_priv_kref;
/arch/s390/crypto/
H A Dsha512_s390.c27 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); local
29 *(__u64 *)&ctx->state[0] = 0x6a09e667f3bcc908ULL;
30 *(__u64 *)&ctx->state[2] = 0xbb67ae8584caa73bULL;
31 *(__u64 *)&ctx->state[4] = 0x3c6ef372fe94f82bULL;
32 *(__u64 *)&ctx->state[6] = 0xa54ff53a5f1d36f1ULL;
33 *(__u64 *)&ctx->state[8] = 0x510e527fade682d1ULL;
34 *(__u64 *)&ctx->state[10] = 0x9b05688c2b3e6c1fULL;
35 *(__u64 *)&ctx->state[12] = 0x1f83d9abfb41bd6bULL;
36 *(__u64 *)&ctx->state[14] = 0x5be0cd19137e2179ULL;
37 ctx
93 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); local
[all...]
H A Dsha_common.c23 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); local
29 index = ctx->count & (bsize - 1);
30 ctx->count += len;
37 memcpy(ctx->buf + index, data, bsize - index);
38 ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, bsize);
48 ret = crypt_s390_kimd(ctx->func, ctx->state, data,
57 memcpy(ctx
65 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); local
[all...]
/arch/x86/crypto/
H A Daes_glue.c10 asmlinkage void aes_enc_blk(struct crypto_aes_ctx *ctx, u8 *out, const u8 *in);
11 asmlinkage void aes_dec_blk(struct crypto_aes_ctx *ctx, u8 *out, const u8 *in);
13 void crypto_aes_encrypt_x86(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src) argument
15 aes_enc_blk(ctx, dst, src);
19 void crypto_aes_decrypt_x86(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src) argument
21 aes_dec_blk(ctx, dst, src);
H A Dcrct10dif-pclmul_glue.c50 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); local
52 ctx->crc = 0;
60 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); local
64 ctx->crc = crc_t10dif_pcl(ctx->crc, data, length);
67 ctx->crc = crc_t10dif_generic(ctx->crc, data, length);
73 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); local
75 *(__u16 *)out = ctx->crc;
94 struct chksum_desc_ctx *ctx local
102 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); local
[all...]
H A Dsalsa20_glue.c34 asmlinkage void salsa20_keysetup(struct salsa20_ctx *ctx, const u8 *k,
36 asmlinkage void salsa20_ivsetup(struct salsa20_ctx *ctx, const u8 *iv);
37 asmlinkage void salsa20_encrypt_bytes(struct salsa20_ctx *ctx,
43 struct salsa20_ctx *ctx = crypto_tfm_ctx(tfm); local
44 salsa20_keysetup(ctx, key, keysize*8, SALSA20_IV_SIZE*8);
54 struct salsa20_ctx *ctx = crypto_blkcipher_ctx(tfm); local
60 salsa20_ivsetup(ctx, walk.iv);
64 salsa20_encrypt_bytes(ctx, walk.src.virt.addr,
70 salsa20_encrypt_bytes(ctx, walk.src.virt.addr,
77 salsa20_encrypt_bytes(ctx, wal
[all...]
/arch/ia64/kvm/
H A Dmisc.h63 union context *ctx = &vcpu->arch.host; local
64 return to_guest(vcpu->kvm, ctx);
70 union context *ctx = &vcpu->arch.guest; local
71 return to_guest(vcpu->kvm, ctx);
/arch/microblaze/include/asm/
H A Dmmu_context_mm.h26 # define CTX_TO_VSID(ctx, va) (((ctx) * (897 * 16) + ((va) >> 28) * 0x111) \
83 mm_context_t ctx; local
89 ctx = next_mmu_context;
90 while (test_and_set_bit(ctx, context_map)) {
91 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx);
92 if (ctx > LAST_CONTEXT)
93 ctx = 0;
95 next_mmu_context = (ctx
[all...]
/arch/microblaze/kernel/
H A Dprocess.c125 struct cpu_context *ctx = local
129 if (in_sched_functions(ctx->r15))
130 return (unsigned long)ctx->r15;
132 return ctx->r14;
/arch/frv/mm/
H A Dmmu-context.c44 static unsigned get_cxn(mm_context_t *ctx) argument
50 if (!list_empty(&ctx->id_link)) {
51 list_move_tail(&ctx->id_link, &cxn_owners_lru);
78 ctx->id = cxn;
79 list_add_tail(&ctx->id_link, &cxn_owners_lru);
82 return ctx->id;
90 void change_mm_context(mm_context_t *old, mm_context_t *ctx, pgd_t *pgd) argument
106 get_cxn(ctx);
107 ctx->id_busy = 1;
110 asm volatile("movgs %0,cxnr" : : "r"(ctx
132 mm_context_t *ctx = &mm->context; local
[all...]
/arch/m68k/include/asm/
H A Dtlbflush.h120 unsigned char ctx, oldctx; local
124 for(ctx = 0; ctx < 8; ctx++) {
125 sun3_put_context(ctx);

Completed in 422 milliseconds

1234