Searched defs:ctx (Results 1 - 25 of 75) sorted by relevance

123

/arch/sparc/prom/
H A Dsegment.c18 * context 'ctx'.
21 prom_putsegment(int ctx, unsigned long vaddr, int segment) argument
25 (*(romvec->pv_setctxt))(ctx, (char *) vaddr, segment);
H A Dmp.c18 * by 'ctable_reg' in context 'ctx' at program counter 'pc'.
23 prom_startcpu(int cpunode, struct linux_prom_registers *ctable_reg, int ctx, char *pc) argument
36 ret = (*(romvec->v3_cpustart))(cpunode, (int) ctable_reg, ctx, pc);
/arch/powerpc/platforms/cell/spufs/
H A Dgang.c65 void spu_gang_add_ctx(struct spu_gang *gang, struct spu_context *ctx) argument
68 ctx->gang = get_spu_gang(gang);
69 list_add(&ctx->gang_list, &gang->list);
74 void spu_gang_remove_ctx(struct spu_gang *gang, struct spu_context *ctx) argument
77 WARN_ON(ctx->gang != gang);
78 if (!list_empty(&ctx->aff_list)) {
79 list_del_init(&ctx->aff_list);
82 list_del_init(&ctx->gang_list);
H A Dcontext.c38 struct spu_context *ctx; local
41 ctx = kzalloc(sizeof *ctx, GFP_KERNEL);
42 if (!ctx)
47 if (spu_init_csa(&ctx->csa))
49 spin_lock_init(&ctx->mmio_lock);
50 mutex_init(&ctx->mapping_lock);
51 kref_init(&ctx->kref);
52 mutex_init(&ctx->state_mutex);
53 mutex_init(&ctx
84 struct spu_context *ctx; local
101 get_spu_context(struct spu_context *ctx) argument
107 put_spu_context(struct spu_context *ctx) argument
113 spu_forget(struct spu_context *ctx) argument
132 spu_unmap_mappings(struct spu_context *ctx) argument
156 spu_acquire_saved(struct spu_context *ctx) argument
178 spu_release_saved(struct spu_context *ctx) argument
[all...]
H A Dfault.c36 static void spufs_handle_event(struct spu_context *ctx, argument
41 if (ctx->flags & SPU_CREATE_EVENTS_ENABLED) {
42 ctx->event_return |= type;
43 wake_up_all(&ctx->stop_wq);
58 ctx->ops->restart_dma(ctx);
68 ctx->ops->npc_read(ctx) - 4;
77 int spufs_handle_class0(struct spu_context *ctx) argument
79 unsigned long stat = ctx
110 spufs_handle_class1(struct spu_context *ctx) argument
[all...]
/arch/x86/include/asm/
H A Dserpent.h11 asmlinkage void __serpent_enc_blk_4way(struct serpent_ctx *ctx, u8 *dst,
13 asmlinkage void serpent_dec_blk_4way(struct serpent_ctx *ctx, u8 *dst,
16 static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst, argument
19 __serpent_enc_blk_4way(ctx, dst, src, false);
22 static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst, argument
25 __serpent_enc_blk_4way(ctx, dst, src, true);
28 static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst, argument
31 serpent_dec_blk_4way(ctx, dst, src);
38 asmlinkage void __serpent_enc_blk_8way(struct serpent_ctx *ctx, u8 *dst,
40 asmlinkage void serpent_dec_blk_8way(struct serpent_ctx *ctx, u
43 serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst, const u8 *src) argument
49 serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst, const u8 *src) argument
55 serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst, const u8 *src) argument
[all...]
/arch/ia64/kernel/
H A Dperfmon_itanium.h8 static int pfm_ita_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs);
51 pfm_ita_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs) argument
57 if (ctx == NULL) return -EINVAL;
59 is_loaded = ctx->ctx_state == PFM_CTX_LOADED || ctx->ctx_state == PFM_CTX_MASKED;
65 if (cnum == 13 && is_loaded && ((*val & 0x1) == 0UL) && ctx->ctx_fl_using_dbreg == 0) {
76 ret = pfm_write_ibr_dbr(1, ctx, NULL, 0, regs);
84 if (cnum == 11 && is_loaded && ((*val >> 28)& 0x1) == 0 && ctx->ctx_fl_using_dbreg == 0) {
95 ret = pfm_write_ibr_dbr(0, ctx, NULL, 0, regs);
H A Dperfmon_mckinley.h8 static int pfm_mck_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs);
77 pfm_mck_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs) argument
87 if (ctx == NULL) return -EINVAL;
89 is_loaded = ctx->ctx_state == PFM_CTX_LOADED || ctx->ctx_state == PFM_CTX_MASKED;
101 DPRINT(("cnum=%u val=0x%lx, using_dbreg=%d loaded=%d\n", cnum, *val, ctx->ctx_fl_using_dbreg, is_loaded));
104 && (*val & 0x1e00000000000UL) && (*val & 0x18181818UL) != 0x18181818UL && ctx->ctx_fl_using_dbreg == 0) {
115 ret = pfm_write_ibr_dbr(PFM_DATA_RR, ctx, NULL, 0, regs);
122 if (cnum == 14 && is_loaded && ((*val & 0x2222UL) != 0x2222UL) && ctx->ctx_fl_using_dbreg == 0) {
133 ret = pfm_write_ibr_dbr(PFM_CODE_RR, ctx, NUL
[all...]
H A Dperfmon_montecito.h8 static int pfm_mont_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs);
155 pfm_mont_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs) argument
169 if (ctx == NULL) return -EINVAL;
171 is_loaded = ctx->ctx_state == PFM_CTX_LOADED || ctx->ctx_state == PFM_CTX_MASKED;
185 DPRINT(("cnum=%u val=0x%lx, using_dbreg=%d loaded=%d\n", cnum, tmpval, ctx->ctx_fl_using_dbreg, is_loaded));
188 && (tmpval & 0x1e00000000000UL) && (tmpval & 0x18181818UL) != 0x18181818UL && ctx->ctx_fl_using_dbreg == 0) {
199 ret = pfm_write_ibr_dbr(PFM_DATA_RR, ctx, NULL, 0, regs);
208 if (cnum == 38 && is_loaded && ((tmpval & 0x492UL) != 0x492UL) && ctx->ctx_fl_using_dbreg == 0) {
219 ret = pfm_write_ibr_dbr(PFM_CODE_RR, ctx, NUL
[all...]
/arch/powerpc/mm/
H A Dmmu_context_hash32.c57 * CTX_TO_VSID(ctx, va) (((ctx) * (897 * 16) + ((va) >> 28) * 0x111) \
66 unsigned long ctx = next_mmu_context; local
68 while (test_and_set_bit(ctx, context_map)) {
69 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx);
70 if (ctx > LAST_CONTEXT)
71 ctx = 0;
73 next_mmu_context = (ctx + 1) & LAST_CONTEXT;
75 return ctx;
92 __destroy_context(unsigned long ctx) argument
[all...]
H A Dtlb_hash32.c96 unsigned int ctx = mm->context.id; local
113 flush_hash_pages(ctx, start, pmd_val(*pmd), count);
/arch/powerpc/platforms/cell/
H A Dspu_notify.c31 void spu_switch_notify(struct spu *spu, struct spu_context *ctx) argument
34 ctx ? ctx->object_id : 0, spu);
54 void spu_set_profile_private_kref(struct spu_context *ctx, argument
58 ctx->prof_priv_kref = prof_info_kref;
59 ctx->prof_priv_release = prof_info_release;
63 void *spu_get_profile_private_kref(struct spu_context *ctx) argument
65 return ctx->prof_priv_kref;
/arch/s390/crypto/
H A Dsha512_s390.c27 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); local
29 *(__u64 *)&ctx->state[0] = 0x6a09e667f3bcc908ULL;
30 *(__u64 *)&ctx->state[2] = 0xbb67ae8584caa73bULL;
31 *(__u64 *)&ctx->state[4] = 0x3c6ef372fe94f82bULL;
32 *(__u64 *)&ctx->state[6] = 0xa54ff53a5f1d36f1ULL;
33 *(__u64 *)&ctx->state[8] = 0x510e527fade682d1ULL;
34 *(__u64 *)&ctx->state[10] = 0x9b05688c2b3e6c1fULL;
35 *(__u64 *)&ctx->state[12] = 0x1f83d9abfb41bd6bULL;
36 *(__u64 *)&ctx->state[14] = 0x5be0cd19137e2179ULL;
37 ctx
93 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); local
[all...]
H A Dsha_common.c23 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); local
29 index = ctx->count & (bsize - 1);
30 ctx->count += len;
37 memcpy(ctx->buf + index, data, bsize - index);
38 ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, bsize);
47 ret = crypt_s390_kimd(ctx->func, ctx->state, data,
55 memcpy(ctx
63 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); local
[all...]
H A Dghash_s390.c40 struct ghash_ctx *ctx = crypto_shash_ctx(tfm); local
47 memcpy(ctx->key, key, GHASH_BLOCK_SIZE);
48 memset(ctx->icv, 0, GHASH_BLOCK_SIZE);
57 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); local
73 ret = crypt_s390_kimd(KIMD_GHASH, ctx, buf,
81 ret = crypt_s390_kimd(KIMD_GHASH, ctx, src, n);
95 static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx) argument
105 ret = crypt_s390_kimd(KIMD_GHASH, ctx, buf, GHASH_BLOCK_SIZE);
115 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); local
117 ghash_flush(ctx, dct
[all...]
/arch/x86/crypto/
H A Daes_glue.c10 asmlinkage void aes_enc_blk(struct crypto_aes_ctx *ctx, u8 *out, const u8 *in);
11 asmlinkage void aes_dec_blk(struct crypto_aes_ctx *ctx, u8 *out, const u8 *in);
13 void crypto_aes_encrypt_x86(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src) argument
15 aes_enc_blk(ctx, dst, src);
19 void crypto_aes_decrypt_x86(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src) argument
21 aes_dec_blk(ctx, dst, src);
H A Dfpu.c29 struct crypto_fpu_ctx *ctx = crypto_tfm_ctx(parent); local
30 struct crypto_blkcipher *child = ctx->child;
47 struct crypto_fpu_ctx *ctx = crypto_blkcipher_ctx(desc_in->tfm); local
48 struct crypto_blkcipher *child = ctx->child;
66 struct crypto_fpu_ctx *ctx = crypto_blkcipher_ctx(desc_in->tfm); local
67 struct crypto_blkcipher *child = ctx->child;
84 struct crypto_fpu_ctx *ctx = crypto_tfm_ctx(tfm); local
91 ctx->child = cipher;
97 struct crypto_fpu_ctx *ctx = crypto_tfm_ctx(tfm); local
98 crypto_free_blkcipher(ctx
[all...]
H A Dsalsa20_glue.c39 asmlinkage void salsa20_keysetup(struct salsa20_ctx *ctx, const u8 *k,
41 asmlinkage void salsa20_ivsetup(struct salsa20_ctx *ctx, const u8 *iv);
42 asmlinkage void salsa20_encrypt_bytes(struct salsa20_ctx *ctx,
48 struct salsa20_ctx *ctx = crypto_tfm_ctx(tfm); local
49 salsa20_keysetup(ctx, key, keysize*8, SALSA20_IV_SIZE*8);
59 struct salsa20_ctx *ctx = crypto_blkcipher_ctx(tfm); local
65 salsa20_ivsetup(ctx, walk.iv);
69 salsa20_encrypt_bytes(ctx, walk.src.virt.addr,
75 salsa20_encrypt_bytes(ctx, walk.src.virt.addr,
82 salsa20_encrypt_bytes(ctx, wal
[all...]
/arch/ia64/kvm/
H A Dmisc.h63 union context *ctx = &vcpu->arch.host; local
64 return to_guest(vcpu->kvm, ctx);
70 union context *ctx = &vcpu->arch.guest; local
71 return to_guest(vcpu->kvm, ctx);
/arch/microblaze/include/asm/
H A Dmmu_context_mm.h26 # define CTX_TO_VSID(ctx, va) (((ctx) * (897 * 16) + ((va) >> 28) * 0x111) \
83 mm_context_t ctx; local
89 ctx = next_mmu_context;
90 while (test_and_set_bit(ctx, context_map)) {
91 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx);
92 if (ctx > LAST_CONTEXT)
93 ctx = 0;
95 next_mmu_context = (ctx
[all...]
/arch/arm/mach-omap2/
H A Dsdrc.c164 void omap2_sms_write_rot_control(u32 val, unsigned ctx) argument
166 sms_write_reg(val, SMS_ROT_CONTROL(ctx));
169 void omap2_sms_write_rot_size(u32 val, unsigned ctx) argument
171 sms_write_reg(val, SMS_ROT_SIZE(ctx));
174 void omap2_sms_write_rot_physical_ba(u32 val, unsigned ctx) argument
176 sms_write_reg(val, SMS_ROT_PHYSICAL_BA(ctx));
/arch/frv/mm/
H A Dmmu-context.c44 static unsigned get_cxn(mm_context_t *ctx) argument
50 if (!list_empty(&ctx->id_link)) {
51 list_move_tail(&ctx->id_link, &cxn_owners_lru);
78 ctx->id = cxn;
79 list_add_tail(&ctx->id_link, &cxn_owners_lru);
82 return ctx->id;
90 void change_mm_context(mm_context_t *old, mm_context_t *ctx, pgd_t *pgd) argument
106 get_cxn(ctx);
107 ctx->id_busy = 1;
110 asm volatile("movgs %0,cxnr" : : "r"(ctx
132 mm_context_t *ctx = &mm->context; local
[all...]
/arch/m68k/include/asm/
H A Dtlbflush.h120 unsigned char ctx, oldctx; local
124 for(ctx = 0; ctx < 8; ctx++) {
125 sun3_put_context(ctx);
H A Dmmu_context.h32 mm_context_t ctx; local
40 ctx = next_mmu_context;
41 while (test_and_set_bit(ctx, context_map)) {
42 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx);
43 if (ctx > LAST_CONTEXT)
44 ctx = 0;
46 next_mmu_context = (ctx + 1) & LAST_CONTEXT;
47 mm->context = ctx;
48 context_mm[ctx]
[all...]
/arch/microblaze/kernel/
H A Dprocess.c197 struct cpu_context *ctx = local
201 if (in_sched_functions(ctx->r15))
202 return (unsigned long)ctx->r15;
204 return ctx->r14;

Completed in 1933 milliseconds

123