X-Git-Url: https://git.wh0rd.org/?p=ICEs.git;a=blobdiff_plain;f=388835%2Faesni-intel_glue.i.1;fp=388835%2Faesni-intel_glue.i.1;h=a130b6c925b51c2d8f67e54b7df4cd53fdae755b;hp=0000000000000000000000000000000000000000;hb=bd3239d2bbe0de3a200b266503e3330b1e391489;hpb=dbff64cb4b7530861c2309c794efdd4e0cf47a23 diff --git a/388835/aesni-intel_glue.i.1 b/388835/aesni-intel_glue.i.1 new file mode 100644 index 0000000..a130b6c --- /dev/null +++ b/388835/aesni-intel_glue.i.1 @@ -0,0 +1,394 @@ +typedef unsigned int __u32; +typedef unsigned char u8; +typedef unsigned short u16; +typedef unsigned int u32; +typedef unsigned long long u64; +struct ftrace_branch_data { +}; +enum { + false = 0, true = 1 +}; +typedef _Bool bool; +struct list_head { +}; +static inline __attribute__ ((always_inline)) __attribute__ ((always_inline)) + __attribute__ ((pure)) +bool __static_cpu_has(u16 bit) +{ + asm goto ("1: jmp %l[t_no]\n" "2:\n" ".section .altinstructions,\"a\"\n" + " " ".balign 4" " " "\n" " " ".long" " " "1b\n" " " ".long" + " " "0\n" " .word %P0\n" " .byte 2b - 1b\n" " .byte 0\n" + ".previous\n"::"i" (bit)::t_no); + return true; + t_no:return false; +} + +struct cpuinfo_x86 { + __u32 x86_capability[10]; +} __attribute__ ((__aligned__((1 << (6))))); +extern struct cpuinfo_x86 boot_cpu_data; +struct i387_fxsave_struct { + u16 swd; +} __attribute__ ((aligned(16))); +struct xsave_hdr_struct { + u64 xstate_bv; +} __attribute__ ((packed)); +struct xsave_struct { + struct xsave_hdr_struct xsave_hdr; +} __attribute__ ((packed, aligned(64))); +union thread_xstate { + struct i387_fxsave_struct fxsave; + struct xsave_struct xsave; +}; +struct fpu { + union thread_xstate *state; +}; +struct thread_struct { + struct fpu fpu; +} mm_segment_t; +struct thread_info { + struct task_struct *task; + __u32 status; +}; +extern struct module __this_module; +struct cipher_alg { + unsigned int cia_min_keysize; + unsigned int cia_max_keysize; + int (*cia_setkey) (struct crypto_tfm * tfm, const u8 * key, + unsigned int keylen); + void (*cia_encrypt) (struct crypto_tfm * tfm, u8 * dst, const u8 * src); + void (*cia_decrypt) (struct crypto_tfm * tfm, u8 * dst, const u8 * src); +}; +struct crypto_alg { + struct list_head cra_list; + u32 cra_flags; + unsigned int cra_blocksize; + unsigned int cra_ctxsize; + unsigned int cra_alignmask; + int cra_priority; + char cra_name[64]; + char cra_driver_name[64]; + const struct crypto_type *cra_type; + union { + struct cipher_alg cipher; + } cra_u; + struct module *cra_module; +}; +extern const struct crypto_type crypto_ablkcipher_type; +extern const struct crypto_type crypto_blkcipher_type; +struct crypto_aes_ctx { +}; +struct task_struct { + struct thread_struct thread; +}; +static inline __attribute__ ((always_inline)) __attribute__ ((always_inline)) + __attribute__ ((pure)) +bool use_xsave(void) +{ + return (__builtin_constant_p + ((__builtin_constant_p((4 * 32 + 26)) + && + (((((4 * 32 + 26)) >> 5) == 0 + && (1UL << (((4 * 32 + 26)) & 31) & + ((1 << ((0 * 32 + 0) & 31)) | 0 | 0 | + (1 << ((0 * 32 + 6) & 31)) | (1 << ((0 * 32 + 8) & 31)) + | 0 | 0 | (1 << ((0 * 32 + 15) & 31)) | 0 | 0))) + || ((((4 * 32 + 26)) >> 5) == 1 + && (1UL << (((4 * 32 + 26)) & 31) & (0 | 0))) + || ((((4 * 32 + 26)) >> 5) == 2 + && (1UL << (((4 * 32 + 26)) & 31) & 0)) + || ((((4 * 32 + 26)) >> 5) == 3 + && (1UL << (((4 * 32 + 26)) & 31) & (0))) + || ((((4 * 32 + 26)) >> 5) == 4 + && (1UL << (((4 * 32 + 26)) & 31) & 0)) + || ((((4 * 32 + 26)) >> 5) == 5 + && (1UL << (((4 * 32 + 26)) & 31) & 0)) + || ((((4 * 32 + 26)) >> 5) == 6 + && (1UL << (((4 * 32 + 26)) & 31) & 0)) + || ((((4 * 32 + 26)) >> 5) == 7 + && (1UL << (((4 * 32 + 26)) & 31) & 0)) + || ((((4 * 32 + 26)) >> 5) == 8 + && (1UL << (((4 * 32 + 26)) & 31) & 0)) + || ((((4 * 32 + 26)) >> 5) == 9 + && (1UL << (((4 * 32 + 26)) & 31) & 0))) ? 1 + : (__builtin_constant_p(((4 * 32 + 26))) ? + constant_test_bit(((4 * 32 + 26)), + ((unsigned long *)((&boot_cpu_data)-> + x86_capability))) : + variable_test_bit(((4 * 32 + 26)), + ((unsigned long *)((&boot_cpu_data)-> + x86_capability)))))) + ? (__builtin_constant_p((4 * 32 + 26)) + && + (((((4 * 32 + 26)) >> 5) == 0 + && (1UL << (((4 * 32 + 26)) & 31) & + ((1 << ((0 * 32 + 0) & 31)) | 0 | 0 | + (1 << ((0 * 32 + 6) & 31)) | (1 << + ((0 * 32 + + 8) & 31)) | 0 | 0 | (1 + << + ((0 * 32 + 15) & 31)) | 0 | 0))) || ((((4 * 32 + 26)) >> 5) == 1 && (1UL << (((4 * 32 + 26)) & 31) & (0 | 0))) || ((((4 * 32 + 26)) >> 5) == 2 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 3 && (1UL << (((4 * 32 + 26)) & 31) & (0))) || ((((4 * 32 + 26)) >> 5) == 4 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 5 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 6 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 7 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 8 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 9 && (1UL << (((4 * 32 + 26)) & 31) & 0))) ? 1 : (__builtin_constant_p(((4 * 32 + 26))) ? constant_test_bit(((4 * 32 + 26)), ((unsigned long *)((&boot_cpu_data)->x86_capability))) : variable_test_bit(((4 * 32 + 26)), ((unsigned long *)((&boot_cpu_data)->x86_capability))))) : __builtin_constant_p((4 * 32 + 26)) ? __static_cpu_has((4 * 32 + 26)) : (__builtin_constant_p((4 * 32 + 26)) && (((((4 * 32 + 26)) >> 5) == 0 && (1UL << (((4 * 32 + 26)) & 31) & ((1 << ((0 * 32 + 0) & 31)) | 0 | 0 | (1 << ((0 * 32 + 6) & 31)) | (1 << ((0 * 32 + 8) & 31)) | 0 | 0 | (1 << ((0 * 32 + 15) & 31)) | 0 | 0))) || ((((4 * 32 + 26)) >> 5) == 1 && (1UL << (((4 * 32 + 26)) & 31) & (0 | 0))) || ((((4 * 32 + 26)) >> 5) == 2 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 3 && (1UL << (((4 * 32 + 26)) & 31) & (0))) || ((((4 * 32 + 26)) >> 5) == 4 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 5 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 6 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 7 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 8 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 9 && (1UL << (((4 * 32 + 26)) & 31) & 0))) ? 1 : (__builtin_constant_p(((4 * 32 + 26))) ? constant_test_bit(((4 * 32 + 26)), ((unsigned long *)((&boot_cpu_data)->x86_capability))) : variable_test_bit(((4 * 32 + 26)), ((unsigned long *)((&boot_cpu_data)->x86_capability)))))); +} + +static inline __attribute__ ((always_inline)) __attribute__ ((always_inline)) + __attribute__ ((pure)) +bool use_fxsr(void) +{ + return (__builtin_constant_p + ((__builtin_constant_p((0 * 32 + 24)) + && + (((((0 * 32 + 24)) >> 5) == 0 + && (1UL << (((0 * 32 + 24)) & 31) & + ((1 << ((0 * 32 + 0) & 31)) | 0 | 0 | + (1 << ((0 * 32 + 6) & 31)) | (1 << ((0 * 32 + 8) & 31)) + | 0 | 0 | (1 << ((0 * 32 + 15) & 31)) | 0 | 0))) + || ((((0 * 32 + 24)) >> 5) == 1 + && (1UL << (((0 * 32 + 24)) & 31) & (0 | 0))) + || ((((0 * 32 + 24)) >> 5) == 2 + && (1UL << (((0 * 32 + 24)) & 31) & 0)) + || ((((0 * 32 + 24)) >> 5) == 3 + && (1UL << (((0 * 32 + 24)) & 31) & (0))) + || ((((0 * 32 + 24)) >> 5) == 4 + && (1UL << (((0 * 32 + 24)) & 31) & 0)) + || ((((0 * 32 + 24)) >> 5) == 5 + && (1UL << (((0 * 32 + 24)) & 31) & 0)) + || ((((0 * 32 + 24)) >> 5) == 6 + && (1UL << (((0 * 32 + 24)) & 31) & 0)) + || ((((0 * 32 + 24)) >> 5) == 7 + && (1UL << (((0 * 32 + 24)) & 31) & 0)) + || ((((0 * 32 + 24)) >> 5) == 8 + && (1UL << (((0 * 32 + 24)) & 31) & 0)) + || ((((0 * 32 + 24)) >> 5) == 9 + && (1UL << (((0 * 32 + 24)) & 31) & 0))) ? 1 + : (__builtin_constant_p(((0 * 32 + 24))) ? + constant_test_bit(((0 * 32 + 24)), + ((unsigned long *)((&boot_cpu_data)-> + x86_capability))) : + variable_test_bit(((0 * 32 + 24)), + ((unsigned long *)((&boot_cpu_data)-> + x86_capability)))))) + ? (__builtin_constant_p((0 * 32 + 24)) + && + (((((0 * 32 + 24)) >> 5) == 0 + && (1UL << (((0 * 32 + 24)) & 31) & + ((1 << ((0 * 32 + 0) & 31)) | 0 | 0 | + (1 << ((0 * 32 + 6) & 31)) | (1 << + ((0 * 32 + + 8) & 31)) | 0 | 0 | (1 + << + ((0 * 32 + 15) & 31)) | 0 | 0))) || ((((0 * 32 + 24)) >> 5) == 1 && (1UL << (((0 * 32 + 24)) & 31) & (0 | 0))) || ((((0 * 32 + 24)) >> 5) == 2 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 3 && (1UL << (((0 * 32 + 24)) & 31) & (0))) || ((((0 * 32 + 24)) >> 5) == 4 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 5 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 6 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 7 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 8 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 9 && (1UL << (((0 * 32 + 24)) & 31) & 0))) ? 1 : (__builtin_constant_p(((0 * 32 + 24))) ? constant_test_bit(((0 * 32 + 24)), ((unsigned long *)((&boot_cpu_data)->x86_capability))) : variable_test_bit(((0 * 32 + 24)), ((unsigned long *)((&boot_cpu_data)->x86_capability))))) : __builtin_constant_p((0 * 32 + 24)) ? __static_cpu_has((0 * 32 + 24)) : (__builtin_constant_p((0 * 32 + 24)) && (((((0 * 32 + 24)) >> 5) == 0 && (1UL << (((0 * 32 + 24)) & 31) & ((1 << ((0 * 32 + 0) & 31)) | 0 | 0 | (1 << ((0 * 32 + 6) & 31)) | (1 << ((0 * 32 + 8) & 31)) | 0 | 0 | (1 << ((0 * 32 + 15) & 31)) | 0 | 0))) || ((((0 * 32 + 24)) >> 5) == 1 && (1UL << (((0 * 32 + 24)) & 31) & (0 | 0))) || ((((0 * 32 + 24)) >> 5) == 2 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 3 && (1UL << (((0 * 32 + 24)) & 31) & (0))) || ((((0 * 32 + 24)) >> 5) == 4 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 5 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 6 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 7 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 8 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 9 && (1UL << (((0 * 32 + 24)) & 31) & 0))) ? 1 : (__builtin_constant_p(((0 * 32 + 24))) ? constant_test_bit(((0 * 32 + 24)), ((unsigned long *)((&boot_cpu_data)->x86_capability))) : variable_test_bit(((0 * 32 + 24)), ((unsigned long *)((&boot_cpu_data)->x86_capability)))))); +} + +static inline __attribute__ ((always_inline)) +void fpu_save_init(struct fpu *fpu) +{ + if (__builtin_constant_p(((use_xsave())))? ! !((use_xsave())) : ( { + int + ______r; + ______r + = + ! !((use_xsave()));} + )) { + if (__builtin_constant_p + (((!(fpu->state->xsave.xsave_hdr. + xstate_bv & 0x1)))) ? ! !((!(fpu->state->xsave. + xsave_hdr. + xstate_bv & 0x1))) : ( { + int + ______r; + ______r + = + ! !((!(fpu->state->xsave.xsave_hdr.xstate_bv & 0x1)));} + )) + return; + } else if (__builtin_constant_p(((use_fxsr())))? ! !((use_fxsr())) : ( { + int + ______r; + ______r + = + ! ! + ((use_fxsr()));} + )) { + } + if (__builtin_constant_p + ((((__builtin_constant_p(fpu->state->fxsave.swd & (1 << 7)) ? + ! !(fpu->state->fxsave.swd & (1 << 7)) : ( { + int ______r; + ______r = + __builtin_expect(! ! + (fpu-> + state-> + fxsave. + swd + & (1 + << + 7)), + 1);} + )))))? + ! !(((__builtin_constant_p(fpu->state->fxsave.swd & (1 << 7)) ? + ! !(fpu->state->fxsave.swd & (1 << 7)) : ( { + int ______r; + ______r = + __builtin_expect(! ! + (fpu-> + state-> + fxsave. + swd + & + (1 + << + 7)), + 1);} + )))): ( { + int ______r; + ______r = + ! !(((__builtin_constant_p(fpu->state->fxsave.swd & (1 << 7)) + ? ! !(fpu->state->fxsave.swd & (1 << 7)) : ( { + ______r = + __builtin_expect + (! ! + (fpu-> + state-> + fxsave. + swd & (1 + << + 7)), + 1);} + ))));} + )) + asm volatile ("fnclex"); +} + +static inline __attribute__ ((always_inline)) +void __save_init_fpu(struct task_struct *tsk) +{ + fpu_save_init(&tsk->thread.fpu); +} + +static inline __attribute__ ((always_inline)) +void kernel_fpu_begin(void) +{ + struct thread_info *me = current_thread_info(); + if (__builtin_constant_p(((me->status & 0x0001))) ? + ! !((me->status & 0x0001)) : ( { + int ______r; + ______r = ! !((me->status & 0x0001));} + )) + __save_init_fpu(me->task); +} + +struct async_aes_ctx { +}; +static int aes_set_key(struct crypto_tfm *tfm, const u8 * in_key, + unsigned int key_len) +{ +} + +static void aes_encrypt(struct crypto_tfm *tfm, u8 * dst, const u8 * src) +{ +} + +static void aes_decrypt(struct crypto_tfm *tfm, u8 * dst, const u8 * src) +{ + struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm)); + if (__builtin_constant_p(((!irq_fpu_usable())))? + ! !((!irq_fpu_usable())) : ( { + int ______r; + ______r = ! !((!irq_fpu_usable()));} + )) + crypto_aes_decrypt_x86(ctx, dst, src); + else { + kernel_fpu_begin(); + } +} + +static struct crypto_alg aesni_alg = { + .cra_name = "aes",.cra_driver_name = "aes-aesni",.cra_priority = + 300,.cra_flags = 0x00000001,.cra_blocksize = 16,.cra_ctxsize = + sizeof(struct crypto_aes_ctx) + (16) - 1,.cra_alignmask = + 0,.cra_module = (&__this_module),.cra_list = { + &(aesni_alg.cra_list), + &(aesni_alg.cra_list)} + ,.cra_u = { + .cipher = { + .cia_min_keysize = 16,.cia_max_keysize = + 32,.cia_setkey = aes_set_key,.cia_encrypt = + aes_encrypt,.cia_decrypt = aes_decrypt} + } +}; + +static struct crypto_alg blk_ecb_alg = { + .cra_name = "__ecb-aes-aesni",.cra_driver_name = + "__driver-ecb-aes-aesni",.cra_priority = 0,.cra_flags = + 0x00000004,.cra_blocksize = 16,.cra_ctxsize = + sizeof(struct crypto_aes_ctx) + (16) - 1,.cra_alignmask = + 0,.cra_type = &crypto_blkcipher_type,.cra_module = + (&__this_module),.cra_list = { + &(blk_ecb_alg.cra_list), + &(blk_ecb_alg.cra_list)} + , +}; + +static struct crypto_alg ablk_ecb_alg = { + .cra_name = "ecb(aes)",.cra_driver_name = + "ecb-aes-aesni",.cra_priority = 400,.cra_flags = + 0x00000005 | 0x00000080,.cra_blocksize = 16,.cra_ctxsize = + sizeof(struct async_aes_ctx),.cra_alignmask = 0,.cra_type = + &crypto_ablkcipher_type,.cra_module = (&__this_module),.cra_list = { + & + (ablk_ecb_alg. + cra_list), + & + (ablk_ecb_alg. + cra_list)} + , +}; + +static struct crypto_alg ablk_cbc_alg = { + .cra_name = "cbc(aes)",.cra_driver_name = + "cbc-aes-aesni",.cra_priority = 400,.cra_flags = + 0x00000005 | 0x00000080,.cra_blocksize = 16,.cra_ctxsize = + sizeof(struct async_aes_ctx),.cra_alignmask = 0,.cra_type = + &crypto_ablkcipher_type,.cra_module = (&__this_module),.cra_list = { + & + (ablk_cbc_alg. + cra_list), + & + (ablk_cbc_alg. + cra_list)} + , +}; + +static struct crypto_alg ablk_pcbc_alg = { + .cra_name = "pcbc(aes)",.cra_driver_name = + "pcbc-aes-aesni",.cra_priority = 400,.cra_flags = + 0x00000005 | 0x00000080,.cra_blocksize = 16,.cra_ctxsize = + sizeof(struct async_aes_ctx),.cra_alignmask = 0,.cra_type = + &crypto_ablkcipher_type,.cra_module = (&__this_module),.cra_list = { + & + (ablk_pcbc_alg. + cra_list), + & + (ablk_pcbc_alg. + cra_list)} + , +}; + +static int __attribute__ ((__section__(".init.text"))) + __attribute__ ((__cold__)) + __attribute__ ((no_instrument_function)) aesni_init(void) +{ + int err; + if (__builtin_constant_p((((err = crypto_register_alg(&aesni_alg))))) ? + ! !(((err = crypto_register_alg(&aesni_alg)))) : ( { + int ______r; + ______r = + ! !(((err = + crypto_register_alg + (&aesni_alg))));} + )) + goto ablk_cbc_err; + ablk_cbc_err:crypto_unregister_alg(&ablk_ecb_alg); +} + +int init_module(void) __attribute__ ((alias("aesni_init")));