+struct ftrace_branch_data {
+};
+struct list_head {
+};
+int __static_cpu_has(unsigned short bit)
+{
+ asm goto ("1: jmp %l[t_no]\n" "2:\n" ".section .altinstructions,\"a\"\n"
+ " " ".balign 4" " " "\n" " " ".long" " " "1b\n" " " ".long"
+ " " "0\n" " .word %P0\n" " .byte 2b - 1b\n" " .byte 0\n"
+ ".previous\n"::"i" (bit)::t_no);
+ return 1;
+ t_no:
+return 0;
+}
+
+struct cpuinfo_x86 {
+ unsigned int x86_capability[10];
+} __attribute__ ((__aligned__((1 << (6)))));
+extern struct cpuinfo_x86 boot_cpu_data;
+struct i387_fxsave_struct {
+ unsigned short swd;
+} __attribute__ ((aligned(16)));
+struct xsave_hdr_struct {
+ unsigned long long xstate_bv;
+} __attribute__ ((packed));
+struct xsave_struct {
+ struct xsave_hdr_struct xsave_hdr;
+} __attribute__ ((packed, aligned(64)));
+union thread_xstate {
+ struct i387_fxsave_struct fxsave;
+ struct xsave_struct xsave;
+};
+struct fpu {
+ union thread_xstate *state;
+};
+struct thread_struct {
+ struct fpu fpu;
+} mm_segment_t;
+struct thread_info {
+ struct task_struct *task;
+ unsigned int status;
+};
+struct cipher_alg {
+ unsigned int cia_min_keysize;
+ unsigned int cia_max_keysize;
+ int (*cia_setkey) (struct crypto_tfm * tfm, const unsigned char * key,
+ unsigned int keylen);
+ void (*cia_encrypt) (struct crypto_tfm * tfm, unsigned char * dst, const unsigned char * src);
+ void (*cia_decrypt) (struct crypto_tfm * tfm, unsigned char * dst, const unsigned char * src);
+};
+struct crypto_alg {
+ struct list_head cra_list;
+ unsigned int cra_flags;
+ unsigned int cra_blocksize;
+ unsigned int cra_ctxsize;
+ unsigned int cra_alignmask;
+ int cra_priority;
+ char cra_name[64];
+ char cra_driver_name[64];
+ const struct crypto_type *cra_type;
+ union {
+ struct cipher_alg cipher;
+ } cra_u;
+ struct module *cra_module;
+};
+extern const struct crypto_type crypto_ablkcipher_type;
+extern const struct crypto_type crypto_blkcipher_type;
+struct crypto_aes_ctx {
+};
+struct task_struct {
+ struct thread_struct thread;
+};
+static __attribute__ ((always_inline))
+ __attribute__ ((pure))
+int use_xsave(void)
+{
+ return (
+__builtin_constant_p
+ ((__builtin_constant_p((4 * 32 + 26))
+ &&
+ (((((4 * 32 + 26)) >> 5) == 0
+ && (1UL << (((4 * 32 + 26)) & 31) &
+ ((1 << ((0 * 32 + 0) & 31)) | 0 | 0 |
+ (1 << ((0 * 32 + 6) & 31)) | (1 << ((0 * 32 + 8) & 31))
+ | 0 | 0 | (1 << ((0 * 32 + 15) & 31)) | 0 | 0)))
+ || ((((4 * 32 + 26)) >> 5) == 1
+ && (1UL << (((4 * 32 + 26)) & 31) & (0 | 0)))
+ || ((((4 * 32 + 26)) >> 5) == 2
+ && (1UL << (((4 * 32 + 26)) & 31) & 0))
+ || ((((4 * 32 + 26)) >> 5) == 3
+ && (1UL << (((4 * 32 + 26)) & 31) & (0)))
+ || ((((4 * 32 + 26)) >> 5) == 4
+ && (1UL << (((4 * 32 + 26)) & 31) & 0))
+ || ((((4 * 32 + 26)) >> 5) == 5
+ && (1UL << (((4 * 32 + 26)) & 31) & 0))
+ || ((((4 * 32 + 26)) >> 5) == 6
+ && (1UL << (((4 * 32 + 26)) & 31) & 0))
+ || ((((4 * 32 + 26)) >> 5) == 7
+ && (1UL << (((4 * 32 + 26)) & 31) & 0))
+ || ((((4 * 32 + 26)) >> 5) == 8
+ && (1UL << (((4 * 32 + 26)) & 31) & 0))
+ || ((((4 * 32 + 26)) >> 5) == 9
+ && (1UL << (((4 * 32 + 26)) & 31) & 0))) ? 1
+ :
+(__builtin_constant_p(4 * 32 + 26) ?
+ constant_test_bit(4 * 32 + 26,
+ ((unsigned long *)((&boot_cpu_data)->
+ x86_capability))) :
+ variable_test_bit(((4 * 32 + 26))
+,
+ ((unsigned long *)((&boot_cpu_data)->
+ x86_capability))))))
+ ? (__builtin_constant_p((4 * 32 + 26))
+ &&
+ (((((4 * 32 + 26)) >> 5) == 0
+ && (1UL << (((4 * 32 + 26)) & 31) &
+ ((1 << ((0 * 32 + 0) & 31)) | 0 | 0 |
+ (1 << ((0 * 32 + 6) & 31)) | (1 <<
+ ((0 * 32 +
+ 8) & 31)) | 0 | 0 | (1
+ <<
+ ((0 * 32 + 15) & 31)) | 0 | 0))) || ((((4 * 32 + 26)) >> 5) == 1 && (1UL << (((4 * 32 + 26)) & 31) & (0 | 0))) || ((((4 * 32 + 26)) >> 5) == 2 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 3 && (1UL << (((4 * 32 + 26)) & 31) & (0))) || ((((4 * 32 + 26)) >> 5) == 4 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 5 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 6 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 7 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 8 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 9 && (1UL << (((4 * 32 + 26)) & 31) & 0))) ? 1 : (__builtin_constant_p(((4 * 32 + 26))) ? constant_test_bit(((4 * 32 + 26)), ((unsigned long *)((&boot_cpu_data)->x86_capability))) : variable_test_bit(((4 * 32 + 26)), ((unsigned long *)((&boot_cpu_data)->x86_capability))))) : __builtin_constant_p((4 * 32 + 26)) ? __static_cpu_has((4 * 32 + 26)) : (__builtin_constant_p((4 * 32 + 26)) && (((((4 * 32 + 26)) >> 5) == 0 && (1UL << (((4 * 32 + 26)) & 31) & ((1 << ((0 * 32 + 0) & 31)) | 0 | 0 | (1 << ((0 * 32 + 6) & 31)) | (1 << ((0 * 32 + 8) & 31)) | 0 | 0 | (1 << ((0 * 32 + 15) & 31)) | 0 | 0))) || ((((4 * 32 + 26)) >> 5) == 1 && (1UL << (((4 * 32 + 26)) & 31) & (0 | 0))) || ((((4 * 32 + 26)) >> 5) == 2 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 3 && (1UL << (((4 * 32 + 26)) & 31) & (0))) || ((((4 * 32 + 26)) >> 5) == 4 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 5 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 6 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 7 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 8 && (1UL << (((4 * 32 + 26)) & 31) & 0)) || ((((4 * 32 + 26)) >> 5) == 9 && (1UL << (((4 * 32 + 26)) & 31) & 0))) ? 1 : (__builtin_constant_p(((4 * 32 + 26))) ? constant_test_bit(((4 * 32 + 26)), ((unsigned long *)((&boot_cpu_data)->x86_capability))) : variable_test_bit(((4 * 32 + 26)), ((unsigned long *)((&boot_cpu_data)->x86_capability))))));
+}
+
+static inline __attribute__ ((always_inline)) __attribute__ ((always_inline))
+ __attribute__ ((pure))
+int use_fxsr(void)
+{
+ return (__builtin_constant_p
+ ((__builtin_constant_p((0 * 32 + 24))
+ &&
+ (((((0 * 32 + 24)) >> 5) == 0
+ && (1UL << (((0 * 32 + 24)) & 31) &
+ ((1 << ((0 * 32 + 0) & 31)) | 0 | 0 |
+ (1 << ((0 * 32 + 6) & 31)) | (1 << ((0 * 32 + 8) & 31))
+ | 0 | 0 | (1 << ((0 * 32 + 15) & 31)) | 0 | 0)))
+ || ((((0 * 32 + 24)) >> 5) == 1
+ && (1UL << (((0 * 32 + 24)) & 31) & (0 | 0)))
+ || ((((0 * 32 + 24)) >> 5) == 2
+ && (1UL << (((0 * 32 + 24)) & 31) & 0))
+ || ((((0 * 32 + 24)) >> 5) == 3
+ && (1UL << (((0 * 32 + 24)) & 31) & (0)))
+ || ((((0 * 32 + 24)) >> 5) == 4
+ && (1UL << (((0 * 32 + 24)) & 31) & 0))
+ || ((((0 * 32 + 24)) >> 5) == 5
+ && (1UL << (((0 * 32 + 24)) & 31) & 0))
+ || ((((0 * 32 + 24)) >> 5) == 6
+ && (1UL << (((0 * 32 + 24)) & 31) & 0))
+ || ((((0 * 32 + 24)) >> 5) == 7
+ && (1UL << (((0 * 32 + 24)) & 31) & 0))
+ || ((((0 * 32 + 24)) >> 5) == 8
+ && (1UL << (((0 * 32 + 24)) & 31) & 0))
+ || ((((0 * 32 + 24)) >> 5) == 9
+ && (1UL << (((0 * 32 + 24)) & 31) & 0))) ? 1
+ : (__builtin_constant_p(((0 * 32 + 24))) ?
+ constant_test_bit(((0 * 32 + 24)),
+ ((unsigned long *)((&boot_cpu_data)->
+ x86_capability))) :
+ variable_test_bit(((0 * 32 + 24)),
+ ((unsigned long *)((&boot_cpu_data)->
+ x86_capability))))))
+ ? (__builtin_constant_p((0 * 32 + 24))
+ &&
+ (((((0 * 32 + 24)) >> 5) == 0
+ && (1UL << (((0 * 32 + 24)) & 31) &
+ ((1 << ((0 * 32 + 0) & 31)) | 0 | 0 |
+ (1 << ((0 * 32 + 6) & 31)) | (1 <<
+ ((0 * 32 +
+ 8) & 31)) | 0 | 0 | (1
+ <<
+ ((0 * 32 + 15) & 31)) | 0 | 0))) || ((((0 * 32 + 24)) >> 5) == 1 && (1UL << (((0 * 32 + 24)) & 31) & (0 | 0))) || ((((0 * 32 + 24)) >> 5) == 2 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 3 && (1UL << (((0 * 32 + 24)) & 31) & (0))) || ((((0 * 32 + 24)) >> 5) == 4 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 5 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 6 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 7 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 8 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 9 && (1UL << (((0 * 32 + 24)) & 31) & 0))) ? 1 : (__builtin_constant_p(((0 * 32 + 24))) ? constant_test_bit(((0 * 32 + 24)), ((unsigned long *)((&boot_cpu_data)->x86_capability))) : variable_test_bit(((0 * 32 + 24)), ((unsigned long *)((&boot_cpu_data)->x86_capability))))) : __builtin_constant_p((0 * 32 + 24)) ? __static_cpu_has((0 * 32 + 24)) : (__builtin_constant_p((0 * 32 + 24)) && (((((0 * 32 + 24)) >> 5) == 0 && (1UL << (((0 * 32 + 24)) & 31) & ((1 << ((0 * 32 + 0) & 31)) | 0 | 0 | (1 << ((0 * 32 + 6) & 31)) | (1 << ((0 * 32 + 8) & 31)) | 0 | 0 | (1 << ((0 * 32 + 15) & 31)) | 0 | 0))) || ((((0 * 32 + 24)) >> 5) == 1 && (1UL << (((0 * 32 + 24)) & 31) & (0 | 0))) || ((((0 * 32 + 24)) >> 5) == 2 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 3 && (1UL << (((0 * 32 + 24)) & 31) & (0))) || ((((0 * 32 + 24)) >> 5) == 4 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 5 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 6 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 7 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 8 && (1UL << (((0 * 32 + 24)) & 31) & 0)) || ((((0 * 32 + 24)) >> 5) == 9 && (1UL << (((0 * 32 + 24)) & 31) & 0))) ? 1 : (__builtin_constant_p(((0 * 32 + 24))) ? constant_test_bit(((0 * 32 + 24)), ((unsigned long *)((&boot_cpu_data)->x86_capability))) : variable_test_bit(((0 * 32 + 24)), ((unsigned long *)((&boot_cpu_data)->x86_capability))))));
+}
+
+static inline __attribute__ ((always_inline))
+void fpu_save_init(struct fpu *fpu)
+{
+ if (__builtin_constant_p(use_xsave())? !!use_xsave() : !!use_xsave()) {
+ if (__builtin_constant_p
+ (((!(fpu->state->xsave.xsave_hdr.
+ xstate_bv & 0x1)))) ? ! !((!(fpu->state->xsave.
+ xsave_hdr.
+ xstate_bv & 0x1))) : ( {
+ ! !((!(fpu->state->xsave.xsave_hdr.xstate_bv & 0x1)));}
+ ))
+ return;
+ } else if (!!use_fxsr()) {
+ }
+ if (__builtin_constant_p
+ ((((__builtin_constant_p(fpu->state->fxsave.swd & (1 << 7)) ?
+ ! !(fpu->state->fxsave.swd & (1 << 7)) : ( {
+ __builtin_expect(! !
+ (fpu->
+ state->
+ fxsave.
+ swd
+ & (1
+ <<
+ 7)),
+ 1);}
+ )))))?
+ ! !(((__builtin_constant_p(fpu->state->fxsave.swd & (1 << 7)) ?
+ ! !(fpu->state->fxsave.swd & (1 << 7)) : ( {
+ __builtin_expect(! !
+ (fpu->
+ state->
+ fxsave.
+ swd
+ &
+ (1
+ <<
+ 7)),
+ 1);}
+ )))): ( {
+ int ______r;
+ ______r =
+ ! !(((__builtin_constant_p(fpu->state->fxsave.swd & (1 << 7))
+ ? ! !(fpu->state->fxsave.swd & (1 << 7)) : ( {
+ ______r =
+ __builtin_expect
+ (! !
+ (fpu->
+ state->
+ fxsave.
+ swd & (1
+ <<
+ 7)),
+ 1);}
+ ))));}
+ ))
+ asm volatile ("fnclex");
+}
+
+static
+__attribute__ ((always_inline))
+void __save_init_fpu(void)
+{
+ fpu_save_init(0);
+}
+
+static __attribute__ ((always_inline))
+void kernel_fpu_begin(void)
+{
+ __save_init_fpu();
+}
+
+static void aes_decrypt(struct crypto_tfm *tfm)
+{
+ struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
+ if (!!!irq_fpu_usable())
+ crypto_aes_decrypt_x86(ctx);
+ else
+ kernel_fpu_begin();
+}
+
+static struct crypto_alg aesni_alg = {
+.cra_list = {
+ &aesni_alg.cra_list,
+ &aesni_alg.cra_list}
+ ,.cra_u = {
+ .cipher = {
+.cia_decrypt = aes_decrypt}
+ }
+};
+
+static struct crypto_alg blk_ecb_alg = {
+.cra_list = {
+ &blk_ecb_alg.cra_list,
+ &blk_ecb_alg.cra_list}
+};
+
+static struct crypto_alg ablk_ecb_alg = {
+.cra_list = {
+ &
+ ablk_ecb_alg.
+ cra_list,
+ &
+ ablk_ecb_alg.
+ cra_list}
+};
+
+static struct crypto_alg ablk_cbc_alg = {
+.cra_list = {
+ &
+ ablk_cbc_alg.
+ cra_list,
+ &
+ ablk_cbc_alg.
+ cra_list}
+};
+
+static struct crypto_alg ablk_pcbc_alg = {
+.cra_list = {
+ &
+ ablk_pcbc_alg.
+ cra_list,
+ &
+ ablk_pcbc_alg.
+ cra_list}
+};
+
+int
+aesni_init(void)
+{
+ if (!!crypto_register_alg(&aesni_alg))
+ crypto_unregister_alg(&ablk_ecb_alg);
+}