crypto: vmx - disable preemption to enable vsx in aes_ctr.c
[ Upstream commit 7dede913fc2ab9c0d3bff3a49e26fa9e858b0c13 ] Some preemptible check warnings were reported from enable_kernel_vsx(). This patch disables preemption in aes_ctr.c before enabling vsx, and they are now consistent with other files in the same directory. Signed-off-by: Li Zhong <zhong@linux.vnet.ibm.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> Signed-off-by: Sasha Levin <alexander.levin@verizon.com> Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
This commit is contained in:
parent
30019ca7dc
commit
26fa336d69
|
@ -80,11 +80,13 @@ static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key,
|
||||||
int ret;
|
int ret;
|
||||||
struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
|
struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||||
|
|
||||||
|
preempt_disable();
|
||||||
pagefault_disable();
|
pagefault_disable();
|
||||||
enable_kernel_altivec();
|
enable_kernel_altivec();
|
||||||
enable_kernel_vsx();
|
enable_kernel_vsx();
|
||||||
ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
|
ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
|
||||||
pagefault_enable();
|
pagefault_enable();
|
||||||
|
preempt_enable();
|
||||||
|
|
||||||
ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
|
ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
|
||||||
return ret;
|
return ret;
|
||||||
|
@ -99,11 +101,13 @@ static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
|
||||||
u8 *dst = walk->dst.virt.addr;
|
u8 *dst = walk->dst.virt.addr;
|
||||||
unsigned int nbytes = walk->nbytes;
|
unsigned int nbytes = walk->nbytes;
|
||||||
|
|
||||||
|
preempt_disable();
|
||||||
pagefault_disable();
|
pagefault_disable();
|
||||||
enable_kernel_altivec();
|
enable_kernel_altivec();
|
||||||
enable_kernel_vsx();
|
enable_kernel_vsx();
|
||||||
aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
|
aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
|
||||||
pagefault_enable();
|
pagefault_enable();
|
||||||
|
preempt_enable();
|
||||||
|
|
||||||
crypto_xor(keystream, src, nbytes);
|
crypto_xor(keystream, src, nbytes);
|
||||||
memcpy(dst, keystream, nbytes);
|
memcpy(dst, keystream, nbytes);
|
||||||
|
@ -132,6 +136,7 @@ static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
|
||||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||||
ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
|
ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
|
||||||
while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
|
while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
|
||||||
|
preempt_disable();
|
||||||
pagefault_disable();
|
pagefault_disable();
|
||||||
enable_kernel_altivec();
|
enable_kernel_altivec();
|
||||||
enable_kernel_vsx();
|
enable_kernel_vsx();
|
||||||
|
@ -143,6 +148,7 @@ static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
|
||||||
&ctx->enc_key,
|
&ctx->enc_key,
|
||||||
walk.iv);
|
walk.iv);
|
||||||
pagefault_enable();
|
pagefault_enable();
|
||||||
|
preempt_enable();
|
||||||
|
|
||||||
/* We need to update IV mostly for last bytes/round */
|
/* We need to update IV mostly for last bytes/round */
|
||||||
inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE;
|
inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE;
|
||||||
|
|
Loading…
Reference in New Issue
Block a user