summaryrefslogtreecommitdiff
path: root/arch/s390/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'arch/s390/crypto')
-rw-r--r--arch/s390/crypto/Makefile2
-rw-r--r--arch/s390/crypto/aes_s390.c6
-rw-r--r--arch/s390/crypto/paes_s390.c184
-rw-r--r--arch/s390/crypto/sha.h12
-rw-r--r--arch/s390/crypto/sha3_256_s390.c147
-rw-r--r--arch/s390/crypto/sha3_512_s390.c155
-rw-r--r--arch/s390/crypto/sha_common.c75
7 files changed, 529 insertions, 52 deletions
diff --git a/arch/s390/crypto/Makefile b/arch/s390/crypto/Makefile
index a51010ea62fa..12889d4652cc 100644
--- a/arch/s390/crypto/Makefile
+++ b/arch/s390/crypto/Makefile
@@ -6,6 +6,8 @@
obj-$(CONFIG_CRYPTO_SHA1_S390) += sha1_s390.o sha_common.o
obj-$(CONFIG_CRYPTO_SHA256_S390) += sha256_s390.o sha_common.o
obj-$(CONFIG_CRYPTO_SHA512_S390) += sha512_s390.o sha_common.o
+obj-$(CONFIG_CRYPTO_SHA3_256_S390) += sha3_256_s390.o sha_common.o
+obj-$(CONFIG_CRYPTO_SHA3_512_S390) += sha3_512_s390.o sha_common.o
obj-$(CONFIG_CRYPTO_DES_S390) += des_s390.o
obj-$(CONFIG_CRYPTO_AES_S390) += aes_s390.o
obj-$(CONFIG_CRYPTO_PAES_S390) += paes_s390.o
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c
index d4f6fd42a105..9803e96d2924 100644
--- a/arch/s390/crypto/aes_s390.c
+++ b/arch/s390/crypto/aes_s390.c
@@ -586,6 +586,9 @@ static int xts_aes_encrypt(struct blkcipher_desc *desc,
struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
struct blkcipher_walk walk;
+ if (!nbytes)
+ return -EINVAL;
+
if (unlikely(!xts_ctx->fc || (nbytes % XTS_BLOCK_SIZE) != 0))
return xts_fallback_encrypt(desc, dst, src, nbytes);
@@ -600,6 +603,9 @@ static int xts_aes_decrypt(struct blkcipher_desc *desc,
struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
struct blkcipher_walk walk;
+ if (!nbytes)
+ return -EINVAL;
+
if (unlikely(!xts_ctx->fc || (nbytes % XTS_BLOCK_SIZE) != 0))
return xts_fallback_decrypt(desc, dst, src, nbytes);
diff --git a/arch/s390/crypto/paes_s390.c b/arch/s390/crypto/paes_s390.c
index e8d9fa54569c..6184dceed340 100644
--- a/arch/s390/crypto/paes_s390.c
+++ b/arch/s390/crypto/paes_s390.c
@@ -5,7 +5,7 @@
* s390 implementation of the AES Cipher Algorithm with protected keys.
*
* s390 Version:
- * Copyright IBM Corp. 2017
+ * Copyright IBM Corp. 2017,2019
* Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
* Harald Freudenberger <freude@de.ibm.com>
*/
@@ -25,16 +25,59 @@
#include <asm/cpacf.h>
#include <asm/pkey.h>
+/*
+ * Key blobs smaller/bigger than these defines are rejected
+ * by the common code even before the individual setkey function
+ * is called. As paes can handle different kinds of key blobs
+ * and padding is also possible, the limits need to be generous.
+ */
+#define PAES_MIN_KEYSIZE 64
+#define PAES_MAX_KEYSIZE 256
+
static u8 *ctrblk;
static DEFINE_SPINLOCK(ctrblk_lock);
static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
struct key_blob {
- __u8 key[MAXKEYBLOBSIZE];
+ /*
+ * Small keys will be stored in the keybuf. Larger keys are
+ * stored in extra allocated memory. In both cases does
+ * key point to the memory where the key is stored.
+ * The code distinguishes by checking keylen against
+ * sizeof(keybuf). See the two following helper functions.
+ */
+ u8 *key;
+ u8 keybuf[128];
unsigned int keylen;
};
+static inline int _copy_key_to_kb(struct key_blob *kb,
+ const u8 *key,
+ unsigned int keylen)
+{
+ if (keylen <= sizeof(kb->keybuf))
+ kb->key = kb->keybuf;
+ else {
+ kb->key = kmalloc(keylen, GFP_KERNEL);
+ if (!kb->key)
+ return -ENOMEM;
+ }
+ memcpy(kb->key, key, keylen);
+ kb->keylen = keylen;
+
+ return 0;
+}
+
+static inline void _free_kb_keybuf(struct key_blob *kb)
+{
+ if (kb->key && kb->key != kb->keybuf
+ && kb->keylen > sizeof(kb->keybuf)) {
+ kfree(kb->key);
+ kb->key = NULL;
+ }
+}
+
struct s390_paes_ctx {
struct key_blob kb;
struct pkey_protkey pk;
@@ -80,13 +123,33 @@ static int __paes_set_key(struct s390_paes_ctx *ctx)
return ctx->fc ? 0 : -EINVAL;
}
+static int ecb_paes_init(struct crypto_tfm *tfm)
+{
+ struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ ctx->kb.key = NULL;
+
+ return 0;
+}
+
+static void ecb_paes_exit(struct crypto_tfm *tfm)
+{
+ struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ _free_kb_keybuf(&ctx->kb);
+}
+
static int ecb_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
unsigned int key_len)
{
+ int rc;
struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
- memcpy(ctx->kb.key, in_key, key_len);
- ctx->kb.keylen = key_len;
+ _free_kb_keybuf(&ctx->kb);
+ rc = _copy_key_to_kb(&ctx->kb, in_key, key_len);
+ if (rc)
+ return rc;
+
if (__paes_set_key(ctx)) {
tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
return -EINVAL;
@@ -148,10 +211,12 @@ static struct crypto_alg ecb_paes_alg = {
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(ecb_paes_alg.cra_list),
+ .cra_init = ecb_paes_init,
+ .cra_exit = ecb_paes_exit,
.cra_u = {
.blkcipher = {
- .min_keysize = MINKEYBLOBSIZE,
- .max_keysize = MAXKEYBLOBSIZE,
+ .min_keysize = PAES_MIN_KEYSIZE,
+ .max_keysize = PAES_MAX_KEYSIZE,
.setkey = ecb_paes_set_key,
.encrypt = ecb_paes_encrypt,
.decrypt = ecb_paes_decrypt,
@@ -159,6 +224,22 @@ static struct crypto_alg ecb_paes_alg = {
}
};
+static int cbc_paes_init(struct crypto_tfm *tfm)
+{
+ struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ ctx->kb.key = NULL;
+
+ return 0;
+}
+
+static void cbc_paes_exit(struct crypto_tfm *tfm)
+{
+ struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ _free_kb_keybuf(&ctx->kb);
+}
+
static int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
{
unsigned long fc;
@@ -180,10 +261,14 @@ static int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
static int cbc_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
unsigned int key_len)
{
+ int rc;
struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
- memcpy(ctx->kb.key, in_key, key_len);
- ctx->kb.keylen = key_len;
+ _free_kb_keybuf(&ctx->kb);
+ rc = _copy_key_to_kb(&ctx->kb, in_key, key_len);
+ if (rc)
+ return rc;
+
if (__cbc_paes_set_key(ctx)) {
tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
return -EINVAL;
@@ -252,10 +337,12 @@ static struct crypto_alg cbc_paes_alg = {
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(cbc_paes_alg.cra_list),
+ .cra_init = cbc_paes_init,
+ .cra_exit = cbc_paes_exit,
.cra_u = {
.blkcipher = {
- .min_keysize = MINKEYBLOBSIZE,
- .max_keysize = MAXKEYBLOBSIZE,
+ .min_keysize = PAES_MIN_KEYSIZE,
+ .max_keysize = PAES_MAX_KEYSIZE,
.ivsize = AES_BLOCK_SIZE,
.setkey = cbc_paes_set_key,
.encrypt = cbc_paes_encrypt,
@@ -264,6 +351,24 @@ static struct crypto_alg cbc_paes_alg = {
}
};
+static int xts_paes_init(struct crypto_tfm *tfm)
+{
+ struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ ctx->kb[0].key = NULL;
+ ctx->kb[1].key = NULL;
+
+ return 0;
+}
+
+static void xts_paes_exit(struct crypto_tfm *tfm)
+{
+ struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ _free_kb_keybuf(&ctx->kb[0]);
+ _free_kb_keybuf(&ctx->kb[1]);
+}
+
static int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
{
unsigned long fc;
@@ -287,20 +392,27 @@ static int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
}
static int xts_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
- unsigned int key_len)
+ unsigned int xts_key_len)
{
+ int rc;
struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
u8 ckey[2 * AES_MAX_KEY_SIZE];
- unsigned int ckey_len, keytok_len;
+ unsigned int ckey_len, key_len;
- if (key_len % 2)
+ if (xts_key_len % 2)
return -EINVAL;
- keytok_len = key_len / 2;
- memcpy(ctx->kb[0].key, in_key, keytok_len);
- ctx->kb[0].keylen = keytok_len;
- memcpy(ctx->kb[1].key, in_key + keytok_len, keytok_len);
- ctx->kb[1].keylen = keytok_len;
+ key_len = xts_key_len / 2;
+
+ _free_kb_keybuf(&ctx->kb[0]);
+ _free_kb_keybuf(&ctx->kb[1]);
+ rc = _copy_key_to_kb(&ctx->kb[0], in_key, key_len);
+ if (rc)
+ return rc;
+ rc = _copy_key_to_kb(&ctx->kb[1], in_key + key_len, key_len);
+ if (rc)
+ return rc;
+
if (__xts_paes_set_key(ctx)) {
tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
return -EINVAL;
@@ -394,10 +506,12 @@ static struct crypto_alg xts_paes_alg = {
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(xts_paes_alg.cra_list),
+ .cra_init = xts_paes_init,
+ .cra_exit = xts_paes_exit,
.cra_u = {
.blkcipher = {
- .min_keysize = 2 * MINKEYBLOBSIZE,
- .max_keysize = 2 * MAXKEYBLOBSIZE,
+ .min_keysize = 2 * PAES_MIN_KEYSIZE,
+ .max_keysize = 2 * PAES_MAX_KEYSIZE,
.ivsize = AES_BLOCK_SIZE,
.setkey = xts_paes_set_key,
.encrypt = xts_paes_encrypt,
@@ -406,6 +520,22 @@ static struct crypto_alg xts_paes_alg = {
}
};
+static int ctr_paes_init(struct crypto_tfm *tfm)
+{
+ struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ ctx->kb.key = NULL;
+
+ return 0;
+}
+
+static void ctr_paes_exit(struct crypto_tfm *tfm)
+{
+ struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ _free_kb_keybuf(&ctx->kb);
+}
+
static int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
{
unsigned long fc;
@@ -428,10 +558,14 @@ static int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
static int ctr_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
unsigned int key_len)
{
+ int rc;
struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
- memcpy(ctx->kb.key, in_key, key_len);
- ctx->kb.keylen = key_len;
+ _free_kb_keybuf(&ctx->kb);
+ rc = _copy_key_to_kb(&ctx->kb, in_key, key_len);
+ if (rc)
+ return rc;
+
if (__ctr_paes_set_key(ctx)) {
tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
return -EINVAL;
@@ -541,10 +675,12 @@ static struct crypto_alg ctr_paes_alg = {
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(ctr_paes_alg.cra_list),
+ .cra_init = ctr_paes_init,
+ .cra_exit = ctr_paes_exit,
.cra_u = {
.blkcipher = {
- .min_keysize = MINKEYBLOBSIZE,
- .max_keysize = MAXKEYBLOBSIZE,
+ .min_keysize = PAES_MIN_KEYSIZE,
+ .max_keysize = PAES_MAX_KEYSIZE,
.ivsize = AES_BLOCK_SIZE,
.setkey = ctr_paes_set_key,
.encrypt = ctr_paes_encrypt,
diff --git a/arch/s390/crypto/sha.h b/arch/s390/crypto/sha.h
index d6f8258b44df..ada2f98c27b7 100644
--- a/arch/s390/crypto/sha.h
+++ b/arch/s390/crypto/sha.h
@@ -12,15 +12,17 @@
#include <linux/crypto.h>
#include <crypto/sha.h>
+#include <crypto/sha3.h>
/* must be big enough for the largest SHA variant */
-#define SHA_MAX_STATE_SIZE (SHA512_DIGEST_SIZE / 4)
-#define SHA_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
+#define SHA3_STATE_SIZE 200
+#define CPACF_MAX_PARMBLOCK_SIZE SHA3_STATE_SIZE
+#define SHA_MAX_BLOCK_SIZE SHA3_224_BLOCK_SIZE
struct s390_sha_ctx {
- u64 count; /* message length in bytes */
- u32 state[SHA_MAX_STATE_SIZE];
- u8 buf[2 * SHA_MAX_BLOCK_SIZE];
+ u64 count; /* message length in bytes */
+ u32 state[CPACF_MAX_PARMBLOCK_SIZE / sizeof(u32)];
+ u8 buf[SHA_MAX_BLOCK_SIZE];
int func; /* KIMD function to use */
};
diff --git a/arch/s390/crypto/sha3_256_s390.c b/arch/s390/crypto/sha3_256_s390.c
new file mode 100644
index 000000000000..460cbbbaa44a
--- /dev/null
+++ b/arch/s390/crypto/sha3_256_s390.c
@@ -0,0 +1,147 @@
+// SPDX-License-Identifier: GPL-2.0+
+/*
+ * Cryptographic API.
+ *
+ * s390 implementation of the SHA256 and SHA224 Secure Hash Algorithm.
+ *
+ * s390 Version:
+ * Copyright IBM Corp. 2019
+ * Author(s): Joerg Schmidbauer (jschmidb@de.ibm.com)
+ */
+#include <crypto/internal/hash.h>
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/cpufeature.h>
+#include <crypto/sha.h>
+#include <crypto/sha3.h>
+#include <asm/cpacf.h>
+
+#include "sha.h"
+
+static int sha3_256_init(struct shash_desc *desc)
+{
+ struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
+
+ memset(sctx->state, 0, sizeof(sctx->state));
+ sctx->count = 0;
+ sctx->func = CPACF_KIMD_SHA3_256;
+
+ return 0;
+}
+
+static int sha3_256_export(struct shash_desc *desc, void *out)
+{
+ struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
+ struct sha3_state *octx = out;
+
+ octx->rsiz = sctx->count;
+ memcpy(octx->st, sctx->state, sizeof(octx->st));
+ memcpy(octx->buf, sctx->buf, sizeof(octx->buf));
+
+ return 0;
+}
+
+static int sha3_256_import(struct shash_desc *desc, const void *in)
+{
+ struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
+ const struct sha3_state *ictx = in;
+
+ sctx->count = ictx->rsiz;
+ memcpy(sctx->state, ictx->st, sizeof(ictx->st));
+ memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf));
+ sctx->func = CPACF_KIMD_SHA3_256;
+
+ return 0;
+}
+
+static int sha3_224_import(struct shash_desc *desc, const void *in)
+{
+ struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
+ const struct sha3_state *ictx = in;
+
+ sctx->count = ictx->rsiz;
+ memcpy(sctx->state, ictx->st, sizeof(ictx->st));
+ memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf));
+ sctx->func = CPACF_KIMD_SHA3_224;
+
+ return 0;
+}
+
+static struct shash_alg sha3_256_alg = {
+ .digestsize = SHA3_256_DIGEST_SIZE, /* = 32 */
+ .init = sha3_256_init,
+ .update = s390_sha_update,
+ .final = s390_sha_final,
+ .export = sha3_256_export,
+ .import = sha3_256_import,
+ .descsize = sizeof(struct s390_sha_ctx),
+ .statesize = sizeof(struct sha3_state),
+ .base = {
+ .cra_name = "sha3-256",
+ .cra_driver_name = "sha3-256-s390",
+ .cra_priority = 300,
+ .cra_blocksize = SHA3_256_BLOCK_SIZE,
+ .cra_module = THIS_MODULE,
+ }
+};
+
+static int sha3_224_init(struct shash_desc *desc)
+{
+ struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
+
+ memset(sctx->state, 0, sizeof(sctx->state));
+ sctx->count = 0;
+ sctx->func = CPACF_KIMD_SHA3_224;
+
+ return 0;
+}
+
+static struct shash_alg sha3_224_alg = {
+ .digestsize = SHA3_224_DIGEST_SIZE,
+ .init = sha3_224_init,
+ .update = s390_sha_update,
+ .final = s390_sha_final,
+ .export = sha3_256_export, /* same as for 256 */
+ .import = sha3_224_import, /* function code different! */
+ .descsize = sizeof(struct s390_sha_ctx),
+ .statesize = sizeof(struct sha3_state),
+ .base = {
+ .cra_name = "sha3-224",
+ .cra_driver_name = "sha3-224-s390",
+ .cra_priority = 300,
+ .cra_blocksize = SHA3_224_BLOCK_SIZE,
+ .cra_module = THIS_MODULE,
+ }
+};
+
+static int __init sha3_256_s390_init(void)
+{
+ int ret;
+
+ if (!cpacf_query_func(CPACF_KIMD, CPACF_KIMD_SHA3_256))
+ return -ENODEV;
+
+ ret = crypto_register_shash(&sha3_256_alg);
+ if (ret < 0)
+ goto out;
+
+ ret = crypto_register_shash(&sha3_224_alg);
+ if (ret < 0)
+ crypto_unregister_shash(&sha3_256_alg);
+out:
+ return ret;
+}
+
+static void __exit sha3_256_s390_fini(void)
+{
+ crypto_unregister_shash(&sha3_224_alg);
+ crypto_unregister_shash(&sha3_256_alg);
+}
+
+module_cpu_feature_match(MSA, sha3_256_s390_init);
+module_exit(sha3_256_s390_fini);
+
+MODULE_ALIAS_CRYPTO("sha3-256");
+MODULE_ALIAS_CRYPTO("sha3-224");
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("SHA3-256 and SHA3-224 Secure Hash Algorithm");
diff --git a/arch/s390/crypto/sha3_512_s390.c b/arch/s390/crypto/sha3_512_s390.c
new file mode 100644
index 000000000000..72cf460a53e5
--- /dev/null
+++ b/arch/s390/crypto/sha3_512_s390.c
@@ -0,0 +1,155 @@
+// SPDX-License-Identifier: GPL-2.0+
+/*
+ * Cryptographic API.
+ *
+ * s390 implementation of the SHA512 and SHA384 Secure Hash Algorithm.
+ *
+ * Copyright IBM Corp. 2019
+ * Author(s): Joerg Schmidbauer (jschmidb@de.ibm.com)
+ */
+#include <crypto/internal/hash.h>
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/cpufeature.h>
+#include <crypto/sha.h>
+#include <crypto/sha3.h>
+#include <asm/cpacf.h>
+
+#include "sha.h"
+
+static int sha3_512_init(struct shash_desc *desc)
+{
+ struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
+
+ memset(sctx->state, 0, sizeof(sctx->state));
+ sctx->count = 0;
+ sctx->func = CPACF_KIMD_SHA3_512;
+
+ return 0;
+}
+
+static int sha3_512_export(struct shash_desc *desc, void *out)
+{
+ struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
+ struct sha3_state *octx = out;
+
+ octx->rsiz = sctx->count;
+ octx->rsizw = sctx->count >> 32;
+
+ memcpy(octx->st, sctx->state, sizeof(octx->st));
+ memcpy(octx->buf, sctx->buf, sizeof(octx->buf));
+
+ return 0;
+}
+
+static int sha3_512_import(struct shash_desc *desc, const void *in)
+{
+ struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
+ const struct sha3_state *ictx = in;
+
+ if (unlikely(ictx->rsizw))
+ return -ERANGE;
+ sctx->count = ictx->rsiz;
+
+ memcpy(sctx->state, ictx->st, sizeof(ictx->st));
+ memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf));
+ sctx->func = CPACF_KIMD_SHA3_512;
+
+ return 0;
+}
+
+static int sha3_384_import(struct shash_desc *desc, const void *in)
+{
+ struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
+ const struct sha3_state *ictx = in;
+
+ if (unlikely(ictx->rsizw))
+ return -ERANGE;
+ sctx->count = ictx->rsiz;
+
+ memcpy(sctx->state, ictx->st, sizeof(ictx->st));
+ memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf));
+ sctx->func = CPACF_KIMD_SHA3_384;
+
+ return 0;
+}
+
+static struct shash_alg sha3_512_alg = {
+ .digestsize = SHA3_512_DIGEST_SIZE,
+ .init = sha3_512_init,
+ .update = s390_sha_update,
+ .final = s390_sha_final,
+ .export = sha3_512_export,
+ .import = sha3_512_import,
+ .descsize = sizeof(struct s390_sha_ctx),
+ .statesize = sizeof(struct sha3_state),
+ .base = {
+ .cra_name = "sha3-512",
+ .cra_driver_name = "sha3-512-s390",
+ .cra_priority = 300,
+ .cra_blocksize = SHA3_512_BLOCK_SIZE,
+ .cra_module = THIS_MODULE,
+ }
+};
+
+MODULE_ALIAS_CRYPTO("sha3-512");
+
+static int sha3_384_init(struct shash_desc *desc)
+{
+ struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
+
+ memset(sctx->state, 0, sizeof(sctx->state));
+ sctx->count = 0;
+ sctx->func = CPACF_KIMD_SHA3_384;
+
+ return 0;
+}
+
+static struct shash_alg sha3_384_alg = {
+ .digestsize = SHA3_384_DIGEST_SIZE,
+ .init = sha3_384_init,
+ .update = s390_sha_update,
+ .final = s390_sha_final,
+ .export = sha3_512_export, /* same as for 512 */
+ .import = sha3_384_import, /* function code different! */
+ .descsize = sizeof(struct s390_sha_ctx),
+ .statesize = sizeof(struct sha3_state),
+ .base = {
+ .cra_name = "sha3-384",
+ .cra_driver_name = "sha3-384-s390",
+ .cra_priority = 300,
+ .cra_blocksize = SHA3_384_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct s390_sha_ctx),
+ .cra_module = THIS_MODULE,
+ }
+};
+
+MODULE_ALIAS_CRYPTO("sha3-384");
+
+static int __init init(void)
+{
+ int ret;
+
+ if (!cpacf_query_func(CPACF_KIMD, CPACF_KIMD_SHA3_512))
+ return -ENODEV;
+ ret = crypto_register_shash(&sha3_512_alg);
+ if (ret < 0)
+ goto out;
+ ret = crypto_register_shash(&sha3_384_alg);
+ if (ret < 0)
+ crypto_unregister_shash(&sha3_512_alg);
+out:
+ return ret;
+}
+
+static void __exit fini(void)
+{
+ crypto_unregister_shash(&sha3_512_alg);
+ crypto_unregister_shash(&sha3_384_alg);
+}
+
+module_cpu_feature_match(MSA, init);
+module_exit(fini);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("SHA3-512 and SHA3-384 Secure Hash Algorithm");
diff --git a/arch/s390/crypto/sha_common.c b/arch/s390/crypto/sha_common.c
index cf0718d121bc..d39e0f079217 100644
--- a/arch/s390/crypto/sha_common.c
+++ b/arch/s390/crypto/sha_common.c
@@ -20,7 +20,7 @@ int s390_sha_update(struct shash_desc *desc, const u8 *data, unsigned int len)
unsigned int index, n;
/* how much is already in the buffer? */
- index = ctx->count & (bsize - 1);
+ index = ctx->count % bsize;
ctx->count += len;
if ((index + len) < bsize)
@@ -37,7 +37,7 @@ int s390_sha_update(struct shash_desc *desc, const u8 *data, unsigned int len)
/* process as many blocks as possible */
if (len >= bsize) {
- n = len & ~(bsize - 1);
+ n = (len / bsize) * bsize;
cpacf_kimd(ctx->func, ctx->state, data, n);
data += n;
len -= n;
@@ -50,34 +50,63 @@ store:
}
EXPORT_SYMBOL_GPL(s390_sha_update);
+static int s390_crypto_shash_parmsize(int func)
+{
+ switch (func) {
+ case CPACF_KLMD_SHA_1:
+ return 20;
+ case CPACF_KLMD_SHA_256:
+ return 32;
+ case CPACF_KLMD_SHA_512:
+ return 64;
+ case CPACF_KLMD_SHA3_224:
+ case CPACF_KLMD_SHA3_256:
+ case CPACF_KLMD_SHA3_384:
+ case CPACF_KLMD_SHA3_512:
+ return 200;
+ default:
+ return -EINVAL;
+ }
+}
+
int s390_sha_final(struct shash_desc *desc, u8 *out)
{
struct s390_sha_ctx *ctx = shash_desc_ctx(desc);
unsigned int bsize = crypto_shash_blocksize(desc->tfm);
u64 bits;
- unsigned int index, end, plen;
-
- /* SHA-512 uses 128 bit padding length */
- plen = (bsize > SHA256_BLOCK_SIZE) ? 16 : 8;
+ unsigned int n, mbl_offset;
- /* must perform manual padding */
- index = ctx->count & (bsize - 1);
- end = (index < bsize - plen) ? bsize : (2 * bsize);
-
- /* start pad with 1 */
- ctx->buf[index] = 0x80;
- index++;
-
- /* pad with zeros */
- memset(ctx->buf + index, 0x00, end - index - 8);
-
- /*
- * Append message length. Well, SHA-512 wants a 128 bit length value,
- * nevertheless we use u64, should be enough for now...
- */
+ n = ctx->count % bsize;
bits = ctx->count * 8;
- memcpy(ctx->buf + end - 8, &bits, sizeof(bits));
- cpacf_kimd(ctx->func, ctx->state, ctx->buf, end);
+ mbl_offset = s390_crypto_shash_parmsize(ctx->func) / sizeof(u32);
+ if (mbl_offset < 0)
+ return -EINVAL;
+
+ /* set total msg bit length (mbl) in CPACF parmblock */
+ switch (ctx->func) {
+ case CPACF_KLMD_SHA_1:
+ case CPACF_KLMD_SHA_256:
+ memcpy(ctx->state + mbl_offset, &bits, sizeof(bits));
+ break;
+ case CPACF_KLMD_SHA_512:
+ /*
+ * the SHA512 parmblock has a 128-bit mbl field, clear
+ * high-order u64 field, copy bits to low-order u64 field
+ */
+ memset(ctx->state + mbl_offset, 0x00, sizeof(bits));
+ mbl_offset += sizeof(u64) / sizeof(u32);
+ memcpy(ctx->state + mbl_offset, &bits, sizeof(bits));
+ break;
+ case CPACF_KLMD_SHA3_224:
+ case CPACF_KLMD_SHA3_256:
+ case CPACF_KLMD_SHA3_384:
+ case CPACF_KLMD_SHA3_512:
+ break;
+ default:
+ return -EINVAL;
+ }
+
+ cpacf_klmd(ctx->func, ctx->state, ctx->buf, n);
/* copy digest to out */
memcpy(out, ctx->state, crypto_shash_digestsize(desc->tfm));