diff options
| author | James Morris <jmorris@intercode.com.au> | 2002-10-28 02:36:29 -0800 |
|---|---|---|
| committer | James Morris <jmorris@intercode.com.au> | 2002-10-28 02:36:29 -0800 |
| commit | 03882e044f1bbf3603a856652100652aeabe465a (patch) | |
| tree | 56c11d0d5e6169dd9dd9772d7ac6d871ffa801a1 /crypto | |
| parent | 3ba6853fe600793fe4b732ddb3eb98acc072de70 (diff) | |
[CRYPTO]: Cleanups and more consistency checks.
- Removed local_bh_disable() from kmap wrapper, not needed now with
two atomic kmaps.
- Nuked atomic flag, use in_softirq() instead.
- Converted crypto_kmap() and crypto_yield() to check in_softirq().
- Check CRYPTO_MAX_CIPHER_BLOCK_SIZE during alg init.
- Try to initialize as much at compile time as possible
(feedback from Christoph Hellwig).
- Clean up list handling a bit (feedback from Christoph Hellwig).
Diffstat (limited to 'crypto')
| -rw-r--r-- | crypto/api.c | 49 | ||||
| -rw-r--r-- | crypto/cipher.c | 20 | ||||
| -rw-r--r-- | crypto/des.c | 5 | ||||
| -rw-r--r-- | crypto/digest.c | 8 | ||||
| -rw-r--r-- | crypto/internal.h | 24 | ||||
| -rw-r--r-- | crypto/md4.c | 2 | ||||
| -rw-r--r-- | crypto/md5.c | 2 | ||||
| -rw-r--r-- | crypto/sha1.c | 2 | ||||
| -rw-r--r-- | crypto/tcrypt.c | 3 |
9 files changed, 53 insertions, 62 deletions
diff --git a/crypto/api.c b/crypto/api.c index 5d23c2c868c0..b2548a46459e 100644 --- a/crypto/api.c +++ b/crypto/api.c @@ -23,7 +23,7 @@ #include "internal.h" static LIST_HEAD(crypto_alg_list); -static struct rw_semaphore crypto_alg_sem; +static DECLARE_RWSEM(crypto_alg_sem); static inline int crypto_alg_get(struct crypto_alg *alg) { @@ -38,23 +38,18 @@ static inline void crypto_alg_put(struct crypto_alg *alg) struct crypto_alg *crypto_alg_lookup(char *name) { - struct list_head *p; - struct crypto_alg *alg = NULL; + struct crypto_alg *q, *alg = NULL; down_read(&crypto_alg_sem); - list_for_each(p, &crypto_alg_list) { - struct crypto_alg *q = - list_entry(p, struct crypto_alg, cra_list); - + list_for_each_entry(q, &crypto_alg_list, cra_list) { if (!(strcmp(q->cra_name, name))) { - if (crypto_alg_get(q)) alg = q; break; } } - + up_read(&crypto_alg_sem); return alg; } @@ -63,9 +58,6 @@ static int crypto_init_flags(struct crypto_tfm *tfm, u32 flags) { tfm->crt_flags = 0; - if (flags & CRYPTO_TFM_REQ_ATOMIC) - tfm->crt_flags |= CRYPTO_TFM_REQ_ATOMIC; - switch (crypto_tfm_alg_type(tfm)) { case CRYPTO_ALG_TYPE_CIPHER: return crypto_init_cipher_flags(tfm, flags); @@ -164,23 +156,35 @@ void crypto_free_tfm(struct crypto_tfm *tfm) kfree(tfm); } +static inline int crypto_alg_blocksize_check(struct crypto_alg *alg) +{ + return ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) + == CRYPTO_ALG_TYPE_CIPHER && + alg->cra_blocksize > CRYPTO_MAX_CIPHER_BLOCK_SIZE); +} + int crypto_register_alg(struct crypto_alg *alg) { int ret = 0; - struct list_head *p; + struct crypto_alg *q; down_write(&crypto_alg_sem); - list_for_each(p, &crypto_alg_list) { - struct crypto_alg *q = - list_entry(p, struct crypto_alg, cra_list); - + list_for_each_entry(q, &crypto_alg_list, cra_list) { if (!(strcmp(q->cra_name, alg->cra_name))) { ret = -EEXIST; goto out; } } - list_add_tail(&alg->cra_list, &crypto_alg_list); + + if (crypto_alg_blocksize_check(alg)) { + printk(KERN_WARNING "%s: blocksize %Zd exceeds max. " + "size %Zd\n", __FUNCTION__, alg->cra_blocksize, + CRYPTO_MAX_CIPHER_BLOCK_SIZE); + ret = -EINVAL; + } + else + list_add_tail(&alg->cra_list, &crypto_alg_list); out: up_write(&crypto_alg_sem); return ret; @@ -189,14 +193,14 @@ out: int crypto_unregister_alg(struct crypto_alg *alg) { int ret = -ENOENT; - struct list_head *p; + struct crypto_alg *q; BUG_ON(!alg->cra_module); down_write(&crypto_alg_sem); - list_for_each(p, &crypto_alg_list) { - if (alg == (void *)p) { - list_del(p); + list_for_each_entry(q, &crypto_alg_list, cra_list) { + if (alg == q) { + list_del(&alg->cra_list); ret = 0; goto out; } @@ -282,7 +286,6 @@ static int __init init_crypto(void) struct proc_dir_entry *proc; printk(KERN_INFO "Initializing Cryptographic API\n"); - init_rwsem(&crypto_alg_sem); proc = create_proc_entry("crypto", 0, NULL); if (proc) proc->proc_fops = &proc_crypto_ops; diff --git a/crypto/cipher.c b/crypto/cipher.c index 7262ed9b8084..aaa48422011f 100644 --- a/crypto/cipher.c +++ b/crypto/cipher.c @@ -71,14 +71,14 @@ static int copy_chunks(struct crypto_tfm *tfm, u8 *buf, clen = rlen; } - p = crypto_kmap(tfm, sg[i].page) + sg[i].offset + coff; + p = crypto_kmap(sg[i].page) + sg[i].offset + coff; if (in) memcpy(&buf[copied], p, clen); else memcpy(p, &buf[copied], clen); - crypto_kunmap(tfm, p); + crypto_kunmap(p); *last = aligned ? 0 : clen; copied += clen; } @@ -118,7 +118,7 @@ static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg, { int i, coff; size_t bsize = crypto_tfm_alg_blocksize(tfm); - u8 tmp[CRYPTO_MAX_BLOCK_SIZE]; + u8 tmp[CRYPTO_MAX_CIPHER_BLOCK_SIZE]; if (sglen(sg, nsg) % bsize) { tfm->crt_flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; @@ -128,11 +128,11 @@ static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg, for (i = 0, coff = 0; i < nsg; i++) { int n = 0, boff = 0; int len = sg[i].length - coff; - char *p = crypto_kmap(tfm, sg[i].page) + sg[i].offset + coff; + char *p = crypto_kmap(sg[i].page) + sg[i].offset + coff; while (len) { if (len < bsize) { - crypto_kunmap(tfm, p); + crypto_kunmap(p); n = gather_chunks(tfm, tmp, sg, i, len, &coff); prfn(tfm, tmp, crfn, enc); scatter_chunks(tfm, tmp, sg, i, len, &coff); @@ -140,13 +140,13 @@ static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg, goto unmapped; } else { prfn(tfm, p, crfn, enc); - crypto_kunmap(tfm, p); + crypto_kunmap(p); crypto_yield(tfm); /* remap and point to recalculated offset */ boff += bsize; - p = crypto_kmap(tfm, sg[i].page) - + sg[i].offset + coff + boff; + p = crypto_kmap(sg[i].page) + + sg[i].offset + coff + boff; len -= bsize; @@ -155,7 +155,7 @@ static int crypt(struct crypto_tfm *tfm, struct scatterlist *sg, coff = 0; } } - crypto_kunmap(tfm, p); + crypto_kunmap(p); unmapped: i += n; @@ -172,7 +172,7 @@ static void cbc_process(struct crypto_tfm *tfm, memcpy(tfm->crt_cipher.cit_iv, block, crypto_tfm_alg_blocksize(tfm)); } else { - u8 buf[CRYPTO_MAX_BLOCK_SIZE]; + u8 buf[CRYPTO_MAX_CIPHER_BLOCK_SIZE]; fn(tfm->crt_ctx, buf, block); xor_64(buf, tfm->crt_cipher.cit_iv); diff --git a/crypto/des.c b/crypto/des.c index dd2a2cf681c9..9972c622bbc1 100644 --- a/crypto/des.c +++ b/crypto/des.c @@ -1260,6 +1260,7 @@ static struct crypto_alg des_alg = { .cra_blocksize = DES_BLOCK_SIZE, .cra_ctxsize = sizeof(struct des_ctx), .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(des_alg.cra_list), .cra_u = { .cipher = { .cia_keysize = DES_KEY_SIZE, .cia_ivsize = DES_BLOCK_SIZE, @@ -1274,6 +1275,7 @@ static struct crypto_alg des3_ede_alg = { .cra_blocksize = DES3_EDE_BLOCK_SIZE, .cra_ctxsize = sizeof(struct des3_ede_ctx), .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(des3_ede_alg.cra_list), .cra_u = { .cipher = { .cia_keysize = DES3_EDE_KEY_SIZE, .cia_ivsize = DES3_EDE_BLOCK_SIZE, @@ -1286,9 +1288,6 @@ static int __init init(void) { int ret = 0; - INIT_LIST_HEAD(&des_alg.cra_list); - INIT_LIST_HEAD(&des3_ede_alg.cra_list); - ret = crypto_register_alg(&des_alg); if (ret < 0) goto out; diff --git a/crypto/digest.c b/crypto/digest.c index a5a406b5f308..978dd67e7fc1 100644 --- a/crypto/digest.c +++ b/crypto/digest.c @@ -34,10 +34,10 @@ static void update(struct crypto_tfm *tfm, struct scatterlist *sg, size_t nsg) int i; for (i = 0; i < nsg; i++) { - char *p = crypto_kmap(tfm, sg[i].page) + sg[i].offset; + char *p = crypto_kmap(sg[i].page) + sg[i].offset; tfm->__crt_alg->cra_digest.dia_update(tfm->crt_ctx, p, sg[i].length); - crypto_kunmap(tfm, p); + crypto_kunmap(p); crypto_yield(tfm); } return; @@ -57,10 +57,10 @@ static void digest(struct crypto_tfm *tfm, tfm->crt_digest.dit_init(tfm); for (i = 0; i < nsg; i++) { - char *p = crypto_kmap(tfm, sg[i].page) + sg[i].offset; + char *p = crypto_kmap(sg[i].page) + sg[i].offset; tfm->__crt_alg->cra_digest.dia_update(tfm->crt_ctx, p, sg[i].length); - crypto_kunmap(tfm, p); + crypto_kunmap(p); crypto_yield(tfm); } crypto_digest_final(tfm, out); diff --git a/crypto/internal.h b/crypto/internal.h index 56de56c1f6f4..ca75ab3ca188 100644 --- a/crypto/internal.h +++ b/crypto/internal.h @@ -17,31 +17,21 @@ #include <asm/hardirq.h> #include <asm/softirq.h> -static inline void *crypto_kmap(struct crypto_tfm *tfm, struct page *page) +static inline void *crypto_kmap(struct page *page) { - if (tfm->crt_flags & CRYPTO_TFM_REQ_ATOMIC) { -#ifdef CONFIG_HIGHMEM /* XXX: remove this after the api change */ - local_bh_disable(); -#endif - return kmap_atomic(page, KM_CRYPTO_SOFTIRQ); - } else - return kmap_atomic(page, KM_CRYPTO_USER); + return kmap_atomic(page, in_softirq() ? + KM_CRYPTO_SOFTIRQ : KM_CRYPTO_USER); } -static inline void crypto_kunmap(struct crypto_tfm *tfm, void *vaddr) +static inline void crypto_kunmap(void *vaddr) { - if (tfm->crt_flags & CRYPTO_TFM_REQ_ATOMIC) { - kunmap_atomic(vaddr, KM_CRYPTO_SOFTIRQ); -#ifdef CONFIG_HIGHMEM /* XXX: remove this after the api change */ - local_bh_enable(); -#endif - } else - kunmap_atomic(vaddr, KM_CRYPTO_USER); + return kunmap_atomic(vaddr, in_softirq() ? + KM_CRYPTO_SOFTIRQ : KM_CRYPTO_USER); } static inline void crypto_yield(struct crypto_tfm *tfm) { - if (!(tfm->crt_flags & CRYPTO_TFM_REQ_ATOMIC)) + if (!in_softirq()) cond_resched(); } diff --git a/crypto/md4.c b/crypto/md4.c index 84d6c41829cf..dc866e6cf369 100644 --- a/crypto/md4.c +++ b/crypto/md4.c @@ -226,6 +226,7 @@ static struct crypto_alg alg = { .cra_blocksize = MD4_HMAC_BLOCK_SIZE, .cra_ctxsize = sizeof(struct md4_ctx), .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_u = { .digest = { .dia_digestsize = MD4_DIGEST_SIZE, .dia_init = md4_init, @@ -235,7 +236,6 @@ static struct crypto_alg alg = { static int __init init(void) { - INIT_LIST_HEAD(&alg.cra_list); return crypto_register_alg(&alg); } diff --git a/crypto/md5.c b/crypto/md5.c index 71ba809b0a32..c219d5983750 100644 --- a/crypto/md5.c +++ b/crypto/md5.c @@ -219,6 +219,7 @@ static struct crypto_alg alg = { .cra_blocksize = MD5_HMAC_BLOCK_SIZE, .cra_ctxsize = sizeof(struct md5_ctx), .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_u = { .digest = { .dia_digestsize = MD5_DIGEST_SIZE, .dia_init = md5_init, @@ -228,7 +229,6 @@ static struct crypto_alg alg = { static int __init init(void) { - INIT_LIST_HEAD(&alg.cra_list); return crypto_register_alg(&alg); } diff --git a/crypto/sha1.c b/crypto/sha1.c index 7514735c2637..874a05dade98 100644 --- a/crypto/sha1.c +++ b/crypto/sha1.c @@ -183,6 +183,7 @@ static struct crypto_alg alg = { .cra_blocksize = SHA1_HMAC_BLOCK_SIZE, .cra_ctxsize = sizeof(struct sha1_ctx), .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_u = { .digest = { .dia_digestsize = SHA1_DIGEST_SIZE, .dia_init = sha1_init, @@ -192,7 +193,6 @@ static struct crypto_alg alg = { static int __init init(void) { - INIT_LIST_HEAD(&alg.cra_list); return crypto_register_alg(&alg); } diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 493c57b8eaec..a940f877b223 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -739,7 +739,7 @@ test_des(void) */ i = 7; key = des_tv[i].key; - tfm->crt_flags = CRYPTO_TFM_REQ_ATOMIC; + tfm->crt_flags = 0; ret = crypto_cipher_setkey(tfm, key, 8); if (ret) { @@ -985,7 +985,6 @@ test_des(void) for (i = 0; i < DES_CBC_ENC_TEST_VECTORS; i++) { printk("test %d:\n", i + 1); - tfm->crt_flags |= CRYPTO_TFM_REQ_ATOMIC; key = des_tv[i].key; ret = crypto_cipher_setkey(tfm, key, 8); |
