Loading...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 | /* SPDX-License-Identifier: GPL-2.0-only */ /* * sm3_base.h - core logic for SM3 implementations * * Copyright (C) 2017 ARM Limited or its affiliates. * Written by Gilad Ben-Yossef <gilad@benyossef.com> */ #ifndef _CRYPTO_SM3_BASE_H #define _CRYPTO_SM3_BASE_H #include <crypto/internal/hash.h> #include <crypto/sm3.h> #include <linux/crypto.h> #include <linux/module.h> #include <linux/string.h> #include <asm/unaligned.h> typedef void (sm3_block_fn)(struct sm3_state *sst, u8 const *src, int blocks); static inline int sm3_base_init(struct shash_desc *desc) { struct sm3_state *sctx = shash_desc_ctx(desc); sctx->state[0] = SM3_IVA; sctx->state[1] = SM3_IVB; sctx->state[2] = SM3_IVC; sctx->state[3] = SM3_IVD; sctx->state[4] = SM3_IVE; sctx->state[5] = SM3_IVF; sctx->state[6] = SM3_IVG; sctx->state[7] = SM3_IVH; sctx->count = 0; return 0; } static inline int sm3_base_do_update(struct shash_desc *desc, const u8 *data, unsigned int len, sm3_block_fn *block_fn) { struct sm3_state *sctx = shash_desc_ctx(desc); unsigned int partial = sctx->count % SM3_BLOCK_SIZE; sctx->count += len; if (unlikely((partial + len) >= SM3_BLOCK_SIZE)) { int blocks; if (partial) { int p = SM3_BLOCK_SIZE - partial; memcpy(sctx->buffer + partial, data, p); data += p; len -= p; block_fn(sctx, sctx->buffer, 1); } blocks = len / SM3_BLOCK_SIZE; len %= SM3_BLOCK_SIZE; if (blocks) { block_fn(sctx, data, blocks); data += blocks * SM3_BLOCK_SIZE; } partial = 0; } if (len) memcpy(sctx->buffer + partial, data, len); return 0; } static inline int sm3_base_do_finalize(struct shash_desc *desc, sm3_block_fn *block_fn) { const int bit_offset = SM3_BLOCK_SIZE - sizeof(__be64); struct sm3_state *sctx = shash_desc_ctx(desc); __be64 *bits = (__be64 *)(sctx->buffer + bit_offset); unsigned int partial = sctx->count % SM3_BLOCK_SIZE; sctx->buffer[partial++] = 0x80; if (partial > bit_offset) { memset(sctx->buffer + partial, 0x0, SM3_BLOCK_SIZE - partial); partial = 0; block_fn(sctx, sctx->buffer, 1); } memset(sctx->buffer + partial, 0x0, bit_offset - partial); *bits = cpu_to_be64(sctx->count << 3); block_fn(sctx, sctx->buffer, 1); return 0; } static inline int sm3_base_finish(struct shash_desc *desc, u8 *out) { struct sm3_state *sctx = shash_desc_ctx(desc); __be32 *digest = (__be32 *)out; int i; for (i = 0; i < SM3_DIGEST_SIZE / sizeof(__be32); i++) put_unaligned_be32(sctx->state[i], digest++); memzero_explicit(sctx, sizeof(*sctx)); return 0; } #endif /* _CRYPTO_SM3_BASE_H */ |