| xj | b04a402 | 2021-11-25 15:01:52 +0800 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 | 
 | 2 |  | 
 | 3 | /* | 
 | 4 |  * SM4 Cipher Algorithm. | 
 | 5 |  * | 
 | 6 |  * Copyright (C) 2018 ARM Limited or its affiliates. | 
 | 7 |  * All rights reserved. | 
 | 8 |  */ | 
 | 9 |  | 
 | 10 | #include <crypto/sm4.h> | 
 | 11 | #include <linux/module.h> | 
 | 12 | #include <linux/init.h> | 
 | 13 | #include <linux/types.h> | 
 | 14 | #include <linux/errno.h> | 
 | 15 | #include <linux/crypto.h> | 
 | 16 | #include <asm/byteorder.h> | 
 | 17 | #include <asm/unaligned.h> | 
 | 18 |  | 
 | 19 | static const u32 fk[4] = { | 
 | 20 | 	0xa3b1bac6, 0x56aa3350, 0x677d9197, 0xb27022dc | 
 | 21 | }; | 
 | 22 |  | 
 | 23 | static const u8 sbox[256] = { | 
 | 24 | 	0xd6, 0x90, 0xe9, 0xfe, 0xcc, 0xe1, 0x3d, 0xb7, | 
 | 25 | 	0x16, 0xb6, 0x14, 0xc2, 0x28, 0xfb, 0x2c, 0x05, | 
 | 26 | 	0x2b, 0x67, 0x9a, 0x76, 0x2a, 0xbe, 0x04, 0xc3, | 
 | 27 | 	0xaa, 0x44, 0x13, 0x26, 0x49, 0x86, 0x06, 0x99, | 
 | 28 | 	0x9c, 0x42, 0x50, 0xf4, 0x91, 0xef, 0x98, 0x7a, | 
 | 29 | 	0x33, 0x54, 0x0b, 0x43, 0xed, 0xcf, 0xac, 0x62, | 
 | 30 | 	0xe4, 0xb3, 0x1c, 0xa9, 0xc9, 0x08, 0xe8, 0x95, | 
 | 31 | 	0x80, 0xdf, 0x94, 0xfa, 0x75, 0x8f, 0x3f, 0xa6, | 
 | 32 | 	0x47, 0x07, 0xa7, 0xfc, 0xf3, 0x73, 0x17, 0xba, | 
 | 33 | 	0x83, 0x59, 0x3c, 0x19, 0xe6, 0x85, 0x4f, 0xa8, | 
 | 34 | 	0x68, 0x6b, 0x81, 0xb2, 0x71, 0x64, 0xda, 0x8b, | 
 | 35 | 	0xf8, 0xeb, 0x0f, 0x4b, 0x70, 0x56, 0x9d, 0x35, | 
 | 36 | 	0x1e, 0x24, 0x0e, 0x5e, 0x63, 0x58, 0xd1, 0xa2, | 
 | 37 | 	0x25, 0x22, 0x7c, 0x3b, 0x01, 0x21, 0x78, 0x87, | 
 | 38 | 	0xd4, 0x00, 0x46, 0x57, 0x9f, 0xd3, 0x27, 0x52, | 
 | 39 | 	0x4c, 0x36, 0x02, 0xe7, 0xa0, 0xc4, 0xc8, 0x9e, | 
 | 40 | 	0xea, 0xbf, 0x8a, 0xd2, 0x40, 0xc7, 0x38, 0xb5, | 
 | 41 | 	0xa3, 0xf7, 0xf2, 0xce, 0xf9, 0x61, 0x15, 0xa1, | 
 | 42 | 	0xe0, 0xae, 0x5d, 0xa4, 0x9b, 0x34, 0x1a, 0x55, | 
 | 43 | 	0xad, 0x93, 0x32, 0x30, 0xf5, 0x8c, 0xb1, 0xe3, | 
 | 44 | 	0x1d, 0xf6, 0xe2, 0x2e, 0x82, 0x66, 0xca, 0x60, | 
 | 45 | 	0xc0, 0x29, 0x23, 0xab, 0x0d, 0x53, 0x4e, 0x6f, | 
 | 46 | 	0xd5, 0xdb, 0x37, 0x45, 0xde, 0xfd, 0x8e, 0x2f, | 
 | 47 | 	0x03, 0xff, 0x6a, 0x72, 0x6d, 0x6c, 0x5b, 0x51, | 
 | 48 | 	0x8d, 0x1b, 0xaf, 0x92, 0xbb, 0xdd, 0xbc, 0x7f, | 
 | 49 | 	0x11, 0xd9, 0x5c, 0x41, 0x1f, 0x10, 0x5a, 0xd8, | 
 | 50 | 	0x0a, 0xc1, 0x31, 0x88, 0xa5, 0xcd, 0x7b, 0xbd, | 
 | 51 | 	0x2d, 0x74, 0xd0, 0x12, 0xb8, 0xe5, 0xb4, 0xb0, | 
 | 52 | 	0x89, 0x69, 0x97, 0x4a, 0x0c, 0x96, 0x77, 0x7e, | 
 | 53 | 	0x65, 0xb9, 0xf1, 0x09, 0xc5, 0x6e, 0xc6, 0x84, | 
 | 54 | 	0x18, 0xf0, 0x7d, 0xec, 0x3a, 0xdc, 0x4d, 0x20, | 
 | 55 | 	0x79, 0xee, 0x5f, 0x3e, 0xd7, 0xcb, 0x39, 0x48 | 
 | 56 | }; | 
 | 57 |  | 
 | 58 | static const u32 ck[] = { | 
 | 59 | 	0x00070e15, 0x1c232a31, 0x383f464d, 0x545b6269, | 
 | 60 | 	0x70777e85, 0x8c939aa1, 0xa8afb6bd, 0xc4cbd2d9, | 
 | 61 | 	0xe0e7eef5, 0xfc030a11, 0x181f262d, 0x343b4249, | 
 | 62 | 	0x50575e65, 0x6c737a81, 0x888f969d, 0xa4abb2b9, | 
 | 63 | 	0xc0c7ced5, 0xdce3eaf1, 0xf8ff060d, 0x141b2229, | 
 | 64 | 	0x30373e45, 0x4c535a61, 0x686f767d, 0x848b9299, | 
 | 65 | 	0xa0a7aeb5, 0xbcc3cad1, 0xd8dfe6ed, 0xf4fb0209, | 
 | 66 | 	0x10171e25, 0x2c333a41, 0x484f565d, 0x646b7279 | 
 | 67 | }; | 
 | 68 |  | 
 | 69 | static u32 sm4_t_non_lin_sub(u32 x) | 
 | 70 | { | 
 | 71 | 	int i; | 
 | 72 | 	u8 *b = (u8 *)&x; | 
 | 73 |  | 
 | 74 | 	for (i = 0; i < 4; ++i) | 
 | 75 | 		b[i] = sbox[b[i]]; | 
 | 76 |  | 
 | 77 | 	return x; | 
 | 78 | } | 
 | 79 |  | 
 | 80 | static u32 sm4_key_lin_sub(u32 x) | 
 | 81 | { | 
 | 82 | 	return x ^ rol32(x, 13) ^ rol32(x, 23); | 
 | 83 |  | 
 | 84 | } | 
 | 85 |  | 
 | 86 | static u32 sm4_enc_lin_sub(u32 x) | 
 | 87 | { | 
 | 88 | 	return x ^ rol32(x, 2) ^ rol32(x, 10) ^ rol32(x, 18) ^ rol32(x, 24); | 
 | 89 | } | 
 | 90 |  | 
 | 91 | static u32 sm4_key_sub(u32 x) | 
 | 92 | { | 
 | 93 | 	return sm4_key_lin_sub(sm4_t_non_lin_sub(x)); | 
 | 94 | } | 
 | 95 |  | 
 | 96 | static u32 sm4_enc_sub(u32 x) | 
 | 97 | { | 
 | 98 | 	return sm4_enc_lin_sub(sm4_t_non_lin_sub(x)); | 
 | 99 | } | 
 | 100 |  | 
 | 101 | static u32 sm4_round(const u32 *x, const u32 rk) | 
 | 102 | { | 
 | 103 | 	return x[0] ^ sm4_enc_sub(x[1] ^ x[2] ^ x[3] ^ rk); | 
 | 104 | } | 
 | 105 |  | 
 | 106 |  | 
 | 107 | /** | 
 | 108 |  * crypto_sm4_expand_key - Expands the SM4 key as described in GB/T 32907-2016 | 
 | 109 |  * @ctx:	The location where the computed key will be stored. | 
 | 110 |  * @in_key:	The supplied key. | 
 | 111 |  * @key_len:	The length of the supplied key. | 
 | 112 |  * | 
 | 113 |  * Returns 0 on success. The function fails only if an invalid key size (or | 
 | 114 |  * pointer) is supplied. | 
 | 115 |  */ | 
 | 116 | int crypto_sm4_expand_key(struct crypto_sm4_ctx *ctx, const u8 *in_key, | 
 | 117 | 			  unsigned int key_len) | 
 | 118 | { | 
 | 119 | 	u32 rk[4], t; | 
 | 120 | 	const u32 *key = (u32 *)in_key; | 
 | 121 | 	int i; | 
 | 122 |  | 
 | 123 | 	if (key_len != SM4_KEY_SIZE) | 
 | 124 | 		return -EINVAL; | 
 | 125 |  | 
 | 126 | 	for (i = 0; i < 4; ++i) | 
 | 127 | 		rk[i] = get_unaligned_be32(&key[i]) ^ fk[i]; | 
 | 128 |  | 
 | 129 | 	for (i = 0; i < 32; ++i) { | 
 | 130 | 		t = rk[0] ^ sm4_key_sub(rk[1] ^ rk[2] ^ rk[3] ^ ck[i]); | 
 | 131 | 		ctx->rkey_enc[i] = t; | 
 | 132 | 		rk[0] = rk[1]; | 
 | 133 | 		rk[1] = rk[2]; | 
 | 134 | 		rk[2] = rk[3]; | 
 | 135 | 		rk[3] = t; | 
 | 136 | 	} | 
 | 137 |  | 
 | 138 | 	for (i = 0; i < 32; ++i) | 
 | 139 | 		ctx->rkey_dec[i] = ctx->rkey_enc[31 - i]; | 
 | 140 |  | 
 | 141 | 	return 0; | 
 | 142 | } | 
 | 143 | EXPORT_SYMBOL_GPL(crypto_sm4_expand_key); | 
 | 144 |  | 
 | 145 | /** | 
 | 146 |  * crypto_sm4_set_key - Set the AES key. | 
 | 147 |  * @tfm:	The %crypto_tfm that is used in the context. | 
 | 148 |  * @in_key:	The input key. | 
 | 149 |  * @key_len:	The size of the key. | 
 | 150 |  * | 
 | 151 |  * Returns 0 on success, on failure the %CRYPTO_TFM_RES_BAD_KEY_LEN flag in tfm | 
 | 152 |  * is set. The function uses crypto_sm4_expand_key() to expand the key. | 
 | 153 |  * &crypto_sm4_ctx _must_ be the private data embedded in @tfm which is | 
 | 154 |  * retrieved with crypto_tfm_ctx(). | 
 | 155 |  */ | 
 | 156 | int crypto_sm4_set_key(struct crypto_tfm *tfm, const u8 *in_key, | 
 | 157 | 		       unsigned int key_len) | 
 | 158 | { | 
 | 159 | 	struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm); | 
 | 160 | 	u32 *flags = &tfm->crt_flags; | 
 | 161 | 	int ret; | 
 | 162 |  | 
 | 163 | 	ret = crypto_sm4_expand_key(ctx, in_key, key_len); | 
 | 164 | 	if (!ret) | 
 | 165 | 		return 0; | 
 | 166 |  | 
 | 167 | 	*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | 
 | 168 | 	return -EINVAL; | 
 | 169 | } | 
 | 170 | EXPORT_SYMBOL_GPL(crypto_sm4_set_key); | 
 | 171 |  | 
 | 172 | static void sm4_do_crypt(const u32 *rk, u32 *out, const u32 *in) | 
 | 173 | { | 
 | 174 | 	u32 x[4], i, t; | 
 | 175 |  | 
 | 176 | 	for (i = 0; i < 4; ++i) | 
 | 177 | 		x[i] = get_unaligned_be32(&in[i]); | 
 | 178 |  | 
 | 179 | 	for (i = 0; i < 32; ++i) { | 
 | 180 | 		t = sm4_round(x, rk[i]); | 
 | 181 | 		x[0] = x[1]; | 
 | 182 | 		x[1] = x[2]; | 
 | 183 | 		x[2] = x[3]; | 
 | 184 | 		x[3] = t; | 
 | 185 | 	} | 
 | 186 |  | 
 | 187 | 	for (i = 0; i < 4; ++i) | 
 | 188 | 		put_unaligned_be32(x[3 - i], &out[i]); | 
 | 189 | } | 
 | 190 |  | 
 | 191 | /* encrypt a block of text */ | 
 | 192 |  | 
 | 193 | void crypto_sm4_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) | 
 | 194 | { | 
 | 195 | 	const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm); | 
 | 196 |  | 
 | 197 | 	sm4_do_crypt(ctx->rkey_enc, (u32 *)out, (u32 *)in); | 
 | 198 | } | 
 | 199 | EXPORT_SYMBOL_GPL(crypto_sm4_encrypt); | 
 | 200 |  | 
 | 201 | /* decrypt a block of text */ | 
 | 202 |  | 
 | 203 | void crypto_sm4_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) | 
 | 204 | { | 
 | 205 | 	const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm); | 
 | 206 |  | 
 | 207 | 	sm4_do_crypt(ctx->rkey_dec, (u32 *)out, (u32 *)in); | 
 | 208 | } | 
 | 209 | EXPORT_SYMBOL_GPL(crypto_sm4_decrypt); | 
 | 210 |  | 
 | 211 | static struct crypto_alg sm4_alg = { | 
 | 212 | 	.cra_name		=	"sm4", | 
 | 213 | 	.cra_driver_name	=	"sm4-generic", | 
 | 214 | 	.cra_priority		=	100, | 
 | 215 | 	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER, | 
 | 216 | 	.cra_blocksize		=	SM4_BLOCK_SIZE, | 
 | 217 | 	.cra_ctxsize		=	sizeof(struct crypto_sm4_ctx), | 
 | 218 | 	.cra_module		=	THIS_MODULE, | 
 | 219 | 	.cra_u			=	{ | 
 | 220 | 		.cipher = { | 
 | 221 | 			.cia_min_keysize	=	SM4_KEY_SIZE, | 
 | 222 | 			.cia_max_keysize	=	SM4_KEY_SIZE, | 
 | 223 | 			.cia_setkey		=	crypto_sm4_set_key, | 
 | 224 | 			.cia_encrypt		=	crypto_sm4_encrypt, | 
 | 225 | 			.cia_decrypt		=	crypto_sm4_decrypt | 
 | 226 | 		} | 
 | 227 | 	} | 
 | 228 | }; | 
 | 229 |  | 
 | 230 | static int __init sm4_init(void) | 
 | 231 | { | 
 | 232 | 	return crypto_register_alg(&sm4_alg); | 
 | 233 | } | 
 | 234 |  | 
 | 235 | static void __exit sm4_fini(void) | 
 | 236 | { | 
 | 237 | 	crypto_unregister_alg(&sm4_alg); | 
 | 238 | } | 
 | 239 |  | 
 | 240 | module_init(sm4_init); | 
 | 241 | module_exit(sm4_fini); | 
 | 242 |  | 
 | 243 | MODULE_DESCRIPTION("SM4 Cipher Algorithm"); | 
 | 244 | MODULE_LICENSE("GPL v2"); | 
 | 245 | MODULE_ALIAS_CRYPTO("sm4"); | 
 | 246 | MODULE_ALIAS_CRYPTO("sm4-generic"); |