xref: /illumos-gate/usr/src/common/crypto/modes/ctr.c (revision 4b56a003)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 #ifndef _KERNEL
27 #include <strings.h>
28 #include <limits.h>
29 #include <assert.h>
30 #include <security/cryptoki.h>
31 #endif
32 
33 #include <sys/types.h>
34 #include <modes/modes.h>
35 #include <sys/crypto/common.h>
36 #include <sys/crypto/impl.h>
37 
38 #ifdef _LITTLE_ENDIAN
39 #include <sys/byteorder.h>
40 #endif
41 
42 /*
43  * Encrypt and decrypt multiple blocks of data in counter mode.
44  */
45 int
46 ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
47     crypto_data_t *out, size_t block_size,
48     int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct),
49     void (*xor_block)(uint8_t *, uint8_t *))
50 {
51 	size_t remainder = length;
52 	size_t need;
53 	uint8_t *datap = (uint8_t *)data;
54 	uint8_t *blockp;
55 	uint8_t *lastp;
56 	void *iov_or_mp;
57 	offset_t offset;
58 	uint8_t *out_data_1;
59 	uint8_t *out_data_2;
60 	size_t out_data_1_len;
61 	uint64_t counter;
62 #ifdef _LITTLE_ENDIAN
63 	uint8_t *p;
64 #endif
65 
66 	if (length + ctx->ctr_remainder_len < block_size) {
67 		/* accumulate bytes here and return */
68 		bcopy(datap,
69 		    (uint8_t *)ctx->ctr_remainder + ctx->ctr_remainder_len,
70 		    length);
71 		ctx->ctr_remainder_len += length;
72 		ctx->ctr_copy_to = datap;
73 		return (CRYPTO_SUCCESS);
74 	}
75 
76 	lastp = (uint8_t *)ctx->ctr_cb;
77 	if (out != NULL)
78 		crypto_init_ptrs(out, &iov_or_mp, &offset);
79 
80 	do {
81 		/* Unprocessed data from last call. */
82 		if (ctx->ctr_remainder_len > 0) {
83 			need = block_size - ctx->ctr_remainder_len;
84 
85 			if (need > remainder)
86 				return (CRYPTO_DATA_LEN_RANGE);
87 
88 			bcopy(datap, &((uint8_t *)ctx->ctr_remainder)
89 			    [ctx->ctr_remainder_len], need);
90 
91 			blockp = (uint8_t *)ctx->ctr_remainder;
92 		} else {
93 			blockp = datap;
94 		}
95 
96 		/* ctr_cb is the counter block */
97 		cipher(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
98 		    (uint8_t *)ctx->ctr_tmp);
99 
100 		lastp = (uint8_t *)ctx->ctr_tmp;
101 
102 		/*
103 		 * Increment counter. Counter bits are confined
104 		 * to the bottom 64 bits of the counter block.
105 		 */
106 #ifdef _LITTLE_ENDIAN
107 		counter = ntohll(ctx->ctr_cb[1] & ctx->ctr_counter_mask);
108 		counter = htonll(counter + 1);
109 #else
110 		counter = ctx->ctr_cb[1] & ctx->ctr_counter_mask;
111 		counter++;
112 #endif	/* _LITTLE_ENDIAN */
113 		counter &= ctx->ctr_counter_mask;
114 		ctx->ctr_cb[1] =
115 		    (ctx->ctr_cb[1] & ~(ctx->ctr_counter_mask)) | counter;
116 
117 		/*
118 		 * XOR the previous cipher block or IV with the
119 		 * current clear block.
120 		 */
121 		xor_block(blockp, lastp);
122 
123 		if (out == NULL) {
124 			if (ctx->ctr_remainder_len > 0) {
125 				bcopy(lastp, ctx->ctr_copy_to,
126 				    ctx->ctr_remainder_len);
127 				bcopy(lastp + ctx->ctr_remainder_len, datap,
128 				    need);
129 			}
130 		} else {
131 			crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
132 			    &out_data_1_len, &out_data_2, block_size);
133 
134 			/* copy block to where it belongs */
135 			bcopy(lastp, out_data_1, out_data_1_len);
136 			if (out_data_2 != NULL) {
137 				bcopy(lastp + out_data_1_len, out_data_2,
138 				    block_size - out_data_1_len);
139 			}
140 			/* update offset */
141 			out->cd_offset += block_size;
142 		}
143 
144 		/* Update pointer to next block of data to be processed. */
145 		if (ctx->ctr_remainder_len != 0) {
146 			datap += need;
147 			ctx->ctr_remainder_len = 0;
148 		} else {
149 			datap += block_size;
150 		}
151 
152 		remainder = (size_t)&data[length] - (size_t)datap;
153 
154 		/* Incomplete last block. */
155 		if (remainder > 0 && remainder < block_size) {
156 			bcopy(datap, ctx->ctr_remainder, remainder);
157 			ctx->ctr_remainder_len = remainder;
158 			ctx->ctr_copy_to = datap;
159 			goto out;
160 		}
161 		ctx->ctr_copy_to = NULL;
162 
163 	} while (remainder > 0);
164 
165 out:
166 	return (CRYPTO_SUCCESS);
167 }
168 
169 int
170 ctr_mode_final(ctr_ctx_t *ctx, crypto_data_t *out,
171     int (*encrypt_block)(const void *, const uint8_t *, uint8_t *))
172 {
173 	uint8_t *lastp;
174 	void *iov_or_mp;
175 	offset_t offset;
176 	uint8_t *out_data_1;
177 	uint8_t *out_data_2;
178 	size_t out_data_1_len;
179 	uint8_t *p;
180 	int i;
181 
182 	if (out->cd_length < ctx->ctr_remainder_len)
183 		return (CRYPTO_DATA_LEN_RANGE);
184 
185 	encrypt_block(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
186 	    (uint8_t *)ctx->ctr_tmp);
187 
188 	lastp = (uint8_t *)ctx->ctr_tmp;
189 	p = (uint8_t *)ctx->ctr_remainder;
190 	for (i = 0; i < ctx->ctr_remainder_len; i++) {
191 		p[i] ^= lastp[i];
192 	}
193 
194 	crypto_init_ptrs(out, &iov_or_mp, &offset);
195 	crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
196 	    &out_data_1_len, &out_data_2, ctx->ctr_remainder_len);
197 
198 	bcopy(p, out_data_1, out_data_1_len);
199 	if (out_data_2 != NULL) {
200 		bcopy((uint8_t *)p + out_data_1_len,
201 		    out_data_2, ctx->ctr_remainder_len - out_data_1_len);
202 	}
203 	out->cd_offset += ctx->ctr_remainder_len;
204 	ctx->ctr_remainder_len = 0;
205 	return (CRYPTO_SUCCESS);
206 }
207 
208 int
209 ctr_init_ctx(ctr_ctx_t *ctr_ctx, ulong_t count, uint8_t *cb,
210 void (*copy_block)(uint8_t *, uint8_t *))
211 {
212 	uint64_t mask = 0;
213 
214 	if (count == 0 || count > 64) {
215 		return (CRYPTO_MECHANISM_PARAM_INVALID);
216 	}
217 	while (count-- > 0)
218 		mask |= (1ULL << count);
219 
220 #ifdef _LITTLE_ENDIAN
221 	mask = htonll(mask);
222 #endif
223 	ctr_ctx->ctr_counter_mask = mask;
224 	copy_block(cb, (uchar_t *)ctr_ctx->ctr_cb);
225 	ctr_ctx->ctr_lastp = (uint8_t *)&ctr_ctx->ctr_cb[0];
226 	ctr_ctx->ctr_flags |= CTR_MODE;
227 	return (CRYPTO_SUCCESS);
228 }
229 
230 /* ARGSUSED */
231 void *
232 ctr_alloc_ctx(int kmflag)
233 {
234 	ctr_ctx_t *ctr_ctx;
235 
236 #ifdef _KERNEL
237 	if ((ctr_ctx = kmem_zalloc(sizeof (ctr_ctx_t), kmflag)) == NULL)
238 #else
239 	if ((ctr_ctx = calloc(1, sizeof (ctr_ctx_t))) == NULL)
240 #endif
241 		return (NULL);
242 
243 	ctr_ctx->ctr_flags = CTR_MODE;
244 	return (ctr_ctx);
245 }
246