Lines Matching refs:ctx

131 gcm_mode_encrypt_contiguous_blocks(gcm_ctx_t *ctx, char *data, size_t length,
150 if (length + ctx->gcm_remainder_len < block_size) {
153 (uint8_t *)ctx->gcm_remainder + ctx->gcm_remainder_len,
155 ctx->gcm_remainder_len += length;
156 ctx->gcm_copy_to = datap;
160 lastp = (uint8_t *)ctx->gcm_cb;
166 if (ctx->gcm_remainder_len > 0) {
167 need = block_size - ctx->gcm_remainder_len;
172 bcopy(datap, &((uint8_t *)ctx->gcm_remainder)
173 [ctx->gcm_remainder_len], need);
175 blockp = (uint8_t *)ctx->gcm_remainder;
184 counter = ntohll(ctx->gcm_cb[1] & counter_mask);
187 ctx->gcm_cb[1] = (ctx->gcm_cb[1] & ~counter_mask) | counter;
189 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_cb,
190 (uint8_t *)ctx->gcm_tmp);
191 xor_block(blockp, (uint8_t *)ctx->gcm_tmp);
193 lastp = (uint8_t *)ctx->gcm_tmp;
195 ctx->gcm_processed_data_len += block_size;
198 if (ctx->gcm_remainder_len > 0) {
199 bcopy(blockp, ctx->gcm_copy_to,
200 ctx->gcm_remainder_len);
201 bcopy(blockp + ctx->gcm_remainder_len, datap,
224 GHASH(ctx, ctx->gcm_tmp, ctx->gcm_ghash);
227 if (ctx->gcm_remainder_len != 0) {
229 ctx->gcm_remainder_len = 0;
238 bcopy(datap, ctx->gcm_remainder, remainder);
239 ctx->gcm_remainder_len = remainder;
240 ctx->gcm_copy_to = datap;
243 ctx->gcm_copy_to = NULL;
252 gcm_encrypt_final(gcm_ctx_t *ctx, crypto_data_t *out, size_t block_size,
262 (ctx->gcm_remainder_len + ctx->gcm_tag_len)) {
266 ghash = (uint8_t *)ctx->gcm_ghash;
268 if (ctx->gcm_remainder_len > 0) {
270 uint8_t *tmpp = (uint8_t *)ctx->gcm_tmp;
280 counter = ntohll(ctx->gcm_cb[1] & counter_mask);
283 ctx->gcm_cb[1] = (ctx->gcm_cb[1] & ~counter_mask) | counter;
285 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_cb,
286 (uint8_t *)ctx->gcm_tmp);
288 macp = (uint8_t *)ctx->gcm_remainder;
289 bzero(macp + ctx->gcm_remainder_len,
290 block_size - ctx->gcm_remainder_len);
293 for (i = 0; i < ctx->gcm_remainder_len; i++) {
298 GHASH(ctx, macp, ghash);
300 ctx->gcm_processed_data_len += ctx->gcm_remainder_len;
303 ctx->gcm_len_a_len_c[1] =
304 htonll(CRYPTO_BYTES2BITS(ctx->gcm_processed_data_len));
305 GHASH(ctx, ctx->gcm_len_a_len_c, ghash);
306 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_J0,
307 (uint8_t *)ctx->gcm_J0);
308 xor_block((uint8_t *)ctx->gcm_J0, ghash);
310 if (ctx->gcm_remainder_len > 0) {
311 rv = crypto_put_output_data(macp, out, ctx->gcm_remainder_len);
315 out->cd_offset += ctx->gcm_remainder_len;
316 ctx->gcm_remainder_len = 0;
317 rv = crypto_put_output_data(ghash, out, ctx->gcm_tag_len);
320 out->cd_offset += ctx->gcm_tag_len;
330 gcm_decrypt_incomplete_block(gcm_ctx_t *ctx, size_t block_size, size_t index,
343 counter = ntohll(ctx->gcm_cb[1] & counter_mask);
346 ctx->gcm_cb[1] = (ctx->gcm_cb[1] & ~counter_mask) | counter;
348 datap = (uint8_t *)ctx->gcm_remainder;
349 outp = &((ctx->gcm_pt_buf)[index]);
350 counterp = (uint8_t *)ctx->gcm_tmp;
353 bzero((uint8_t *)ctx->gcm_tmp, block_size);
354 bcopy(datap, (uint8_t *)ctx->gcm_tmp, ctx->gcm_remainder_len);
357 GHASH(ctx, ctx->gcm_tmp, ctx->gcm_ghash);
360 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_cb, counterp);
363 for (i = 0; i < ctx->gcm_remainder_len; i++) {
370 gcm_mode_decrypt_contiguous_blocks(gcm_ctx_t *ctx, char *data, size_t length,
384 new_len = ctx->gcm_pt_buf_len + length;
386 new = kmem_alloc(new_len, ctx->gcm_kmflag);
387 bcopy(ctx->gcm_pt_buf, new, ctx->gcm_pt_buf_len);
388 kmem_free(ctx->gcm_pt_buf, ctx->gcm_pt_buf_len);
391 bcopy(ctx->gcm_pt_buf, new, ctx->gcm_pt_buf_len);
392 free(ctx->gcm_pt_buf);
397 ctx->gcm_pt_buf = new;
398 ctx->gcm_pt_buf_len = new_len;
399 bcopy(data, &ctx->gcm_pt_buf[ctx->gcm_processed_data_len],
401 ctx->gcm_processed_data_len += length;
404 ctx->gcm_remainder_len = 0;
409 gcm_decrypt_final(gcm_ctx_t *ctx, crypto_data_t *out, size_t block_size,
422 ASSERT(ctx->gcm_processed_data_len == ctx->gcm_pt_buf_len);
424 pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
425 ghash = (uint8_t *)ctx->gcm_ghash;
426 blockp = ctx->gcm_pt_buf;
431 bcopy(blockp, ctx->gcm_remainder, remainder);
432 ctx->gcm_remainder_len = remainder;
437 gcm_decrypt_incomplete_block(ctx, block_size,
439 ctx->gcm_remainder_len = 0;
443 GHASH(ctx, blockp, ghash);
449 counter = ntohll(ctx->gcm_cb[1] & counter_mask);
452 ctx->gcm_cb[1] = (ctx->gcm_cb[1] & ~counter_mask) | counter;
454 cbp = (uint8_t *)ctx->gcm_tmp;
455 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_cb, cbp);
465 ctx->gcm_len_a_len_c[1] = htonll(CRYPTO_BYTES2BITS(pt_len));
466 GHASH(ctx, ctx->gcm_len_a_len_c, ghash);
467 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_J0,
468 (uint8_t *)ctx->gcm_J0);
469 xor_block((uint8_t *)ctx->gcm_J0, ghash);
472 if (bcmp(&ctx->gcm_pt_buf[pt_len], ghash, ctx->gcm_tag_len)) {
476 rv = crypto_put_output_data(ctx->gcm_pt_buf, out, pt_len);
514 gcm_ctx_t *ctx, size_t block_size,
524 ghash = (uint8_t *)ctx->gcm_ghash;
525 cb = (uint8_t *)ctx->gcm_cb;
533 copy_block(cb, (uint8_t *)ctx->gcm_J0);
547 GHASH(ctx, datap, ghash);
552 GHASH(ctx, len_a_len_c, ctx->gcm_J0);
555 copy_block((uint8_t *)ctx->gcm_J0, (uint8_t *)cb);
564 gcm_init(gcm_ctx_t *ctx, unsigned char *iv, size_t iv_len,
574 bzero(ctx->gcm_H, sizeof (ctx->gcm_H));
575 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_H,
576 (uint8_t *)ctx->gcm_H);
578 gcm_format_initial_blocks(iv, iv_len, ctx, block_size,
581 authp = (uint8_t *)ctx->gcm_tmp;
582 ghash = (uint8_t *)ctx->gcm_ghash;
605 GHASH(ctx, datap, ghash);
720 gcm_set_kmflag(gcm_ctx_t *ctx, int kmflag)
722 ctx->gcm_kmflag = kmflag;