1/* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0-only) */
2/* Copyright(c) 2014 - 2020 Intel Corporation */
3#ifndef _QAT_CRYPTO_INSTANCE_H_
4#define _QAT_CRYPTO_INSTANCE_H_
5
6#include <crypto/aes.h>
7#include <linux/list.h>
8#include <linux/slab.h>
9#include "adf_accel_devices.h"
10#include "icp_qat_fw_la.h"
11#include "qat_algs_send.h"
12#include "qat_bl.h"
13
14struct qat_crypto_instance {
15 struct adf_etr_ring_data *sym_tx;
16 struct adf_etr_ring_data *sym_rx;
17 struct adf_etr_ring_data *pke_tx;
18 struct adf_etr_ring_data *pke_rx;
19 struct adf_accel_dev *accel_dev;
20 struct list_head list;
21 unsigned long state;
22 int id;
23 atomic_t refctr;
24 struct qat_instance_backlog backlog;
25};
26
27struct qat_crypto_request;
28
29struct qat_crypto_request {
30 struct icp_qat_fw_la_bulk_req req;
31 union {
32 struct qat_alg_aead_ctx *aead_ctx;
33 struct qat_alg_skcipher_ctx *skcipher_ctx;
34 };
35 union {
36 struct aead_request *aead_req;
37 struct skcipher_request *skcipher_req;
38 };
39 struct qat_request_buffs buf;
40 void (*cb)(struct icp_qat_fw_la_resp *resp,
41 struct qat_crypto_request *req);
42 union {
43 struct {
44 __be64 iv_hi;
45 __be64 iv_lo;
46 };
47 u8 iv[AES_BLOCK_SIZE];
48 };
49 bool encryption;
50 struct qat_alg_req alg_req;
51};
52
53static inline bool adf_hw_dev_has_crypto(struct adf_accel_dev *accel_dev)
54{
55 struct adf_hw_device_data *hw_device = accel_dev->hw_device;
56 u32 mask = ~hw_device->accel_capabilities_mask;
57
58 if (mask & ADF_ACCEL_CAPABILITIES_CRYPTO_SYMMETRIC)
59 return false;
60 if (mask & ADF_ACCEL_CAPABILITIES_CRYPTO_ASYMMETRIC)
61 return false;
62 if (mask & ADF_ACCEL_CAPABILITIES_AUTHENTICATION)
63 return false;
64
65 return true;
66}
67
68#endif
69

source code of linux/drivers/crypto/intel/qat/qat_common/qat_crypto.h