#include <sys/types.h>
#include <sys/sunddi.h>
#include <sys/disp.h>
#include <sys/modctl.h>
#include <sys/sysmacros.h>
#include <sys/crypto/common.h>
#include <sys/crypto/api.h>
#include <sys/crypto/impl.h>
#include <sys/crypto/sched_impl.h>
#define isspace(ch) (((ch) == ' ') || ((ch) == '\r') || ((ch) == '\n') || \
((ch) == '\t') || ((ch) == '\f'))
#define CRYPTO_OPS_OFFSET(f) offsetof(crypto_ops_t, co_##f)
#define CRYPTO_KEY_OFFSET(f) offsetof(crypto_key_ops_t, f)
#define CRYPTO_PROVIDER_OFFSET(f) \
offsetof(crypto_provider_management_ops_t, f)
kmutex_t ntfy_list_lock;
kcondvar_t ntfy_list_cv;
static kcf_ntfy_elem_t *ntfy_list_head;
static kcf_ntfy_elem_t *ntfy_list_tail;
#define PROV_COUNT(me) \
(((me)->me_sw_prov != NULL ? 1 : 0) + (me)->me_num_hwprov)
crypto_mech_type_t
crypto_mech2id(const char *mechname)
{
return (crypto_mech2id_common((char *)mechname, B_TRUE));
}
crypto_mech_name_t *
crypto_get_mech_list(uint_t *countp, int kmflag)
{
uint_t count = 0, me_tab_size, i, j;
kcf_ops_class_t cl;
kcf_mech_entry_t *me, *me_tab;
crypto_mech_name_t *mech_name_tab, *tmp_mech_name_tab;
char *mech_name, *hint_mech, *end;
kcf_soft_conf_entry_t *p;
size_t n;
kcf_lock_withpad_t *mp;
mutex_enter(&soft_config_mutex);
p = soft_config_list;
while (p != NULL) {
count += p->ce_count;
p = p->ce_next;
}
mutex_exit(&soft_config_mutex);
for (cl = KCF_FIRST_OPSCLASS; cl <= KCF_LAST_OPSCLASS; cl++) {
me_tab_size = kcf_mech_tabs_tab[cl].met_size;
me_tab = kcf_mech_tabs_tab[cl].met_tab;
for (i = 0; i < me_tab_size; i++) {
me = &me_tab[i];
mp = &me_mutexes[CPU_SEQID];
mutex_enter(&mp->kl_lock);
if ((me->me_name[0] != 0) && (me->me_num_hwprov >= 1)) {
ASSERT(me->me_hw_prov_chain != NULL);
count++;
}
mutex_exit(&mp->kl_lock);
}
}
n = count * CRYPTO_MAX_MECH_NAME;
again:
count = 0;
tmp_mech_name_tab = kmem_zalloc(n, kmflag);
if (tmp_mech_name_tab == NULL) {
*countp = 0;
return (NULL);
}
mech_name = (char *)tmp_mech_name_tab;
end = mech_name + n;
for (cl = KCF_FIRST_OPSCLASS; cl <= KCF_LAST_OPSCLASS; cl++) {
me_tab_size = kcf_mech_tabs_tab[cl].met_size;
me_tab = kcf_mech_tabs_tab[cl].met_tab;
for (i = 0; i < me_tab_size; i++) {
me = &me_tab[i];
mp = &me_mutexes[CPU_SEQID];
mutex_enter(&mp->kl_lock);
if ((me->me_name[0] != 0) && (me->me_num_hwprov >= 1)) {
ASSERT(me->me_hw_prov_chain != NULL);
if ((mech_name + CRYPTO_MAX_MECH_NAME) > end) {
mutex_exit(&mp->kl_lock);
kmem_free(tmp_mech_name_tab, n);
n = n << 1;
goto again;
}
(void) strncpy(mech_name, me->me_name,
CRYPTO_MAX_MECH_NAME);
mech_name += CRYPTO_MAX_MECH_NAME;
count++;
}
mutex_exit(&mp->kl_lock);
}
}
mutex_enter(&soft_config_mutex);
p = soft_config_list;
while (p != NULL) {
for (i = 0; i < p->ce_count; i++) {
hint_mech = p->ce_mechs[i];
if (is_mech_disabled_byname(CRYPTO_SW_PROVIDER,
p->ce_name, 0, hint_mech))
continue;
for (j = 0; j < count; j++) {
if (strcmp(hint_mech,
tmp_mech_name_tab[j]) == 0)
break;
}
if (j == count) {
ASSERT((char *)&tmp_mech_name_tab[count] ==
mech_name);
if ((mech_name + CRYPTO_MAX_MECH_NAME) > end) {
mutex_exit(&soft_config_mutex);
kmem_free(tmp_mech_name_tab, n);
n = n << 1;
goto again;
}
(void) strncpy(tmp_mech_name_tab[count],
hint_mech, CRYPTO_MAX_MECH_NAME);
mech_name += CRYPTO_MAX_MECH_NAME;
count++;
}
}
p = p->ce_next;
}
mutex_exit(&soft_config_mutex);
ASSERT(mech_name <= end);
if (mech_name == end) {
mech_name_tab = tmp_mech_name_tab;
goto done;
}
mech_name_tab = kmem_zalloc(count * CRYPTO_MAX_MECH_NAME, kmflag);
if (mech_name_tab == NULL) {
kmem_free(tmp_mech_name_tab, n);
*countp = 0;
return (NULL);
}
bcopy(tmp_mech_name_tab, mech_name_tab, count * CRYPTO_MAX_MECH_NAME);
kmem_free(tmp_mech_name_tab, n);
done:
*countp = count;
return (mech_name_tab);
}
void
crypto_free_mech_list(crypto_mech_name_t *mech_names, uint_t count)
{
if ((mech_names != NULL) && (count > 0))
kmem_free(mech_names, count * CRYPTO_MAX_MECH_NAME);
}
crypto_notify_handle_t
crypto_notify_events(crypto_notify_callback_t nf, uint32_t event_mask)
{
kcf_ntfy_elem_t *nep;
crypto_notify_handle_t hndl;
if (nf == NULL || !(event_mask & (CRYPTO_EVENT_MECHS_CHANGED |
CRYPTO_EVENT_PROVIDER_REGISTERED |
CRYPTO_EVENT_PROVIDER_UNREGISTERED))) {
return (NULL);
}
nep = kmem_zalloc(sizeof (kcf_ntfy_elem_t), KM_SLEEP);
mutex_init(&nep->kn_lock, NULL, MUTEX_DEFAULT, NULL);
cv_init(&nep->kn_cv, NULL, CV_DEFAULT, NULL);
nep->kn_state = NTFY_WAITING;
nep->kn_func = nf;
nep->kn_event_mask = event_mask;
mutex_enter(&ntfy_list_lock);
if (ntfy_list_head == NULL) {
ntfy_list_head = ntfy_list_tail = nep;
} else {
ntfy_list_tail->kn_next = nep;
nep->kn_prev = ntfy_list_tail;
ntfy_list_tail = nep;
}
hndl = (crypto_notify_handle_t)nep;
mutex_exit(&ntfy_list_lock);
return (hndl);
}
void
crypto_unnotify_events(crypto_notify_handle_t hndl)
{
kcf_ntfy_elem_t *nep = (kcf_ntfy_elem_t *)hndl;
if (hndl == NULL)
return;
retry:
mutex_enter(&ntfy_list_lock);
mutex_enter(&nep->kn_lock);
if (nep->kn_state == NTFY_WAITING) {
kcf_ntfy_elem_t *nextp = nep->kn_next;
kcf_ntfy_elem_t *prevp = nep->kn_prev;
if (nextp != NULL)
nextp->kn_prev = prevp;
else
ntfy_list_tail = prevp;
if (prevp != NULL)
prevp->kn_next = nextp;
else
ntfy_list_head = nextp;
} else {
ASSERT(nep->kn_state == NTFY_RUNNING);
mutex_exit(&ntfy_list_lock);
while (nep->kn_state == NTFY_RUNNING)
cv_wait(&nep->kn_cv, &nep->kn_lock);
mutex_exit(&nep->kn_lock);
goto retry;
}
mutex_exit(&nep->kn_lock);
mutex_destroy(&nep->kn_lock);
cv_destroy(&nep->kn_cv);
kmem_free(nep, sizeof (kcf_ntfy_elem_t));
mutex_exit(&ntfy_list_lock);
}
void
kcf_walk_ntfylist(uint32_t event, void *event_arg)
{
kcf_ntfy_elem_t *nep;
int nelem = 0;
mutex_enter(&ntfy_list_lock);
for (nep = ntfy_list_head; nep != NULL; nep = nep->kn_next)
nelem++;
for (nep = ntfy_list_head; (nep != NULL && nelem); nep = nep->kn_next) {
nelem--;
if (!(nep->kn_event_mask & event))
continue;
mutex_enter(&nep->kn_lock);
nep->kn_state = NTFY_RUNNING;
mutex_exit(&nep->kn_lock);
mutex_exit(&ntfy_list_lock);
(*nep->kn_func)(event, event_arg);
mutex_enter(&nep->kn_lock);
nep->kn_state = NTFY_WAITING;
cv_broadcast(&nep->kn_cv);
mutex_exit(&nep->kn_lock);
mutex_enter(&ntfy_list_lock);
}
mutex_exit(&ntfy_list_lock);
}
int
crypto_key_check(crypto_mechanism_t *mech, crypto_key_t *key)
{
int error;
kcf_mech_entry_t *me;
kcf_provider_desc_t *pd;
kcf_prov_mech_desc_t *prov_chain;
kcf_lock_withpad_t *mp;
if ((mech == NULL) || (key == NULL) ||
(key->ck_format == CRYPTO_KEY_REFERENCE))
return (CRYPTO_ARGUMENTS_BAD);
if ((error = kcf_get_mech_entry(mech->cm_type, &me)) != KCF_SUCCESS) {
return (CRYPTO_MECHANISM_INVALID);
}
mp = &me_mutexes[CPU_SEQID];
mutex_enter(&mp->kl_lock);
if (me->me_sw_prov != NULL) {
pd = me->me_sw_prov->pm_prov_desc;
KCF_PROV_REFHOLD(pd);
if ((KCF_PROV_KEY_OPS(pd) != NULL) &&
(KCF_PROV_KEY_OPS(pd)->key_check != NULL)) {
crypto_mechanism_t lmech;
mutex_exit(&mp->kl_lock);
lmech = *mech;
KCF_SET_PROVIDER_MECHNUM(mech->cm_type, pd, &lmech);
error = KCF_PROV_KEY_CHECK(pd, &lmech, key);
if (error != CRYPTO_SUCCESS) {
KCF_PROV_REFRELE(pd);
return (error);
}
mutex_enter(&mp->kl_lock);
}
KCF_PROV_REFRELE(pd);
}
prov_chain = me->me_hw_prov_chain;
while (prov_chain != NULL) {
pd = prov_chain->pm_prov_desc;
KCF_PROV_REFHOLD(pd);
if ((KCF_PROV_KEY_OPS(pd) != NULL) &&
(KCF_PROV_KEY_OPS(pd)->key_check != NULL)) {
crypto_mechanism_t lmech;
mutex_exit(&mp->kl_lock);
lmech = *mech;
KCF_SET_PROVIDER_MECHNUM(mech->cm_type, pd,
&lmech);
error = KCF_PROV_KEY_CHECK(pd, &lmech, key);
if (error != CRYPTO_SUCCESS) {
KCF_PROV_REFRELE(pd);
return (error);
}
mutex_enter(&mp->kl_lock);
}
KCF_PROV_REFRELE(pd);
prov_chain = prov_chain->pm_next;
}
mutex_exit(&mp->kl_lock);
return (CRYPTO_SUCCESS);
}
int
crypto_key_check_prov(crypto_provider_t provider, crypto_mechanism_t *mech,
crypto_key_t *key)
{
kcf_provider_desc_t *pd = provider;
kcf_provider_desc_t *real_provider = pd;
crypto_mechanism_t lmech;
int rv;
ASSERT(KCF_PROV_REFHELD(pd));
if ((mech == NULL) || (key == NULL) ||
(key->ck_format == CRYPTO_KEY_REFERENCE))
return (CRYPTO_ARGUMENTS_BAD);
if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
return (CRYPTO_NOT_SUPPORTED);
}
lmech = *mech;
KCF_SET_PROVIDER_MECHNUM(mech->cm_type, real_provider, &lmech);
rv = KCF_PROV_KEY_CHECK(real_provider, &lmech, key);
if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
KCF_PROV_REFRELE(real_provider);
return (rv);
}
static void
init_mechanism_info(crypto_mechanism_info_t *mech_info,
kcf_prov_mech_desc_t *pmd)
{
crypto_func_group_t fg = pmd->pm_mech_info.cm_func_group_mask;
mech_info->mi_keysize_unit = pmd->pm_mech_info.cm_mech_flags &
(CRYPTO_KEYSIZE_UNIT_IN_BITS | CRYPTO_KEYSIZE_UNIT_IN_BYTES);
mech_info->mi_min_key_size =
(size_t)pmd->pm_mech_info.cm_min_key_length;
mech_info->mi_max_key_size =
(size_t)pmd->pm_mech_info.cm_max_key_length;
mech_info->mi_usage = 0;
if (fg & (CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC))
mech_info->mi_usage |= CRYPTO_MECH_USAGE_ENCRYPT;
if (fg & (CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC))
mech_info->mi_usage |= CRYPTO_MECH_USAGE_DECRYPT;
if (fg & (CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC))
mech_info->mi_usage |= CRYPTO_MECH_USAGE_MAC;
}
int
crypto_get_all_mech_info(crypto_mech_type_t mech_type,
crypto_mechanism_info_t **mech_infos, uint_t *num_mech_infos,
int km_flag)
{
uint_t ninfos, cur_info;
kcf_mech_entry_t *me;
int rv;
kcf_prov_mech_desc_t *hwp;
crypto_mechanism_info_t *infos;
size_t infos_size;
kcf_lock_withpad_t *mp;
if ((rv = kcf_get_mech_entry(mech_type, &me)) != CRYPTO_SUCCESS) {
return (rv);
}
mp = &me_mutexes[CPU_SEQID];
mutex_enter(&mp->kl_lock);
again:
ninfos = PROV_COUNT(me);
mutex_exit(&mp->kl_lock);
if (ninfos == 0) {
infos = NULL;
rv = CRYPTO_SUCCESS;
goto bail;
}
infos_size = ninfos * sizeof (crypto_mechanism_info_t);
infos = kmem_alloc(infos_size, km_flag);
if (infos == NULL) {
rv = CRYPTO_HOST_MEMORY;
goto bail;
}
mutex_enter(&mp->kl_lock);
if (ninfos != PROV_COUNT(me)) {
kmem_free(infos, infos_size);
goto again;
}
cur_info = 0;
if (me->me_sw_prov != NULL)
init_mechanism_info(&infos[cur_info++], me->me_sw_prov);
for (hwp = me->me_hw_prov_chain; hwp != NULL; hwp = hwp->pm_next)
init_mechanism_info(&infos[cur_info++], hwp);
mutex_exit(&mp->kl_lock);
ASSERT(cur_info == ninfos);
bail:
*mech_infos = infos;
*num_mech_infos = ninfos;
return (rv);
}
void
crypto_free_all_mech_info(crypto_mechanism_info_t *mech_infos, uint_t count)
{
if ((mech_infos != NULL) && (count > 0))
kmem_free(mech_infos, count * sizeof (crypto_mechanism_info_t));
}
static int
memcmp_pad_max(void *d1, uint_t d1_len, void *d2, uint_t d2_len, uint_t max_sz)
{
uint_t len, extra_len;
char *marker;
if (d1_len > max_sz)
d1_len = max_sz;
if (d2_len > max_sz)
d2_len = max_sz;
if (d1_len <= d2_len) {
len = d1_len;
extra_len = d2_len;
marker = d2;
} else {
len = d2_len;
extra_len = d1_len;
marker = d1;
}
if (memcmp(d1, d2, len) != 0)
return (!0);
while (len < extra_len)
if (!isspace(marker[len++]))
return (!0);
return (0);
}
static boolean_t
match_ext_info(kcf_provider_desc_t *pd, char *label, char *manuf, char *serial,
crypto_provider_ext_info_t *ext_info)
{
int rv;
rv = crypto_get_provinfo(pd, ext_info);
ASSERT(rv != CRYPTO_NOT_SUPPORTED);
if (rv != CRYPTO_SUCCESS)
return (B_FALSE);
if (memcmp_pad_max(ext_info->ei_label, CRYPTO_EXT_SIZE_LABEL,
label, strlen(label), CRYPTO_EXT_SIZE_LABEL))
return (B_FALSE);
if (manuf != NULL) {
if (memcmp_pad_max(ext_info->ei_manufacturerID,
CRYPTO_EXT_SIZE_MANUF, manuf, strlen(manuf),
CRYPTO_EXT_SIZE_MANUF))
return (B_FALSE);
}
if (serial != NULL) {
if (memcmp_pad_max(ext_info->ei_serial_number,
CRYPTO_EXT_SIZE_SERIAL, serial, strlen(serial),
CRYPTO_EXT_SIZE_SERIAL))
return (B_FALSE);
}
return (B_TRUE);
}
crypto_provider_t
crypto_get_provider(char *label, char *manuf, char *serial)
{
kcf_provider_desc_t **provider_array, *pd;
crypto_provider_ext_info_t *ext_info;
uint_t count;
int i;
if (label == NULL)
return (NULL);
if (kcf_get_slot_list(&count, &provider_array, B_FALSE)
!= CRYPTO_SUCCESS)
return (NULL);
if (count == 0)
return (NULL);
ext_info = kmem_zalloc(sizeof (crypto_provider_ext_info_t), KM_SLEEP);
for (i = 0; i < count; i++) {
pd = provider_array[i];
if (match_ext_info(pd, label, manuf, serial, ext_info)) {
KCF_PROV_REFHOLD(pd);
break;
}
}
if (i == count)
pd = NULL;
kcf_free_provider_tab(count, provider_array);
kmem_free(ext_info, sizeof (crypto_provider_ext_info_t));
return (pd);
}
int
crypto_get_provinfo(crypto_provider_t hndl, crypto_provider_ext_info_t *info)
{
int rv;
kcf_req_params_t params;
kcf_provider_desc_t *pd;
kcf_provider_desc_t *real_provider;
pd = (kcf_provider_desc_t *)hndl;
rv = kcf_get_hardware_provider_nomech(
CRYPTO_OPS_OFFSET(provider_ops), CRYPTO_PROVIDER_OFFSET(ext_info),
pd, &real_provider);
if (rv == CRYPTO_SUCCESS && real_provider != NULL) {
ASSERT(real_provider == pd ||
pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER);
KCF_WRAP_PROVMGMT_OPS_PARAMS(¶ms, KCF_OP_MGMT_EXTINFO,
0, NULL, 0, NULL, 0, NULL, info, pd);
rv = kcf_submit_request(real_provider, NULL, NULL, ¶ms,
B_FALSE);
KCF_PROV_REFRELE(real_provider);
}
return (rv);
}
void
crypto_release_provider(crypto_provider_t provider)
{
KCF_PROV_REFRELE((kcf_provider_desc_t *)provider);
}