#include <linux/array_size.h>
#include <linux/bsearch.h>
#include <linux/consolemap.h>
#include <linux/minmax.h>
struct ucs_interval16 {
u16 first;
u16 last;
};
struct ucs_interval32 {
u32 first;
u32 last;
};
#include "ucs_width_table.h"
static int interval16_cmp(const void *key, const void *element)
{
u16 cp = *(u16 *)key;
const struct ucs_interval16 *entry = element;
if (cp < entry->first)
return -1;
if (cp > entry->last)
return 1;
return 0;
}
static int interval32_cmp(const void *key, const void *element)
{
u32 cp = *(u32 *)key;
const struct ucs_interval32 *entry = element;
if (cp < entry->first)
return -1;
if (cp > entry->last)
return 1;
return 0;
}
static bool cp_in_range16(u16 cp, const struct ucs_interval16 *ranges, size_t size)
{
if (cp < ranges[0].first || cp > ranges[size - 1].last)
return false;
return __inline_bsearch(&cp, ranges, size, sizeof(*ranges),
interval16_cmp) != NULL;
}
static bool cp_in_range32(u32 cp, const struct ucs_interval32 *ranges, size_t size)
{
if (cp < ranges[0].first || cp > ranges[size - 1].last)
return false;
return __inline_bsearch(&cp, ranges, size, sizeof(*ranges),
interval32_cmp) != NULL;
}
#define UCS_IS_BMP(cp) ((cp) <= 0xffff)
bool ucs_is_zero_width(u32 cp)
{
if (UCS_IS_BMP(cp))
return cp_in_range16(cp, ucs_zero_width_bmp_ranges,
ARRAY_SIZE(ucs_zero_width_bmp_ranges));
else
return cp_in_range32(cp, ucs_zero_width_non_bmp_ranges,
ARRAY_SIZE(ucs_zero_width_non_bmp_ranges));
}
bool ucs_is_double_width(u32 cp)
{
if (UCS_IS_BMP(cp))
return cp_in_range16(cp, ucs_double_width_bmp_ranges,
ARRAY_SIZE(ucs_double_width_bmp_ranges));
else
return cp_in_range32(cp, ucs_double_width_non_bmp_ranges,
ARRAY_SIZE(ucs_double_width_non_bmp_ranges));
}
struct ucs_recomposition {
u16 base;
u16 mark;
u16 recomposed;
};
#include "ucs_recompose_table.h"
struct compare_key {
u16 base;
u16 mark;
};
static int recomposition_cmp(const void *key, const void *element)
{
const struct compare_key *search_key = key;
const struct ucs_recomposition *entry = element;
if (search_key->base < entry->base)
return -1;
if (search_key->base > entry->base)
return 1;
if (search_key->mark < entry->mark)
return -1;
if (search_key->mark > entry->mark)
return 1;
return 0;
}
u32 ucs_recompose(u32 base, u32 mark)
{
if (base < UCS_RECOMPOSE_MIN_BASE || base > UCS_RECOMPOSE_MAX_BASE ||
mark < UCS_RECOMPOSE_MIN_MARK || mark > UCS_RECOMPOSE_MAX_MARK)
return 0;
struct compare_key key = { base, mark };
struct ucs_recomposition *result =
__inline_bsearch(&key, ucs_recomposition_table,
ARRAY_SIZE(ucs_recomposition_table),
sizeof(*ucs_recomposition_table),
recomposition_cmp);
return result ? result->recomposed : 0;
}
struct ucs_page_desc {
u8 page;
u8 count;
u16 start;
};
struct ucs_page_entry {
u8 offset;
u8 fallback;
};
#include "ucs_fallback_table.h"
static int ucs_page_desc_cmp(const void *key, const void *element)
{
u8 page = *(u8 *)key;
const struct ucs_page_desc *entry = element;
if (page < entry->page)
return -1;
if (page > entry->page)
return 1;
return 0;
}
static int ucs_page_entry_cmp(const void *key, const void *element)
{
u8 offset = *(u8 *)key;
const struct ucs_page_entry *entry = element;
if (offset < entry->offset)
return -1;
if (entry->fallback == UCS_PAGE_ENTRY_RANGE_MARKER) {
if (offset > entry[1].offset)
return 1;
} else {
if (offset > entry->offset)
return 1;
}
return 0;
}
u32 ucs_get_fallback(u32 cp)
{
const struct ucs_page_desc *page;
const struct ucs_page_entry *entry;
u8 page_idx = cp >> 8, offset = cp;
if (!UCS_IS_BMP(cp))
return 0;
if (cp >= 0xFF01 && cp <= 0xFF5E)
return cp - 0xFF01 + 33;
page = __inline_bsearch(&page_idx, ucs_fallback_pages,
ARRAY_SIZE(ucs_fallback_pages),
sizeof(*ucs_fallback_pages),
ucs_page_desc_cmp);
if (!page)
return 0;
entry = __inline_bsearch(&offset, ucs_fallback_entries + page->start,
page->count, sizeof(*ucs_fallback_entries),
ucs_page_entry_cmp);
if (!entry)
return 0;
if (entry->fallback == UCS_PAGE_ENTRY_RANGE_MARKER)
entry++;
return entry->fallback;
}