BHDR
hash = le32_to_cpu(BHDR(bh)->h_hash);
ref = le32_to_cpu(BHDR(bh)->h_refcount);
BHDR(bh)->h_refcount = cpu_to_le32(ref);
le32_to_cpu(BHDR(bh)->h_refcount));
struct ext4_xattr_header *hdr = BHDR(bh);
BHDR(bh)->h_checksum = ext4_xattr_block_csum(inode,
bh->b_blocknr, BHDR(bh));
le32_to_cpu(BHDR(bs->bh)->h_refcount));
bs->s.base = BHDR(bs->bh);
__u32 hash = le32_to_cpu(BHDR(bs->bh)->h_hash);
s->base = kmemdup(BHDR(bs->bh), bs->bh->b_size, GFP_NOFS);
if (BHDR(bh)->h_magic != cpu_to_le32(EXT4_XATTR_MAGIC) ||
BHDR(bh)->h_blocks != cpu_to_le32(1)) {
ref = le32_to_cpu(BHDR(new_bh)->h_refcount) + 1;
BHDR(new_bh)->h_refcount = cpu_to_le32(ref);
base = BHDR(bh);
struct ext4_xattr_header *header = BHDR(bh);
} else if (ext4_xattr_cmp(header, BHDR(bh)) == 0) {
atomic_read(&(bh->b_count)), le32_to_cpu(BHDR(bh)->h_refcount));
atomic_read(&(bh->b_count)), le32_to_cpu(BHDR(bh)->h_refcount));
#define BFIRST(bh) ENTRY(BHDR(bh)+1)