Lines Matching refs:asize
2693 u32 asize = le32_to_cpu(attr->size); in check_attr() local
2699 if (asize >= sbi->record_size || in check_attr()
2700 asize + PtrOffset(rec, attr) >= sbi->record_size || in check_attr()
2703 asize)) { in check_attr()
2711 if (rsize >= asize || in check_attr()
2712 le16_to_cpu(attr->res.data_off) + rsize > asize) { in check_attr()
2723 if (svcn > evcn + 1 || run_off >= asize || in check_attr()
2729 if (run_off > asize) in check_attr()
2733 Add2Ptr(attr, run_off), asize - run_off) < 0) { in check_attr()
2746 attr, le16_to_cpu(attr->res.data_off))) > asize) { in check_attr()
2840 u32 asize; in check_if_attr() local
2845 asize = le32_to_cpu(attr->size); in check_if_attr()
2846 if (!asize) in check_if_attr()
2849 o += asize; in check_if_attr()
2850 attr = Add2Ptr(attr, asize); in check_if_attr()
2864 u32 asize; in check_if_index_root() local
2869 asize = le32_to_cpu(attr->size); in check_if_index_root()
2870 if (!asize) in check_if_index_root()
2873 o += asize; in check_if_index_root()
2874 attr = Add2Ptr(attr, asize); in check_if_index_root()
2888 u32 asize = le32_to_cpu(attr->size); in check_if_root_index() local
2893 if (o >= asize) in check_if_root_index()
2936 u32 asize = le32_to_cpu(attr->size); in change_attr_size() local
2937 int dsize = nsize - asize; in change_attr_size()
2938 u8 *next = Add2Ptr(attr, asize); in change_attr_size()
2994 u32 asize = name_size + in attr_create_nonres_log() local
2997 attr = kzalloc(asize, GFP_NOFS); in attr_create_nonres_log()
3002 attr->size = cpu_to_le32(asize); in attr_create_nonres_log()
3053 u32 nsize, t32, asize, used, esize, off, bits; in do_action() local
3234 asize = le32_to_cpu(attr2->size); in do_action()
3238 !IS_ALIGNED(asize, 8) || in do_action()
3239 Add2Ptr(attr2, asize) > Add2Ptr(lrh, rec_len) || in do_action()
3244 memmove(Add2Ptr(attr, asize), attr, used - roff); in do_action()
3245 memcpy(attr, attr2, asize); in do_action()
3247 rec->used = cpu_to_le32(used + asize); in do_action()
3270 asize = le32_to_cpu(attr->size); in do_action()
3276 rec->used = cpu_to_le32(used - asize); in do_action()
3280 memmove(attr, Add2Ptr(attr, asize), used - asize - roff); in do_action()
3291 asize = le32_to_cpu(attr->size); in do_action()
3295 if (nsize > asize) in do_action()
3300 if (nsize > asize && nsize - asize > record_size - used) in do_action()
3306 if (nsize < asize) { in do_action()
3311 memmove(Add2Ptr(attr, nsize), Add2Ptr(attr, asize), in do_action()
3312 used - le16_to_cpu(lrh->record_off) - asize); in do_action()
3314 rec->used = cpu_to_le32(used + nsize - asize); in do_action()
3339 asize = le32_to_cpu(attr->size); in do_action()
3343 aoff < le16_to_cpu(attr->nres.run_off) || aoff > asize || in do_action()
3344 (nsize > asize && nsize - asize > record_size - used)) { in do_action()
3350 memmove(Add2Ptr(attr, nsize), Add2Ptr(attr, asize), in do_action()
3351 used - le16_to_cpu(lrh->record_off) - asize); in do_action()
3352 rec->used = cpu_to_le32(used + nsize - asize); in do_action()