1 /* SPDX-License-Identifier: GPL-2.0 */ 2 /* 3 * Traceprobe fetch helper inlines 4 */ 5 6 static nokprobe_inline void 7 fetch_store_raw(unsigned long val, struct fetch_insn *code, void *buf) 8 { 9 switch (code->size) { 10 case 1: 11 *(u8 *)buf = (u8)val; 12 break; 13 case 2: 14 *(u16 *)buf = (u16)val; 15 break; 16 case 4: 17 *(u32 *)buf = (u32)val; 18 break; 19 case 8: 20 //TBD: 32bit signed 21 *(u64 *)buf = (u64)val; 22 break; 23 default: 24 *(unsigned long *)buf = val; 25 } 26 } 27 28 static nokprobe_inline void 29 fetch_apply_bitfield(struct fetch_insn *code, void *buf) 30 { 31 switch (code->basesize) { 32 case 1: 33 *(u8 *)buf <<= code->lshift; 34 *(u8 *)buf >>= code->rshift; 35 break; 36 case 2: 37 *(u16 *)buf <<= code->lshift; 38 *(u16 *)buf >>= code->rshift; 39 break; 40 case 4: 41 *(u32 *)buf <<= code->lshift; 42 *(u32 *)buf >>= code->rshift; 43 break; 44 case 8: 45 *(u64 *)buf <<= code->lshift; 46 *(u64 *)buf >>= code->rshift; 47 break; 48 } 49 } 50 51 /* 52 * These functions must be defined for each callsite. 53 * Return consumed dynamic data size (>= 0), or error (< 0). 54 * If dest is NULL, don't store result and return required dynamic data size. 55 */ 56 static int 57 process_fetch_insn(struct fetch_insn *code, void *rec, 58 void *dest, void *base); 59 static nokprobe_inline int fetch_store_strlen(unsigned long addr); 60 static nokprobe_inline int 61 fetch_store_string(unsigned long addr, void *dest, void *base); 62 static nokprobe_inline int fetch_store_strlen_user(unsigned long addr); 63 static nokprobe_inline int 64 fetch_store_string_user(unsigned long addr, void *dest, void *base); 65 static nokprobe_inline int 66 probe_mem_read(void *dest, void *src, size_t size); 67 static nokprobe_inline int 68 probe_mem_read_user(void *dest, void *src, size_t size); 69 70 static nokprobe_inline int 71 fetch_store_symstrlen(unsigned long addr) 72 { 73 char namebuf[KSYM_SYMBOL_LEN]; 74 int ret; 75 76 ret = sprint_symbol(namebuf, addr); 77 if (ret < 0) 78 return 0; 79 80 return ret + 1; 81 } 82 83 /* 84 * Fetch a null-terminated symbol string + offset. Caller MUST set *(u32 *)buf 85 * with max length and relative data location. 86 */ 87 static nokprobe_inline int 88 fetch_store_symstring(unsigned long addr, void *dest, void *base) 89 { 90 int maxlen = get_loc_len(*(u32 *)dest); 91 void *__dest; 92 93 if (unlikely(!maxlen)) 94 return -ENOMEM; 95 96 __dest = get_loc_data(dest, base); 97 98 return sprint_symbol(__dest, addr); 99 } 100 101 /* common part of process_fetch_insn*/ 102 static nokprobe_inline int 103 process_common_fetch_insn(struct fetch_insn *code, unsigned long *val) 104 { 105 switch (code->op) { 106 case FETCH_OP_IMM: 107 *val = code->immediate; 108 break; 109 case FETCH_OP_COMM: 110 *val = (unsigned long)current->comm; 111 break; 112 case FETCH_OP_DATA: 113 *val = (unsigned long)code->data; 114 break; 115 default: 116 return -EILSEQ; 117 } 118 return 0; 119 } 120 121 /* From the 2nd stage, routine is same */ 122 static nokprobe_inline int 123 process_fetch_insn_bottom(struct fetch_insn *code, unsigned long val, 124 void *dest, void *base) 125 { 126 struct fetch_insn *s3 = NULL; 127 int total = 0, ret = 0, i = 0; 128 u32 loc = 0; 129 unsigned long lval = val; 130 131 stage2: 132 /* 2nd stage: dereference memory if needed */ 133 do { 134 if (code->op == FETCH_OP_DEREF) { 135 lval = val; 136 ret = probe_mem_read(&val, (void *)val + code->offset, 137 sizeof(val)); 138 } else if (code->op == FETCH_OP_UDEREF) { 139 lval = val; 140 ret = probe_mem_read_user(&val, 141 (void *)val + code->offset, sizeof(val)); 142 } else 143 break; 144 if (ret) 145 return ret; 146 code++; 147 } while (1); 148 149 s3 = code; 150 stage3: 151 /* 3rd stage: store value to buffer */ 152 if (unlikely(!dest)) { 153 switch (code->op) { 154 case FETCH_OP_ST_STRING: 155 ret = fetch_store_strlen(val + code->offset); 156 code++; 157 goto array; 158 case FETCH_OP_ST_USTRING: 159 ret += fetch_store_strlen_user(val + code->offset); 160 code++; 161 goto array; 162 case FETCH_OP_ST_SYMSTR: 163 ret += fetch_store_symstrlen(val + code->offset); 164 code++; 165 goto array; 166 default: 167 return -EILSEQ; 168 } 169 } 170 171 switch (code->op) { 172 case FETCH_OP_ST_RAW: 173 fetch_store_raw(val, code, dest); 174 break; 175 case FETCH_OP_ST_MEM: 176 probe_mem_read(dest, (void *)val + code->offset, code->size); 177 break; 178 case FETCH_OP_ST_UMEM: 179 probe_mem_read_user(dest, (void *)val + code->offset, code->size); 180 break; 181 case FETCH_OP_ST_STRING: 182 loc = *(u32 *)dest; 183 ret = fetch_store_string(val + code->offset, dest, base); 184 break; 185 case FETCH_OP_ST_USTRING: 186 loc = *(u32 *)dest; 187 ret = fetch_store_string_user(val + code->offset, dest, base); 188 break; 189 case FETCH_OP_ST_SYMSTR: 190 loc = *(u32 *)dest; 191 ret = fetch_store_symstring(val + code->offset, dest, base); 192 break; 193 default: 194 return -EILSEQ; 195 } 196 code++; 197 198 /* 4th stage: modify stored value if needed */ 199 if (code->op == FETCH_OP_MOD_BF) { 200 fetch_apply_bitfield(code, dest); 201 code++; 202 } 203 204 array: 205 /* the last stage: Loop on array */ 206 if (code->op == FETCH_OP_LP_ARRAY) { 207 total += ret; 208 if (++i < code->param) { 209 code = s3; 210 if (s3->op != FETCH_OP_ST_STRING && 211 s3->op != FETCH_OP_ST_USTRING) { 212 dest += s3->size; 213 val += s3->size; 214 goto stage3; 215 } 216 code--; 217 val = lval + sizeof(char *); 218 if (dest) { 219 dest += sizeof(u32); 220 *(u32 *)dest = update_data_loc(loc, ret); 221 } 222 goto stage2; 223 } 224 code++; 225 ret = total; 226 } 227 228 return code->op == FETCH_OP_END ? ret : -EILSEQ; 229 } 230 231 /* Sum up total data length for dynamic arrays (strings) */ 232 static nokprobe_inline int 233 __get_data_size(struct trace_probe *tp, struct pt_regs *regs) 234 { 235 struct probe_arg *arg; 236 int i, len, ret = 0; 237 238 for (i = 0; i < tp->nr_args; i++) { 239 arg = tp->args + i; 240 if (unlikely(arg->dynamic)) { 241 len = process_fetch_insn(arg->code, regs, NULL, NULL); 242 if (len > 0) 243 ret += len; 244 } 245 } 246 247 return ret; 248 } 249 250 /* Store the value of each argument */ 251 static nokprobe_inline void 252 store_trace_args(void *data, struct trace_probe *tp, void *rec, 253 int header_size, int maxlen) 254 { 255 struct probe_arg *arg; 256 void *base = data - header_size; 257 void *dyndata = data + tp->size; 258 u32 *dl; /* Data location */ 259 int ret, i; 260 261 for (i = 0; i < tp->nr_args; i++) { 262 arg = tp->args + i; 263 dl = data + arg->offset; 264 /* Point the dynamic data area if needed */ 265 if (unlikely(arg->dynamic)) 266 *dl = make_data_loc(maxlen, dyndata - base); 267 ret = process_fetch_insn(arg->code, rec, dl, base); 268 if (unlikely(ret < 0 && arg->dynamic)) { 269 *dl = make_data_loc(0, dyndata - base); 270 } else { 271 dyndata += ret; 272 maxlen -= ret; 273 } 274 } 275 } 276