1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com> 4 */ 5 6 #include <stdio.h> 7 #include <stdlib.h> 8 9 #define unlikely(cond) (cond) 10 #include <asm/insn.h> 11 #include "../../../arch/x86/lib/inat.c" 12 #include "../../../arch/x86/lib/insn.c" 13 14 #include "../../check.h" 15 #include "../../elf.h" 16 #include "../../arch.h" 17 #include "../../warn.h" 18 19 static unsigned char op_to_cfi_reg[][2] = { 20 {CFI_AX, CFI_R8}, 21 {CFI_CX, CFI_R9}, 22 {CFI_DX, CFI_R10}, 23 {CFI_BX, CFI_R11}, 24 {CFI_SP, CFI_R12}, 25 {CFI_BP, CFI_R13}, 26 {CFI_SI, CFI_R14}, 27 {CFI_DI, CFI_R15}, 28 }; 29 30 static int is_x86_64(const struct elf *elf) 31 { 32 switch (elf->ehdr.e_machine) { 33 case EM_X86_64: 34 return 1; 35 case EM_386: 36 return 0; 37 default: 38 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine); 39 return -1; 40 } 41 } 42 43 bool arch_callee_saved_reg(unsigned char reg) 44 { 45 switch (reg) { 46 case CFI_BP: 47 case CFI_BX: 48 case CFI_R12: 49 case CFI_R13: 50 case CFI_R14: 51 case CFI_R15: 52 return true; 53 54 case CFI_AX: 55 case CFI_CX: 56 case CFI_DX: 57 case CFI_SI: 58 case CFI_DI: 59 case CFI_SP: 60 case CFI_R8: 61 case CFI_R9: 62 case CFI_R10: 63 case CFI_R11: 64 case CFI_RA: 65 default: 66 return false; 67 } 68 } 69 70 unsigned long arch_dest_rela_offset(int addend) 71 { 72 return addend + 4; 73 } 74 75 unsigned long arch_jump_destination(struct instruction *insn) 76 { 77 return insn->offset + insn->len + insn->immediate; 78 } 79 80 #define ADD_OP(op) \ 81 if (!(op = calloc(1, sizeof(*op)))) \ 82 return -1; \ 83 else for (list_add_tail(&op->list, ops_list); op; op = NULL) 84 85 int arch_decode_instruction(const struct elf *elf, const struct section *sec, 86 unsigned long offset, unsigned int maxlen, 87 unsigned int *len, enum insn_type *type, 88 unsigned long *immediate, 89 struct list_head *ops_list) 90 { 91 struct insn insn; 92 int x86_64, sign; 93 unsigned char op1, op2, rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, 94 rex_x = 0, modrm = 0, modrm_mod = 0, modrm_rm = 0, 95 modrm_reg = 0, sib = 0; 96 struct stack_op *op = NULL; 97 struct symbol *sym; 98 99 x86_64 = is_x86_64(elf); 100 if (x86_64 == -1) 101 return -1; 102 103 insn_init(&insn, sec->data->d_buf + offset, maxlen, x86_64); 104 insn_get_length(&insn); 105 106 if (!insn_complete(&insn)) { 107 WARN("can't decode instruction at %s:0x%lx", sec->name, offset); 108 return -1; 109 } 110 111 *len = insn.length; 112 *type = INSN_OTHER; 113 114 if (insn.vex_prefix.nbytes) 115 return 0; 116 117 op1 = insn.opcode.bytes[0]; 118 op2 = insn.opcode.bytes[1]; 119 120 if (insn.rex_prefix.nbytes) { 121 rex = insn.rex_prefix.bytes[0]; 122 rex_w = X86_REX_W(rex) >> 3; 123 rex_r = X86_REX_R(rex) >> 2; 124 rex_x = X86_REX_X(rex) >> 1; 125 rex_b = X86_REX_B(rex); 126 } 127 128 if (insn.modrm.nbytes) { 129 modrm = insn.modrm.bytes[0]; 130 modrm_mod = X86_MODRM_MOD(modrm); 131 modrm_reg = X86_MODRM_REG(modrm); 132 modrm_rm = X86_MODRM_RM(modrm); 133 } 134 135 if (insn.sib.nbytes) 136 sib = insn.sib.bytes[0]; 137 138 switch (op1) { 139 140 case 0x1: 141 case 0x29: 142 if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) { 143 144 /* add/sub reg, %rsp */ 145 ADD_OP(op) { 146 op->src.type = OP_SRC_ADD; 147 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r]; 148 op->dest.type = OP_DEST_REG; 149 op->dest.reg = CFI_SP; 150 } 151 } 152 break; 153 154 case 0x50 ... 0x57: 155 156 /* push reg */ 157 ADD_OP(op) { 158 op->src.type = OP_SRC_REG; 159 op->src.reg = op_to_cfi_reg[op1 & 0x7][rex_b]; 160 op->dest.type = OP_DEST_PUSH; 161 } 162 163 break; 164 165 case 0x58 ... 0x5f: 166 167 /* pop reg */ 168 ADD_OP(op) { 169 op->src.type = OP_SRC_POP; 170 op->dest.type = OP_DEST_REG; 171 op->dest.reg = op_to_cfi_reg[op1 & 0x7][rex_b]; 172 } 173 174 break; 175 176 case 0x68: 177 case 0x6a: 178 /* push immediate */ 179 ADD_OP(op) { 180 op->src.type = OP_SRC_CONST; 181 op->dest.type = OP_DEST_PUSH; 182 } 183 break; 184 185 case 0x70 ... 0x7f: 186 *type = INSN_JUMP_CONDITIONAL; 187 break; 188 189 case 0x81: 190 case 0x83: 191 if (rex != 0x48) 192 break; 193 194 if (modrm == 0xe4) { 195 /* and imm, %rsp */ 196 ADD_OP(op) { 197 op->src.type = OP_SRC_AND; 198 op->src.reg = CFI_SP; 199 op->src.offset = insn.immediate.value; 200 op->dest.type = OP_DEST_REG; 201 op->dest.reg = CFI_SP; 202 } 203 break; 204 } 205 206 if (modrm == 0xc4) 207 sign = 1; 208 else if (modrm == 0xec) 209 sign = -1; 210 else 211 break; 212 213 /* add/sub imm, %rsp */ 214 ADD_OP(op) { 215 op->src.type = OP_SRC_ADD; 216 op->src.reg = CFI_SP; 217 op->src.offset = insn.immediate.value * sign; 218 op->dest.type = OP_DEST_REG; 219 op->dest.reg = CFI_SP; 220 } 221 break; 222 223 case 0x89: 224 if (rex_w && !rex_r && modrm_mod == 3 && modrm_reg == 4) { 225 226 /* mov %rsp, reg */ 227 ADD_OP(op) { 228 op->src.type = OP_SRC_REG; 229 op->src.reg = CFI_SP; 230 op->dest.type = OP_DEST_REG; 231 op->dest.reg = op_to_cfi_reg[modrm_rm][rex_b]; 232 } 233 break; 234 } 235 236 if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) { 237 238 /* mov reg, %rsp */ 239 ADD_OP(op) { 240 op->src.type = OP_SRC_REG; 241 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r]; 242 op->dest.type = OP_DEST_REG; 243 op->dest.reg = CFI_SP; 244 } 245 break; 246 } 247 248 /* fallthrough */ 249 case 0x88: 250 if (!rex_b && 251 (modrm_mod == 1 || modrm_mod == 2) && modrm_rm == 5) { 252 253 /* mov reg, disp(%rbp) */ 254 ADD_OP(op) { 255 op->src.type = OP_SRC_REG; 256 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r]; 257 op->dest.type = OP_DEST_REG_INDIRECT; 258 op->dest.reg = CFI_BP; 259 op->dest.offset = insn.displacement.value; 260 } 261 262 } else if (rex_w && !rex_b && modrm_rm == 4 && sib == 0x24) { 263 264 /* mov reg, disp(%rsp) */ 265 ADD_OP(op) { 266 op->src.type = OP_SRC_REG; 267 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r]; 268 op->dest.type = OP_DEST_REG_INDIRECT; 269 op->dest.reg = CFI_SP; 270 op->dest.offset = insn.displacement.value; 271 } 272 } 273 274 break; 275 276 case 0x8b: 277 if (rex_w && !rex_b && modrm_mod == 1 && modrm_rm == 5) { 278 279 /* mov disp(%rbp), reg */ 280 ADD_OP(op) { 281 op->src.type = OP_SRC_REG_INDIRECT; 282 op->src.reg = CFI_BP; 283 op->src.offset = insn.displacement.value; 284 op->dest.type = OP_DEST_REG; 285 op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r]; 286 } 287 288 } else if (rex_w && !rex_b && sib == 0x24 && 289 modrm_mod != 3 && modrm_rm == 4) { 290 291 /* mov disp(%rsp), reg */ 292 ADD_OP(op) { 293 op->src.type = OP_SRC_REG_INDIRECT; 294 op->src.reg = CFI_SP; 295 op->src.offset = insn.displacement.value; 296 op->dest.type = OP_DEST_REG; 297 op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r]; 298 } 299 } 300 301 break; 302 303 case 0x8d: 304 if (sib == 0x24 && rex_w && !rex_b && !rex_x) { 305 306 ADD_OP(op) { 307 if (!insn.displacement.value) { 308 /* lea (%rsp), reg */ 309 op->src.type = OP_SRC_REG; 310 } else { 311 /* lea disp(%rsp), reg */ 312 op->src.type = OP_SRC_ADD; 313 op->src.offset = insn.displacement.value; 314 } 315 op->src.reg = CFI_SP; 316 op->dest.type = OP_DEST_REG; 317 op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r]; 318 } 319 320 } else if (rex == 0x48 && modrm == 0x65) { 321 322 /* lea disp(%rbp), %rsp */ 323 ADD_OP(op) { 324 op->src.type = OP_SRC_ADD; 325 op->src.reg = CFI_BP; 326 op->src.offset = insn.displacement.value; 327 op->dest.type = OP_DEST_REG; 328 op->dest.reg = CFI_SP; 329 } 330 331 } else if (rex == 0x49 && modrm == 0x62 && 332 insn.displacement.value == -8) { 333 334 /* 335 * lea -0x8(%r10), %rsp 336 * 337 * Restoring rsp back to its original value after a 338 * stack realignment. 339 */ 340 ADD_OP(op) { 341 op->src.type = OP_SRC_ADD; 342 op->src.reg = CFI_R10; 343 op->src.offset = -8; 344 op->dest.type = OP_DEST_REG; 345 op->dest.reg = CFI_SP; 346 } 347 348 } else if (rex == 0x49 && modrm == 0x65 && 349 insn.displacement.value == -16) { 350 351 /* 352 * lea -0x10(%r13), %rsp 353 * 354 * Restoring rsp back to its original value after a 355 * stack realignment. 356 */ 357 ADD_OP(op) { 358 op->src.type = OP_SRC_ADD; 359 op->src.reg = CFI_R13; 360 op->src.offset = -16; 361 op->dest.type = OP_DEST_REG; 362 op->dest.reg = CFI_SP; 363 } 364 } 365 366 break; 367 368 case 0x8f: 369 /* pop to mem */ 370 ADD_OP(op) { 371 op->src.type = OP_SRC_POP; 372 op->dest.type = OP_DEST_MEM; 373 } 374 break; 375 376 case 0x90: 377 *type = INSN_NOP; 378 break; 379 380 case 0x9c: 381 /* pushf */ 382 ADD_OP(op) { 383 op->src.type = OP_SRC_CONST; 384 op->dest.type = OP_DEST_PUSHF; 385 } 386 break; 387 388 case 0x9d: 389 /* popf */ 390 ADD_OP(op) { 391 op->src.type = OP_SRC_POPF; 392 op->dest.type = OP_DEST_MEM; 393 } 394 break; 395 396 case 0x0f: 397 398 if (op2 == 0x01) { 399 400 if (modrm == 0xca) 401 *type = INSN_CLAC; 402 else if (modrm == 0xcb) 403 *type = INSN_STAC; 404 405 } else if (op2 >= 0x80 && op2 <= 0x8f) { 406 407 *type = INSN_JUMP_CONDITIONAL; 408 409 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 || 410 op2 == 0x35) { 411 412 /* sysenter, sysret */ 413 *type = INSN_CONTEXT_SWITCH; 414 415 } else if (op2 == 0x0b || op2 == 0xb9) { 416 417 /* ud2 */ 418 *type = INSN_BUG; 419 420 } else if (op2 == 0x0d || op2 == 0x1f) { 421 422 /* nopl/nopw */ 423 *type = INSN_NOP; 424 425 } else if (op2 == 0xa0 || op2 == 0xa8) { 426 427 /* push fs/gs */ 428 ADD_OP(op) { 429 op->src.type = OP_SRC_CONST; 430 op->dest.type = OP_DEST_PUSH; 431 } 432 433 } else if (op2 == 0xa1 || op2 == 0xa9) { 434 435 /* pop fs/gs */ 436 ADD_OP(op) { 437 op->src.type = OP_SRC_POP; 438 op->dest.type = OP_DEST_MEM; 439 } 440 } 441 442 break; 443 444 case 0xc9: 445 /* 446 * leave 447 * 448 * equivalent to: 449 * mov bp, sp 450 * pop bp 451 */ 452 ADD_OP(op) 453 op->dest.type = OP_DEST_LEAVE; 454 455 break; 456 457 case 0xe3: 458 /* jecxz/jrcxz */ 459 *type = INSN_JUMP_CONDITIONAL; 460 break; 461 462 case 0xe9: 463 case 0xeb: 464 *type = INSN_JUMP_UNCONDITIONAL; 465 break; 466 467 case 0xc2: 468 case 0xc3: 469 *type = INSN_RETURN; 470 break; 471 472 case 0xcf: /* iret */ 473 /* 474 * Handle sync_core(), which has an IRET to self. 475 * All other IRET are in STT_NONE entry code. 476 */ 477 sym = find_symbol_containing(sec, offset); 478 if (sym && sym->type == STT_FUNC) { 479 ADD_OP(op) { 480 /* add $40, %rsp */ 481 op->src.type = OP_SRC_ADD; 482 op->src.reg = CFI_SP; 483 op->src.offset = 5*8; 484 op->dest.type = OP_DEST_REG; 485 op->dest.reg = CFI_SP; 486 } 487 break; 488 } 489 490 /* fallthrough */ 491 492 case 0xca: /* retf */ 493 case 0xcb: /* retf */ 494 *type = INSN_CONTEXT_SWITCH; 495 break; 496 497 case 0xe8: 498 *type = INSN_CALL; 499 /* 500 * For the impact on the stack, a CALL behaves like 501 * a PUSH of an immediate value (the return address). 502 */ 503 ADD_OP(op) { 504 op->src.type = OP_SRC_CONST; 505 op->dest.type = OP_DEST_PUSH; 506 } 507 break; 508 509 case 0xfc: 510 *type = INSN_CLD; 511 break; 512 513 case 0xfd: 514 *type = INSN_STD; 515 break; 516 517 case 0xff: 518 if (modrm_reg == 2 || modrm_reg == 3) 519 520 *type = INSN_CALL_DYNAMIC; 521 522 else if (modrm_reg == 4) 523 524 *type = INSN_JUMP_DYNAMIC; 525 526 else if (modrm_reg == 5) 527 528 /* jmpf */ 529 *type = INSN_CONTEXT_SWITCH; 530 531 else if (modrm_reg == 6) { 532 533 /* push from mem */ 534 ADD_OP(op) { 535 op->src.type = OP_SRC_CONST; 536 op->dest.type = OP_DEST_PUSH; 537 } 538 } 539 540 break; 541 542 default: 543 break; 544 } 545 546 *immediate = insn.immediate.nbytes ? insn.immediate.value : 0; 547 548 return 0; 549 } 550 551 void arch_initial_func_cfi_state(struct cfi_init_state *state) 552 { 553 int i; 554 555 for (i = 0; i < CFI_NUM_REGS; i++) { 556 state->regs[i].base = CFI_UNDEFINED; 557 state->regs[i].offset = 0; 558 } 559 560 /* initial CFA (call frame address) */ 561 state->cfa.base = CFI_SP; 562 state->cfa.offset = 8; 563 564 /* initial RA (return address) */ 565 state->regs[16].base = CFI_CFA; 566 state->regs[16].offset = -8; 567 } 568