kernel_optimize_test/tools/objtool/special.c
Peter Zijlstra 908bd980a8 objtool,x86: Replace alternatives with .retpoline_sites
commit 134ab5bd1883312d7a4b3033b05c6b5a1bb8889b upstream.

Instead of writing complete alternatives, simply provide a list of all
the retpoline thunk calls. Then the kernel is free to do with them as
it pleases. Simpler code all-round.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Borislav Petkov <bp@suse.de>
Acked-by: Josh Poimboeuf <jpoimboe@redhat.com>
Tested-by: Alexei Starovoitov <ast@kernel.org>
Link: https://lore.kernel.org/r/20211026120309.850007165@infradead.org
[cascardo: fixed conflict because of missing
 8b946cc38e063f0f7bb67789478c38f6d7d457c9]
Signed-off-by: Thadeu Lima de Souza Cascardo <cascardo@canonical.com>
[bwh: Backported to 5.10: deleted functions had slightly different code]
Signed-off-by: Ben Hutchings <ben@decadent.org.uk>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
2022-07-25 11:26:23 +02:00

166 lines
3.6 KiB
C

// SPDX-License-Identifier: GPL-2.0-or-later
/*
* Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
*/
/*
* This file reads all the special sections which have alternate instructions
* which can be patched in or redirected to at runtime.
*/
#include <stdlib.h>
#include <string.h>
#include "builtin.h"
#include "special.h"
#include "warn.h"
#include "arch_special.h"
struct special_entry {
const char *sec;
bool group, jump_or_nop;
unsigned char size, orig, new;
unsigned char orig_len, new_len; /* group only */
unsigned char feature; /* ALTERNATIVE macro CPU feature */
};
struct special_entry entries[] = {
{
.sec = ".altinstructions",
.group = true,
.size = ALT_ENTRY_SIZE,
.orig = ALT_ORIG_OFFSET,
.orig_len = ALT_ORIG_LEN_OFFSET,
.new = ALT_NEW_OFFSET,
.new_len = ALT_NEW_LEN_OFFSET,
.feature = ALT_FEATURE_OFFSET,
},
{
.sec = "__jump_table",
.jump_or_nop = true,
.size = JUMP_ENTRY_SIZE,
.orig = JUMP_ORIG_OFFSET,
.new = JUMP_NEW_OFFSET,
},
{
.sec = "__ex_table",
.size = EX_ENTRY_SIZE,
.orig = EX_ORIG_OFFSET,
.new = EX_NEW_OFFSET,
},
{},
};
void __weak arch_handle_alternative(unsigned short feature, struct special_alt *alt)
{
}
static void reloc_to_sec_off(struct reloc *reloc, struct section **sec,
unsigned long *off)
{
*sec = reloc->sym->sec;
*off = reloc->sym->offset + reloc->addend;
}
static int get_alt_entry(struct elf *elf, struct special_entry *entry,
struct section *sec, int idx,
struct special_alt *alt)
{
struct reloc *orig_reloc, *new_reloc;
unsigned long offset;
offset = idx * entry->size;
alt->group = entry->group;
alt->jump_or_nop = entry->jump_or_nop;
if (alt->group) {
alt->orig_len = *(unsigned char *)(sec->data->d_buf + offset +
entry->orig_len);
alt->new_len = *(unsigned char *)(sec->data->d_buf + offset +
entry->new_len);
}
if (entry->feature) {
unsigned short feature;
feature = *(unsigned short *)(sec->data->d_buf + offset +
entry->feature);
arch_handle_alternative(feature, alt);
}
orig_reloc = find_reloc_by_dest(elf, sec, offset + entry->orig);
if (!orig_reloc) {
WARN_FUNC("can't find orig reloc", sec, offset + entry->orig);
return -1;
}
reloc_to_sec_off(orig_reloc, &alt->orig_sec, &alt->orig_off);
if (!entry->group || alt->new_len) {
new_reloc = find_reloc_by_dest(elf, sec, offset + entry->new);
if (!new_reloc) {
WARN_FUNC("can't find new reloc",
sec, offset + entry->new);
return -1;
}
reloc_to_sec_off(new_reloc, &alt->new_sec, &alt->new_off);
/* _ASM_EXTABLE_EX hack */
if (alt->new_off >= 0x7ffffff0)
alt->new_off -= 0x7ffffff0;
}
return 0;
}
/*
* Read all the special sections and create a list of special_alt structs which
* describe all the alternate instructions which can be patched in or
* redirected to at runtime.
*/
int special_get_alts(struct elf *elf, struct list_head *alts)
{
struct special_entry *entry;
struct section *sec;
unsigned int nr_entries;
struct special_alt *alt;
int idx, ret;
INIT_LIST_HEAD(alts);
for (entry = entries; entry->sec; entry++) {
sec = find_section_by_name(elf, entry->sec);
if (!sec)
continue;
if (sec->len % entry->size != 0) {
WARN("%s size not a multiple of %d",
sec->name, entry->size);
return -1;
}
nr_entries = sec->len / entry->size;
for (idx = 0; idx < nr_entries; idx++) {
alt = malloc(sizeof(*alt));
if (!alt) {
WARN("malloc failed");
return -1;
}
memset(alt, 0, sizeof(*alt));
ret = get_alt_entry(elf, entry, sec, idx, alt);
if (ret > 0)
continue;
if (ret < 0)
return ret;
list_add_tail(&alt->list, alts);
}
}
return 0;
}