mirror of
https://forge.sourceware.org/marek/gcc.git
synced 2026-02-22 03:47:02 -05:00
asan: memtag-stack add support for MTE instructions
Memory tagging is used for detecting memory safety bugs. On AArch64, the
memory tagging extension (MTE) helps in reducing the overheads of memory
tagging:
- CPU: MTE instructions for efficiently tagging and untagging memory.
- Memory: New memory type, Normal Tagged Memory, added to the Arm
Architecture.
The MEMory TAGging (MEMTAG) sanitizer uses the same infrastructure as
HWASAN. MEMTAG and HWASAN are both hardware-assisted solutions, and
rely on the same sanitizer machinery in parts. So, define new
constructs that allow MEMTAG and HWASAN to share the infrastructure:
- hwassist_sanitize_p () is true when either SANITIZE_MEMTAG or
SANITIZE_HWASAN is true.
- hwassist_sanitize_stack_p () is when hwassist_sanitize_p () and
stack variables are to be sanitized.
MEMTAG and HWASAN do have differences, however, and hence, the need to
conditionalize using memtag_sanitize_p () in the relevant places. E.g.,
- Instead of generating the libcall __hwasan_tag_memory, MEMTAG needs
to invoke the target-specific hook TARGET_MEMTAG_TAG_MEMORY to tag
memory. Similar approach can be seen for handling
handle_builtin_alloca, where instead of doing the gimple
transformations, target hooks are used.
- Add a new internal function HWASAN_ALLOCA_POISON to handle
dynamically allocated stack when MEMTAG sanitizer is enabled. At
expansion, this allows to, in turn, invoke target-hooks to increment
tag, and use the generated tag to finally tag the dynamically allocated
memory.
The usual pattern:
irg x0, x0, x0
subg x0, x0, #16, #0
creates a tag in x0 and so on. For alloca, we need to apply the
generated tag to the new sp. In absense of an extract tag insn, the
implemenation in expand_HWASAN_ALLOCA_POISON resorts to invoking irg
again.
gcc/
* asan.cc (handle_builtin_stack_restore): Accommodate MEMTAG
sanitizer.
(handle_builtin_alloca): Expand differently if MEMTAG sanitizer.
(get_mem_refs_of_builtin_call): Include MEMTAG along with
HWASAN.
(memtag_sanitize_stack_p): New definition.
(memtag_sanitize_allocas_p): Likewise.
(memtag_memintrin): Likewise.
(hwassist_sanitize_p): Likewise.
(hwassist_sanitize_stack_p): Likewise.
(report_error_func): Include MEMTAG along with HWASAN.
(build_check_stmt): Likewise.
(instrument_derefs): MEMTAG too does not deal with globals yet.
(instrument_builtin_call): Include MEMTAG along with HWASAN.
(maybe_instrument_call): Likewise.
(asan_expand_mark_ifn): Likewise.
(asan_expand_check_ifn): Likewise.
(asan_expand_poison_ifn): Expand differently if MEMTAG sanitizer.
(asan_instrument): Include MEMTAG along with HWASAN.
(hwasan_emit_prologue): Expand differently if MEMTAG sanitizer.
(hwasan_emit_untag_frame): Likewise.
* asan.h (memtag_sanitize_stack_p): New declaration.
(memtag_sanitize_allocas_p): Likewise.
(hwassist_sanitize_p): Likewise.
(hwassist_sanitize_stack_p): Likewise.
(asan_sanitize_use_after_scope): Include MEMTAG along with
HWASAN.
* cfgexpand.cc (align_local_variable): Likewise.
(expand_one_stack_var_at): Likewise.
(expand_stack_vars): Likewise.
(expand_one_stack_var_1): Likewise.
(init_vars_expansion): Likewise.
(expand_used_vars): Likewise.
(pass_expand::execute): Likewise.
* gimplify.cc (asan_poison_variable): Likewise.
* internal-fn.cc (expand_HWASAN_ALLOCA_POISON): New definition.
(expand_HWASAN_ALLOCA_UNPOISON): Expand differently if MEMTAG
sanitizer.
(expand_HWASAN_MARK): Likewise.
* internal-fn.def (HWASAN_ALLOCA_POISON): Define new.
* params.opt: Document new param.
* sanopt.cc (pass_sanopt::execute): Include MEMTAG along with
HWASAN.
* gcc.cc (sanitize_spec_function): Add check for memtag-stack.
* doc/tm.texi: Regenerate.
* target.def (extract_tag): Update documentation.
(add_tag): Likewise.
(insert_random_tag): Likewise.
Co-authored-by: Indu Bhagat <indu.bhagat@oracle.com>
Signed-off-by: Claudiu Zissulescu <claudiu.zissulescu-ianculescu@oracle.com>
This commit is contained in:
229
gcc/asan.cc
229
gcc/asan.cc
@@ -762,14 +762,15 @@ static void
|
||||
handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
|
||||
{
|
||||
if (!iter
|
||||
|| !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ()))
|
||||
|| !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ()
|
||||
|| memtag_sanitize_allocas_p ()))
|
||||
return;
|
||||
|
||||
tree restored_stack = gimple_call_arg (call, 0);
|
||||
|
||||
gimple *g;
|
||||
|
||||
if (hwasan_sanitize_allocas_p ())
|
||||
if (hwasan_sanitize_allocas_p () || memtag_sanitize_allocas_p ())
|
||||
{
|
||||
enum internal_fn fn = IFN_HWASAN_ALLOCA_UNPOISON;
|
||||
/* There is only one piece of information `expand_HWASAN_ALLOCA_UNPOISON`
|
||||
@@ -818,7 +819,8 @@ static void
|
||||
handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
|
||||
{
|
||||
if (!iter
|
||||
|| !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ()))
|
||||
|| !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ()
|
||||
|| memtag_sanitize_allocas_p ()))
|
||||
return;
|
||||
|
||||
gassign *g;
|
||||
@@ -842,23 +844,31 @@ handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
|
||||
e = find_fallthru_edge (gsi_bb (*iter)->succs);
|
||||
}
|
||||
|
||||
if (hwasan_sanitize_allocas_p ())
|
||||
if (hwasan_sanitize_allocas_p () || memtag_sanitize_allocas_p ())
|
||||
{
|
||||
gimple_seq stmts = NULL;
|
||||
location_t loc = gimple_location (gsi_stmt (*iter));
|
||||
/*
|
||||
HWASAN needs a different expansion.
|
||||
/* HWASAN and MEMTAG need a different expansion.
|
||||
|
||||
addr = __builtin_alloca (size, align);
|
||||
|
||||
should be replaced by
|
||||
in case of HWASAN, should be replaced by
|
||||
|
||||
new_size = size rounded up to HWASAN_TAG_GRANULE_SIZE byte alignment;
|
||||
untagged_addr = __builtin_alloca (new_size, align);
|
||||
tag = __hwasan_choose_alloca_tag ();
|
||||
addr = ifn_HWASAN_SET_TAG (untagged_addr, tag);
|
||||
__hwasan_tag_memory (untagged_addr, tag, new_size);
|
||||
*/
|
||||
|
||||
in case of MEMTAG, should be replaced by
|
||||
|
||||
new_size = size rounded up to HWASAN_TAG_GRANULE_SIZE byte alignment;
|
||||
untagged_addr = __builtin_alloca (new_size, align);
|
||||
addr = ifn_HWASAN_ALLOCA_POISON (untagged_addr, new_size);
|
||||
|
||||
where a new tag is chosen and set on untagged_addr when
|
||||
HWASAN_ALLOCA_POISON is expanded. */
|
||||
|
||||
/* Ensure alignment at least HWASAN_TAG_GRANULE_SIZE bytes so we start on
|
||||
a tag granule. */
|
||||
align = align > HWASAN_TAG_GRANULE_SIZE ? align : HWASAN_TAG_GRANULE_SIZE;
|
||||
@@ -874,23 +884,30 @@ handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
|
||||
as_combined_fn (BUILT_IN_ALLOCA_WITH_ALIGN), ptr_type,
|
||||
new_size, build_int_cst (size_type_node, align));
|
||||
|
||||
/* Choose the tag.
|
||||
Here we use an internal function so we can choose the tag at expand
|
||||
time. We need the decision to be made after stack variables have been
|
||||
assigned their tag (i.e. once the hwasan_frame_tag_offset variable has
|
||||
been set to one after the last stack variables tag). */
|
||||
tree tag = gimple_build (&stmts, loc, CFN_HWASAN_CHOOSE_TAG,
|
||||
unsigned_char_type_node);
|
||||
tree addr;
|
||||
|
||||
/* Add tag to pointer. */
|
||||
tree addr
|
||||
= gimple_build (&stmts, loc, CFN_HWASAN_SET_TAG, ptr_type,
|
||||
untagged_addr, tag);
|
||||
if (memtag_sanitize_p ())
|
||||
addr = gimple_build (&stmts, loc, CFN_HWASAN_ALLOCA_POISON, ptr_type,
|
||||
untagged_addr, new_size);
|
||||
else
|
||||
{
|
||||
/* Choose the tag.
|
||||
Here we use an internal function so we can choose the tag at expand
|
||||
time. We need the decision to be made after stack variables have been
|
||||
assigned their tag (i.e. once the hwasan_frame_tag_offset variable has
|
||||
been set to one after the last stack variables tag). */
|
||||
tree tag = gimple_build (&stmts, loc, CFN_HWASAN_CHOOSE_TAG,
|
||||
unsigned_char_type_node);
|
||||
|
||||
/* Tag shadow memory.
|
||||
NOTE: require using `untagged_addr` here for libhwasan API. */
|
||||
gimple_build (&stmts, loc, as_combined_fn (BUILT_IN_HWASAN_TAG_MEM),
|
||||
void_type_node, untagged_addr, tag, new_size);
|
||||
/* Add tag to pointer. */
|
||||
addr = gimple_build (&stmts, loc, CFN_HWASAN_SET_TAG, ptr_type,
|
||||
untagged_addr, tag);
|
||||
|
||||
/* Tag shadow memory.
|
||||
NOTE: require using `untagged_addr` here for libhwasan API. */
|
||||
gimple_build (&stmts, loc, as_combined_fn (BUILT_IN_HWASAN_TAG_MEM),
|
||||
void_type_node, untagged_addr, tag, new_size);
|
||||
}
|
||||
|
||||
/* Insert the built up code sequence into the original instruction stream
|
||||
the iterator points to. */
|
||||
@@ -1104,7 +1121,7 @@ get_mem_refs_of_builtin_call (gcall *call,
|
||||
for now we choose to just ignore `strlen` calls.
|
||||
This decision was simply made because that means the special case is
|
||||
limited to this one case of this one function. */
|
||||
if (hwasan_sanitize_p ())
|
||||
if (hwassist_sanitize_p ())
|
||||
return false;
|
||||
source0 = gimple_call_arg (call, 0);
|
||||
len = gimple_call_lhs (call);
|
||||
@@ -1886,6 +1903,83 @@ hwasan_memintrin (void)
|
||||
return (hwasan_sanitize_p () && param_hwasan_instrument_mem_intrinsics);
|
||||
}
|
||||
|
||||
/* MEMoryTAGging sanitizer (MEMTAG) uses a hardware based capability known as
|
||||
memory tagging to detect memory safety vulnerabilities. Similar to HWASAN,
|
||||
it is also a probabilistic method.
|
||||
|
||||
MEMTAG relies on the optional extension in armv8.5a known as MTE (Memory
|
||||
Tagging Extension). The extension is available in AArch64 only and
|
||||
introduces two types of tags:
|
||||
- Logical Address Tag - bits 56-59 (TARGET_MEMTAG_TAG_BITSIZE) of the
|
||||
virtual address.
|
||||
- Allocation Tag - 4 bits for each tag granule (TARGET_MEMTAG_GRANULE_SIZE
|
||||
set to 16 bytes), stored separately.
|
||||
Load / store instructions raise an exception if tags differ, thereby
|
||||
providing a faster way (than HWASAN) to detect memory safety issues.
|
||||
Further, new instructions are available in MTE to manipulate (generate,
|
||||
update address with) tags. Load / store instructions with SP base register
|
||||
and immediate offset do not check tags.
|
||||
|
||||
PS: Currently, MEMTAG sanitizer is capable of stack (variable / memory)
|
||||
tagging only.
|
||||
|
||||
In general, detecting stack-related memory bugs requires the compiler to:
|
||||
- ensure that each tag granule is only used by one variable at a time.
|
||||
This includes alloca.
|
||||
- Tag/Color: put tags into each stack variable pointer.
|
||||
- Untag: the function epilogue will retag the memory.
|
||||
|
||||
MEMTAG sanitizer is based off the HWASAN sanitizer implementation
|
||||
internally. Similar to HWASAN:
|
||||
- Assigning an independently random tag to each variable is carried out by
|
||||
keeping a tagged base pointer. A tagged base pointer allows addressing
|
||||
variables with (addr offset, tag offset).
|
||||
*/
|
||||
|
||||
/* Returns whether we are tagging pointers and checking those tags on memory
|
||||
access. */
|
||||
bool
|
||||
memtag_sanitize_p ()
|
||||
{
|
||||
return sanitize_flags_p (SANITIZE_MEMTAG);
|
||||
}
|
||||
|
||||
/* Are we tagging the stack? */
|
||||
bool
|
||||
memtag_sanitize_stack_p ()
|
||||
{
|
||||
return (sanitize_flags_p (SANITIZE_MEMTAG_STACK));
|
||||
}
|
||||
|
||||
/* Are we tagging alloca objects? */
|
||||
bool
|
||||
memtag_sanitize_allocas_p (void)
|
||||
{
|
||||
return (memtag_sanitize_stack_p () && param_memtag_instrument_allocas);
|
||||
}
|
||||
|
||||
/* Are we taggin mem intrinsics? */
|
||||
bool
|
||||
memtag_memintrin (void)
|
||||
{
|
||||
return (memtag_sanitize_p () && param_memtag_instrument_mem_intrinsics);
|
||||
}
|
||||
|
||||
/* Returns whether we are tagging pointers and checking those tags on memory
|
||||
access. */
|
||||
bool
|
||||
hwassist_sanitize_p ()
|
||||
{
|
||||
return (hwasan_sanitize_p () || memtag_sanitize_p ());
|
||||
}
|
||||
|
||||
/* Are we tagging stack objects for hwasan or memtag? */
|
||||
bool
|
||||
hwassist_sanitize_stack_p ()
|
||||
{
|
||||
return (hwasan_sanitize_stack_p () || memtag_sanitize_stack_p ());
|
||||
}
|
||||
|
||||
/* Insert code to protect stack vars. The prologue sequence should be emitted
|
||||
directly, epilogue sequence returned. BASE is the register holding the
|
||||
stack base, against which OFFSETS array offsets are relative to, OFFSETS
|
||||
@@ -2416,7 +2510,7 @@ static tree
|
||||
report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
|
||||
int *nargs)
|
||||
{
|
||||
gcc_assert (!hwasan_sanitize_p ());
|
||||
gcc_assert (!hwassist_sanitize_p ());
|
||||
|
||||
static enum built_in_function report[2][2][6]
|
||||
= { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
|
||||
@@ -2755,7 +2849,7 @@ build_check_stmt (location_t loc, tree base, tree len,
|
||||
if (is_scalar_access)
|
||||
flags |= ASAN_CHECK_SCALAR_ACCESS;
|
||||
|
||||
enum internal_fn fn = hwasan_sanitize_p ()
|
||||
enum internal_fn fn = hwassist_sanitize_p ()
|
||||
? IFN_HWASAN_CHECK
|
||||
: IFN_ASAN_CHECK;
|
||||
|
||||
@@ -2855,7 +2949,7 @@ instrument_derefs (gimple_stmt_iterator *iter, tree t,
|
||||
access is inside a global variable, then there's no point adding
|
||||
instrumentation to check the access. N.b. hwasan currently never
|
||||
sanitizes globals. */
|
||||
if ((hwasan_sanitize_p () || !param_asan_globals)
|
||||
if ((hwassist_sanitize_p () || !param_asan_globals)
|
||||
&& is_global_var (inner))
|
||||
return;
|
||||
if (!TREE_STATIC (inner))
|
||||
@@ -2954,7 +3048,8 @@ instrument_mem_region_access (tree base, tree len,
|
||||
static bool
|
||||
instrument_builtin_call (gimple_stmt_iterator *iter)
|
||||
{
|
||||
if (!(asan_memintrin () || hwasan_memintrin ()))
|
||||
if (!(asan_memintrin () || hwasan_memintrin ()
|
||||
|| memtag_memintrin ()))
|
||||
return false;
|
||||
|
||||
bool iter_advanced_p = false;
|
||||
@@ -3108,7 +3203,7 @@ maybe_instrument_call (gimple_stmt_iterator *iter)
|
||||
`longjmp`, thread exit, and exceptions in a different way. These
|
||||
problems must be handled externally to the compiler, e.g. in the
|
||||
language runtime. */
|
||||
if (! hwasan_sanitize_p ())
|
||||
if (! hwassist_sanitize_p ())
|
||||
{
|
||||
tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
|
||||
gimple *g = gimple_build_call (decl, 0);
|
||||
@@ -3861,7 +3956,7 @@ asan_expand_mark_ifn (gimple_stmt_iterator *iter)
|
||||
|
||||
gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
|
||||
|
||||
if (hwasan_sanitize_p ())
|
||||
if (hwassist_sanitize_p ())
|
||||
{
|
||||
gcc_assert (param_hwasan_instrument_stack);
|
||||
gimple_seq stmts = NULL;
|
||||
@@ -3959,7 +4054,7 @@ asan_expand_mark_ifn (gimple_stmt_iterator *iter)
|
||||
bool
|
||||
asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
|
||||
{
|
||||
gcc_assert (!hwasan_sanitize_p ());
|
||||
gcc_assert (!hwassist_sanitize_p ());
|
||||
gimple *g = gsi_stmt (*iter);
|
||||
location_t loc = gimple_location (g);
|
||||
bool recover_p;
|
||||
@@ -4234,7 +4329,7 @@ asan_expand_poison_ifn (gimple_stmt_iterator *iter,
|
||||
int nargs;
|
||||
bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
|
||||
gcall *call;
|
||||
if (hwasan_sanitize_p ())
|
||||
if (hwassist_sanitize_p ())
|
||||
{
|
||||
tree fun = builtin_decl_implicit (BUILT_IN_HWASAN_TAG_MISMATCH4);
|
||||
/* NOTE: hwasan has no __hwasan_report_* functions like asan does.
|
||||
@@ -4335,7 +4430,7 @@ asan_expand_poison_ifn (gimple_stmt_iterator *iter,
|
||||
static unsigned int
|
||||
asan_instrument (void)
|
||||
{
|
||||
if (hwasan_sanitize_p ())
|
||||
if (hwassist_sanitize_p ())
|
||||
{
|
||||
initialize_sanitizer_builtins ();
|
||||
transform_statements ();
|
||||
@@ -4381,7 +4476,7 @@ public:
|
||||
opt_pass * clone () final override { return new pass_asan (m_ctxt); }
|
||||
bool gate (function *) final override
|
||||
{
|
||||
return gate_asan () || gate_hwasan ();
|
||||
return gate_asan () || gate_hwasan () || gate_memtag ();
|
||||
}
|
||||
unsigned int execute (function *) final override
|
||||
{
|
||||
@@ -4423,7 +4518,7 @@ public:
|
||||
/* opt_pass methods: */
|
||||
bool gate (function *) final override
|
||||
{
|
||||
return !optimize && (gate_asan () || gate_hwasan ());
|
||||
return !optimize && (gate_asan () || gate_hwasan () || gate_memtag ());
|
||||
}
|
||||
unsigned int execute (function *) final override
|
||||
{
|
||||
@@ -4676,19 +4771,41 @@ hwasan_emit_prologue ()
|
||||
gcc_assert (multiple_p (bot, HWASAN_TAG_GRANULE_SIZE));
|
||||
gcc_assert (multiple_p (size, HWASAN_TAG_GRANULE_SIZE));
|
||||
|
||||
rtx fn = init_one_libfunc ("__hwasan_tag_memory");
|
||||
rtx base_tag = targetm.memtag.extract_tag (cur.tagged_base, NULL_RTX);
|
||||
rtx tag = plus_constant (QImode, base_tag, cur.tag_offset);
|
||||
tag = hwasan_truncate_to_tag_size (tag, NULL_RTX);
|
||||
|
||||
rtx bottom = convert_memory_address (ptr_mode,
|
||||
plus_constant (Pmode,
|
||||
cur.untagged_base,
|
||||
bot));
|
||||
emit_library_call (fn, LCT_NORMAL, VOIDmode,
|
||||
bottom, ptr_mode,
|
||||
tag, QImode,
|
||||
gen_int_mode (size, ptr_mode), ptr_mode);
|
||||
if (memtag_sanitize_p ())
|
||||
{
|
||||
expand_operand ops[3];
|
||||
rtx tagged_addr = gen_reg_rtx (ptr_mode);
|
||||
|
||||
/* Check if the required target instructions are present. */
|
||||
gcc_assert (targetm.have_compose_tag ());
|
||||
gcc_assert (targetm.have_tag_memory ());
|
||||
|
||||
/* The AArch64 has addg/subg instructions which are working directly
|
||||
on a tagged pointer. */
|
||||
create_output_operand (&ops[0], tagged_addr, ptr_mode);
|
||||
create_input_operand (&ops[1], base_tag, ptr_mode);
|
||||
create_integer_operand (&ops[2], cur.tag_offset);
|
||||
expand_insn (targetm.code_for_compose_tag, 3, ops);
|
||||
|
||||
emit_insn (targetm.gen_tag_memory (bottom, tagged_addr,
|
||||
gen_int_mode (size, ptr_mode)));
|
||||
}
|
||||
else
|
||||
{
|
||||
rtx fn = init_one_libfunc ("__hwasan_tag_memory");
|
||||
rtx tag = plus_constant (QImode, base_tag, cur.tag_offset);
|
||||
tag = hwasan_truncate_to_tag_size (tag, NULL_RTX);
|
||||
emit_library_call (fn, LCT_NORMAL, VOIDmode,
|
||||
bottom, ptr_mode,
|
||||
tag, QImode,
|
||||
gen_int_mode (size, ptr_mode), ptr_mode);
|
||||
}
|
||||
}
|
||||
/* Clear the stack vars, we've emitted the prologue for them all now. */
|
||||
hwasan_tagged_stack_vars.truncate (0);
|
||||
@@ -4725,15 +4842,21 @@ hwasan_emit_untag_frame (rtx dynamic, rtx vars)
|
||||
bot_rtx = vars;
|
||||
}
|
||||
|
||||
rtx size_rtx = expand_simple_binop (ptr_mode, MINUS, top_rtx, bot_rtx,
|
||||
NULL_RTX, /* unsignedp = */0,
|
||||
OPTAB_DIRECT);
|
||||
rtx size_rtx = simplify_gen_binary (MINUS, ptr_mode, top_rtx, bot_rtx);
|
||||
if (!CONST_INT_P (size_rtx))
|
||||
size_rtx = force_reg (ptr_mode, size_rtx);
|
||||
|
||||
rtx fn = init_one_libfunc ("__hwasan_tag_memory");
|
||||
emit_library_call (fn, LCT_NORMAL, VOIDmode,
|
||||
bot_rtx, ptr_mode,
|
||||
HWASAN_STACK_BACKGROUND, QImode,
|
||||
size_rtx, ptr_mode);
|
||||
if (memtag_sanitize_p ())
|
||||
emit_insn (targetm.gen_tag_memory (bot_rtx, HWASAN_STACK_BACKGROUND,
|
||||
size_rtx));
|
||||
else
|
||||
{
|
||||
rtx fn = init_one_libfunc ("__hwasan_tag_memory");
|
||||
emit_library_call (fn, LCT_NORMAL, VOIDmode,
|
||||
bot_rtx, ptr_mode,
|
||||
HWASAN_STACK_BACKGROUND, QImode,
|
||||
size_rtx, ptr_mode);
|
||||
}
|
||||
|
||||
do_pending_stack_adjust ();
|
||||
return end_sequence ();
|
||||
@@ -4919,4 +5042,10 @@ gate_hwasan ()
|
||||
return hwasan_sanitize_p ();
|
||||
}
|
||||
|
||||
bool
|
||||
gate_memtag ()
|
||||
{
|
||||
return memtag_sanitize_p ();
|
||||
}
|
||||
|
||||
#include "gt-asan.h"
|
||||
|
||||
10
gcc/asan.h
10
gcc/asan.h
@@ -57,6 +57,14 @@ extern bool hwasan_expand_check_ifn (gimple_stmt_iterator *, bool);
|
||||
extern bool hwasan_expand_mark_ifn (gimple_stmt_iterator *);
|
||||
extern bool gate_hwasan (void);
|
||||
|
||||
extern bool memtag_sanitize_p (void);
|
||||
extern bool memtag_sanitize_stack_p (void);
|
||||
extern bool memtag_sanitize_allocas_p (void);
|
||||
extern bool gate_memtag (void);
|
||||
|
||||
bool hwassist_sanitize_p (void);
|
||||
bool hwassist_sanitize_stack_p (void);
|
||||
|
||||
extern gimple_stmt_iterator create_cond_insert_point
|
||||
(gimple_stmt_iterator *, bool, bool, bool, basic_block *, basic_block *);
|
||||
|
||||
@@ -225,7 +233,7 @@ inline bool
|
||||
asan_sanitize_use_after_scope (void)
|
||||
{
|
||||
return (flag_sanitize_address_use_after_scope
|
||||
&& (asan_sanitize_stack_p () || hwasan_sanitize_stack_p ()));
|
||||
&& (asan_sanitize_stack_p () || hwassist_sanitize_stack_p ()));
|
||||
}
|
||||
|
||||
/* Return true if DECL should be guarded on the stack. */
|
||||
|
||||
@@ -383,7 +383,7 @@ align_local_variable (tree decl, bool really_expand)
|
||||
else
|
||||
align = LOCAL_DECL_ALIGNMENT (decl);
|
||||
|
||||
if (hwasan_sanitize_stack_p ())
|
||||
if (hwassist_sanitize_stack_p ())
|
||||
align = MAX (align, (unsigned) HWASAN_TAG_GRANULE_SIZE * BITS_PER_UNIT);
|
||||
|
||||
if (TREE_CODE (decl) != SSA_NAME && really_expand)
|
||||
@@ -1337,7 +1337,7 @@ expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
|
||||
/* If this fails, we've overflowed the stack frame. Error nicely? */
|
||||
gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
|
||||
|
||||
if (hwasan_sanitize_stack_p ())
|
||||
if (hwassist_sanitize_stack_p ())
|
||||
x = targetm.memtag.add_tag (base, offset,
|
||||
hwasan_current_frame_tag ());
|
||||
else
|
||||
@@ -1472,14 +1472,14 @@ expand_stack_vars (bool (*pred) (unsigned), class stack_vars_data *data)
|
||||
if (pred && !pred (i))
|
||||
continue;
|
||||
|
||||
base = (hwasan_sanitize_stack_p ()
|
||||
base = (hwassist_sanitize_stack_p ()
|
||||
? hwasan_frame_base ()
|
||||
: virtual_stack_vars_rtx);
|
||||
alignb = stack_vars[i].alignb;
|
||||
if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
|
||||
{
|
||||
poly_int64 hwasan_orig_offset;
|
||||
if (hwasan_sanitize_stack_p ())
|
||||
if (hwassist_sanitize_stack_p ())
|
||||
{
|
||||
/* There must be no tag granule "shared" between different
|
||||
objects. This means that no HWASAN_TAG_GRANULE_SIZE byte
|
||||
@@ -1578,7 +1578,7 @@ expand_stack_vars (bool (*pred) (unsigned), class stack_vars_data *data)
|
||||
offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
|
||||
base_align = crtl->max_used_stack_slot_alignment;
|
||||
|
||||
if (hwasan_sanitize_stack_p ())
|
||||
if (hwassist_sanitize_stack_p ())
|
||||
{
|
||||
/* Align again since the point of this alignment is to handle
|
||||
the "end" of the object (i.e. smallest address after the
|
||||
@@ -1623,7 +1623,7 @@ expand_stack_vars (bool (*pred) (unsigned), class stack_vars_data *data)
|
||||
large_alloc = aligned_upper_bound (large_alloc, alignb);
|
||||
offset = large_alloc;
|
||||
large_alloc += stack_vars[i].size;
|
||||
if (hwasan_sanitize_stack_p ())
|
||||
if (hwassist_sanitize_stack_p ())
|
||||
{
|
||||
/* An object with a large alignment requirement means that the
|
||||
alignment requirement is greater than the required alignment
|
||||
@@ -1654,7 +1654,7 @@ expand_stack_vars (bool (*pred) (unsigned), class stack_vars_data *data)
|
||||
expand_one_stack_var_at (stack_vars[j].decl,
|
||||
base, base_align, offset);
|
||||
}
|
||||
if (hwasan_sanitize_stack_p ())
|
||||
if (hwassist_sanitize_stack_p ())
|
||||
hwasan_increment_frame_tag ();
|
||||
}
|
||||
|
||||
@@ -1747,7 +1747,7 @@ expand_one_stack_var_1 (tree var)
|
||||
gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
|
||||
|
||||
rtx base;
|
||||
if (hwasan_sanitize_stack_p ())
|
||||
if (hwassist_sanitize_stack_p ())
|
||||
{
|
||||
/* Allocate zero bytes to align the stack. */
|
||||
poly_int64 hwasan_orig_offset
|
||||
@@ -1775,7 +1775,7 @@ expand_one_stack_var_1 (tree var)
|
||||
expand_one_stack_var_at (var, base,
|
||||
crtl->max_used_stack_slot_alignment, offset);
|
||||
|
||||
if (hwasan_sanitize_stack_p ())
|
||||
if (hwassist_sanitize_stack_p ())
|
||||
hwasan_increment_frame_tag ();
|
||||
}
|
||||
|
||||
@@ -2379,7 +2379,7 @@ init_vars_expansion (void)
|
||||
/* Initialize local stack smashing state. */
|
||||
has_protected_decls = false;
|
||||
has_short_buffer = false;
|
||||
if (hwasan_sanitize_stack_p ())
|
||||
if (hwassist_sanitize_stack_p ())
|
||||
hwasan_record_frame_init ();
|
||||
}
|
||||
|
||||
@@ -2705,13 +2705,14 @@ expand_used_vars (bitmap forced_stack_vars)
|
||||
expand_stack_vars (NULL, &data);
|
||||
}
|
||||
|
||||
if (hwasan_sanitize_stack_p ())
|
||||
if (hwassist_sanitize_stack_p ())
|
||||
hwasan_emit_prologue ();
|
||||
if (asan_sanitize_allocas_p () && cfun->calls_alloca)
|
||||
var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
|
||||
virtual_stack_vars_rtx,
|
||||
var_end_seq);
|
||||
else if (hwasan_sanitize_allocas_p () && cfun->calls_alloca)
|
||||
else if ((hwasan_sanitize_allocas_p () || memtag_sanitize_p ())
|
||||
&& cfun->calls_alloca)
|
||||
/* When using out-of-line instrumentation we only want to emit one function
|
||||
call for clearing the tags in a region of shadow stack. When there are
|
||||
alloca calls in this frame we want to emit a call using the
|
||||
@@ -2719,7 +2720,7 @@ expand_used_vars (bitmap forced_stack_vars)
|
||||
rtx we created in expand_stack_vars. */
|
||||
var_end_seq = hwasan_emit_untag_frame (virtual_stack_dynamic_rtx,
|
||||
virtual_stack_vars_rtx);
|
||||
else if (hwasan_sanitize_stack_p ())
|
||||
else if (hwassist_sanitize_stack_p ())
|
||||
/* If no variables were stored on the stack, `hwasan_get_frame_extent`
|
||||
will return NULL_RTX and hence `hwasan_emit_untag_frame` will return
|
||||
NULL (i.e. an empty sequence). */
|
||||
@@ -7310,7 +7311,7 @@ pass_expand::execute (function *fun)
|
||||
emit_insn_after (var_ret_seq, after);
|
||||
}
|
||||
|
||||
if (hwasan_sanitize_stack_p ())
|
||||
if (hwassist_sanitize_stack_p ())
|
||||
hwasan_maybe_emit_frame_base_init ();
|
||||
|
||||
/* Zap the tree EH table. */
|
||||
|
||||
@@ -13001,6 +13001,8 @@ Return an RTX representing the value of @var{untagged} but with a
|
||||
(possibly) random tag in it.
|
||||
Put that value into @var{target} if it is convenient to do so.
|
||||
This function is used to generate a tagged base for the current stack frame.
|
||||
It is also used by memtag-stack sanitizer to emit specific memory tagging
|
||||
instructions.
|
||||
@end deftypefn
|
||||
|
||||
@deftypefn {Target Hook} rtx TARGET_MEMTAG_ADD_TAG (rtx @var{base}, poly_int64 @var{addr_offset}, uint8_t @var{tag_offset})
|
||||
@@ -13011,7 +13013,7 @@ The resulting RTX must either be a valid memory address or be able to get
|
||||
put into an operand with @code{force_operand}.
|
||||
|
||||
Unlike other memtag hooks, this must return an expression and not emit any
|
||||
RTL.
|
||||
RTL. In the case of memtag-stack sanitizer, this constraint is not enforced.
|
||||
@end deftypefn
|
||||
|
||||
@deftypefn {Target Hook} rtx TARGET_MEMTAG_SET_TAG (rtx @var{untagged_base}, rtx @var{tag}, rtx @var{target})
|
||||
@@ -13026,6 +13028,8 @@ The default of this hook is to set the top byte of @var{untagged_base} to
|
||||
Return an RTX representing the tag stored in @var{tagged_pointer}.
|
||||
Store the result in @var{target} if it is convenient.
|
||||
The default represents the top byte of the original pointer.
|
||||
In the case of memtag-stack sanitizer for targets that can process
|
||||
tagged pointers (i.e. AArch64), this hook can return a tagged pointer.
|
||||
@end deftypefn
|
||||
|
||||
@deftypefn {Target Hook} rtx TARGET_MEMTAG_UNTAGGED_POINTER (rtx @var{tagged_pointer}, rtx @var{target})
|
||||
|
||||
@@ -10562,6 +10562,8 @@ sanitize_spec_function (int argc, const char **argv)
|
||||
return (flag_sanitize & SANITIZE_KERNEL_ADDRESS) ? "" : NULL;
|
||||
if (strcmp (argv[0], "kernel-hwaddress") == 0)
|
||||
return (flag_sanitize & SANITIZE_KERNEL_HWADDRESS) ? "" : NULL;
|
||||
if (strcmp (argv[0], "memtag-stack") == 0)
|
||||
return (flag_sanitize & SANITIZE_MEMTAG_STACK) ? "" : NULL;
|
||||
if (strcmp (argv[0], "thread") == 0)
|
||||
return (flag_sanitize & SANITIZE_THREAD) ? "" : NULL;
|
||||
if (strcmp (argv[0], "undefined") == 0)
|
||||
|
||||
@@ -1318,9 +1318,10 @@ asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
|
||||
|
||||
/* It's necessary to have all stack variables aligned to ASAN granularity
|
||||
bytes. */
|
||||
gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
|
||||
gcc_assert (!hwassist_sanitize_p () || hwassist_sanitize_stack_p ());
|
||||
unsigned shadow_granularity
|
||||
= hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
|
||||
= (hwassist_sanitize_p ()
|
||||
? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY);
|
||||
if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
|
||||
SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
|
||||
|
||||
|
||||
@@ -750,6 +750,54 @@ expand_HWASAN_CHECK (internal_fn, gcall *)
|
||||
gcc_unreachable ();
|
||||
}
|
||||
|
||||
/* For hwasan stack tagging:
|
||||
Tag memory which is dynamically allocated. */
|
||||
static void
|
||||
expand_HWASAN_ALLOCA_POISON (internal_fn, gcall *gc)
|
||||
{
|
||||
gcc_assert (ptr_mode == Pmode);
|
||||
tree g_target = gimple_call_lhs (gc);
|
||||
tree g_ptr = gimple_call_arg (gc, 0);
|
||||
tree g_size = gimple_call_arg (gc, 1);
|
||||
|
||||
/* There is no target; this happens, usually, when we have an alloca of zero
|
||||
size. */
|
||||
if (!g_target)
|
||||
return;
|
||||
rtx target = expand_normal (g_target);
|
||||
rtx ptr = expand_normal (g_ptr);
|
||||
rtx size = expand_normal (g_size);
|
||||
|
||||
/* No size, nothing to do. */
|
||||
if (size == const0_rtx)
|
||||
return;
|
||||
|
||||
/* Get new tag for the alloca'd memory.
|
||||
Doing a regular add_tag () like so:
|
||||
rtx tag = targetm.memtag.add_tag (hwasan_frame_base (), 0,
|
||||
hwasan_current_frame_tag ());
|
||||
gets a new tag, which can be used for tagging memory. But for alloca, we
|
||||
need both tagged memory and a tagged pointer to pass to consumers. Invoke
|
||||
insert_random_tag () instead to add a random tag to ptr to get a tagged
|
||||
pointer that will work for both purposes. */
|
||||
rtx tagged_ptr
|
||||
= force_reg (Pmode, targetm.memtag.insert_random_tag (ptr, NULL_RTX));
|
||||
rtx tag = targetm.memtag.extract_tag (tagged_ptr, NULL_RTX);
|
||||
|
||||
if (memtag_sanitize_p ())
|
||||
{
|
||||
/* Need to put the tagged ptr into the `target` RTX for consumers
|
||||
of alloca'd memory. */
|
||||
if (tagged_ptr != target)
|
||||
emit_move_insn (target, tagged_ptr);
|
||||
/* Tag the memory. */
|
||||
emit_insn (targetm.gen_tag_memory (ptr, tag, size));
|
||||
hwasan_increment_frame_tag ();
|
||||
}
|
||||
else
|
||||
gcc_unreachable ();
|
||||
}
|
||||
|
||||
/* For hwasan stack tagging:
|
||||
Clear tags on the dynamically allocated space.
|
||||
For use after an object dynamically allocated on the stack goes out of
|
||||
@@ -761,14 +809,22 @@ expand_HWASAN_ALLOCA_UNPOISON (internal_fn, gcall *gc)
|
||||
tree restored_position = gimple_call_arg (gc, 0);
|
||||
rtx restored_rtx = expand_expr (restored_position, NULL_RTX, VOIDmode,
|
||||
EXPAND_NORMAL);
|
||||
rtx func = init_one_libfunc ("__hwasan_tag_memory");
|
||||
rtx off = expand_simple_binop (Pmode, MINUS, restored_rtx,
|
||||
stack_pointer_rtx, NULL_RTX, 0,
|
||||
OPTAB_WIDEN);
|
||||
emit_library_call_value (func, NULL_RTX, LCT_NORMAL, VOIDmode,
|
||||
virtual_stack_dynamic_rtx, Pmode,
|
||||
HWASAN_STACK_BACKGROUND, QImode,
|
||||
off, Pmode);
|
||||
|
||||
if (memtag_sanitize_p ())
|
||||
emit_insn (targetm.gen_tag_memory (virtual_stack_dynamic_rtx,
|
||||
HWASAN_STACK_BACKGROUND,
|
||||
off));
|
||||
else
|
||||
{
|
||||
rtx func = init_one_libfunc ("__hwasan_tag_memory");
|
||||
emit_library_call_value (func, NULL_RTX, LCT_NORMAL, VOIDmode,
|
||||
virtual_stack_dynamic_rtx, Pmode,
|
||||
HWASAN_STACK_BACKGROUND, QImode,
|
||||
off, Pmode);
|
||||
}
|
||||
}
|
||||
|
||||
/* For hwasan stack tagging:
|
||||
@@ -822,9 +878,14 @@ expand_HWASAN_MARK (internal_fn, gcall *gc)
|
||||
tree len = gimple_call_arg (gc, 2);
|
||||
rtx r_len = expand_normal (len);
|
||||
|
||||
rtx func = init_one_libfunc ("__hwasan_tag_memory");
|
||||
emit_library_call (func, LCT_NORMAL, VOIDmode, address, Pmode,
|
||||
tag, QImode, r_len, Pmode);
|
||||
if (memtag_sanitize_p ())
|
||||
emit_insn (targetm.gen_tag_memory (address, tag, r_len));
|
||||
else
|
||||
{
|
||||
rtx func = init_one_libfunc ("__hwasan_tag_memory");
|
||||
emit_library_call (func, LCT_NORMAL, VOIDmode, address, Pmode,
|
||||
tag, QImode, r_len, Pmode);
|
||||
}
|
||||
}
|
||||
|
||||
/* For hwasan stack tagging:
|
||||
|
||||
@@ -499,6 +499,7 @@ DEF_INTERNAL_FN (UBSAN_PTR, ECF_LEAF | ECF_NOTHROW, ". R . ")
|
||||
DEF_INTERNAL_FN (UBSAN_OBJECT_SIZE, ECF_LEAF | ECF_NOTHROW, NULL)
|
||||
DEF_INTERNAL_FN (ABNORMAL_DISPATCHER, ECF_NORETURN, NULL)
|
||||
DEF_INTERNAL_FN (BUILTIN_EXPECT, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
|
||||
DEF_INTERNAL_FN (HWASAN_ALLOCA_POISON, ECF_LEAF | ECF_NOTHROW, NULL)
|
||||
DEF_INTERNAL_FN (HWASAN_ALLOCA_UNPOISON, ECF_LEAF | ECF_NOTHROW, ". R ")
|
||||
DEF_INTERNAL_FN (HWASAN_CHOOSE_TAG, ECF_LEAF | ECF_NOTHROW, ". ")
|
||||
DEF_INTERNAL_FN (HWASAN_CHECK, ECF_TM_PURE | ECF_LEAF | ECF_NOTHROW,
|
||||
|
||||
@@ -102,6 +102,10 @@ Enable hwasan instrumentation of builtin functions.
|
||||
Target Joined UInteger Var(param_memtag_instrument_allocas) Init(1) IntegerRange(0, 1) Param
|
||||
When sanitizing using MTE instructions, add checks for all stack allocas.
|
||||
|
||||
-param=memtag-instrument-mem-intrinsics=
|
||||
Common Joined UInteger Var(param_memtag_instrument_mem_intrinsics) Init(1) IntegerRange(0, 1) Param Optimization
|
||||
When sanitizing using MTE instructions, include builtin functions.
|
||||
|
||||
-param=avg-loop-niter=
|
||||
Common Joined UInteger Var(param_avg_loop_niter) Init(10) IntegerRange(1, 65536) Param Optimization
|
||||
Average number of iterations of a loop.
|
||||
|
||||
@@ -1330,7 +1330,7 @@ pass_sanopt::execute (function *fun)
|
||||
sanitize_asan_mark_poison ();
|
||||
}
|
||||
|
||||
if (asan_sanitize_stack_p () || hwasan_sanitize_stack_p ())
|
||||
if (asan_sanitize_stack_p () || hwassist_sanitize_stack_p ())
|
||||
sanitize_rewrite_addressable_params (fun);
|
||||
|
||||
bool use_calls = param_asan_instrumentation_with_call_threshold < INT_MAX
|
||||
|
||||
@@ -7577,7 +7577,9 @@ DEFHOOK
|
||||
"Return an RTX representing the value of @var{untagged} but with a\n\
|
||||
(possibly) random tag in it.\n\
|
||||
Put that value into @var{target} if it is convenient to do so.\n\
|
||||
This function is used to generate a tagged base for the current stack frame.",
|
||||
This function is used to generate a tagged base for the current stack frame.\n\
|
||||
It is also used by memtag-stack sanitizer to emit specific memory tagging\n\
|
||||
instructions.",
|
||||
rtx, (rtx untagged, rtx target), default_memtag_insert_random_tag)
|
||||
|
||||
DEFHOOK
|
||||
@@ -7589,7 +7591,7 @@ The resulting RTX must either be a valid memory address or be able to get\n\
|
||||
put into an operand with @code{force_operand}.\n\
|
||||
\n\
|
||||
Unlike other memtag hooks, this must return an expression and not emit any\n\
|
||||
RTL.",
|
||||
RTL. In the case of memtag-stack sanitizer, this constraint is not enforced.",
|
||||
rtx, (rtx base, poly_int64 addr_offset, uint8_t tag_offset),
|
||||
default_memtag_add_tag)
|
||||
|
||||
@@ -7606,7 +7608,9 @@ DEFHOOK
|
||||
(extract_tag,
|
||||
"Return an RTX representing the tag stored in @var{tagged_pointer}.\n\
|
||||
Store the result in @var{target} if it is convenient.\n\
|
||||
The default represents the top byte of the original pointer.",
|
||||
The default represents the top byte of the original pointer.\n\
|
||||
In the case of memtag-stack sanitizer for targets that can process\n\
|
||||
tagged pointers (i.e. AArch64), this hook can return a tagged pointer.",
|
||||
rtx, (rtx tagged_pointer, rtx target), default_memtag_extract_tag)
|
||||
|
||||
DEFHOOK
|
||||
|
||||
Reference in New Issue
Block a user