61b258b0d2
Memory accesses in copy_mc_to_kernel() and copy_mc_to_user() are performed by assembly routines and are invisible to KASAN, KCSAN, and KMSAN. Add hooks from instrumentation.h to tell the tools these functions have memcpy/copy_from_user semantics. The call to copy_mc_fragile() in copy_mc_fragile_handle_tail() is left intact, because the latter is only called from the assembly implementation of copy_mc_fragile(), so the memory accesses in it are covered by the instrumentation in copy_mc_to_kernel() and copy_mc_to_user(). Link: https://lore.kernel.org/all/3b7dbd88-0861-4638-b2d2-911c97a4cadf@I-love.SAKURA.ne.jp/ Link: https://lkml.kernel.org/r/20240320101851.2589698-3-glider@google.com Signed-off-by: Alexander Potapenko <glider@google.com> Suggested-by: Linus Torvalds <torvalds@linux-foundation.org> Cc: Dmitry Vyukov <dvyukov@google.com> Cc: Marco Elver <elver@google.com> Cc: Tetsuo Handa <penguin-kernel@I-love.SAKURA.ne.jp> Cc: Thomas Gleixner <tglx@linutronix.de> Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
106 lines
2.8 KiB
C
106 lines
2.8 KiB
C
// SPDX-License-Identifier: GPL-2.0
|
|
/* Copyright(c) 2016-2020 Intel Corporation. All rights reserved. */
|
|
|
|
#include <linux/jump_label.h>
|
|
#include <linux/uaccess.h>
|
|
#include <linux/export.h>
|
|
#include <linux/instrumented.h>
|
|
#include <linux/string.h>
|
|
#include <linux/types.h>
|
|
|
|
#include <asm/mce.h>
|
|
|
|
#ifdef CONFIG_X86_MCE
|
|
static DEFINE_STATIC_KEY_FALSE(copy_mc_fragile_key);
|
|
|
|
void enable_copy_mc_fragile(void)
|
|
{
|
|
static_branch_inc(©_mc_fragile_key);
|
|
}
|
|
#define copy_mc_fragile_enabled (static_branch_unlikely(©_mc_fragile_key))
|
|
|
|
/*
|
|
* Similar to copy_user_handle_tail, probe for the write fault point, or
|
|
* source exception point.
|
|
*/
|
|
__visible notrace unsigned long
|
|
copy_mc_fragile_handle_tail(char *to, char *from, unsigned len)
|
|
{
|
|
for (; len; --len, to++, from++)
|
|
if (copy_mc_fragile(to, from, 1))
|
|
break;
|
|
return len;
|
|
}
|
|
#else
|
|
/*
|
|
* No point in doing careful copying, or consulting a static key when
|
|
* there is no #MC handler in the CONFIG_X86_MCE=n case.
|
|
*/
|
|
void enable_copy_mc_fragile(void)
|
|
{
|
|
}
|
|
#define copy_mc_fragile_enabled (0)
|
|
#endif
|
|
|
|
unsigned long copy_mc_enhanced_fast_string(void *dst, const void *src, unsigned len);
|
|
|
|
/**
|
|
* copy_mc_to_kernel - memory copy that handles source exceptions
|
|
*
|
|
* @dst: destination address
|
|
* @src: source address
|
|
* @len: number of bytes to copy
|
|
*
|
|
* Call into the 'fragile' version on systems that benefit from avoiding
|
|
* corner case poison consumption scenarios, For example, accessing
|
|
* poison across 2 cachelines with a single instruction. Almost all
|
|
* other uses case can use copy_mc_enhanced_fast_string() for a fast
|
|
* recoverable copy, or fallback to plain memcpy.
|
|
*
|
|
* Return 0 for success, or number of bytes not copied if there was an
|
|
* exception.
|
|
*/
|
|
unsigned long __must_check copy_mc_to_kernel(void *dst, const void *src, unsigned len)
|
|
{
|
|
unsigned long ret;
|
|
|
|
if (copy_mc_fragile_enabled) {
|
|
instrument_memcpy_before(dst, src, len);
|
|
ret = copy_mc_fragile(dst, src, len);
|
|
instrument_memcpy_after(dst, src, len, ret);
|
|
return ret;
|
|
}
|
|
if (static_cpu_has(X86_FEATURE_ERMS)) {
|
|
instrument_memcpy_before(dst, src, len);
|
|
ret = copy_mc_enhanced_fast_string(dst, src, len);
|
|
instrument_memcpy_after(dst, src, len, ret);
|
|
return ret;
|
|
}
|
|
memcpy(dst, src, len);
|
|
return 0;
|
|
}
|
|
EXPORT_SYMBOL_GPL(copy_mc_to_kernel);
|
|
|
|
unsigned long __must_check copy_mc_to_user(void __user *dst, const void *src, unsigned len)
|
|
{
|
|
unsigned long ret;
|
|
|
|
if (copy_mc_fragile_enabled) {
|
|
instrument_copy_to_user(dst, src, len);
|
|
__uaccess_begin();
|
|
ret = copy_mc_fragile((__force void *)dst, src, len);
|
|
__uaccess_end();
|
|
return ret;
|
|
}
|
|
|
|
if (static_cpu_has(X86_FEATURE_ERMS)) {
|
|
instrument_copy_to_user(dst, src, len);
|
|
__uaccess_begin();
|
|
ret = copy_mc_enhanced_fast_string((__force void *)dst, src, len);
|
|
__uaccess_end();
|
|
return ret;
|
|
}
|
|
|
|
return copy_user_generic((__force void *)dst, src, len);
|
|
}
|