Merge tag 'v4.7-rc6' into x86/mm, to merge fixes before applying new changes
[cascardo/linux.git] / arch / x86 / include / asm / uaccess.h
index 12f9653..d40ec72 100644 (file)
@@ -5,6 +5,7 @@
  */
 #include <linux/errno.h>
 #include <linux/compiler.h>
+#include <linux/kasan-checks.h>
 #include <linux/thread_info.h>
 #include <linux/string.h>
 #include <asm/asm.h>
@@ -341,7 +342,26 @@ do {                                                                       \
 } while (0)
 
 #ifdef CONFIG_X86_32
-#define __get_user_asm_u64(x, ptr, retval, errret)     (x) = __get_user_bad()
+#define __get_user_asm_u64(x, ptr, retval, errret)                     \
+({                                                                     \
+       __typeof__(ptr) __ptr = (ptr);                                  \
+       asm volatile(ASM_STAC "\n"                                      \
+                    "1:        movl %2,%%eax\n"                        \
+                    "2:        movl %3,%%edx\n"                        \
+                    "3: " ASM_CLAC "\n"                                \
+                    ".section .fixup,\"ax\"\n"                         \
+                    "4:        mov %4,%0\n"                            \
+                    "  xorl %%eax,%%eax\n"                             \
+                    "  xorl %%edx,%%edx\n"                             \
+                    "  jmp 3b\n"                                       \
+                    ".previous\n"                                      \
+                    _ASM_EXTABLE(1b, 4b)                               \
+                    _ASM_EXTABLE(2b, 4b)                               \
+                    : "=r" (retval), "=A"(x)                           \
+                    : "m" (__m(__ptr)), "m" __m(((u32 *)(__ptr)) + 1), \
+                      "i" (errret), "0" (retval));                     \
+})
+
 #define __get_user_asm_ex_u64(x, ptr)                  (x) = __get_user_bad()
 #else
 #define __get_user_asm_u64(x, ptr, retval, errret) \
@@ -428,7 +448,7 @@ do {                                                                        \
 #define __get_user_nocheck(x, ptr, size)                               \
 ({                                                                     \
        int __gu_err;                                                   \
-       unsigned long __gu_val;                                         \
+       __inttype(*(ptr)) __gu_val;                                     \
        __uaccess_begin();                                              \
        __get_user_size(__gu_val, (ptr), (size), __gu_err, -EFAULT);    \
        __uaccess_end();                                                \
@@ -721,6 +741,8 @@ copy_from_user(void *to, const void __user *from, unsigned long n)
 
        might_fault();
 
+       kasan_check_write(to, n);
+
        /*
         * While we would like to have the compiler do the checking for us
         * even in the non-constant size case, any false positives there are
@@ -754,6 +776,8 @@ copy_to_user(void __user *to, const void *from, unsigned long n)
 {
        int sz = __compiletime_object_size(from);
 
+       kasan_check_read(from, n);
+
        might_fault();
 
        /* See the comment in copy_from_user() above. */