]> Pileus Git - ~andy/linux/commitdiff
Merge branch 'x86/mpx' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
authorLinus Torvalds <torvalds@linux-foundation.org>
Mon, 20 Jan 2014 22:46:32 +0000 (14:46 -0800)
committerLinus Torvalds <torvalds@linux-foundation.org>
Mon, 20 Jan 2014 22:46:32 +0000 (14:46 -0800)
Pull x86 cpufeature and mpx updates from Peter Anvin:
 "This includes the basic infrastructure for MPX (Memory Protection
  Extensions) support, but does not include MPX support itself.  It is,
  however, a prerequisite for KVM support for MPX, which I believe will
  be pushed later this merge window by the KVM team.

  This includes moving the functionality in
  futex_atomic_cmpxchg_inatomic() into a new function in uaccess.h so it
  can be reused - this will be used by the final MPX patches.

  The actual MPX functionality (map management and so on) will be pushed
  in a future merge window, when ready"

* 'x86/mpx' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  x86/intel/mpx: Remove unused LWP structure
  x86, mpx: Add MPX related opcodes to the x86 opcode map
  x86: replace futex_atomic_cmpxchg_inatomic() with user_atomic_cmpxchg_inatomic
  x86: add user_atomic_cmpxchg_inatomic at uaccess.h
  x86, xsave: Support eager-only xsave features, add MPX support
  x86, cpufeature: Define the Intel MPX feature flag

arch/x86/include/asm/cpufeature.h
arch/x86/include/asm/futex.h
arch/x86/include/asm/processor.h
arch/x86/include/asm/uaccess.h
arch/x86/include/asm/xsave.h
arch/x86/kernel/xsave.c
arch/x86/lib/x86-opcode-map.txt

index 89270b4318db8b97ac467717b38863fa549435ad..e099f9502acec86cb5ec1412745d90384820e625 100644 (file)
 #define X86_FEATURE_ERMS       (9*32+ 9) /* Enhanced REP MOVSB/STOSB */
 #define X86_FEATURE_INVPCID    (9*32+10) /* Invalidate Processor Context ID */
 #define X86_FEATURE_RTM                (9*32+11) /* Restricted Transactional Memory */
+#define X86_FEATURE_MPX                (9*32+14) /* Memory Protection Extension */
 #define X86_FEATURE_RDSEED     (9*32+18) /* The RDSEED instruction */
 #define X86_FEATURE_ADX                (9*32+19) /* The ADCX and ADOX instructions */
 #define X86_FEATURE_SMAP       (9*32+20) /* Supervisor Mode Access Prevention */
index be27ba1e947a95b5b54f8783920404800752a8d6..b4c1f545343663057460376157de5d9d55a6f528 100644 (file)
@@ -110,26 +110,7 @@ static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
 static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
                                                u32 oldval, u32 newval)
 {
-       int ret = 0;
-
-       if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
-               return -EFAULT;
-
-       asm volatile("\t" ASM_STAC "\n"
-                    "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
-                    "2:\t" ASM_CLAC "\n"
-                    "\t.section .fixup, \"ax\"\n"
-                    "3:\tmov     %3, %0\n"
-                    "\tjmp     2b\n"
-                    "\t.previous\n"
-                    _ASM_EXTABLE(1b, 3b)
-                    : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
-                    : "i" (-EFAULT), "r" (newval), "1" (oldval)
-                    : "memory"
-       );
-
-       *uval = oldval;
-       return ret;
+       return user_atomic_cmpxchg_inatomic(uval, uaddr, oldval, newval);
 }
 
 #endif
index a61b0717da3210db336919c9215c1ef01dee9e6d..fdedd38fd0fc7feeabc26f8b366b525b65685904 100644 (file)
@@ -370,6 +370,20 @@ struct ymmh_struct {
        u32 ymmh_space[64];
 };
 
+/* We don't support LWP yet: */
+struct lwp_struct {
+       u8 reserved[128];
+};
+
+struct bndregs_struct {
+       u64 bndregs[8];
+} __packed;
+
+struct bndcsr_struct {
+       u64 cfg_reg_u;
+       u64 status_reg;
+} __packed;
+
 struct xsave_hdr_struct {
        u64 xstate_bv;
        u64 reserved1[2];
@@ -380,6 +394,9 @@ struct xsave_struct {
        struct i387_fxsave_struct i387;
        struct xsave_hdr_struct xsave_hdr;
        struct ymmh_struct ymmh;
+       struct lwp_struct lwp;
+       struct bndregs_struct bndregs;
+       struct bndcsr_struct bndcsr;
        /* new processor state extensions will go here */
 } __attribute__ ((packed, aligned (64)));
 
index 6f1bb74d547beed3459f9bdb1eabff5965bc2881..0d592e0a5b84fa3c3738ce8400e34df5bb552eb0 100644 (file)
@@ -533,6 +533,98 @@ extern __must_check long strnlen_user(const char __user *str, long n);
 unsigned long __must_check clear_user(void __user *mem, unsigned long len);
 unsigned long __must_check __clear_user(void __user *mem, unsigned long len);
 
+extern void __cmpxchg_wrong_size(void)
+       __compiletime_error("Bad argument size for cmpxchg");
+
+#define __user_atomic_cmpxchg_inatomic(uval, ptr, old, new, size)      \
+({                                                                     \
+       int __ret = 0;                                                  \
+       __typeof__(ptr) __uval = (uval);                                \
+       __typeof__(*(ptr)) __old = (old);                               \
+       __typeof__(*(ptr)) __new = (new);                               \
+       switch (size) {                                                 \
+       case 1:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "q" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       case 2:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "r" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       case 4:                                                         \
+       {                                                               \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "r" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       case 8:                                                         \
+       {                                                               \
+               if (!IS_ENABLED(CONFIG_X86_64))                         \
+                       __cmpxchg_wrong_size();                         \
+                                                                       \
+               asm volatile("\t" ASM_STAC "\n"                         \
+                       "1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n"          \
+                       "2:\t" ASM_CLAC "\n"                            \
+                       "\t.section .fixup, \"ax\"\n"                   \
+                       "3:\tmov     %3, %0\n"                          \
+                       "\tjmp     2b\n"                                \
+                       "\t.previous\n"                                 \
+                       _ASM_EXTABLE(1b, 3b)                            \
+                       : "+r" (__ret), "=a" (__old), "+m" (*(ptr))     \
+                       : "i" (-EFAULT), "r" (__new), "1" (__old)       \
+                       : "memory"                                      \
+               );                                                      \
+               break;                                                  \
+       }                                                               \
+       default:                                                        \
+               __cmpxchg_wrong_size();                                 \
+       }                                                               \
+       *__uval = __old;                                                \
+       __ret;                                                          \
+})
+
+#define user_atomic_cmpxchg_inatomic(uval, ptr, old, new)              \
+({                                                                     \
+       access_ok(VERIFY_WRITE, (ptr), sizeof(*(ptr))) ?                \
+               __user_atomic_cmpxchg_inatomic((uval), (ptr),           \
+                               (old), (new), sizeof(*(ptr))) :         \
+               -EFAULT;                                                \
+})
+
 /*
  * movsl can be slow when source and dest are not both 8-byte aligned
  */
index 0415cdabb5a663d1684284525a3fe24b7b3cd359..554738963b28cf47dd76fa9947338bb1eae5a0d0 100644 (file)
@@ -9,6 +9,8 @@
 #define XSTATE_FP      0x1
 #define XSTATE_SSE     0x2
 #define XSTATE_YMM     0x4
+#define XSTATE_BNDREGS 0x8
+#define XSTATE_BNDCSR  0x10
 
 #define XSTATE_FPSSE   (XSTATE_FP | XSTATE_SSE)
 
 #define XSAVE_YMM_SIZE     256
 #define XSAVE_YMM_OFFSET    (XSAVE_HDR_SIZE + XSAVE_HDR_OFFSET)
 
-/*
- * These are the features that the OS can handle currently.
- */
-#define XCNTXT_MASK    (XSTATE_FP | XSTATE_SSE | XSTATE_YMM)
+/* Supported features which support lazy state saving */
+#define XSTATE_LAZY    (XSTATE_FP | XSTATE_SSE | XSTATE_YMM)
+
+/* Supported features which require eager state saving */
+#define XSTATE_EAGER   (XSTATE_BNDREGS | XSTATE_BNDCSR)
+
+/* All currently supported features */
+#define XCNTXT_MASK    (XSTATE_LAZY | XSTATE_EAGER)
 
 #ifdef CONFIG_X86_64
 #define REX_PREFIX     "0x48, "
index 422fd82234700fe46f7ac12fe150665d0977e3b9..a4b451c6addfb7085a22c70b408614a58bf0ad7b 100644 (file)
@@ -562,6 +562,16 @@ static void __init xstate_enable_boot_cpu(void)
        if (cpu_has_xsaveopt && eagerfpu != DISABLE)
                eagerfpu = ENABLE;
 
+       if (pcntxt_mask & XSTATE_EAGER) {
+               if (eagerfpu == DISABLE) {
+                       pr_err("eagerfpu not present, disabling some xstate features: 0x%llx\n",
+                                       pcntxt_mask & XSTATE_EAGER);
+                       pcntxt_mask &= ~XSTATE_EAGER;
+               } else {
+                       eagerfpu = ENABLE;
+               }
+       }
+
        pr_info("enabled xstate_bv 0x%llx, cntxt size 0x%x\n",
                pcntxt_mask, xstate_size);
 }
index 533a85e3a07e7dfc4abc73fb68aed9fd74094d95..1a2be7c6895d811be12083b5dab49f92cfb8791f 100644 (file)
@@ -346,8 +346,8 @@ AVXcode: 1
 17: vmovhps Mq,Vq (v1) | vmovhpd Mq,Vq (66),(v1)
 18: Grp16 (1A)
 19:
-1a:
-1b:
+1a: BNDCL Ev,Gv | BNDCU Ev,Gv | BNDMOV Gv,Ev | BNDLDX Gv,Ev,Gv
+1b: BNDCN Ev,Gv | BNDMOV Ev,Gv | BNDMK Gv,Ev | BNDSTX Ev,GV,Gv
 1c:
 1d:
 1e: