diff options
Diffstat (limited to 'include/linux/compiler.h')
| -rw-r--r-- | include/linux/compiler.h | 114 | 
1 files changed, 75 insertions, 39 deletions
diff --git a/include/linux/compiler.h b/include/linux/compiler.h index 42506e4d1f53..18c80cfa4fc4 100644 --- a/include/linux/compiler.h +++ b/include/linux/compiler.h @@ -23,8 +23,8 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val,  #define __branch_check__(x, expect, is_constant) ({			\  			long ______r;					\  			static struct ftrace_likely_data		\ -				__attribute__((__aligned__(4)))		\ -				__attribute__((section("_ftrace_annotated_branch"))) \ +				__aligned(4)				\ +				__section("_ftrace_annotated_branch")	\  				______f = {				\  				.data.func = __func__,			\  				.data.file = __FILE__,			\ @@ -59,8 +59,8 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val,  	({								\  		int ______r;						\  		static struct ftrace_branch_data			\ -			__attribute__((__aligned__(4)))			\ -			__attribute__((section("_ftrace_branch")))	\ +			__aligned(4)					\ +			__section("_ftrace_branch")			\  			______f = {					\  				.func = __func__,			\  				.file = __FILE__,			\ @@ -99,22 +99,13 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val,   * unique, to convince GCC not to merge duplicate inline asm statements.   */  #define annotate_reachable() ({						\ -	asm volatile("%c0:\n\t"						\ -		     ".pushsection .discard.reachable\n\t"		\ -		     ".long %c0b - .\n\t"				\ -		     ".popsection\n\t" : : "i" (__COUNTER__));		\ +	asm volatile("ANNOTATE_REACHABLE counter=%c0"			\ +		     : : "i" (__COUNTER__));				\  })  #define annotate_unreachable() ({					\ -	asm volatile("%c0:\n\t"						\ -		     ".pushsection .discard.unreachable\n\t"		\ -		     ".long %c0b - .\n\t"				\ -		     ".popsection\n\t" : : "i" (__COUNTER__));		\ +	asm volatile("ANNOTATE_UNREACHABLE counter=%c0"			\ +		     : : "i" (__COUNTER__));				\  }) -#define ASM_UNREACHABLE							\ -	"999:\n\t"							\ -	".pushsection .discard.unreachable\n\t"				\ -	".long 999b - .\n\t"						\ -	".popsection\n\t"  #else  #define annotate_reachable()  #define annotate_unreachable() @@ -124,7 +115,10 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val,  # define ASM_UNREACHABLE  #endif  #ifndef unreachable -# define unreachable() do { annotate_reachable(); do { } while (1); } while (0) +# define unreachable() do {		\ +	annotate_unreachable();		\ +	__builtin_unreachable();	\ +} while (0)  #endif  /* @@ -146,7 +140,7 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val,  	extern typeof(sym) sym;					\  	static const unsigned long __kentry_##sym		\  	__used							\ -	__attribute__((section("___kentry" "+" #sym ), used))	\ +	__section("___kentry" "+" #sym )			\  	= (unsigned long)&sym;  #endif @@ -280,11 +274,65 @@ unsigned long read_word_at_a_time(const void *addr)  #endif /* __KERNEL__ */ -#endif /* __ASSEMBLY__ */ +/* + * Force the compiler to emit 'sym' as a symbol, so that we can reference + * it from inline assembler. Necessary in case 'sym' could be inlined + * otherwise, or eliminated entirely due to lack of references that are + * visible to the compiler. + */ +#define __ADDRESSABLE(sym) \ +	static void * __section(".discard.addressable") __used \ +		__PASTE(__addressable_##sym, __LINE__) = (void *)&sym; -#ifndef __optimize -# define __optimize(level) -#endif +/** + * offset_to_ptr - convert a relative memory offset to an absolute pointer + * @off:	the address of the 32-bit offset value + */ +static inline void *offset_to_ptr(const int *off) +{ +	return (void *)((unsigned long)off + *off); +} + +#else /* __ASSEMBLY__ */ + +#ifdef __KERNEL__ +#ifndef LINKER_SCRIPT + +#ifdef CONFIG_STACK_VALIDATION +.macro ANNOTATE_UNREACHABLE counter:req +\counter: +	.pushsection .discard.unreachable +	.long \counter\()b -. +	.popsection +.endm + +.macro ANNOTATE_REACHABLE counter:req +\counter: +	.pushsection .discard.reachable +	.long \counter\()b -. +	.popsection +.endm + +.macro ASM_UNREACHABLE +999: +	.pushsection .discard.unreachable +	.long 999b - . +	.popsection +.endm +#else /* CONFIG_STACK_VALIDATION */ +.macro ANNOTATE_UNREACHABLE counter:req +.endm + +.macro ANNOTATE_REACHABLE counter:req +.endm + +.macro ASM_UNREACHABLE +.endm +#endif /* CONFIG_STACK_VALIDATION */ + +#endif /* LINKER_SCRIPT */ +#endif /* __KERNEL__ */ +#endif /* __ASSEMBLY__ */  /* Compile time object size, -1 for unknown */  #ifndef __compiletime_object_size @@ -295,29 +343,14 @@ unsigned long read_word_at_a_time(const void *addr)  #endif  #ifndef __compiletime_error  # define __compiletime_error(message) -/* - * Sparse complains of variable sized arrays due to the temporary variable in - * __compiletime_assert. Unfortunately we can't just expand it out to make - * sparse see a constant array size without breaking compiletime_assert on old - * versions of GCC (e.g. 4.2.4), so hide the array from sparse altogether. - */ -# ifndef __CHECKER__ -#  define __compiletime_error_fallback(condition) \ -	do { ((void)sizeof(char[1 - 2 * condition])); } while (0) -# endif -#endif -#ifndef __compiletime_error_fallback -# define __compiletime_error_fallback(condition) do { } while (0)  #endif  #ifdef __OPTIMIZE__  # define __compiletime_assert(condition, msg, prefix, suffix)		\  	do {								\ -		bool __cond = !(condition);				\  		extern void prefix ## suffix(void) __compiletime_error(msg); \ -		if (__cond)						\ +		if (!(condition))					\  			prefix ## suffix();				\ -		__compiletime_error_fallback(__cond);			\  	} while (0)  #else  # define __compiletime_assert(condition, msg, prefix, suffix) do { } while (0) @@ -342,4 +375,7 @@ unsigned long read_word_at_a_time(const void *addr)  	compiletime_assert(__native_word(t),				\  		"Need native word sized stores/loads for atomicity.") +/* &a[0] degrades to a pointer: a different type from an array */ +#define __must_be_array(a)	BUILD_BUG_ON_ZERO(__same_type((a), &(a)[0])) +  #endif /* __LINUX_COMPILER_H */  | 
