Merge branch 'for-next/linkage' into for-next/core

* for-next/linkage:
  arm64: module: remove (NOLOAD) from linker script
  linkage: remove SYM_FUNC_{START,END}_ALIAS()
  x86: clean up symbol aliasing
  arm64: clean up symbol aliasing
  linkage: add SYM_FUNC_ALIAS{,_LOCAL,_WEAK}()
This commit is contained in:
Will Deacon 2022-03-14 19:01:05 +00:00
commit 563c463595
27 changed files with 172 additions and 163 deletions

View File

@ -130,14 +130,13 @@ denoting a range of code via ``SYM_*_START/END`` annotations.
In fact, this kind of annotation corresponds to the now deprecated ``ENTRY``
and ``ENDPROC`` macros.
* ``SYM_FUNC_START_ALIAS`` and ``SYM_FUNC_START_LOCAL_ALIAS`` serve for those
who decided to have two or more names for one function. The typical use is::
* ``SYM_FUNC_ALIAS``, ``SYM_FUNC_ALIAS_LOCAL``, and ``SYM_FUNC_ALIAS_WEAK`` can
be used to define multiple names for a function. The typical use is::
SYM_FUNC_START_ALIAS(__memset)
SYM_FUNC_START(memset)
SYM_FUNC_START(__memset)
... asm insns ...
SYM_FUNC_END(memset)
SYM_FUNC_END_ALIAS(__memset)
SYN_FUNC_END(__memset)
SYM_FUNC_ALIAS(memset, __memset)
In this example, one can call ``__memset`` or ``memset`` with the same
result, except the debug information for the instructions is generated to

View File

@ -39,28 +39,4 @@
SYM_START(name, SYM_L_WEAK, SYM_A_NONE) \
bti c ;
/*
* Annotate a function as position independent, i.e., safe to be called before
* the kernel virtual mapping is activated.
*/
#define SYM_FUNC_START_PI(x) \
SYM_FUNC_START_ALIAS(__pi_##x); \
SYM_FUNC_START(x)
#define SYM_FUNC_START_WEAK_PI(x) \
SYM_FUNC_START_ALIAS(__pi_##x); \
SYM_FUNC_START_WEAK(x)
#define SYM_FUNC_START_WEAK_ALIAS_PI(x) \
SYM_FUNC_START_ALIAS(__pi_##x); \
SYM_START(x, SYM_L_WEAK, SYM_A_ALIGN)
#define SYM_FUNC_END_PI(x) \
SYM_FUNC_END(x); \
SYM_FUNC_END_ALIAS(__pi_##x)
#define SYM_FUNC_END_ALIAS_PI(x) \
SYM_FUNC_END_ALIAS(x); \
SYM_FUNC_END_ALIAS(__pi_##x)
#endif

View File

@ -1,8 +1,8 @@
SECTIONS {
#ifdef CONFIG_ARM64_MODULE_PLTS
.plt 0 (NOLOAD) : { BYTE(0) }
.init.plt 0 (NOLOAD) : { BYTE(0) }
.text.ftrace_trampoline 0 (NOLOAD) : { BYTE(0) }
.plt 0 : { BYTE(0) }
.init.plt 0 : { BYTE(0) }
.text.ftrace_trampoline 0 : { BYTE(0) }
#endif
#ifdef CONFIG_KASAN_SW_TAGS

View File

@ -7,7 +7,8 @@
#include <asm/assembler.h>
#include <asm/alternative.h>
SYM_FUNC_START_PI(dcache_clean_inval_poc)
SYM_FUNC_START(__pi_dcache_clean_inval_poc)
dcache_by_line_op civac, sy, x0, x1, x2, x3
ret
SYM_FUNC_END_PI(dcache_clean_inval_poc)
SYM_FUNC_END(__pi_dcache_clean_inval_poc)
SYM_FUNC_ALIAS(dcache_clean_inval_poc, __pi_dcache_clean_inval_poc)

View File

@ -14,7 +14,7 @@
* Parameters:
* x0 - dest
*/
SYM_FUNC_START_PI(clear_page)
SYM_FUNC_START(__pi_clear_page)
mrs x1, dczid_el0
tbnz x1, #4, 2f /* Branch if DC ZVA is prohibited */
and w1, w1, #0xf
@ -35,5 +35,6 @@ SYM_FUNC_START_PI(clear_page)
tst x0, #(PAGE_SIZE - 1)
b.ne 2b
ret
SYM_FUNC_END_PI(clear_page)
SYM_FUNC_END(__pi_clear_page)
SYM_FUNC_ALIAS(clear_page, __pi_clear_page)
EXPORT_SYMBOL(clear_page)

View File

@ -17,7 +17,7 @@
* x0 - dest
* x1 - src
*/
SYM_FUNC_START_PI(copy_page)
SYM_FUNC_START(__pi_copy_page)
alternative_if ARM64_HAS_NO_HW_PREFETCH
// Prefetch three cache lines ahead.
prfm pldl1strm, [x1, #128]
@ -75,5 +75,6 @@ alternative_else_nop_endif
stnp x16, x17, [x0, #112 - 256]
ret
SYM_FUNC_END_PI(copy_page)
SYM_FUNC_END(__pi_copy_page)
SYM_FUNC_ALIAS(copy_page, __pi_copy_page)
EXPORT_SYMBOL(copy_page)

View File

@ -38,7 +38,7 @@
.p2align 4
nop
SYM_FUNC_START_WEAK_PI(memchr)
SYM_FUNC_START(__pi_memchr)
and chrin, chrin, #0xff
lsr wordcnt, cntin, #3
cbz wordcnt, L(byte_loop)
@ -71,5 +71,6 @@ CPU_LE( rev tmp, tmp)
L(not_found):
mov result, #0
ret
SYM_FUNC_END_PI(memchr)
SYM_FUNC_END(__pi_memchr)
SYM_FUNC_ALIAS_WEAK(memchr, __pi_memchr)
EXPORT_SYMBOL_NOKASAN(memchr)

View File

@ -32,7 +32,7 @@
#define tmp1 x7
#define tmp2 x8
SYM_FUNC_START_WEAK_PI(memcmp)
SYM_FUNC_START(__pi_memcmp)
subs limit, limit, 8
b.lo L(less8)
@ -134,6 +134,6 @@ L(byte_loop):
b.eq L(byte_loop)
sub result, data1w, data2w
ret
SYM_FUNC_END_PI(memcmp)
SYM_FUNC_END(__pi_memcmp)
SYM_FUNC_ALIAS_WEAK(memcmp, __pi_memcmp)
EXPORT_SYMBOL_NOKASAN(memcmp)

View File

@ -57,10 +57,7 @@
The loop tail is handled by always copying 64 bytes from the end.
*/
SYM_FUNC_START_ALIAS(__memmove)
SYM_FUNC_START_WEAK_ALIAS_PI(memmove)
SYM_FUNC_START_ALIAS(__memcpy)
SYM_FUNC_START_WEAK_PI(memcpy)
SYM_FUNC_START(__pi_memcpy)
add srcend, src, count
add dstend, dstin, count
cmp count, 128
@ -241,12 +238,16 @@ L(copy64_from_start):
stp B_l, B_h, [dstin, 16]
stp C_l, C_h, [dstin]
ret
SYM_FUNC_END(__pi_memcpy)
SYM_FUNC_END_PI(memcpy)
EXPORT_SYMBOL(memcpy)
SYM_FUNC_END_ALIAS(__memcpy)
SYM_FUNC_ALIAS(__memcpy, __pi_memcpy)
EXPORT_SYMBOL(__memcpy)
SYM_FUNC_END_ALIAS_PI(memmove)
EXPORT_SYMBOL(memmove)
SYM_FUNC_END_ALIAS(__memmove)
SYM_FUNC_ALIAS_WEAK(memcpy, __memcpy)
EXPORT_SYMBOL(memcpy)
SYM_FUNC_ALIAS(__pi_memmove, __pi_memcpy)
SYM_FUNC_ALIAS(__memmove, __pi_memmove)
EXPORT_SYMBOL(__memmove)
SYM_FUNC_ALIAS_WEAK(memmove, __memmove)
EXPORT_SYMBOL(memmove)

View File

@ -42,8 +42,7 @@ dst .req x8
tmp3w .req w9
tmp3 .req x9
SYM_FUNC_START_ALIAS(__memset)
SYM_FUNC_START_WEAK_PI(memset)
SYM_FUNC_START(__pi_memset)
mov dst, dstin /* Preserve return value. */
and A_lw, val, #255
orr A_lw, A_lw, A_lw, lsl #8
@ -202,7 +201,10 @@ SYM_FUNC_START_WEAK_PI(memset)
ands count, count, zva_bits_x
b.ne .Ltail_maybe_long
ret
SYM_FUNC_END_PI(memset)
EXPORT_SYMBOL(memset)
SYM_FUNC_END_ALIAS(__memset)
SYM_FUNC_END(__pi_memset)
SYM_FUNC_ALIAS(__memset, __pi_memset)
EXPORT_SYMBOL(__memset)
SYM_FUNC_ALIAS_WEAK(memset, __pi_memset)
EXPORT_SYMBOL(memset)

View File

@ -18,7 +18,7 @@
* Returns:
* x0 - address of first occurrence of 'c' or 0
*/
SYM_FUNC_START_WEAK(strchr)
SYM_FUNC_START(__pi_strchr)
and w1, w1, #0xff
1: ldrb w2, [x0], #1
cmp w2, w1
@ -28,5 +28,7 @@ SYM_FUNC_START_WEAK(strchr)
cmp w2, w1
csel x0, x0, xzr, eq
ret
SYM_FUNC_END(strchr)
SYM_FUNC_END(__pi_strchr)
SYM_FUNC_ALIAS_WEAK(strchr, __pi_strchr)
EXPORT_SYMBOL_NOKASAN(strchr)

View File

@ -41,7 +41,7 @@
/* Start of performance-critical section -- one 64B cache line. */
.align 6
SYM_FUNC_START_WEAK_PI(strcmp)
SYM_FUNC_START(__pi_strcmp)
eor tmp1, src1, src2
mov zeroones, #REP8_01
tst tmp1, #7
@ -171,6 +171,6 @@ L(loop_misaligned):
L(done):
sub result, data1, data2
ret
SYM_FUNC_END_PI(strcmp)
SYM_FUNC_END(__pi_strcmp)
SYM_FUNC_ALIAS_WEAK(strcmp, __pi_strcmp)
EXPORT_SYMBOL_NOHWKASAN(strcmp)

View File

@ -79,7 +79,7 @@
whether the first fetch, which may be misaligned, crosses a page
boundary. */
SYM_FUNC_START_WEAK_PI(strlen)
SYM_FUNC_START(__pi_strlen)
and tmp1, srcin, MIN_PAGE_SIZE - 1
mov zeroones, REP8_01
cmp tmp1, MIN_PAGE_SIZE - 16
@ -208,6 +208,6 @@ L(page_cross):
csel data1, data1, tmp4, eq
csel data2, data2, tmp2, eq
b L(page_cross_entry)
SYM_FUNC_END_PI(strlen)
SYM_FUNC_END(__pi_strlen)
SYM_FUNC_ALIAS_WEAK(strlen, __pi_strlen)
EXPORT_SYMBOL_NOKASAN(strlen)

View File

@ -44,7 +44,7 @@
#define endloop x15
#define count mask
SYM_FUNC_START_WEAK_PI(strncmp)
SYM_FUNC_START(__pi_strncmp)
cbz limit, L(ret0)
eor tmp1, src1, src2
mov zeroones, #REP8_01
@ -256,6 +256,6 @@ L(done_loop):
L(ret0):
mov result, #0
ret
SYM_FUNC_END_PI(strncmp)
SYM_FUNC_END(__pi_strncmp)
SYM_FUNC_ALIAS_WEAK(strncmp, __pi_strncmp)
EXPORT_SYMBOL_NOHWKASAN(strncmp)

View File

@ -47,7 +47,7 @@ limit_wd .req x14
#define REP8_7f 0x7f7f7f7f7f7f7f7f
#define REP8_80 0x8080808080808080
SYM_FUNC_START_WEAK_PI(strnlen)
SYM_FUNC_START(__pi_strnlen)
cbz limit, .Lhit_limit
mov zeroones, #REP8_01
bic src, srcin, #15
@ -156,5 +156,7 @@ CPU_LE( lsr tmp2, tmp2, tmp4 ) /* Shift (tmp1 & 63). */
.Lhit_limit:
mov len, limit
ret
SYM_FUNC_END_PI(strnlen)
SYM_FUNC_END(__pi_strnlen)
SYM_FUNC_ALIAS_WEAK(strnlen, __pi_strnlen)
EXPORT_SYMBOL_NOKASAN(strnlen)

View File

@ -18,7 +18,7 @@
* Returns:
* x0 - address of last occurrence of 'c' or 0
*/
SYM_FUNC_START_WEAK_PI(strrchr)
SYM_FUNC_START(__pi_strrchr)
mov x3, #0
and w1, w1, #0xff
1: ldrb w2, [x0], #1
@ -29,5 +29,6 @@ SYM_FUNC_START_WEAK_PI(strrchr)
b 1b
2: mov x0, x3
ret
SYM_FUNC_END_PI(strrchr)
SYM_FUNC_END(__pi_strrchr)
SYM_FUNC_ALIAS_WEAK(strrchr, __pi_strrchr)
EXPORT_SYMBOL_NOKASAN(strrchr)

View File

@ -107,10 +107,11 @@ SYM_FUNC_END(icache_inval_pou)
* - start - virtual start address of region
* - end - virtual end address of region
*/
SYM_FUNC_START_PI(dcache_clean_inval_poc)
SYM_FUNC_START(__pi_dcache_clean_inval_poc)
dcache_by_line_op civac, sy, x0, x1, x2, x3
ret
SYM_FUNC_END_PI(dcache_clean_inval_poc)
SYM_FUNC_END(__pi_dcache_clean_inval_poc)
SYM_FUNC_ALIAS(dcache_clean_inval_poc, __pi_dcache_clean_inval_poc)
/*
* dcache_clean_pou(start, end)
@ -140,7 +141,7 @@ SYM_FUNC_END(dcache_clean_pou)
* - start - kernel start address of region
* - end - kernel end address of region
*/
SYM_FUNC_START_PI(dcache_inval_poc)
SYM_FUNC_START(__pi_dcache_inval_poc)
dcache_line_size x2, x3
sub x3, x2, #1
tst x1, x3 // end cache line aligned?
@ -158,7 +159,8 @@ SYM_FUNC_START_PI(dcache_inval_poc)
b.lo 2b
dsb sy
ret
SYM_FUNC_END_PI(dcache_inval_poc)
SYM_FUNC_END(__pi_dcache_inval_poc)
SYM_FUNC_ALIAS(dcache_inval_poc, __pi_dcache_inval_poc)
/*
* dcache_clean_poc(start, end)
@ -169,10 +171,11 @@ SYM_FUNC_END_PI(dcache_inval_poc)
* - start - virtual start address of region
* - end - virtual end address of region
*/
SYM_FUNC_START_PI(dcache_clean_poc)
SYM_FUNC_START(__pi_dcache_clean_poc)
dcache_by_line_op cvac, sy, x0, x1, x2, x3
ret
SYM_FUNC_END_PI(dcache_clean_poc)
SYM_FUNC_END(__pi_dcache_clean_poc)
SYM_FUNC_ALIAS(dcache_clean_poc, __pi_dcache_clean_poc)
/*
* dcache_clean_pop(start, end)
@ -183,13 +186,14 @@ SYM_FUNC_END_PI(dcache_clean_poc)
* - start - virtual start address of region
* - end - virtual end address of region
*/
SYM_FUNC_START_PI(dcache_clean_pop)
SYM_FUNC_START(__pi_dcache_clean_pop)
alternative_if_not ARM64_HAS_DCPOP
b dcache_clean_poc
alternative_else_nop_endif
dcache_by_line_op cvap, sy, x0, x1, x2, x3
ret
SYM_FUNC_END_PI(dcache_clean_pop)
SYM_FUNC_END(__pi_dcache_clean_pop)
SYM_FUNC_ALIAS(dcache_clean_pop, __pi_dcache_clean_pop)
/*
* __dma_flush_area(start, size)
@ -199,11 +203,12 @@ SYM_FUNC_END_PI(dcache_clean_pop)
* - start - virtual start address of region
* - size - size in question
*/
SYM_FUNC_START_PI(__dma_flush_area)
SYM_FUNC_START(__pi___dma_flush_area)
add x1, x0, x1
dcache_by_line_op civac, sy, x0, x1, x2, x3
ret
SYM_FUNC_END_PI(__dma_flush_area)
SYM_FUNC_END(__pi___dma_flush_area)
SYM_FUNC_ALIAS(__dma_flush_area, __pi___dma_flush_area)
/*
* __dma_map_area(start, size, dir)
@ -211,12 +216,13 @@ SYM_FUNC_END_PI(__dma_flush_area)
* - size - size of region
* - dir - DMA direction
*/
SYM_FUNC_START_PI(__dma_map_area)
SYM_FUNC_START(__pi___dma_map_area)
add x1, x0, x1
cmp w2, #DMA_FROM_DEVICE
b.eq __pi_dcache_inval_poc
b __pi_dcache_clean_poc
SYM_FUNC_END_PI(__dma_map_area)
SYM_FUNC_END(__pi___dma_map_area)
SYM_FUNC_ALIAS(__dma_map_area, __pi___dma_map_area)
/*
* __dma_unmap_area(start, size, dir)
@ -224,9 +230,10 @@ SYM_FUNC_END_PI(__dma_map_area)
* - size - size of region
* - dir - DMA direction
*/
SYM_FUNC_START_PI(__dma_unmap_area)
SYM_FUNC_START(__pi___dma_unmap_area)
add x1, x0, x1
cmp w2, #DMA_TO_DEVICE
b.ne __pi_dcache_inval_poc
ret
SYM_FUNC_END_PI(__dma_unmap_area)
SYM_FUNC_END(__pi___dma_unmap_area)
SYM_FUNC_ALIAS(__dma_unmap_area, __pi___dma_unmap_area)

View File

@ -152,14 +152,13 @@ SYM_FUNC_END(startup_32)
#ifdef CONFIG_EFI_STUB
SYM_FUNC_START(efi32_stub_entry)
SYM_FUNC_START_ALIAS(efi_stub_entry)
add $0x4, %esp
movl 8(%esp), %esi /* save boot_params pointer */
call efi_main
/* efi_main returns the possibly relocated address of startup_32 */
jmp *%eax
SYM_FUNC_END(efi32_stub_entry)
SYM_FUNC_END_ALIAS(efi_stub_entry)
SYM_FUNC_ALIAS(efi_stub_entry, efi32_stub_entry)
#endif
.text

View File

@ -535,7 +535,6 @@ SYM_CODE_END(startup_64)
#ifdef CONFIG_EFI_STUB
.org 0x390
SYM_FUNC_START(efi64_stub_entry)
SYM_FUNC_START_ALIAS(efi_stub_entry)
and $~0xf, %rsp /* realign the stack */
movq %rdx, %rbx /* save boot_params pointer */
call efi_main
@ -543,7 +542,7 @@ SYM_FUNC_START_ALIAS(efi_stub_entry)
leaq rva(startup_64)(%rax), %rax
jmp *%rax
SYM_FUNC_END(efi64_stub_entry)
SYM_FUNC_END_ALIAS(efi_stub_entry)
SYM_FUNC_ALIAS(efi_stub_entry, efi64_stub_entry)
#endif
.text

View File

@ -1751,8 +1751,6 @@ SYM_FUNC_END(aesni_gcm_finalize)
#endif
SYM_FUNC_START_LOCAL_ALIAS(_key_expansion_128)
SYM_FUNC_START_LOCAL(_key_expansion_256a)
pshufd $0b11111111, %xmm1, %xmm1
shufps $0b00010000, %xmm0, %xmm4
@ -1764,7 +1762,7 @@ SYM_FUNC_START_LOCAL(_key_expansion_256a)
add $0x10, TKEYP
RET
SYM_FUNC_END(_key_expansion_256a)
SYM_FUNC_END_ALIAS(_key_expansion_128)
SYM_FUNC_ALIAS_LOCAL(_key_expansion_128, _key_expansion_256a)
SYM_FUNC_START_LOCAL(_key_expansion_192a)
pshufd $0b01010101, %xmm1, %xmm1

View File

@ -27,8 +27,7 @@
* Output:
* rax original destination
*/
SYM_FUNC_START_ALIAS(__memcpy)
SYM_FUNC_START_WEAK(memcpy)
SYM_FUNC_START(__memcpy)
ALTERNATIVE_2 "jmp memcpy_orig", "", X86_FEATURE_REP_GOOD, \
"jmp memcpy_erms", X86_FEATURE_ERMS
@ -40,11 +39,12 @@ SYM_FUNC_START_WEAK(memcpy)
movl %edx, %ecx
rep movsb
RET
SYM_FUNC_END(memcpy)
SYM_FUNC_END_ALIAS(__memcpy)
EXPORT_SYMBOL(memcpy)
SYM_FUNC_END(__memcpy)
EXPORT_SYMBOL(__memcpy)
SYM_FUNC_ALIAS_WEAK(memcpy, __memcpy)
EXPORT_SYMBOL(memcpy)
/*
* memcpy_erms() - enhanced fast string memcpy. This is faster and
* simpler than memcpy. Use memcpy_erms when possible.

View File

@ -24,7 +24,6 @@
* Output:
* rax: dest
*/
SYM_FUNC_START_WEAK(memmove)
SYM_FUNC_START(__memmove)
mov %rdi, %rax
@ -207,6 +206,7 @@ SYM_FUNC_START(__memmove)
13:
RET
SYM_FUNC_END(__memmove)
SYM_FUNC_END_ALIAS(memmove)
EXPORT_SYMBOL(__memmove)
SYM_FUNC_ALIAS_WEAK(memmove, __memmove)
EXPORT_SYMBOL(memmove)

View File

@ -17,7 +17,6 @@
*
* rax original destination
*/
SYM_FUNC_START_WEAK(memset)
SYM_FUNC_START(__memset)
/*
* Some CPUs support enhanced REP MOVSB/STOSB feature. It is recommended
@ -42,10 +41,11 @@ SYM_FUNC_START(__memset)
movq %r9,%rax
RET
SYM_FUNC_END(__memset)
SYM_FUNC_END_ALIAS(memset)
EXPORT_SYMBOL(memset)
EXPORT_SYMBOL(__memset)
SYM_FUNC_ALIAS_WEAK(memset, __memset)
EXPORT_SYMBOL(memset)
/*
* ISO C memset - set a memory block to a byte value. This function uses
* enhanced rep stosb to override the fast string function.

View File

@ -165,7 +165,18 @@
#ifndef SYM_END
#define SYM_END(name, sym_type) \
.type name sym_type ASM_NL \
.size name, .-name
.set .L__sym_size_##name, .-name ASM_NL \
.size name, .L__sym_size_##name
#endif
/* SYM_ALIAS -- use only if you have to */
#ifndef SYM_ALIAS
#define SYM_ALIAS(alias, name, sym_type, linkage) \
linkage(alias) ASM_NL \
.set alias, name ASM_NL \
.type alias sym_type ASM_NL \
.set .L__sym_size_##alias, .L__sym_size_##name ASM_NL \
.size alias, .L__sym_size_##alias
#endif
/* === code annotations === */
@ -200,30 +211,8 @@
SYM_ENTRY(name, linkage, SYM_A_NONE)
#endif
/*
* SYM_FUNC_START_LOCAL_ALIAS -- use where there are two local names for one
* function
*/
#ifndef SYM_FUNC_START_LOCAL_ALIAS
#define SYM_FUNC_START_LOCAL_ALIAS(name) \
SYM_START(name, SYM_L_LOCAL, SYM_A_ALIGN)
#endif
/*
* SYM_FUNC_START_ALIAS -- use where there are two global names for one
* function
*/
#ifndef SYM_FUNC_START_ALIAS
#define SYM_FUNC_START_ALIAS(name) \
SYM_START(name, SYM_L_GLOBAL, SYM_A_ALIGN)
#endif
/* SYM_FUNC_START -- use for global functions */
#ifndef SYM_FUNC_START
/*
* The same as SYM_FUNC_START_ALIAS, but we will need to distinguish these two
* later.
*/
#define SYM_FUNC_START(name) \
SYM_START(name, SYM_L_GLOBAL, SYM_A_ALIGN)
#endif
@ -236,7 +225,6 @@
/* SYM_FUNC_START_LOCAL -- use for local functions */
#ifndef SYM_FUNC_START_LOCAL
/* the same as SYM_FUNC_START_LOCAL_ALIAS, see comment near SYM_FUNC_START */
#define SYM_FUNC_START_LOCAL(name) \
SYM_START(name, SYM_L_LOCAL, SYM_A_ALIGN)
#endif
@ -259,22 +247,39 @@
SYM_START(name, SYM_L_WEAK, SYM_A_NONE)
#endif
/* SYM_FUNC_END_ALIAS -- the end of LOCAL_ALIASed or ALIASed function */
#ifndef SYM_FUNC_END_ALIAS
#define SYM_FUNC_END_ALIAS(name) \
SYM_END(name, SYM_T_FUNC)
#endif
/*
* SYM_FUNC_END -- the end of SYM_FUNC_START_LOCAL, SYM_FUNC_START,
* SYM_FUNC_START_WEAK, ...
*/
#ifndef SYM_FUNC_END
/* the same as SYM_FUNC_END_ALIAS, see comment near SYM_FUNC_START */
#define SYM_FUNC_END(name) \
SYM_END(name, SYM_T_FUNC)
#endif
/*
* SYM_FUNC_ALIAS -- define a global alias for an existing function
*/
#ifndef SYM_FUNC_ALIAS
#define SYM_FUNC_ALIAS(alias, name) \
SYM_ALIAS(alias, name, SYM_T_FUNC, SYM_L_GLOBAL)
#endif
/*
* SYM_FUNC_ALIAS_LOCAL -- define a local alias for an existing function
*/
#ifndef SYM_FUNC_ALIAS_LOCAL
#define SYM_FUNC_ALIAS_LOCAL(alias, name) \
SYM_ALIAS(alias, name, SYM_T_FUNC, SYM_L_LOCAL)
#endif
/*
* SYM_FUNC_ALIAS_WEAK -- define a weak global alias for an existing function
*/
#ifndef SYM_FUNC_ALIAS_WEAK
#define SYM_FUNC_ALIAS_WEAK(alias, name) \
SYM_ALIAS(alias, name, SYM_T_FUNC, SYM_L_WEAK)
#endif
/* SYM_CODE_START -- use for non-C (special) functions */
#ifndef SYM_CODE_START
#define SYM_CODE_START(name) \

View File

@ -27,8 +27,7 @@
* Output:
* rax original destination
*/
SYM_FUNC_START_ALIAS(__memcpy)
SYM_FUNC_START_WEAK(memcpy)
SYM_FUNC_START(__memcpy)
ALTERNATIVE_2 "jmp memcpy_orig", "", X86_FEATURE_REP_GOOD, \
"jmp memcpy_erms", X86_FEATURE_ERMS
@ -40,11 +39,12 @@ SYM_FUNC_START_WEAK(memcpy)
movl %edx, %ecx
rep movsb
RET
SYM_FUNC_END(memcpy)
SYM_FUNC_END_ALIAS(__memcpy)
EXPORT_SYMBOL(memcpy)
SYM_FUNC_END(__memcpy)
EXPORT_SYMBOL(__memcpy)
SYM_FUNC_ALIAS_WEAK(memcpy, __memcpy)
EXPORT_SYMBOL(memcpy)
/*
* memcpy_erms() - enhanced fast string memcpy. This is faster and
* simpler than memcpy. Use memcpy_erms when possible.

View File

@ -17,7 +17,6 @@
*
* rax original destination
*/
SYM_FUNC_START_WEAK(memset)
SYM_FUNC_START(__memset)
/*
* Some CPUs support enhanced REP MOVSB/STOSB feature. It is recommended
@ -42,10 +41,11 @@ SYM_FUNC_START(__memset)
movq %r9,%rax
RET
SYM_FUNC_END(__memset)
SYM_FUNC_END_ALIAS(memset)
EXPORT_SYMBOL(memset)
EXPORT_SYMBOL(__memset)
SYM_FUNC_ALIAS_WEAK(memset, __memset)
EXPORT_SYMBOL(memset)
/*
* ISO C memset - set a memory block to a byte value. This function uses
* enhanced rep stosb to override the fast string function.

View File

@ -50,41 +50,32 @@
#ifndef SYM_END
#define SYM_END(name, sym_type) \
.type name sym_type ASM_NL \
.set .L__sym_size_##name, .-name ASM_NL \
.size name, .-name
#endif
/*
* SYM_FUNC_START_ALIAS -- use where there are two global names for one
* function
*/
#ifndef SYM_FUNC_START_ALIAS
#define SYM_FUNC_START_ALIAS(name) \
SYM_START(name, SYM_L_GLOBAL, SYM_A_ALIGN)
/* SYM_ALIAS -- use only if you have to */
#ifndef SYM_ALIAS
#define SYM_ALIAS(alias, name, sym_type, linkage) \
linkage(alias) ASM_NL \
.set alias, name ASM_NL \
.type alias sym_type ASM_NL \
.set .L__sym_size_##alias, .L__sym_size_##name ASM_NL \
.size alias, .L__sym_size_##alias
#endif
/* SYM_FUNC_START -- use for global functions */
#ifndef SYM_FUNC_START
/*
* The same as SYM_FUNC_START_ALIAS, but we will need to distinguish these two
* later.
*/
#define SYM_FUNC_START(name) \
SYM_START(name, SYM_L_GLOBAL, SYM_A_ALIGN)
#endif
/* SYM_FUNC_START_LOCAL -- use for local functions */
#ifndef SYM_FUNC_START_LOCAL
/* the same as SYM_FUNC_START_LOCAL_ALIAS, see comment near SYM_FUNC_START */
#define SYM_FUNC_START_LOCAL(name) \
SYM_START(name, SYM_L_LOCAL, SYM_A_ALIGN)
#endif
/* SYM_FUNC_END_ALIAS -- the end of LOCAL_ALIASed or ALIASed function */
#ifndef SYM_FUNC_END_ALIAS
#define SYM_FUNC_END_ALIAS(name) \
SYM_END(name, SYM_T_FUNC)
#endif
/* SYM_FUNC_START_WEAK -- use for weak functions */
#ifndef SYM_FUNC_START_WEAK
#define SYM_FUNC_START_WEAK(name) \
@ -96,9 +87,32 @@
* SYM_FUNC_START_WEAK, ...
*/
#ifndef SYM_FUNC_END
/* the same as SYM_FUNC_END_ALIAS, see comment near SYM_FUNC_START */
#define SYM_FUNC_END(name) \
SYM_END(name, SYM_T_FUNC)
#endif
/*
* SYM_FUNC_ALIAS -- define a global alias for an existing function
*/
#ifndef SYM_FUNC_ALIAS
#define SYM_FUNC_ALIAS(alias, name) \
SYM_ALIAS(alias, name, SYM_T_FUNC, SYM_L_GLOBAL)
#endif
/*
* SYM_FUNC_ALIAS_LOCAL -- define a local alias for an existing function
*/
#ifndef SYM_FUNC_ALIAS_LOCAL
#define SYM_FUNC_ALIAS_LOCAL(alias, name) \
SYM_ALIAS(alias, name, SYM_T_FUNC, SYM_L_LOCAL)
#endif
/*
* SYM_FUNC_ALIAS_WEAK -- define a weak global alias for an existing function
*/
#ifndef SYM_FUNC_ALIAS_WEAK
#define SYM_FUNC_ALIAS_WEAK(alias, name) \
SYM_ALIAS(alias, name, SYM_T_FUNC, SYM_L_WEAK)
#endif
#endif /* PERF_LINUX_LINKAGE_H_ */