| xj | b04a402 | 2021-11-25 15:01:52 +0800 | [diff] [blame] | 1 | /* | 
|  | 2 | * x86 semaphore implementation. | 
|  | 3 | * | 
|  | 4 | * (C) Copyright 1999 Linus Torvalds | 
|  | 5 | * | 
|  | 6 | * Portions Copyright 1999 Red Hat, Inc. | 
|  | 7 | * | 
|  | 8 | *	This program is free software; you can redistribute it and/or | 
|  | 9 | *	modify it under the terms of the GNU General Public License | 
|  | 10 | *	as published by the Free Software Foundation; either version | 
|  | 11 | *	2 of the License, or (at your option) any later version. | 
|  | 12 | * | 
|  | 13 | * rw semaphores implemented November 1999 by Benjamin LaHaise <bcrl@kvack.org> | 
|  | 14 | */ | 
|  | 15 |  | 
|  | 16 | #include <linux/linkage.h> | 
|  | 17 | #include <asm/alternative-asm.h> | 
|  | 18 | #include <asm/frame.h> | 
|  | 19 |  | 
|  | 20 | #define __ASM_HALF_REG(reg)	__ASM_SEL(reg, e##reg) | 
|  | 21 | #define __ASM_HALF_SIZE(inst)	__ASM_SEL(inst##w, inst##l) | 
|  | 22 |  | 
|  | 23 | #ifdef CONFIG_X86_32 | 
|  | 24 |  | 
|  | 25 | /* | 
|  | 26 | * The semaphore operations have a special calling sequence that | 
|  | 27 | * allow us to do a simpler in-line version of them. These routines | 
|  | 28 | * need to convert that sequence back into the C sequence when | 
|  | 29 | * there is contention on the semaphore. | 
|  | 30 | * | 
|  | 31 | * %eax contains the semaphore pointer on entry. Save the C-clobbered | 
|  | 32 | * registers (%eax, %edx and %ecx) except %eax which is either a return | 
|  | 33 | * value or just gets clobbered. Same is true for %edx so make sure GCC | 
|  | 34 | * reloads it after the slow path, by making it hold a temporary, for | 
|  | 35 | * example see ____down_write(). | 
|  | 36 | */ | 
|  | 37 |  | 
|  | 38 | #define save_common_regs \ | 
|  | 39 | pushl %ecx | 
|  | 40 |  | 
|  | 41 | #define restore_common_regs \ | 
|  | 42 | popl %ecx | 
|  | 43 |  | 
|  | 44 | /* Avoid uglifying the argument copying x86-64 needs to do. */ | 
|  | 45 | .macro movq src, dst | 
|  | 46 | .endm | 
|  | 47 |  | 
|  | 48 | #else | 
|  | 49 |  | 
|  | 50 | /* | 
|  | 51 | * x86-64 rwsem wrappers | 
|  | 52 | * | 
|  | 53 | * This interfaces the inline asm code to the slow-path | 
|  | 54 | * C routines. We need to save the call-clobbered regs | 
|  | 55 | * that the asm does not mark as clobbered, and move the | 
|  | 56 | * argument from %rax to %rdi. | 
|  | 57 | * | 
|  | 58 | * NOTE! We don't need to save %rax, because the functions | 
|  | 59 | * will always return the semaphore pointer in %rax (which | 
|  | 60 | * is also the input argument to these helpers) | 
|  | 61 | * | 
|  | 62 | * The following can clobber %rdx because the asm clobbers it: | 
|  | 63 | *   call_rwsem_down_write_failed | 
|  | 64 | *   call_rwsem_wake | 
|  | 65 | * but %rdi, %rsi, %rcx, %r8-r11 always need saving. | 
|  | 66 | */ | 
|  | 67 |  | 
|  | 68 | #define save_common_regs \ | 
|  | 69 | pushq %rdi; \ | 
|  | 70 | pushq %rsi; \ | 
|  | 71 | pushq %rcx; \ | 
|  | 72 | pushq %r8;  \ | 
|  | 73 | pushq %r9;  \ | 
|  | 74 | pushq %r10; \ | 
|  | 75 | pushq %r11 | 
|  | 76 |  | 
|  | 77 | #define restore_common_regs \ | 
|  | 78 | popq %r11; \ | 
|  | 79 | popq %r10; \ | 
|  | 80 | popq %r9; \ | 
|  | 81 | popq %r8; \ | 
|  | 82 | popq %rcx; \ | 
|  | 83 | popq %rsi; \ | 
|  | 84 | popq %rdi | 
|  | 85 |  | 
|  | 86 | #endif | 
|  | 87 |  | 
|  | 88 | /* Fix up special calling conventions */ | 
|  | 89 | ENTRY(call_rwsem_down_read_failed) | 
|  | 90 | FRAME_BEGIN | 
|  | 91 | save_common_regs | 
|  | 92 | __ASM_SIZE(push,) %__ASM_REG(dx) | 
|  | 93 | movq %rax,%rdi | 
|  | 94 | call rwsem_down_read_failed | 
|  | 95 | __ASM_SIZE(pop,) %__ASM_REG(dx) | 
|  | 96 | restore_common_regs | 
|  | 97 | FRAME_END | 
|  | 98 | ret | 
|  | 99 | ENDPROC(call_rwsem_down_read_failed) | 
|  | 100 |  | 
|  | 101 | ENTRY(call_rwsem_down_read_failed_killable) | 
|  | 102 | FRAME_BEGIN | 
|  | 103 | save_common_regs | 
|  | 104 | __ASM_SIZE(push,) %__ASM_REG(dx) | 
|  | 105 | movq %rax,%rdi | 
|  | 106 | call rwsem_down_read_failed_killable | 
|  | 107 | __ASM_SIZE(pop,) %__ASM_REG(dx) | 
|  | 108 | restore_common_regs | 
|  | 109 | FRAME_END | 
|  | 110 | ret | 
|  | 111 | ENDPROC(call_rwsem_down_read_failed_killable) | 
|  | 112 |  | 
|  | 113 | ENTRY(call_rwsem_down_write_failed) | 
|  | 114 | FRAME_BEGIN | 
|  | 115 | save_common_regs | 
|  | 116 | movq %rax,%rdi | 
|  | 117 | call rwsem_down_write_failed | 
|  | 118 | restore_common_regs | 
|  | 119 | FRAME_END | 
|  | 120 | ret | 
|  | 121 | ENDPROC(call_rwsem_down_write_failed) | 
|  | 122 |  | 
|  | 123 | ENTRY(call_rwsem_down_write_failed_killable) | 
|  | 124 | FRAME_BEGIN | 
|  | 125 | save_common_regs | 
|  | 126 | movq %rax,%rdi | 
|  | 127 | call rwsem_down_write_failed_killable | 
|  | 128 | restore_common_regs | 
|  | 129 | FRAME_END | 
|  | 130 | ret | 
|  | 131 | ENDPROC(call_rwsem_down_write_failed_killable) | 
|  | 132 |  | 
|  | 133 | ENTRY(call_rwsem_wake) | 
|  | 134 | FRAME_BEGIN | 
|  | 135 | /* do nothing if still outstanding active readers */ | 
|  | 136 | __ASM_HALF_SIZE(dec) %__ASM_HALF_REG(dx) | 
|  | 137 | jnz 1f | 
|  | 138 | save_common_regs | 
|  | 139 | movq %rax,%rdi | 
|  | 140 | call rwsem_wake | 
|  | 141 | restore_common_regs | 
|  | 142 | 1:	FRAME_END | 
|  | 143 | ret | 
|  | 144 | ENDPROC(call_rwsem_wake) | 
|  | 145 |  | 
|  | 146 | ENTRY(call_rwsem_downgrade_wake) | 
|  | 147 | FRAME_BEGIN | 
|  | 148 | save_common_regs | 
|  | 149 | __ASM_SIZE(push,) %__ASM_REG(dx) | 
|  | 150 | movq %rax,%rdi | 
|  | 151 | call rwsem_downgrade_wake | 
|  | 152 | __ASM_SIZE(pop,) %__ASM_REG(dx) | 
|  | 153 | restore_common_regs | 
|  | 154 | FRAME_END | 
|  | 155 | ret | 
|  | 156 | ENDPROC(call_rwsem_downgrade_wake) |