Thomas Gleixner | 767a67b | 2019-06-01 10:08:44 +0200 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
Jan Beulich | ec7fd34 | 2013-03-11 10:06:12 +0000 | [diff] [blame] | 2 | .text |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 3 | #include <linux/linkage.h> |
| 4 | #include <asm/segment.h> |
Jeremy Fitzhardinge | 0341c14 | 2009-02-13 11:14:01 -0800 | [diff] [blame] | 5 | #include <asm/page_types.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 6 | |
Thomas Gleixner | 767a67b | 2019-06-01 10:08:44 +0200 | [diff] [blame] | 7 | # Copyright 2003, 2008 Pavel Machek <pavel@suse.cz |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 8 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 9 | .code32 |
| 10 | ALIGN |
| 11 | |
Jiri Slaby | 78762b0 | 2019-10-11 13:51:05 +0200 | [diff] [blame] | 12 | SYM_CODE_START(wakeup_pmode_return) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 13 | movw $__KERNEL_DS, %ax |
| 14 | movw %ax, %ss |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 15 | movw %ax, %fs |
| 16 | movw %ax, %gs |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 17 | |
Ingo Molnar | ffa64ef | 2015-06-22 14:40:03 +0200 | [diff] [blame] | 18 | movw $__USER_DS, %ax |
| 19 | movw %ax, %ds |
| 20 | movw %ax, %es |
| 21 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 22 | # reload the gdt, as we need the full 32 bit address |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 23 | lidt saved_idt |
| 24 | lldt saved_ldt |
Pavel Machek | e44b7b7 | 2008-04-10 23:28:10 +0200 | [diff] [blame] | 25 | ljmp $(__KERNEL_CS), $1f |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 26 | 1: |
| 27 | movl %cr3, %eax |
| 28 | movl %eax, %cr3 |
| 29 | wbinvd |
| 30 | |
| 31 | # and restore the stack ... but you need gdt for this to work |
| 32 | movl saved_context_esp, %esp |
| 33 | |
| 34 | movl %cs:saved_magic, %eax |
| 35 | cmpl $0x12345678, %eax |
| 36 | jne bogus_magic |
| 37 | |
| 38 | # jump to place where we left off |
Pavel Machek | e44b7b7 | 2008-04-10 23:28:10 +0200 | [diff] [blame] | 39 | movl saved_eip, %eax |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 40 | jmp *%eax |
Jiri Slaby | 78762b0 | 2019-10-11 13:51:05 +0200 | [diff] [blame] | 41 | SYM_CODE_END(wakeup_pmode_return) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 42 | |
| 43 | bogus_magic: |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 44 | jmp bogus_magic |
| 45 | |
| 46 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 47 | |
Pavel Machek | e44b7b7 | 2008-04-10 23:28:10 +0200 | [diff] [blame] | 48 | save_registers: |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 49 | sidt saved_idt |
| 50 | sldt saved_ldt |
| 51 | str saved_tss |
| 52 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 53 | leal 4(%esp), %eax |
| 54 | movl %eax, saved_context_esp |
Pavel Machek | e44b7b7 | 2008-04-10 23:28:10 +0200 | [diff] [blame] | 55 | movl %ebx, saved_context_ebx |
| 56 | movl %ebp, saved_context_ebp |
| 57 | movl %esi, saved_context_esi |
| 58 | movl %edi, saved_context_edi |
| 59 | pushfl |
| 60 | popl saved_context_eflags |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 61 | |
Pavel Machek | e44b7b7 | 2008-04-10 23:28:10 +0200 | [diff] [blame] | 62 | movl $ret_point, saved_eip |
Peter Zijlstra | f94909c | 2021-12-04 14:43:40 +0100 | [diff] [blame] | 63 | RET |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 64 | |
| 65 | |
| 66 | restore_registers: |
Pavel Machek | e44b7b7 | 2008-04-10 23:28:10 +0200 | [diff] [blame] | 67 | movl saved_context_ebp, %ebp |
| 68 | movl saved_context_ebx, %ebx |
| 69 | movl saved_context_esi, %esi |
| 70 | movl saved_context_edi, %edi |
| 71 | pushl saved_context_eflags |
| 72 | popfl |
Peter Zijlstra | f94909c | 2021-12-04 14:43:40 +0100 | [diff] [blame] | 73 | RET |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 74 | |
Jiri Slaby | 78762b0 | 2019-10-11 13:51:05 +0200 | [diff] [blame] | 75 | SYM_CODE_START(do_suspend_lowlevel) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 76 | call save_processor_state |
| 77 | call save_registers |
Len Brown | 3b6961b | 2012-07-26 19:40:08 -0400 | [diff] [blame] | 78 | pushl $3 |
Lv Zheng | 40bce10 | 2013-10-31 09:31:18 +0800 | [diff] [blame] | 79 | call x86_acpi_enter_sleep_state |
Len Brown | 3b6961b | 2012-07-26 19:40:08 -0400 | [diff] [blame] | 80 | addl $4, %esp |
William Morrrow | 4e6e650 | 2006-08-14 22:37:31 -0700 | [diff] [blame] | 81 | |
| 82 | # In case of S3 failure, we'll emerge here. Jump |
| 83 | # to ret_point to recover |
| 84 | jmp ret_point |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 85 | .p2align 4,,7 |
| 86 | ret_point: |
| 87 | call restore_registers |
| 88 | call restore_processor_state |
Peter Zijlstra | f94909c | 2021-12-04 14:43:40 +0100 | [diff] [blame] | 89 | RET |
Jiri Slaby | 78762b0 | 2019-10-11 13:51:05 +0200 | [diff] [blame] | 90 | SYM_CODE_END(do_suspend_lowlevel) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 91 | |
Shaohua Li | daacf8b | 2006-06-23 02:04:50 -0700 | [diff] [blame] | 92 | .data |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 93 | ALIGN |
Jiri Slaby | 37503f73 | 2019-10-11 13:50:42 +0200 | [diff] [blame] | 94 | SYM_DATA(saved_magic, .long 0) |
Jiri Slaby | 5a064d3 | 2019-01-30 13:46:45 +0100 | [diff] [blame] | 95 | saved_eip: .long 0 |
Shaohua Li | daacf8b | 2006-06-23 02:04:50 -0700 | [diff] [blame] | 96 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 97 | # saved registers |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 98 | saved_idt: .long 0,0 |
| 99 | saved_ldt: .long 0 |
| 100 | saved_tss: .long 0 |
| 101 | |