[29] | 1 | /* |
---|
| 2 | * hal_cpu.S - Miscellaneous CPU functions |
---|
[145] | 3 | * |
---|
[29] | 4 | * Copyright (c) 2017 Maxime Villard |
---|
[145] | 5 | * |
---|
[29] | 6 | * This file is part of ALMOS-MKH. |
---|
| 7 | * |
---|
| 8 | * ALMOS-MKH is free software; you can redistribute it and/or modify it |
---|
| 9 | * under the terms of the GNU General Public License as published by |
---|
| 10 | * the Free Software Foundation; version 2.0 of the License. |
---|
| 11 | * |
---|
| 12 | * ALMOS-MKH is distributed in the hope that it will be useful, but |
---|
| 13 | * WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
| 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
---|
| 15 | * General Public License for more details. |
---|
| 16 | * |
---|
| 17 | * You should have received a copy of the GNU General Public License |
---|
[234] | 18 | * along with ALMOS-MKH; if not, write to the Free Software Foundation, |
---|
[29] | 19 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA |
---|
| 20 | */ |
---|
| 21 | |
---|
| 22 | #define x86_ASM |
---|
| 23 | #include <hal_boot.h> |
---|
[85] | 24 | #include <hal_segmentation.h> |
---|
[166] | 25 | #include <hal_register.h> |
---|
[339] | 26 | #include <hal_kentry.h> |
---|
[29] | 27 | |
---|
| 28 | ASM_ENTRY(lgdt) |
---|
| 29 | lgdt (%rdi) |
---|
[85] | 30 | /* Reload the prefetch queue */ |
---|
[97] | 31 | jmp 1f |
---|
| 32 | nop |
---|
[85] | 33 | 1: /* Reload stale selectors */ |
---|
| 34 | movl $GDT_FIXED_SEL(GDT_KDATA_SEL, SEL_KPL),%eax |
---|
| 35 | movl %eax,%ds |
---|
| 36 | movl %eax,%es |
---|
| 37 | movl %eax,%ss |
---|
[29] | 38 | ret |
---|
| 39 | |
---|
| 40 | ASM_ENTRY(lidt) |
---|
| 41 | lidt (%rdi) |
---|
| 42 | ret |
---|
| 43 | |
---|
| 44 | ASM_ENTRY(ltr) |
---|
| 45 | ltr %di |
---|
| 46 | ret |
---|
| 47 | |
---|
[35] | 48 | ASM_ENTRY(invlpg) |
---|
| 49 | invlpg (%rdi) |
---|
| 50 | ret |
---|
| 51 | |
---|
[78] | 52 | ASM_ENTRY(sti) |
---|
| 53 | sti |
---|
| 54 | ret |
---|
| 55 | |
---|
| 56 | ASM_ENTRY(cli) |
---|
| 57 | cli |
---|
| 58 | ret |
---|
| 59 | |
---|
[94] | 60 | ASM_ENTRY(rdtsc) |
---|
[97] | 61 | xorq %rax,%rax |
---|
[94] | 62 | rdtsc |
---|
| 63 | shlq $32,%rdx |
---|
| 64 | orq %rdx,%rax |
---|
| 65 | ret |
---|
| 66 | |
---|
[91] | 67 | ASM_ENTRY(in8) |
---|
| 68 | movq %rdi,%rdx |
---|
| 69 | xorq %rax,%rax |
---|
| 70 | inb %dx,%al |
---|
| 71 | ret |
---|
| 72 | |
---|
[195] | 73 | ASM_ENTRY(in16) |
---|
| 74 | movq %rdi,%rdx |
---|
| 75 | xorq %rax,%rax |
---|
| 76 | inw %dx,%ax |
---|
| 77 | ret |
---|
| 78 | |
---|
[82] | 79 | ASM_ENTRY(out8) |
---|
| 80 | movq %rdi,%rdx |
---|
| 81 | movq %rsi,%rax |
---|
| 82 | outb %al,%dx |
---|
| 83 | ret |
---|
| 84 | |
---|
[195] | 85 | ASM_ENTRY(out16) |
---|
| 86 | movq %rdi,%rdx |
---|
| 87 | movq %rsi,%rax |
---|
| 88 | outw %ax,%dx |
---|
| 89 | ret |
---|
| 90 | |
---|
[46] | 91 | ASM_ENTRY(rdmsr) |
---|
| 92 | movq %rdi,%rcx |
---|
| 93 | xorq %rax,%rax |
---|
| 94 | rdmsr |
---|
| 95 | shlq $32,%rdx |
---|
| 96 | orq %rdx,%rax |
---|
| 97 | ret |
---|
| 98 | |
---|
| 99 | ASM_ENTRY(wrmsr) |
---|
| 100 | movq %rdi,%rcx |
---|
| 101 | movq %rsi,%rax |
---|
| 102 | movq %rsi,%rdx |
---|
| 103 | shrq $32,%rdx |
---|
| 104 | wrmsr |
---|
| 105 | ret |
---|
| 106 | |
---|
[125] | 107 | ASM_ENTRY(mfence) |
---|
| 108 | mfence |
---|
| 109 | ret |
---|
| 110 | |
---|
[236] | 111 | ASM_ENTRY(rcr0) |
---|
| 112 | movq %cr0,%rax |
---|
| 113 | ret |
---|
| 114 | |
---|
[99] | 115 | ASM_ENTRY(rcr2) |
---|
| 116 | movq %cr2,%rax |
---|
| 117 | ret |
---|
| 118 | |
---|
[236] | 119 | ASM_ENTRY(rcr3) |
---|
| 120 | movq %cr3,%rax |
---|
| 121 | ret |
---|
| 122 | |
---|
[368] | 123 | ASM_ENTRY(lcr3) |
---|
| 124 | movq %rdi,%cr3 |
---|
| 125 | ret |
---|
| 126 | |
---|
[166] | 127 | ASM_ENTRY(rcr4) |
---|
| 128 | movq %cr4,%rax |
---|
| 129 | ret |
---|
| 130 | |
---|
| 131 | ASM_ENTRY(lcr4) |
---|
| 132 | movq %rdi,%cr4 |
---|
| 133 | ret |
---|
| 134 | |
---|
| 135 | ASM_ENTRY(cpuid) |
---|
| 136 | movq %rbx,%r8 |
---|
| 137 | movq %rdi,%rax |
---|
| 138 | movq %rsi,%rcx |
---|
| 139 | movq %rdx,%rsi |
---|
| 140 | cpuid |
---|
| 141 | movl %eax,0(%rsi) |
---|
| 142 | movl %ebx,4(%rsi) |
---|
| 143 | movl %ecx,8(%rsi) |
---|
| 144 | movl %edx,12(%rsi) |
---|
| 145 | movq %r8,%rbx |
---|
| 146 | ret |
---|
| 147 | |
---|
| 148 | /* |
---|
| 149 | * To flush all TLB entries, we must re-set the CR4_PGE flag in %cr4. |
---|
| 150 | */ |
---|
| 151 | ASM_ENTRY(tlbflushg) |
---|
| 152 | movq %cr4,%rax |
---|
| 153 | movq %rax,%rdx |
---|
| 154 | andq $~CR4_PGE,%rdx |
---|
| 155 | movq %rdx,%cr4 |
---|
| 156 | movq %rax,%cr4 |
---|
| 157 | ret |
---|
| 158 | |
---|
| 159 | ASM_ENTRY(tlbflush) |
---|
| 160 | movq %cr3,%rax |
---|
| 161 | movq %rax,%cr3 |
---|
| 162 | ret |
---|
| 163 | |
---|
[309] | 164 | ASM_ENTRY(clts) |
---|
| 165 | clts |
---|
| 166 | ret |
---|
| 167 | |
---|
| 168 | ASM_ENTRY(stts) |
---|
| 169 | movq %cr0,%rax |
---|
| 170 | orq $CR0_TS,%rax |
---|
| 171 | movq %rax,%cr0 |
---|
| 172 | ret |
---|
| 173 | |
---|
[235] | 174 | ASM_ENTRY(pause) |
---|
| 175 | pause |
---|
| 176 | ret |
---|
| 177 | |
---|
| 178 | ASM_ENTRY(wbinvd) |
---|
| 179 | wbinvd |
---|
| 180 | ret |
---|
| 181 | |
---|
[94] | 182 | /* -------------------------------------------------------------------------- */ |
---|
| 183 | |
---|
[339] | 184 | ASM_ENTRY(cpu_context_switch) |
---|
| 185 | pushq %r12 |
---|
| 186 | pushq %r13 |
---|
| 187 | |
---|
| 188 | movq %rdi,%r13 /* oldctx */ |
---|
| 189 | movq %rsi,%r12 /* newctx */ |
---|
| 190 | |
---|
| 191 | /* |
---|
| 192 | * Save the current stack in %rdx, and switch to the trap frame of |
---|
| 193 | * the old thread. |
---|
| 194 | */ |
---|
| 195 | movq %rsp,%rdx |
---|
| 196 | movq CTX_TF(%r13),%rsp |
---|
| 197 | addq $TF_SIZE,%rsp /* end of the structure */ |
---|
| 198 | |
---|
[359] | 199 | /* |
---|
| 200 | * Save the TLS intr flag, and set the new one. |
---|
| 201 | */ |
---|
| 202 | movq TLSVAR(INTR),%rax |
---|
| 203 | movq %rax,CTX_INTR(%r13) |
---|
| 204 | movq CTX_INTR(%r12),%rax |
---|
| 205 | movq %rax,TLSVAR(INTR) |
---|
| 206 | |
---|
[339] | 207 | /* Build the trap frame */ |
---|
| 208 | movl %ss,%eax |
---|
| 209 | pushq %rax /* tf_ss */ |
---|
| 210 | pushq %rdx /* tf_rsp */ |
---|
| 211 | pushfq /* tf_rflags */ |
---|
| 212 | movl %cs,%eax |
---|
| 213 | pushq %rax /* tf_cs */ |
---|
| 214 | movabsq $thr_resume,%rax |
---|
| 215 | pushq %rax /* tf_rip */ |
---|
| 216 | pushq $0 /* tf_err */ |
---|
| 217 | pushq $T_ASTFLT /* tf_trapno */ |
---|
| 218 | INTR_SAVE_REGS |
---|
| 219 | |
---|
| 220 | /* Switch rsp0 */ |
---|
| 221 | movq CTX_RSP0(%r12),%rax |
---|
| 222 | movq TLSVAR(RSP0),%rdx |
---|
| 223 | movq %rax,(%rdx) |
---|
| 224 | |
---|
| 225 | /* Switch to the new trap frame */ |
---|
| 226 | movq CTX_TF(%r12),%rsp |
---|
| 227 | |
---|
| 228 | /* |
---|
| 229 | * Restore the context, and jump into the new thread. |
---|
| 230 | */ |
---|
| 231 | INTR_RESTORE_REGS |
---|
[343] | 232 | addq $16,%rsp |
---|
[339] | 233 | iretq |
---|
| 234 | |
---|
| 235 | thr_resume: |
---|
| 236 | /* |
---|
| 237 | * Only pop %r12 and %r13, and return. |
---|
| 238 | */ |
---|
| 239 | popq %r13 |
---|
| 240 | popq %r12 |
---|
| 241 | |
---|
| 242 | ret |
---|
| 243 | |
---|
| 244 | /* -------------------------------------------------------------------------- */ |
---|
| 245 | |
---|
[94] | 246 | ASM_ENTRY(atomic_cas_32) |
---|
| 247 | movl %esi,%eax |
---|
| 248 | lock |
---|
| 249 | cmpxchgl %edx,(%rdi) |
---|
| 250 | /* %eax now contains the old value */ |
---|
| 251 | ret |
---|
| 252 | |
---|
| 253 | ASM_ENTRY(atomic_add_32) |
---|
[97] | 254 | movl %esi,%eax |
---|
[94] | 255 | lock |
---|
[97] | 256 | xaddl %eax,(%rdi) |
---|
| 257 | /* %eax now contains the old value */ |
---|
[94] | 258 | ret |
---|
| 259 | |
---|
[98] | 260 | ASM_ENTRY(atomic_and_32) |
---|
| 261 | movl (%rdi),%eax |
---|
| 262 | 1: |
---|
| 263 | movl %eax,%ecx |
---|
| 264 | andl %esi,%ecx |
---|
| 265 | lock |
---|
| 266 | cmpxchgl %ecx,(%rdi) |
---|
| 267 | jnz 1b |
---|
| 268 | /* %eax now contains the old value */ |
---|
| 269 | ret |
---|
| 270 | |
---|
| 271 | ASM_ENTRY(atomic_or_32) |
---|
| 272 | movl (%rdi),%eax |
---|
| 273 | 1: |
---|
| 274 | movl %eax,%ecx |
---|
| 275 | orl %esi,%ecx |
---|
| 276 | lock |
---|
| 277 | cmpxchgl %ecx,(%rdi) |
---|
| 278 | jnz 1b |
---|
| 279 | /* %eax now contains the old value */ |
---|
| 280 | ret |
---|
| 281 | |
---|