^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0-or-later */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * This file contains miscellaneous low-level functions.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Largely rewritten by Cort Dougan (cort@cs.nmt.edu)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * and Paul Mackerras.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <linux/sys.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <asm/unistd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <asm/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <asm/reg.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <asm/page.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <asm/cache.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <asm/cputable.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <asm/mmu.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include <asm/ppc_asm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #include <asm/thread_info.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #include <asm/asm-offsets.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #include <asm/processor.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) #include <asm/bug.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #include <asm/ptrace.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) #include <asm/export.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #include <asm/feature-fixups.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) .text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) * We store the saved ksp_limit in the unused part
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) * of the STACK_FRAME_OVERHEAD
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) _GLOBAL(call_do_softirq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) mflr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) stw r0,4(r1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) lwz r10,THREAD+KSP_LIMIT(r2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) stw r3, THREAD+KSP_LIMIT(r2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) stwu r1,THREAD_SIZE-STACK_FRAME_OVERHEAD(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) mr r1,r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) stw r10,8(r1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) bl __do_softirq
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) lwz r10,8(r1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) lwz r1,0(r1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) lwz r0,4(r1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) stw r10,THREAD+KSP_LIMIT(r2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) mtlr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) * void call_do_irq(struct pt_regs *regs, void *sp);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) _GLOBAL(call_do_irq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) mflr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) stw r0,4(r1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) lwz r10,THREAD+KSP_LIMIT(r2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) stw r4, THREAD+KSP_LIMIT(r2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) stwu r1,THREAD_SIZE-STACK_FRAME_OVERHEAD(r4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) mr r1,r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) stw r10,8(r1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) bl __do_irq
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) lwz r10,8(r1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) lwz r1,0(r1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) lwz r0,4(r1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) stw r10,THREAD+KSP_LIMIT(r2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) mtlr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) * This returns the high 64 bits of the product of two 64-bit numbers.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) _GLOBAL(mulhdu)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) cmpwi r6,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) cmpwi cr1,r3,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) mr r10,r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) mulhwu r4,r4,r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) beq 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) mulhwu r0,r10,r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) mullw r7,r10,r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) addc r7,r0,r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) addze r4,r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) 1: beqlr cr1 /* all done if high part of A is 0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) mullw r9,r3,r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) mulhwu r10,r3,r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) beq 2f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) mullw r0,r3,r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) mulhwu r8,r3,r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) addc r7,r0,r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) adde r4,r4,r8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) addze r10,r10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) 2: addc r4,r4,r9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) addze r3,r10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) * reloc_got2 runs through the .got2 section adding an offset
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) * to each entry.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) _GLOBAL(reloc_got2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) mflr r11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) lis r7,__got2_start@ha
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) addi r7,r7,__got2_start@l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) lis r8,__got2_end@ha
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) addi r8,r8,__got2_end@l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) subf r8,r7,r8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) srwi. r8,r8,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) beqlr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) mtctr r8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) bl 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) 1: mflr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) lis r4,1b@ha
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) addi r4,r4,1b@l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) subf r0,r4,r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) add r7,r0,r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) 2: lwz r0,0(r7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) add r0,r0,r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) stw r0,0(r7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) addi r7,r7,4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) bdnz 2b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) mtlr r11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) * call_setup_cpu - call the setup_cpu function for this cpu
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) * r3 = data offset, r24 = cpu number
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) * Setup function is called with:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) * r3 = data offset
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) * r4 = ptr to CPU spec (relocated)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) _GLOBAL(call_setup_cpu)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) addis r4,r3,cur_cpu_spec@ha
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) addi r4,r4,cur_cpu_spec@l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) lwz r4,0(r4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) add r4,r4,r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) lwz r5,CPU_SPEC_SETUP(r4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) cmpwi 0,r5,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) add r5,r5,r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) beqlr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) mtctr r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) bctr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) #if defined(CONFIG_CPU_FREQ_PMAC) && defined(CONFIG_PPC_BOOK3S_32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) /* This gets called by via-pmu.c to switch the PLL selection
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) * on 750fx CPU. This function should really be moved to some
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) * other place (as most of the cpufreq code in via-pmu
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) _GLOBAL(low_choose_750fx_pll)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) /* Clear MSR:EE */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) mfmsr r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) rlwinm r0,r7,0,17,15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) mtmsr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) /* If switching to PLL1, disable HID0:BTIC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) cmplwi cr0,r3,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) beq 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) mfspr r5,SPRN_HID0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) rlwinm r5,r5,0,27,25
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) mtspr SPRN_HID0,r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) isync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) /* Calc new HID1 value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) mfspr r4,SPRN_HID1 /* Build a HID1:PS bit from parameter */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) rlwinm r5,r3,16,15,15 /* Clear out HID1:PS from value read */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) rlwinm r4,r4,0,16,14 /* Could have I used rlwimi here ? */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) or r4,r4,r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) mtspr SPRN_HID1,r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) #ifdef CONFIG_SMP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) /* Store new HID1 image */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) lwz r6,TASK_CPU(r2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) slwi r6,r6,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) li r6, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) addis r6,r6,nap_save_hid1@ha
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) stw r4,nap_save_hid1@l(r6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) /* If switching to PLL0, enable HID0:BTIC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) cmplwi cr0,r3,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) bne 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) mfspr r5,SPRN_HID0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) ori r5,r5,HID0_BTIC
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) mtspr SPRN_HID0,r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) isync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) /* Return */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) mtmsr r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) _GLOBAL(low_choose_7447a_dfs)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) /* Clear MSR:EE */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) mfmsr r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) rlwinm r0,r7,0,17,15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) mtmsr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) /* Calc new HID1 value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) mfspr r4,SPRN_HID1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) insrwi r4,r3,1,9 /* insert parameter into bit 9 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) mtspr SPRN_HID1,r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) isync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) /* Return */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) mtmsr r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) #endif /* CONFIG_CPU_FREQ_PMAC && CONFIG_PPC_BOOK3S_32 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) #ifdef CONFIG_40x
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) * Do an IO access in real mode
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) _GLOBAL(real_readb)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) mfmsr r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) rlwinm r0,r7,0,~MSR_DR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) mtmsr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) isync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) lbz r3,0(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) mtmsr r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) isync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) _ASM_NOKPROBE_SYMBOL(real_readb)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) * Do an IO access in real mode
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) _GLOBAL(real_writeb)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) mfmsr r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) rlwinm r0,r7,0,~MSR_DR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) mtmsr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) isync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) stb r3,0(r4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) mtmsr r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) isync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) _ASM_NOKPROBE_SYMBOL(real_writeb)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) #endif /* CONFIG_40x */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) * Copy a whole page. We use the dcbz instruction on the destination
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) * to reduce memory traffic (it eliminates the unnecessary reads of
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) * the destination into cache). This requires that the destination
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) * is cacheable.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) #define COPY_16_BYTES \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) lwz r6,4(r4); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) lwz r7,8(r4); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) lwz r8,12(r4); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) lwzu r9,16(r4); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) stw r6,4(r3); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) stw r7,8(r3); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) stw r8,12(r3); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) stwu r9,16(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) _GLOBAL(copy_page)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) rlwinm r5, r3, 0, L1_CACHE_BYTES - 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) addi r3,r3,-4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) 0: twnei r5, 0 /* WARN if r3 is not cache aligned */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) EMIT_BUG_ENTRY 0b,__FILE__,__LINE__, BUGFLAG_WARNING
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) addi r4,r4,-4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) li r5,4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) #if MAX_COPY_PREFETCH > 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) li r0,MAX_COPY_PREFETCH
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) li r11,4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) mtctr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) 11: dcbt r11,r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) addi r11,r11,L1_CACHE_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) bdnz 11b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) #else /* MAX_COPY_PREFETCH == 1 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) dcbt r5,r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) li r11,L1_CACHE_BYTES+4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) #endif /* MAX_COPY_PREFETCH */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) li r0,PAGE_SIZE/L1_CACHE_BYTES - MAX_COPY_PREFETCH
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) crclr 4*cr0+eq
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) 2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) mtctr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) dcbt r11,r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) dcbz r5,r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) COPY_16_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) #if L1_CACHE_BYTES >= 32
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) COPY_16_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) #if L1_CACHE_BYTES >= 64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) COPY_16_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) COPY_16_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) #if L1_CACHE_BYTES >= 128
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) COPY_16_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) COPY_16_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) COPY_16_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) COPY_16_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) bdnz 1b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) beqlr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) crnot 4*cr0+eq,4*cr0+eq
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) li r0,MAX_COPY_PREFETCH
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) li r11,4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) b 2b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) EXPORT_SYMBOL(copy_page)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) * Extended precision shifts.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) * Updated to be valid for shift counts from 0 to 63 inclusive.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) * -- Gabriel
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) * R3/R4 has 64 bit value
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) * R5 has shift count
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) * result in R3/R4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) * ashrdi3: arithmetic right shift (sign propagation)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) * lshrdi3: logical right shift
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) * ashldi3: left shift
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) _GLOBAL(__ashrdi3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) subfic r6,r5,32
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) srw r4,r4,r5 # LSW = count > 31 ? 0 : LSW >> count
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) addi r7,r5,32 # could be xori, or addi with -32
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) slw r6,r3,r6 # t1 = count > 31 ? 0 : MSW << (32-count)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) rlwinm r8,r7,0,32 # t3 = (count < 32) ? 32 : 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) sraw r7,r3,r7 # t2 = MSW >> (count-32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) or r4,r4,r6 # LSW |= t1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) slw r7,r7,r8 # t2 = (count < 32) ? 0 : t2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) sraw r3,r3,r5 # MSW = MSW >> count
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) or r4,r4,r7 # LSW |= t2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) EXPORT_SYMBOL(__ashrdi3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) _GLOBAL(__ashldi3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) subfic r6,r5,32
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) slw r3,r3,r5 # MSW = count > 31 ? 0 : MSW << count
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) addi r7,r5,32 # could be xori, or addi with -32
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) srw r6,r4,r6 # t1 = count > 31 ? 0 : LSW >> (32-count)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) slw r7,r4,r7 # t2 = count < 32 ? 0 : LSW << (count-32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) or r3,r3,r6 # MSW |= t1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) slw r4,r4,r5 # LSW = LSW << count
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) or r3,r3,r7 # MSW |= t2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) EXPORT_SYMBOL(__ashldi3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) _GLOBAL(__lshrdi3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) subfic r6,r5,32
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) srw r4,r4,r5 # LSW = count > 31 ? 0 : LSW >> count
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) addi r7,r5,32 # could be xori, or addi with -32
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) slw r6,r3,r6 # t1 = count > 31 ? 0 : MSW << (32-count)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) srw r7,r3,r7 # t2 = count < 32 ? 0 : MSW >> (count-32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) or r4,r4,r6 # LSW |= t1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) srw r3,r3,r5 # MSW = MSW >> count
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) or r4,r4,r7 # LSW |= t2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) EXPORT_SYMBOL(__lshrdi3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) * 64-bit comparison: __cmpdi2(s64 a, s64 b)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) * Returns 0 if a < b, 1 if a == b, 2 if a > b.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) _GLOBAL(__cmpdi2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) cmpw r3,r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) li r3,1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) bne 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) cmplw r4,r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) beqlr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) 1: li r3,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) bltlr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) li r3,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) EXPORT_SYMBOL(__cmpdi2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) * 64-bit comparison: __ucmpdi2(u64 a, u64 b)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) * Returns 0 if a < b, 1 if a == b, 2 if a > b.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) _GLOBAL(__ucmpdi2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) cmplw r3,r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) li r3,1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) bne 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) cmplw r4,r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) beqlr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) 1: li r3,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) bltlr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) li r3,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) EXPORT_SYMBOL(__ucmpdi2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) _GLOBAL(__bswapdi2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) rotlwi r9,r4,8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) rotlwi r10,r3,8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) rlwimi r9,r4,24,0,7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) rlwimi r10,r3,24,0,7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) rlwimi r9,r4,24,16,23
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) rlwimi r10,r3,24,16,23
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) mr r3,r9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) mr r4,r10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) EXPORT_SYMBOL(__bswapdi2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) #ifdef CONFIG_SMP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) _GLOBAL(start_secondary_resume)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) /* Reset stack */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) rlwinm r1, r1, 0, 0, 31 - THREAD_SHIFT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) addi r1,r1,THREAD_SIZE-STACK_FRAME_OVERHEAD
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) li r3,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) stw r3,0(r1) /* Zero the stack frame pointer */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) bl start_secondary
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) b .
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) #endif /* CONFIG_SMP */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) * This routine is just here to keep GCC happy - sigh...
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) _GLOBAL(__main)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) blr