^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) * include/asm-xtensa/delay.h
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * This file is subject to the terms and conditions of the GNU General Public
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * License. See the file "COPYING" in the main directory of this archive
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * for more details.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * Copyright (C) 2001 - 2005 Tensilica Inc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #ifndef _XTENSA_DELAY_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #define _XTENSA_DELAY_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <asm/timex.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <asm/param.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) extern unsigned long loops_per_jiffy;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) static inline void __delay(unsigned long loops)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) if (__builtin_constant_p(loops) && loops < 2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) __asm__ __volatile__ ("nop");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) else if (loops >= 2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) /* 2 cycles per loop. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) __asm__ __volatile__ ("1: addi %0, %0, -2; bgeui %0, 2, 1b"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) : "+r" (loops));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) /* Undefined function to get compile-time error */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) void __bad_udelay(void);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) void __bad_ndelay(void);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) #define __MAX_UDELAY 30000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) #define __MAX_NDELAY 30000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) static inline void __udelay(unsigned long usecs)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) unsigned long start = get_ccount();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) unsigned long cycles = (usecs * (ccount_freq >> 15)) >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) /* Note: all variables are unsigned (can wrap around)! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) while (((unsigned long)get_ccount()) - start < cycles)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) cpu_relax();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) static inline void udelay(unsigned long usec)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) if (__builtin_constant_p(usec) && usec >= __MAX_UDELAY)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) __bad_udelay();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) __udelay(usec);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) static inline void __ndelay(unsigned long nsec)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) * Inner shift makes sure multiplication doesn't overflow
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) * for legitimate nsec values
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) unsigned long cycles = (nsec * (ccount_freq >> 15)) >> 15;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) __delay(cycles);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) #define ndelay(n) ndelay(n)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) static inline void ndelay(unsigned long nsec)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) if (__builtin_constant_p(nsec) && nsec >= __MAX_NDELAY)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) __bad_ndelay();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) __ndelay(nsec);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) #endif