mirror of
https://github.com/torvalds/linux.git
synced 2024-12-25 12:21:37 +00:00
f3f3149f35
As suggested by Ingo, remove all references to tsc from init/calibrate.c TSC is x86 specific, and using tsc in variable names in a generic file should be avoided. lpj_tsc is now called lpj_fine, since it is related to fine tuning of lpj value. Also tsc_rate_* is called timer_rate_* Signed-off-by: Alok N Kataria <akataria@vmware.com> Cc: Arjan van de Ven <arjan@infradead.org> Cc: Daniel Hecht <dhecht@vmware.com> Cc: Tim Mann <mann@vmware.com> Cc: Zach Amsden <zach@vmware.com> Cc: Sahil Rihan <srihan@vmware.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
55 lines
1.3 KiB
C
55 lines
1.3 KiB
C
#ifndef _LINUX_DELAY_H
|
|
#define _LINUX_DELAY_H
|
|
|
|
/*
|
|
* Copyright (C) 1993 Linus Torvalds
|
|
*
|
|
* Delay routines, using a pre-computed "loops_per_jiffy" value.
|
|
*/
|
|
|
|
#include <linux/kernel.h>
|
|
|
|
extern unsigned long loops_per_jiffy;
|
|
|
|
#include <asm/delay.h>
|
|
|
|
/*
|
|
* Using udelay() for intervals greater than a few milliseconds can
|
|
* risk overflow for high loops_per_jiffy (high bogomips) machines. The
|
|
* mdelay() provides a wrapper to prevent this. For delays greater
|
|
* than MAX_UDELAY_MS milliseconds, the wrapper is used. Architecture
|
|
* specific values can be defined in asm-???/delay.h as an override.
|
|
* The 2nd mdelay() definition ensures GCC will optimize away the
|
|
* while loop for the common cases where n <= MAX_UDELAY_MS -- Paul G.
|
|
*/
|
|
|
|
#ifndef MAX_UDELAY_MS
|
|
#define MAX_UDELAY_MS 5
|
|
#endif
|
|
|
|
#ifndef mdelay
|
|
#define mdelay(n) (\
|
|
(__builtin_constant_p(n) && (n)<=MAX_UDELAY_MS) ? udelay((n)*1000) : \
|
|
({unsigned long __ms=(n); while (__ms--) udelay(1000);}))
|
|
#endif
|
|
|
|
#ifndef ndelay
|
|
static inline void ndelay(unsigned long x)
|
|
{
|
|
udelay(DIV_ROUND_UP(x, 1000));
|
|
}
|
|
#define ndelay(x) ndelay(x)
|
|
#endif
|
|
|
|
extern unsigned long lpj_fine;
|
|
void calibrate_delay(void);
|
|
void msleep(unsigned int msecs);
|
|
unsigned long msleep_interruptible(unsigned int msecs);
|
|
|
|
static inline void ssleep(unsigned int seconds)
|
|
{
|
|
msleep(seconds * 1000);
|
|
}
|
|
|
|
#endif /* defined(_LINUX_DELAY_H) */
|