2007-03-06 17:42:05 +08:00
|
|
|
/*
|
2007-10-13 05:04:23 +08:00
|
|
|
* x86 TSC related functions
|
2007-03-06 17:42:05 +08:00
|
|
|
*/
|
2008-10-23 13:26:29 +08:00
|
|
|
#ifndef _ASM_X86_TSC_H
|
|
|
|
#define _ASM_X86_TSC_H
|
2007-03-06 17:42:05 +08:00
|
|
|
|
|
|
|
#include <asm/processor.h>
|
|
|
|
|
2007-10-13 05:04:23 +08:00
|
|
|
#define NS_SCALE 10 /* 2^10, carefully chosen */
|
|
|
|
#define US_SCALE 32 /* 2^32, arbitralrily chosen */
|
|
|
|
|
2007-03-06 17:42:05 +08:00
|
|
|
/*
|
|
|
|
* Standard way to access the cycle counter.
|
|
|
|
*/
|
|
|
|
typedef unsigned long long cycles_t;
|
|
|
|
|
|
|
|
extern unsigned int cpu_khz;
|
|
|
|
extern unsigned int tsc_khz;
|
2008-01-30 20:31:26 +08:00
|
|
|
|
|
|
|
extern void disable_TSC(void);
|
2007-03-06 17:42:05 +08:00
|
|
|
|
|
|
|
static inline cycles_t get_cycles(void)
|
|
|
|
{
|
|
|
|
#ifndef CONFIG_X86_TSC
|
2016-04-05 04:24:59 +08:00
|
|
|
if (!boot_cpu_has(X86_FEATURE_TSC))
|
2007-03-06 17:42:05 +08:00
|
|
|
return 0;
|
|
|
|
#endif
|
|
|
|
|
2015-06-26 00:44:07 +08:00
|
|
|
return rdtsc();
|
2008-01-30 20:32:39 +08:00
|
|
|
}
|
2008-01-30 20:31:03 +08:00
|
|
|
|
2016-12-22 03:32:01 +08:00
|
|
|
extern struct system_counterval_t convert_art_to_tsc(u64 art);
|
2016-02-29 22:33:47 +08:00
|
|
|
|
2017-07-14 11:34:06 +08:00
|
|
|
extern void tsc_early_delay_calibrate(void);
|
2007-03-06 17:42:05 +08:00
|
|
|
extern void tsc_init(void);
|
2007-05-03 01:27:08 +08:00
|
|
|
extern void mark_tsc_unstable(char *reason);
|
2007-03-06 17:42:05 +08:00
|
|
|
extern int unsynchronized_tsc(void);
|
2009-08-20 23:06:25 +08:00
|
|
|
extern int check_tsc_unstable(void);
|
2016-06-17 13:22:51 +08:00
|
|
|
extern unsigned long native_calibrate_cpu(void);
|
2009-08-20 23:06:25 +08:00
|
|
|
extern unsigned long native_calibrate_tsc(void);
|
2015-05-11 03:22:39 +08:00
|
|
|
extern unsigned long long native_sched_clock_from_tsc(u64 tsc);
|
2007-03-06 17:42:05 +08:00
|
|
|
|
2011-11-05 06:42:17 +08:00
|
|
|
extern int tsc_clocksource_reliable;
|
|
|
|
|
2007-03-06 17:42:05 +08:00
|
|
|
/*
|
|
|
|
* Boot-time check whether the TSCs are synchronized across
|
|
|
|
* all CPUs/cores:
|
|
|
|
*/
|
2016-11-19 21:47:36 +08:00
|
|
|
#ifdef CONFIG_X86_TSC
|
2016-12-13 21:14:17 +08:00
|
|
|
extern bool tsc_store_and_check_tsc_adjust(bool bootcpu);
|
2016-12-13 21:14:17 +08:00
|
|
|
extern void tsc_verify_tsc_adjust(bool resume);
|
2016-11-30 03:28:31 +08:00
|
|
|
extern void check_tsc_sync_source(int cpu);
|
|
|
|
extern void check_tsc_sync_target(void);
|
2016-11-19 21:47:36 +08:00
|
|
|
#else
|
2016-12-13 21:14:17 +08:00
|
|
|
static inline bool tsc_store_and_check_tsc_adjust(bool bootcpu) { return false; }
|
2016-12-13 21:14:17 +08:00
|
|
|
static inline void tsc_verify_tsc_adjust(bool resume) { }
|
2016-11-30 03:28:31 +08:00
|
|
|
static inline void check_tsc_sync_source(int cpu) { }
|
|
|
|
static inline void check_tsc_sync_target(void) { }
|
2016-11-19 21:47:36 +08:00
|
|
|
#endif
|
|
|
|
|
2008-01-30 20:30:18 +08:00
|
|
|
extern int notsc_setup(char *);
|
2012-02-13 21:07:27 +08:00
|
|
|
extern void tsc_save_sched_clock_state(void);
|
|
|
|
extern void tsc_restore_sched_clock_state(void);
|
2007-10-13 05:04:06 +08:00
|
|
|
|
2016-06-17 13:22:50 +08:00
|
|
|
unsigned long cpu_khz_from_msr(void);
|
2013-10-22 00:16:33 +08:00
|
|
|
|
2008-10-23 13:26:29 +08:00
|
|
|
#endif /* _ASM_X86_TSC_H */
|