21#ifndef BENCHMARK_CYCLECLOCK_H_
22#define BENCHMARK_CYCLECLOCK_H_
26#include "benchmark/benchmark.h"
27#include "internal_macros.h"
29#if defined(BENCHMARK_OS_MACOSX)
30#include <mach/mach_time.h>
39#if defined(COMPILER_MSVC) && !defined(_M_IX86) && !defined(_M_ARM64) && \
41extern "C" uint64_t __rdtsc();
42#pragma intrinsic(__rdtsc)
45#if !defined(BENCHMARK_OS_WINDOWS) || defined(BENCHMARK_OS_MINGW)
50#ifdef BENCHMARK_OS_EMSCRIPTEN
51#include <emscripten.h>
62inline BENCHMARK_ALWAYS_INLINE int64_t Now() {
63#if defined(BENCHMARK_OS_MACOSX)
73 return static_cast<int64_t
>(mach_absolute_time());
74#elif defined(BENCHMARK_OS_EMSCRIPTEN)
77 return static_cast<int64_t
>(emscripten_get_now() * 1e+6);
78#elif defined(__i386__)
80 __asm__
volatile(
"rdtsc" :
"=A"(ret));
82#elif defined(__x86_64__) || defined(__amd64__)
84 __asm__
volatile(
"rdtsc" :
"=a"(low),
"=d"(high));
85 return static_cast<int64_t
>((high << 32) | low);
86#elif defined(__powerpc__) || defined(__ppc__)
88#if defined(__powerpc64__) || defined(__ppc64__)
90 asm volatile(
"mfspr %0, 268" :
"=r"(tb));
93 uint32_t tbl, tbu0, tbu1;
98 :
"=r"(tbu0),
"=r"(tbl),
"=r"(tbu1));
99 tbl &= -
static_cast<int32_t
>(tbu0 == tbu1);
101 return (
static_cast<uint64_t
>(tbu1) << 32) | tbl;
103#elif defined(__sparc__)
105 asm(
".byte 0x83, 0x41, 0x00, 0x00");
106 asm(
"mov %%g1, %0" :
"=r"(tick));
108#elif defined(__ia64__)
110 asm(
"mov %0 = ar.itc" :
"=r"(itc));
112#elif defined(COMPILER_MSVC) && defined(_M_IX86)
118#elif defined(COMPILER_MSVC) && (defined(_M_ARM64) || defined(_M_ARM64EC))
121 int64_t virtual_timer_value;
122 virtual_timer_value = _ReadStatusReg(ARM64_CNTVCT);
123 return virtual_timer_value;
124#elif defined(COMPILER_MSVC)
126#elif defined(BENCHMARK_OS_NACL)
139 struct timespec ts = {0, 0};
140 clock_gettime(CLOCK_MONOTONIC, &ts);
141 return static_cast<int64_t
>(ts.tv_sec) * 1000000000 + ts.tv_nsec;
142#elif defined(__aarch64__)
147 int64_t virtual_timer_value;
148 asm volatile(
"mrs %0, cntvct_el0" :
"=r"(virtual_timer_value));
149 return virtual_timer_value;
150#elif defined(__ARM_ARCH)
158 asm volatile(
"mrc p15, 0, %0, c9, c14, 0" :
"=r"(pmuseren));
160 asm volatile(
"mrc p15, 0, %0, c9, c12, 1" :
"=r"(pmcntenset));
161 if (pmcntenset & 0x80000000ul) {
162 asm volatile(
"mrc p15, 0, %0, c9, c13, 0" :
"=r"(pmccntr));
164 return static_cast<int64_t
>(pmccntr) * 64;
169 gettimeofday(&tv,
nullptr);
170 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
171#elif defined(__mips__) || defined(__m68k__)
175 gettimeofday(&tv,
nullptr);
176 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
177#elif defined(__loongarch__) || defined(__csky__)
179 gettimeofday(&tv,
nullptr);
180 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
181#elif defined(__s390__)
184#if defined(BENCHMARK_OS_ZOS)
186 asm(
" stck %0" :
"=m"(tsc) : :
"cc");
189 asm(
"stck %0" :
"=Q"(tsc) : :
"cc");
192#elif defined(__riscv)
195#if __riscv_xlen == 32
196 uint32_t cycles_lo, cycles_hi0, cycles_hi1;
207 :
"=r"(cycles_hi0),
"=r"(cycles_lo),
"=r"(cycles_hi1));
208 return static_cast<int64_t
>((
static_cast<uint64_t
>(cycles_hi1) << 32) |
212 asm volatile(
"rdtime %0" :
"=r"(cycles));
213 return static_cast<int64_t
>(cycles);
215#elif defined(__e2k__) || defined(__elbrus__)
217 gettimeofday(&tv,
nullptr);
218 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
219#elif defined(__hexagon__)
221 asm volatile(
"%0 = C15:14" :
"=r"(pcycle));
222 return static_cast<int64_t
>(pcycle);
223#elif defined(__alpha__)
230 gettimeofday(&tv,
nullptr);
231 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
232#elif defined(__hppa__) || defined(__linux__)
241 struct timespec ts = {0, 0};
242 clock_gettime(CLOCK_MONOTONIC, &ts);
243 return static_cast<int64_t
>(ts.tv_sec) * 1000000000 + ts.tv_nsec;
249#error You need to define CycleTimer for your OS and CPU