21#ifndef BENCHMARK_CYCLECLOCK_H_
22#define BENCHMARK_CYCLECLOCK_H_
26#include "benchmark/benchmark.h"
27#include "internal_macros.h"
29#if defined(BENCHMARK_OS_MACOSX)
30#include <mach/mach_time.h>
42#if defined(COMPILER_MSVC) && !defined(_M_IX86) && !defined(_M_ARM64) && \
44extern "C" uint64_t __rdtsc();
45#pragma intrinsic(__rdtsc)
48#if !defined(BENCHMARK_OS_WINDOWS) || defined(BENCHMARK_OS_MINGW)
53#ifdef BENCHMARK_OS_EMSCRIPTEN
54#include <emscripten.h>
65inline BENCHMARK_ALWAYS_INLINE int64_t Now() {
66#if defined(BENCHMARK_OS_MACOSX)
76 return static_cast<int64_t
>(mach_absolute_time());
77#elif defined(BENCHMARK_OS_EMSCRIPTEN)
80 return static_cast<int64_t
>(emscripten_get_now() * 1e+6);
81#elif defined(__i386__)
83 __asm__
volatile(
"rdtsc" :
"=A"(ret));
88#elif (defined(__x86_64__) || defined(__amd64__)) && !defined(__arm64ec__)
90 __asm__
volatile(
"rdtsc" :
"=a"(low),
"=d"(high));
91 return static_cast<int64_t
>((high << 32) | low);
92#elif defined(__powerpc__) || defined(__ppc__)
94#if defined(__powerpc64__) || defined(__ppc64__)
96 asm volatile(
"mfspr %0, 268" :
"=r"(tb));
99 uint32_t tbl, tbu0, tbu1;
104 :
"=r"(tbu0),
"=r"(tbl),
"=r"(tbu1));
105 tbl &= -
static_cast<int32_t
>(tbu0 == tbu1);
107 return (
static_cast<uint64_t
>(tbu1) << 32) | tbl;
109#elif defined(__sparc__)
111 asm(
".byte 0x83, 0x41, 0x00, 0x00");
112 asm(
"mov %%g1, %0" :
"=r"(tick));
114#elif defined(__ia64__)
116 asm(
"mov %0 = ar.itc" :
"=r"(itc));
118#elif defined(COMPILER_MSVC) && defined(_M_IX86)
124#elif defined(COMPILER_MSVC) && (defined(_M_ARM64) || defined(_M_ARM64EC))
127 int64_t virtual_timer_value;
128 virtual_timer_value = _ReadStatusReg(ARM64_CNTVCT);
129 return virtual_timer_value;
130#elif defined(COMPILER_MSVC)
132#elif defined(BENCHMARK_OS_NACL)
145 struct timespec ts = {0, 0};
146 clock_gettime(CLOCK_MONOTONIC, &ts);
147 return static_cast<int64_t
>(ts.tv_sec) * 1000000000 + ts.tv_nsec;
148#elif defined(__aarch64__) || defined(__arm64ec__)
153 int64_t virtual_timer_value;
154 asm volatile(
"mrs %0, cntvct_el0" :
"=r"(virtual_timer_value));
155 return virtual_timer_value;
156#elif defined(__ARM_ARCH)
164 asm volatile(
"mrc p15, 0, %0, c9, c14, 0" :
"=r"(pmuseren));
166 asm volatile(
"mrc p15, 0, %0, c9, c12, 1" :
"=r"(pmcntenset));
167 if (pmcntenset & 0x80000000ul) {
168 asm volatile(
"mrc p15, 0, %0, c9, c13, 0" :
"=r"(pmccntr));
170 return static_cast<int64_t
>(pmccntr) * 64;
175 gettimeofday(&tv,
nullptr);
176 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
177#elif defined(__mips__) || defined(__m68k__)
181 gettimeofday(&tv,
nullptr);
182 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
183#elif defined(__loongarch__) || defined(__csky__)
185 gettimeofday(&tv,
nullptr);
186 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
187#elif defined(__s390__)
190#if defined(BENCHMARK_OS_ZOS)
192 asm(
" stck %0" :
"=m"(tsc) : :
"cc");
195 asm(
"stck %0" :
"=Q"(tsc) : :
"cc");
198#elif defined(__riscv)
201#if __riscv_xlen == 32
202 uint32_t cycles_lo, cycles_hi0, cycles_hi1;
213 :
"=r"(cycles_hi0),
"=r"(cycles_lo),
"=r"(cycles_hi1));
214 return static_cast<int64_t
>((
static_cast<uint64_t
>(cycles_hi1) << 32) |
218 asm volatile(
"rdtime %0" :
"=r"(cycles));
219 return static_cast<int64_t
>(cycles);
221#elif defined(__e2k__) || defined(__elbrus__)
223 gettimeofday(&tv,
nullptr);
224 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
225#elif defined(__hexagon__)
227 asm volatile(
"%0 = C15:14" :
"=r"(pcycle));
228 return static_cast<int64_t
>(pcycle);
229#elif defined(__alpha__)
236 gettimeofday(&tv,
nullptr);
237 return static_cast<int64_t
>(tv.tv_sec) * 1000000 + tv.tv_usec;
238#elif defined(__hppa__) || defined(__linux__)
247 struct timespec ts = {0, 0};
248 clock_gettime(CLOCK_MONOTONIC, &ts);
249 return static_cast<int64_t
>(ts.tv_sec) * 1000000000 + ts.tv_nsec;
255#error You need to define CycleTimer for your OS and CPU