#include <iostream>
auto sleep_microseconds = [](int microseconds)
{
timespec time1, time2;
timespec timens = {0};
timens.tv_sec = 0;
timens.tv_nsec = microseconds * 1000;
clock_gettime(CLOCK_MONOTONIC , &time1);
nanosleep(&timens, (struct timespec *)NULL);
clock_gettime(CLOCK_MONOTONIC , &time2);
std::cout << "in="<<microseconds <<" us out="<< (time2.tv_nsec-time1.tv_nsec)/1000<<" us\n";
return 0;
};
int main(int, char**) {
long us{1};
while (us <= 131072)
{
sleep_microseconds(us);
us*=2;
}
}
https://godbolt.org/z/5nenb9hsv
Na linux
in=1 us out=71 us
in=2 us out=67 us
in=4 us out=68 us
in=8 us out=73 us
in=16 us out=79 us
in=32 us out=97 us
in=64 us out=131 us
in=128 us out=195 us
in=256 us out=324 us
in=512 us out=584 us
in=1024 us out=1096 us
in=2048 us out=2124 us
in=4096 us out=4169 us
in=8192 us out=8269 us
in=16384 us out=16466 us
in=32768 us out=32847 us
in=65536 us out=65625 us
in=131072 us out=134902 us
Na Windows
in=1 us out=0 us
in=2 us out=0 us
in=4 us out=0 us
in=8 us out=0 us
in=16 us out=0 us
in=32 us out=0 us
in=64 us out=0 us
in=128 us out=0 us
in=256 us out=0 us
in=512 us out=0 us
in=1024 us out=9114 us
in=2048 us out=15425 us
in=4096 us out=14444 us
in=8192 us out=15026 us
in=16384 us out=28964 us
in=32768 us out=46046 us
in=65536 us out=78900 us
in=131072 us out=141530 us
Rozumiem ze Windows nie jest system czasu rzeczywistego ale aż takie różnice ?