Ubuntu
#include <chrono>
#include <iostream>
#include <limits.h>
#include <uuid/uuid.h>
using namespace std;
void testTime(int x);
int main(int args, char **argv)
{
int x = atoi(argv[1]);
testTime(x);
}
void testTime(int x)
{
cout<<numeric_limits<uint32_t>().max()<<endl;
chrono::time_point<chrono::steady_clock> startTime;
chrono::time_point<chrono::steady_clock> endTime;
for(int i=0;i<x;i++)
{
startTime = chrono::steady_clock::now();
for(uint32_t j=0;j<numeric_limits<uint32_t>().max();j++)
{
}
endTime = chrono::steady_clock::now();
cout << i<< "," << chrono::duration_cast<chrono::milliseconds>(endTime - startTime).count() << " milliseconds,"<<chrono::duration_cast<chrono::nanoseconds>(endTime-startTime).count()<<" nanos!" << endl;
}
}
Compile
g++ -std=c++2a *.cpp -o h1 -luuid
Run
./h1 10
Snapshot

As the above snapshot illustrated when run 4294967296 times increment,in Ubuntu 20.04,c++ will cost approximately 2.2-2.3 seconds.
原创文章,作者:jamestackk,如若转载,请注明出处:https://blog.ytso.com/tech/aiops/277033.html