Benchmark Code
Spire_Jeff
Posts: 1,917
A question was raised in the optimization code thread about how I was benchmarking the code. Because of that, I am posting the code I will be using from now on for testing chunks of code. I welcome any suggestions on how to improve the code and I also welcome anyone out there to test bits of code and post the results.
Also, let me know if something is not clear.
Ohh, in case it isn't as clear as I think it is, I am running the tests using the diagnostics window and emulate device 10001. After you have run the tests a few times, you can emulate a push on channel 100 to print your reports to the diagnostics window.
Jeff
P.S.
I am going to rerun the For Loop tests with this code and post the results momentarily.
PROGRAM_NAME='timelineBenchmark' DEFINE_DEVICE dvTP = 10001:1:0 DEFINE_CONSTANT //Benchmark Constants integer MAX_NUM_TESTS = 20; //Place test constants below DEFINE_TYPE DEFINE_VARIABLE //Benchmark Variables DO NOT CHANGE char sTestDescription[MAX_NUM_TESTS][30] VOLATILE LONG lTL_Times[5] = {10000,10000,10000,10000,10000} LONG lTestTime[MAX_NUM_TESTS][6] //1-5 = last values, 6 = running avg of all LONG lTestAverageCount[MAX_NUM_TESTS] = 0 VOLATILE INTEGER nTestIndex[MAX_NUM_TESTS] VOLATILE INTEGER nWORK_IN_PROGRESS[MAX_NUM_TESTS] //********************** //Benchmark functions. //Call the TestStart function immediately before code to be tested. //Call the TestFinish function immediately after code to be tested. //********************** define_function TestName(integer nMethod, char sName[30]){ sTestDescription[nMethod] = sName; } define_function TestReset(integer nMethod){//Clears averages and sets pointers to first spot. stack_var integer x; nTestIndex[nMethod] = 1; nWORK_IN_PROGRESS[nMethod] = 0; timeline_kill(nMethod); lTestAverageCount[nMethod] = 0; for(x=6;x;x--){ lTestTime[nMethod][x] = 0; } send_string 0,"'*********************************************************'"; send_string 0,"'* TEST ',itoa(nMethod),' RESET: ',sTestDescription[nMethod]"; send_string 0,"'*********************************************************'"; } define_function TestStart(integer nMethod){ IF((nTestIndex[nMethod] > 5) or (nTestIndex[nMethod] < 1)) nTestIndex[nMethod] = 1; send_string 0,"'*********************************************************'"; send_string 0,"'* TEST ',itoa(nMethod),' STARTING: ',sTestDescription[nMethod]"; send_string 0,"'*********************************************************'"; nWork_In_Progress[nMethod] = 1; TIMELINE_CREATE(type_cast(nMethod),lTL_Times,5,TIMELINE_RELATIVE,TIMELINE_REPEAT); } define_function TestFinish(integer nMethod){ TIMELINE_PAUSE(nMethod); lTestTime[nMethod][nTestIndex[nMethod]] = TIMELINE_GET(nMethod); TIMELINE_KILL(nMethod); lTestTime[nMethod][6] = ((lTestTime[nMethod][6]*lTestAverageCount[nMethod]) + lTestTime[nMethod][nTestIndex[nMethod]])/(lTestAverageCount[nMethod] + 1); lTestAverageCount[nMethod]++; send_string 0,"'*********************************************************'"; send_string 0,"'* TEST ',itoa(nMethod),' FINISHED: ',sTestDescription[nMethod]"; send_string 0,"'* Last run time: ',itoa(lTestTime[nMethod][nTestIndex[nMethod]]),'ms - ',sTestDescription[nMethod]"; send_string 0,"'* Average run time: ',itoa(lTestTime[nMethod][6]),'ms - over ',itoa(lTestAverageCount[nMethod]),' tests - ',sTestDescription[nMethod]"; send_string 0,"'*********************************************************'"; IF((nTestIndex[nMethod] >= 5) or (nTestIndex[nMethod] < 1)) nTestIndex[nMethod] = 1; ELSE nTestIndex[nMethod]++; nWORK_IN_PROGRESS[nMethod] = 0; } define_function TestPrintReport(integer nMethod){ stack_var integer x; if(nMethod){ send_string 0,"'*********************************************************'"; send_string 0,"'* TEST ',itoa(nMethod),' REPORT: ',sTestDescription[nMethod]"; send_string 0,"'* Most recent 5 runs:'"; send_string 0,"'* 1: ',itoa(lTestTime[nMethod][1]),'ms'"; send_string 0,"'* 2: ',itoa(lTestTime[nMethod][2]),'ms'"; send_string 0,"'* 3: ',itoa(lTestTime[nMethod][3]),'ms'"; send_string 0,"'* 4: ',itoa(lTestTime[nMethod][4]),'ms'"; send_string 0,"'* 5: ',itoa(lTestTime[nMethod][5]),'ms'"; send_string 0,"'*----------------------------------------------------------'"; send_string 0,"'* Average run time: ',itoa(lTestTime[nMethod][6]),'ms - over ',itoa(lTestAverageCount[nMethod]),' tests'"; send_string 0,"'*********************************************************'"; } else{ for(x=1;x<=MAX_NUM_TESTS;x++){ if(lTestTime[x][1]){ send_string 0,"'*********************************************************'"; send_string 0,"'* TEST ',itoa(x),' REPORT: ',sTestDescription[x]"; send_string 0,"'* Most recent 5 runs:'"; send_string 0,"'* 1: ',itoa(lTestTime[x][1]),'ms'"; send_string 0,"'* 2: ',itoa(lTestTime[x][2]),'ms'"; send_string 0,"'* 3: ',itoa(lTestTime[x][3]),'ms'"; send_string 0,"'* 4: ',itoa(lTestTime[x][4]),'ms'"; send_string 0,"'* 5: ',itoa(lTestTime[x][5]),'ms'"; send_string 0,"'*----------------------------------------------------------'"; send_string 0,"'* Average run time: ',itoa(lTestTime[x][6]),'ms - over ',itoa(lTestAverageCount[x]),' tests'"; send_string 0,"'*********************************************************'"; } } } } DEFINE_FUNCTION PutYourFunctionsBelow(){//example function for testing TestName(20,'Sample Test');//20 is the test ID. Change this accordingly. start at 1. MAX_NUM_TESTS is the highest ID allowed. TestStart(20); //Test code goes here! TestFinish(20); } DEFINE_START DEFINE_EVENT BUTTON_EVENT[dvTP,1] { //Run Test PUSH: { } } BUTTON_EVENT[dvTP,2] { //Run Test PUSH: { } } BUTTON_EVENT[dvTP,3] { //Run Test PUSH: { } } BUTTON_EVENT[dvTP,100] BUTTON_EVENT[dvTP,101] BUTTON_EVENT[dvTP,102] BUTTON_EVENT[dvTP,103] BUTTON_EVENT[dvTP,104] BUTTON_EVENT[dvTP,105] BUTTON_EVENT[dvTP,106] BUTTON_EVENT[dvTP,107] BUTTON_EVENT[dvTP,108] BUTTON_EVENT[dvTP,109] BUTTON_EVENT[dvTP,110] BUTTON_EVENT[dvTP,111] BUTTON_EVENT[dvTP,112] BUTTON_EVENT[dvTP,113] BUTTON_EVENT[dvTP,114] BUTTON_EVENT[dvTP,115] BUTTON_EVENT[dvTP,116] BUTTON_EVENT[dvTP,117] BUTTON_EVENT[dvTP,118] BUTTON_EVENT[dvTP,119] BUTTON_EVENT[dvTP,120]{ //Print reports. channel 100 prints all valid reports. //channel 101-120 print a report only for a single test (channel-100) PUSH: { TestPrintReport(button.input.channel - 100); } } button_event[dvTp,255]{ //Reset Test Data push:{ stack_var integer x; for(x=MAX_NUM_TESTS;x;x--){ TestReset(x); } } }
Also, let me know if something is not clear.
Ohh, in case it isn't as clear as I think it is, I am running the tests using the diagnostics window and emulate device 10001. After you have run the tests a few times, you can emulate a push on channel 100 to print your reports to the diagnostics window.
Jeff
P.S.
I am going to rerun the For Loop tests with this code and post the results momentarily.
0
Comments
It seems the timeline method is a little more accurate than the other code I received as the tests seem to be a little quicker using the timeline tests.
I also added a test to compare ON[] OFF[] vs Var=True Var=False. It seems that ON and OFF are slightly quicker than assignment. I would guess maybe one less cycle required to do them as the difference is about 95ms over 70,000 iterations
Jeff
On a side note... ...Have you found any differences using DEFINE_CALL compared to DEFINE_FUNCTION?