I have the following code running on my dual core machine.
When i'm running one or two instances of the application on the same PC, I have the correct timing resolution of 100msec. However, when i ran 3 instances of the same application on the same PC, the timing resolution is more than 100msec. Is it possible at all to make the 3 instances of application to run with the same resolution of 100msec? Is this related to the number of cores on my machine?
#include <signal.h>
#include <stdio.h>
#include <string.h>
#include <sys/time.h>
void timer_handler ( int signum)
{
   double time ; 
   // obtain time here
   gettimeofday() ;
   printf("timer_handler at time = %lf \n",
     time ) ;
}
int main ()
{ 
   struct sigaction sa;
   struct itimerval timer ;
   memset ( &sa, 0, sizeof ( sa ) ) ;
   sa.sa_handler = &timer_handler ;
   sigaction ( SIGALRM, &sa, NULL );
   timer.it_value.tv_sec = 0 ;
   timer.it_value.tv_usec = 100000;
   timer.it_interval.tv_sec = 0;
   timer.it_interval.tv_usec = 100000 ;
   setitimer ( ITIMER_REAL, &timer, NULL ) ;
   for (;;); 
}