Im practising in parallel programming using MPI and i developed a programm that is calculating the average and how many numbers of the array are greater or fewer than the average. Unfortunately when im trying to run it on linux ubuntu system i get this *Caught signal 11 (Segmentation fault: address not mapped to object at address 0xff00000107), backtrace(tid: 5699).
#include <stdio.h>
#include <stdlib.h>
#include "mpi.h"
int main(int argc, char** argv){
    int p, plithos, i, k, count_max,count_min, sunexeia;
    int *pinakas;
    int *final_res;
    int loc_matrix[100];
    int loc_num;
    int my_rank;
    int root, local_sum, sum, j, source;
    int tag1=50;            
    int tag2=60;            
    int tag3=70;            
    int tag4=80;
    
    float average;
    
    MPI_Status status;
    MPI_Init(&argc, &argv);
    MPI_Comm_rank(MPI_COMM_WORLD, &my_rank);
    MPI_Comm_size(MPI_COMM_WORLD, &p);  
    
    if (my_rank == 0) {
        printf("please give the size of the array: ");
        scanf("%d", &plithos);
        pinakas = (int *) malloc(plithos * sizeof(int)); 
        for (i = 0; i < plithos; i++) {
            printf("Give the %d number: ", i);
            scanf("%d", &pinakas[i]);
        }
    }
    root = 0;
    MPI_Bcast(&plithos, 1, MPI_INT, root, MPI_COMM_WORLD); // here we are sending the size of the array to the processors
    
    loc_num = plithos / p;
    root = 0; 
    MPI_Scatter(&pinakas, loc_num, MPI_INT, &loc_matrix, loc_num, MPI_INT, root, MPI_COMM_WORLD); // here we are sending the amount of tasks that every processor must have
    
    
    local_sum=0; // here all processors will calculate their sums
    if (my_rank == 0) {
        int start = 0;
        int end = loc_num;
        for (i = start; i < end; i++){
            local_sum += pinakas[i];
        }
    }
    else{
        int start = my_rank * loc_num;
        int end = my_rank + loc_num;
        for (i = start; i < end; i++){
            local_sum += pinakas[i];
        }
    }
    MPI_Reduce(&local_sum, &sum, 1, MPI_INT, MPI_SUM, 0, MPI_COMM_WORLD); //here with the reduce we are caclulating all the sums from all processors
    
    if (my_rank == 0) { // processor 0 finds the average 
        average = (double)sum / plithos;
    printf("The average is: %f\n", average);
    }
    
    root = 0;
    MPI_Bcast(&average, 1, MPI_FLOAT, root, MPI_COMM_WORLD); //edo stelnoume tin mesi timi stis upoloipes diergasies
    
    if (my_rank = 0){ //edo h diergasia 0 tha upologisei posa stoixeia exoun min kai max > tou average
        for(i=0; i<plithos; i++){
            if(pinakas[i]> average){
                count_max = count_max +1;
            }
            if(pinakas[i]< average){
                count_min = count_min +1;
            }
        }
        printf("To plithos ton stoixeion pou exoun megaluteri timi apo tin mesi timi einai: %d", count_max);
        printf("To plithos ton stoixeion pou exoun mikroteri timi apo tin mesi timi einai: %d", count_min);
    }
    
    
MPI_Finalize();
}
    
 
    