Вы находитесь на странице: 1из 12

Parallel Distributed Computing

CASE STUDY - 1

Name : Abhishek.C
Reg.No : 16BCE0564
Slot : l3+l4

Q1)Suppose we toss darts randomly at a square dartboard, whose bullseye is at the origin,
and whose sides are 2 feet in length. Suppose also that there is a circle inscribed in the
square dartboard. The radius of the circle is 1 foot, and its area is π square feet. If the points
that are hit by the darts are uniformly distributed (and we always hit the square), then the
number of darts that hit inside the circle should approximately satisfy the equation

Number in circle /Total number of tosses =PI/4


since the ratio of the area of the circle to the area of the square is π/4

We can use this formula to estimate the value of π with a random number generator:
number_in_circle = 0;
for (toss = 0; toss < number_of_tossess; toss++)
{x = random double between -1 and 1;
y = random double between -1 and 1;
distance_squared = x * x + y * y;
if(distance_squared<=1)number_in_circle++;
}
pi_estimate=4*number_in_circle/(double)number_of_tossess;
This is called a “Monte Carlo” method, since it uses randomness. Write a program that uses the
above Monte Carlo method to estimate π (MPI / Pthreads/OpenMP).

ALGORITHM :

Start

Step 1.The main thread should read in the total number of tosses and print
the estimate.
Step 2.You can use point-to-point or collective communications.
Step 3.Vary the number of darts (points) used: Ndarts = 10n
, where n = 3, 4, 5, Nmax
Step 4.You may want to use long long ints for the number of hits in the
circle and the number of tosses, since both may have to be very
large to get a reasonable estimate of π.
Step 5.Vary the number of PEs: p = [1, 2, 4, 8, 16]
Step 6.Time the job runs.
Step 7.Run jobs on the queue.
Step 8.Find a reference value for π to the limits of a double precision
number.
Step 9.Estimate π to the limits of a double precision number.
Step 10.Calculate the value for π and the error of your estimate as a
function of the number or areas used.
Step 11.Calculate the error of your estimate: Err = πref − πmeasured
Step 12.Create summary tables of your test data and results.
Step 13.Plot the error as a function of the number of processors and number
of points.
Step 13.Plot the runtime as a function of the number of processors and
number of points.
End

CODE :
#include "mpi.h"
#include <stdio.h>
#include <stdlib.h>
#include <sys/time.h>
#include <time.h>
#include <float.h>
#include <string.h>

double square_distance(double x, double y){


return (x * x) + (y * y);
}

int random_toss_generator(long long int num_tosses, int rank){


long long int toss, number_in_circle = 0;
double x, y, distance_squared;

srand((unsigned)time(NULL));
// printf(" Number of tosses %d \n ",num_tosses);
for (toss = 0; toss < num_tosses; toss++) {
x = (((double)rand()/RAND_MAX) * 2 ) - 1;
y = (((double)rand()/RAND_MAX) * 2 ) - 1;
// printf(" %lf %lf \n ", x, y);
distance_squared = square_distance(x,y);
// printf("%lf\n", distance_squared);

if(distance_squared <= 1) {
number_in_circle++;
}
}
//printf(" Rank %d has %lld within circles \n", rank,number_in_circle);
return number_in_circle;
}

double estimate_pi(long long int num_tosses, long long int global_toss_sum, int numtasks) {
double pi = (4 * (global_toss_sum / num_tosses) );
return (pi / numtasks);
}

int main (int argc, char *argv[])


{
int numtasks, rank;
long long int num_tosses, number_in_circle, global_toss_sum;
double pi;

/* Setup MPI */
MPI_Init(&argc,&argv);

/* Determine number of tasks and rank */


MPI_Comm_size(MPI_COMM_WORLD, &numtasks);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);

if( rank == 0 ) {
/* Master reads number of tosses */
printf(" Enter the number of tosses: ");
scanf("%lld", &num_tosses);
}

/* Broadcasts the number to the other processes */


MPI_Bcast(&num_tosses, 1, MPI_LONG_LONG_INT, 0, MPI_COMM_WORLD);

/* Find number of tosses in the circle generated randomly */


number_in_circle = random_toss_generator(num_tosses, rank);
printf(" Rank %d has %lld tosses in the circle \n ",rank, number_in_circle);

/* Reduce to find global sum */


MPI_Reduce(&number_in_circle, &global_toss_sum, 1, MPI_LONG_LONG_INT, MPI_SUM,
0, MPI_COMM_WORLD);

/* Master estimates and prints result of pi */


if(rank == 0) {
printf(" Global sum of tosses: %lld \n", global_toss_sum);
printf(" Number of tasks %d \n",numtasks);
pi = estimate_pi(num_tosses, global_toss_sum, numtasks);
printf(" Pi value is %lf \n ", pi);
}

MPI_Finalize();
return 0;
}
OUTPUT :

Q2) Assume that a program generates large quantities of floating point data that is stored in
an array. In order to determine the distribution of the data, we can make a histogram of the
data. To make a histogram, we simply divide the range of the data up into equal sized
subintervals, or bins; determine the number of measurements in each bin; and plot a bar
graph showing the relative sizes of the bin. Use MPI to implement the histogram.

ALGORITHM :
Start
Step 1.Get the Inputs from user ,
1.1 the number of measurements, data count
1.2 an array of data count floats, data
1.3 the minimum value for the bin containing the smallest values, min meas
1.4. the maximum value for the bin containing the largest values, max meas
1.5 the number of bins, bin count
Step 2.Output will be an array containing the number of elements of data that lie in each
bin, where bin maxes is An array of bin count floats
bin counts. An array of bin count ints
Step 3.The array bin maxes will store the upper bound for each bin, and bin counts will
store the number of data elements in each bin, where
bin width = (max mens - min meas)/bin count
Step 4.Then bin maxes will be initialized
Step 5.Convention that bin b will be all the measurements in the range , where
bin maxes[b1]<= measurement < bin maxes[b]
Step 6.Find bin function returns the bin that data[i] where it,
could be a simple linear search function: search through bin maxes until
find a bin b that satisfies,
bin maxes[b1]<= data[i] < bin maxes[b]
Step 7.Elements of data are assigned to the processes/threads so that each process/thread
gets roughly the same number of elements.
Step 8.Each process/thread is responsible for updating its loc bin cts array on the basis
of its assigned elements.
End

CODE:

#include <stdio.h>
#include <stdlib.h>
#include <mpi.h>

void Get_input( int* bin_count_p,


float* min_meas_p,
float* max_meas_p,
int* data_count_p,
int* local_data_count_p,
int my_rank,
int comm_sz,
MPI_Comm comm);

void Gen_data( float local_data[],


int local_data_count,
int data_count,
float min_meas,
float max_meas,
int my_rank,
MPI_Comm comm);

void Set_bins( float bin_maxes[],


int loc_bin_cts[],
float min_meas,
float max_meas,
int bin_count,
int my_rank,
MPI_Comm comm);

void Find_bins( int bin_counts[],


float local_data[],
int loc_bin_cts[],
int local_data_count,
float bin_maxes[],
int bin_count,
float min_meas,
MPI_Comm comm);

int Which_bin( float data,


float bin_maxes[],
int bin_count,
float min_meas);

void Print_histo( float bin_maxes[],


int bin_counts[],
int bin_count,
float min_meas);

void e( int error);

int main(int argc, char* argv[]) {


int bin_count;
float min_meas;
float max_meas;
float* bin_maxes;
int* bin_counts;
int* loc_bin_cts;
int data_count;
int local_data_count;
float* data;
float* local_data;
int my_rank;
int comm_sz;
MPI_Comm comm;

// Initialize mpi
e(MPI_Init(&argc, &argv));
comm = MPI_COMM_WORLD;
e(MPI_Comm_size(comm, &comm_sz));
e(MPI_Comm_rank(comm, &my_rank));
// get user inputs for bin_count, max_meas, min_meas, and data_count
Get_input(&bin_count, &min_meas, &max_meas, &data_count,
&local_data_count, my_rank, comm_sz, comm);

// allocate arrays
bin_maxes = malloc(bin_count*sizeof(float));
bin_counts = malloc(bin_count*sizeof(int));
loc_bin_cts = malloc(bin_count*sizeof(int));
data = malloc(data_count*sizeof(float));
local_data = malloc(local_data_count*sizeof(float));

// insert code below to finish this program


Set_bins(bin_maxes,loc_bin_cts,min_meas,max_meas,bin_count,my_rank,comm);
Gen_data(local_data,local_data_count,data_count,min_meas,max_meas,my_rank,comm);

Find_bins(bin_counts,local_data,loc_bin_cts,local_data_count,bin_maxes,bin_count,min_meas,co
mm);
e(MPI_Reduce(loc_bin_cts,bin_counts,bin_count,MPI_INT,MPI_SUM,0,comm));
if(my_rank == 0)
Print_histo(bin_maxes,bin_counts,bin_count,min_meas);

free(bin_maxes);
free(bin_counts);
free(loc_bin_cts);
free(data);
free(local_data);
MPI_Finalize();
return 0;
} /* main */

void e(int error) {


if(error != MPI_SUCCESS) {
fprintf(stderr,"Error starting MPI program, Terminating.\n");
MPI_Abort(MPI_COMM_WORLD,error);
MPI_Finalize();
exit(1);
}
}

/* Print out the histogram */


void Print_histo(
float bin_maxes[] /* in */,
int bin_counts[] /* in */,
int bin_count /* in */,
float min_meas /* in */) {

int width = 40;


int max = 0;
int row_width;
int i;
int j;

// get max count


for(i = 0; i < bin_count; i++) {
if(bin_counts[i] > max)
max = bin_counts[i];
}
for(i = 0; i < bin_count; i++) {
printf("%10.3f |",bin_maxes[i]);
row_width = (float) bin_counts[i] / (float) max * (float) width;
for(j=0; j < row_width; j++) {
printf("#");
}
printf(" %d\n",bin_counts[i]);
}
} /* Print_histo */

/* Find out the appropriate bin for each data in local_data and increase the number of data in this
bin */
void Find_bins(
int bin_counts[] /* out */,
float local_data[] /* in */,
int loc_bin_cts[] /* out */,
int local_data_count /* in */,
float bin_maxes[] /* in */,
int bin_count /* in */,
float min_meas /* in */,
MPI_Comm comm){

int i;
int bin;

for(i = 0; i < local_data_count; i++) {


bin = Which_bin(local_data[i],bin_maxes,bin_count,min_meas);
loc_bin_cts[bin]++;
}
} /* Find_bins */

/* Find out the appropriate bin for each data */


int Which_bin(float data, float bin_maxes[], int bin_count,
float min_meas) {

int i;
for(i = 0; i < bin_count-1; i++) {
if(data <= bin_maxes[i]) break;
}
return i;
} /* Which_bin */

/* Initialzie each bin */


void Set_bins(
float bin_maxes[] /* out */,
int loc_bin_cts[] /* out */,
float min_meas /* in */,
float max_meas /* in */,
int bin_count /* in */,
int my_rank /* in */,
MPI_Comm comm /* in */) {

float range = max_meas - min_meas;


float interval = range / bin_count;

int i;
for(i = 0; i < bin_count; i++) {
bin_maxes[i] = interval * (float)(i+1) + min_meas;
loc_bin_cts[i] = 0;
}
} /* Set_bins */

/* Generate random data */


void Gen_data(
float local_data[] /* out */,
int local_data_count /* in */,
int data_count /* in */,
float min_meas /* in */,
float max_meas /* in */,
int my_rank /* in */,
MPI_Comm comm /* in */) {
float* data;
if(my_rank == 0) {
float range = max_meas - min_meas;
data = malloc(data_count*sizeof(float));

int i;
for(i=0;i<data_count;i++) {
data[i] = (float) rand() / (float) RAND_MAX * range + min_meas;
}
}
e(MPI_Scatter(data,local_data_count,MPI_FLOAT,local_data,local_data_count,MPI_FLOAT, 0,
comm));
if(my_rank == 0) free(data);
} /* Gen_data */

/* Get user inputs for bin_count, max_meas, min_meas, and data_count */


void Get_input(
int* bin_count_p, /* out */
float* min_meas_p, /* out */
float* max_meas_p, /* out */
int* data_count_p, /* out */
int* local_data_count_p, /* out */
int my_rank, /* in */
int comm_sz, /* in */
MPI_Comm comm /* in */) {

if(my_rank == 0) {
printf("Number of bins (int): ");
scanf("%d",bin_count_p);
printf("Minimum value (float): ");
scanf("%f",min_meas_p);
printf("Maximum value (float): ");
scanf("%f",max_meas_p);
// Make sure min < max
if(*max_meas_p < *min_meas_p) {
float* temp = max_meas_p;
max_meas_p = min_meas_p;
min_meas_p = temp;
}
printf("Number of values (int): ");
scanf("%d",data_count_p);

// Make sure data_count is a multiple of comm_sz


*local_data_count_p = *data_count_p / comm_sz;
*data_count_p = *local_data_count_p * comm_sz;
printf("\n");
}
e(MPI_Bcast(bin_count_p,1,MPI_INT,0,comm));
e(MPI_Bcast(min_meas_p,1,MPI_FLOAT,0,comm));
e(MPI_Bcast(max_meas_p,1,MPI_FLOAT,0,comm));
e(MPI_Bcast(data_count_p,1,MPI_INT,0,comm));
e(MPI_Bcast(local_data_count_p,1,MPI_INT,0,comm));
}
OUTPUT :

Вам также может понравиться