#include "../../../squiggle.h" #include "../../../squiggle_more.h" #include #include int main() { /* Question: can we parallelize this? A = normal(5,2) B = min(A) B * 20 */ /* Option 1: parallelize taking from n samples */ // Question being asked: what is the distribution of sampling 1000 times and taking the min? double sample_min_of_n(uint64_t* seed, int n){ double min = sample_normal(5, 2, seed); for(int i=0; i<(n-1); i++){ double sample = sample_normal(5, 2, seed); if(sample < min){ min = sample; } } return min; } double sampler_min_of_1000(uint64_t* seed) { return sample_min_of_n(seed, 1000); } int n_samples = 10000, n_threads = 16; double* results = malloc(n_samples * sizeof(double)); parallel_sampler(sampler_min_of_1000, results, n_threads, n_samples); printf("Mean of the distribution of (taking the min of 1000 samples of a normal(5,2)): %f\n", array_mean(results, n_samples)); free(results); /* Option 2: take the min from n samples cleverly using parallelism */ // Question being asked: can we take the min of n samples cleverly? double sample_n_parallel(int n){ int n_threads = 16; int quotient = n / 16; int remainder = n % 16; uint64_t seed = 100; double result_remainder = sample_min_of_n(&seed, remainder); double sample_min_of_quotient(uint64_t* seed) { double result = sample_min_of_n(seed, quotient); // printf("Result: %f\n", result); return result; } double* results = malloc(n_threads * sizeof(double)); parallel_sampler(sample_min_of_quotient, results, n_threads, n_threads); double min = results[0]; for(int i=1; i results[i]){ min = results[i]; } } if(min > result_remainder){ min = result_remainder; } free(results); return min; } printf("Minimum of 10M samples of normal(5,2): %f\n", sample_n_parallel(1000 * 1000)); }