#include "../../../squiggle.h" #include "../../../squiggle_more.h" #include #include int main() { /* Question: can we parallelize this? A = normal(5,2) B = min(A) B * 20 */ /* Option 1: parallelize taking from n samples */ // Question being asked: what is the distribution of sampling 1000 times and taking the min? double sample_min_of_n(uint64_t * seed, int n) { double min = sample_normal(5, 2, seed); for (int i = 0; i < (n - 2); i++) { double sample = sample_normal(5, 2, seed); if (sample < min) { min = sample; } } return min; } double sample_min_of_1000(uint64_t * seed) { return sample_min_of_n(seed, 1000); } int n_samples = 1000000, n_threads = 16; double* results = malloc((size_t)n_samples * sizeof(double)); sampler_parallel(sample_min_of_1000, results, n_threads, n_samples); printf("Mean of the distribution of (taking the min of 1000 samples of a normal(5,2)): %f\n", array_mean(results, n_samples)); free(results); /* Option 2: take the min from n samples cleverly using parallelism */ // Question being asked: can we take the min of n samples cleverly? double sample_n_parallel(int n) { int n_threads = 16; int quotient = n / 16; int remainder = n % 16; uint64_t seed = 1000; double result_remainder = sample_min_of_n(&seed, remainder); double sample_min_of_quotient(uint64_t * seed) { return sample_min_of_n(seed, quotient); } double* results_quotient = malloc((size_t)quotient * sizeof(double)); sampler_parallel(sample_min_of_quotient, results_quotient, n_threads, quotient); double min = results_quotient[0]; for (int i = 1; i < quotient; i++) { if (min > results_quotient[i]) { min = results_quotient[i]; } } if (min > result_remainder) { min = result_remainder; } free(results_quotient); return min; } printf("Minimum of 1M samples of normal(5,2): %f\n", sample_n_parallel(1000000)); }