Compare commits
10 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
69f58d7435 | ||
|
fe9ba337d8 | ||
|
95c3ba7798 | ||
|
82293a196c | ||
|
0a75c1b637 | ||
|
9207c34bf2 | ||
|
bb3e2630f3 | ||
|
bd647fdaef | ||
|
947374d097 | ||
|
8def7b59bf |
113
functions/ftx-grants.csv
Normal file
113
functions/ftx-grants.csv
Normal file
|
@ -0,0 +1,113 @@
|
|||
{ from: 'FTX FF', to: 'School of Thinking', date: '2022-07-27', amount: 250000, description: 'This regrant will support a global media outreach project to create high quality video and social media content about rationalism, longtermism and Effective Altruism.' },
|
||||
{ from: 'FTX FF', to: 'Legal Services Planning Grant', date: '2022-07-27', amount: 100000, description: 'This regrant will support six months of research on topics including how legal services can be effectively provided to the Effective Altruism community, materials to be included in a legal services handbook for EA organizations, novel legal questions particular to the EA community that might benefit from further research initiatives, and ways to create an effective EA professional network for practicing lawyers.' },
|
||||
{ from: 'FTX FF', to: 'Manifold Markets', date: '2022-07-27', amount: 1000000, description: 'This regrant will support Manifold Markets in building a play-money prediction market platform. The platform is also experimenting with impact certificates and charity prediction markets.' },
|
||||
{ from: 'FTX FF', to: 'David Xu', date: '2022-07-27', amount: 50000, description: 'This regrant will support six months of research on AI safety.' },
|
||||
{ from: 'FTX FF', to: 'Trojan Detection Challenge at NeurIPS 2022', date: '2022-07-27', amount: 50000, description: 'This regrant will support prizes for a trojan detection competition at NeurIPS, which involves identifying whether a deep neural network will suddenly change behavior if certain unknown conditions are met.' },
|
||||
{ from: 'FTX FF', to: 'Effective Altruism Office Zurich', date: '2022-07-27', amount: 52000, description: 'This regrant will support renting and furnishing an office space for a year.' },
|
||||
{ from: 'FTX FF', to: 'Akash Wasil', date: '2022-07-27', amount: 26000, description: 'This regrant will support an individual working on supporting students who are interested in focusing their careers on the world’s most pressing problems.' },
|
||||
{ from: 'FTX FF', to: 'Fiona Pollack', date: '2022-07-27', amount: 30000, description: 'This regrant will support six months of salary for an individual working to support Harvard students interested in working on the world’s most pressing problems and protecting and improving the long term future.' },
|
||||
{ from: 'FTX FF', to: 'Peter McLaughlin', date: '2022-07-27', amount: 46000, description: 'This regrant will support six months of research on criticisms of effective altruism.' },
|
||||
{ from: 'FTX FF', to: 'Dwarkesh Patel', date: '2022-07-27', amount: 76000, description: 'This regrant will support a promising podcaster to hire a research assistant and editor, purchase equipment, and cover travel to meet guests in person. The podcast covers technological progress, existential risk, economic growth, and the long term future.' },
|
||||
{ from: 'FTX FF', to: 'ALERT', date: '2022-07-27', amount: 150000, description: 'This regrant will support the creation of the Active Longtermist Emergency Response Team, an organization to rapidly manage emerging global events like Covid-19.' },
|
||||
{ from: 'FTX FF', to: 'EA Critiques and Red Teaming Prize', date: '2022-07-27', amount: 100000, description: 'This regrant will support prize money for a writing contest for critically engaging with theory or work in Effective Altruism. The goal of the contest is to produce thoughtful, action oriented critiques.' },
|
||||
{ from: 'FTX FF', to: 'Federation for American Scientists', date: '2022-07-27', amount: 1000000, description: 'This regrant will support a researcher and research assistant to work on high-skill immigration and AI policy at FAS for three years.' },
|
||||
{ from: 'FTX FF', to: 'Ought', date: '2022-07-27', amount: 5000000, description: 'This regrant will support Ought’s work building Elicit, a language-model based research assistant. This work contributes to research on reducing alignment risk through scaling human supervision via process-based systems.' },
|
||||
{ from: 'FTX FF', to: 'ML Safety Scholars Program', date: '2022-07-27', amount: 490000, description: 'This regrant will fund a summer program for up to 100 students to spend 9 weeks studying machine learning, deep learning, and technical topics in safety.' },
|
||||
{ from: 'FTX FF', to: 'AntiEntropy', date: '2022-07-27', amount: 120000, description: 'This regrant will support a project to create and house operations-related resources and guidance for EA-aligned organizations.' },
|
||||
{ from: 'FTX FF', to: 'Everett Smith', date: '2022-07-27', amount: 35000, description: 'This regrant will support a policy retreat on governing artificial intelligence.' },
|
||||
{ from: 'FTX FF', to: 'Olle Häggström, Chalmers University of Technology', date: '2022-07-27', amount: 380000, description: 'This regrant will support research on statistical arguments relating to existential risk and work on risks from artificial intelligence, as well as outreach, supervision, and policy work on these topics.' },
|
||||
{ from: 'FTX FF', to: 'Essay Contest on Existential Risk in US Cost Benefit Analysis', date: '2022-07-27', amount: 137500, description: 'This regrant will support an essay contest on “Accounting for Existential Risks in US Cost-Benefit Analysis,” with the aim of contributing to the revision of OMB Circular-A4, a document which guides US government cost-benefit analysis. The Legal Priorities Project is administering the contest.' },
|
||||
{ from: 'FTX FF', to: 'MineRL BASALT competition at NeurIPS', date: '2022-07-27', amount: 155000, description: 'This regrant will support a NeurIPS competition applying human feedback in a non-language-model setting, specifically pretrained models in Minecraft. The grant will be administered by the Berkeley Existential Risk Initiative.' },
|
||||
{ from: 'FTX FF', to: 'QURI', date: '2022-07-27', amount: 200000, description: 'This regrant will support QURI to develop a programming language called "Squiggle" as a tool for probabilistic estimation. The hope is this will be a useful tool for forecasting and fermi estimates.' },
|
||||
{ from: 'FTX FF', to: 'Andi Peng', date: '2022-07-27', amount: 42600, description: 'This regrant will support four months of salary and compute for research on AI alignment.' },
|
||||
{ from: 'FTX FF', to: 'CSIS', date: '2022-07-27', amount: 75000, description: 'This regrant will support initiatives including a CSIS public event focused on the importance of investments in human capital to ensure US national security; roundtables with policymakers, immigration experts, national security professionals, and company representatives to discuss key policy actions that should be taken to bolster US national security through immigration reform; and two episodes of the “Vying for Talent” podcast focusing on the importance of foreign talent in bolstering America’s innovative capacity.' },
|
||||
{ from: 'FTX FF', to: 'Aaron Scher', date: '2022-07-27', amount: 28500, description: 'This regrant will support a summer of research on AI alignment in Berkeley.' },
|
||||
{ from: 'FTX FF', to: 'Kris Shrishak', date: '2022-07-27', amount: 28000, description: 'This regrant will support research on how cryptography might be applied to AI safety research.' },
|
||||
{ from: 'FTX FF', to: 'AI Impacts', date: '2022-07-27', amount: 250000, description: 'This regrant will support rerunning the highly-cited survey “When Will AI Exceed Human Performance? Evidence from AI Experts” from 2016, analysis, and publication of results.' },
|
||||
{ from: 'FTX FF', to: 'Chinmay Ingalagavi', date: '2022-07-27', amount: 50000, description: 'This regrant will support a Masters at LSE for a talented STEM student.' },
|
||||
{ from: 'FTX FF', to: 'Apart Research', date: '2022-07-27', amount: 95000, description: 'This regrant will support the creation of an AI Safety organization which will create a platform to share AI safety research ideas and educational materials, connect people working on AI safety, and bring new people into the field.' },
|
||||
{ from: 'FTX FF', to: 'Tereza Flidrova', date: '2022-07-27', amount: 32000, description: 'This regrant will support a one year master’s program in architecture for a student interested in building civilizational shelters.' },
|
||||
{ from: 'FTX FF', to: 'J. Peter Scoblic', date: '2022-07-27', amount: 25000, description: 'This regrant will fund a nuclear risk expert to construct nuclear war-related forecasting questions and provide forecasts and explanations on key nuclear war questions.' },
|
||||
{ from: 'FTX FF', to: 'AI Risk Public Materials Competition', date: '2022-07-27', amount: 40000, description: 'This regrant will support two competitions to produce better public materials on the existential risk from AI.' },
|
||||
{ from: 'FTX FF', to: 'Moncef Slaoui', date: '2022-07-27', amount: 150000, description: 'This regrant will fund the writing of Slaouis memoir, especially including his experience directing Operation Warp Speed.' },
|
||||
{ from: 'FTX FF', to: 'Artificial Intelligence Summer Residency Program', date: '2022-07-27', amount: 60000, description: 'This regrant will support a six week summer residency in Berkeley on AI safety.' },
|
||||
{ from: 'FTX FF', to: 'Public Editor', date: '2022-07-27', amount: 500000, description: 'This regrant will support a project to use a combination of human feedback and Machine Learning to label misinformation and reasoning errors in popular news articles.' },
|
||||
{ from: 'FTX FF', to: 'The Good Ancestors Project', date: '2022-07-27', amount: 75000, description: 'This regrant will support the creation of The Good Ancestors Project, an Australian-based organization to host research and community building on topics relevant to making the long term future go well.' },
|
||||
{ from: 'FTX FF', to: 'Thomas Kwa', date: '2022-07-27', amount: 37500, description: 'This regrant will support three months of research on AI safety.' },
|
||||
{ from: 'FTX FF', to: 'Joshua Greene, Harvard University', date: '2022-07-27', amount: 250000, description: 'This regrant will support the real-world testing and roll-out of Red Brain, Blue Brain, an online quiz designed to reduce negative partisanship between Democrats and Republicans in the US.' },
|
||||
{ from: 'FTX FF', to: 'Braden Leach', date: '2022-07-27', amount: 175000, description: 'This regrant supported a recent law school graduate to work on biosecurity. Braden will research and write at the Johns Hopkins Center for Health Security.' },
|
||||
{ from: 'FTX FF', to: 'Adversarial Robustness Prizes at ECCV', date: '2022-07-27', amount: 30000, description: 'This regrant will support three prizes for the best papers on adversarial robustness research at a workshop at ECCV, the main fall computer vision conference. The best papers are selected to have higher relevance to long-term threat models than usual adversarial robustness papers.' },
|
||||
{ from: 'FTX FF', to: 'Confido Institute', date: '2022-07-27', amount: 190000, description: 'The Confido Institute is working on developing a user-friendly interactive app, Confido, for making forecasts and communicating beliefs and uncertainty within groups and organizations. They are also building interactive educational programs about forecasting and working with uncertainty based around this app.' },
|
||||
{ from: 'FTX FF', to: 'Supporting Agent Foundations AI safety research at ALTER', date: '2022-07-27', amount: 200000, description: 'This regrant will support 1.5-3 years of salary for a mathematics researcher to work with Vanessa Kosoy on the learning-theoretic AI safety agenda.' },
|
||||
{ from: 'FTX FF', to: 'Modeling Transformative AI Risks (Aryeh Englander, Sammy Martin, Analytica Consulting)', date: '2022-07-27', amount: 272000, description: 'This regrant will support two AI researchers, one or two additional assistants, and a consulting firm to continue to build out and fully implement the quantitative model for how to understand risks and interventions around AI safety, expanding on their earlier research on “Modeling Transformative AI Risk.”' },
|
||||
{ from: 'FTX FF', to: 'Impact Markets', date: '2022-07-27', amount: 215000, description: 'This regrant will support the creation of an “impact market.” The hope is to improve charity fundraising by allowing profit-motivated investors to earn returns by investing in charitable projects that are eventually deemed impactful.' },
|
||||
{ from: 'FTX FF', to: 'AI Alignment Prize on Inverse Scaling', date: '2022-07-27', amount: 250000, description: 'This regrant will support prizes for a contest to find tasks where larger language models do worse (“inverse scaling”).' },
|
||||
{ from: 'FTX FF', to: 'Swift Centre for Applied Forecasting', date: '2022-07-27', amount: 2000000, description: 'This regrant will support the creation of the Swift Centre for Applied Forecasting, including salary for a director and a team of expert forecasters. They will forecast trends from Our World in Data charts, as well as other topics related to ensuring the long term future goes well, with a particular focus on explaining the “why” of forecast estimates.' },
|
||||
{ from: 'FTX FF', to: 'Lawrence Newport', date: '2022-07-27', amount: 95000, description: 'This regrant will support the launch and first year of a youtube channel focusing on video essays presented by Dr Lawrence Newport on longtermism, the future of humanity, and related topics.' },
|
||||
{ from: 'FTX FF', to: 'Aidan O’Gara', date: '2022-07-27', amount: 46000, description: 'This regrant will find salary, compute, and a scholarship for an undergraduate student doing career development and research on language model safety.' },
|
||||
{ from: 'FTX FF', to: 'Legal Priorities Project', date: '2022-07-27', amount: 480000, description: 'We recommended a grant to support the Legal Priorities Project’s ongoing research and outreach activities. This will allow LPP to pay two new hires and to put on a summer institute for non-US law students in Oxford.' },
|
||||
{ from: 'FTX FF', to: 'Oded Galor, Brown University', date: '2022-07-27', amount: 500000, description: 'We recommended a grant to support two years of academic research on long-term economic growth.' },
|
||||
{ from: 'FTX FF', to: 'The Atlas Fellowship', date: '2022-07-27', amount: 5000000, description: 'We recommended a grant to support scholarships for talented and promising high school students to use towards educational opportunities and enrolling in a summer program.' },
|
||||
{ from: 'FTX FF', to: 'Sherlock Biosciences', date: '2022-07-27', amount: 2000000, description: 'We recommended an investment to support the development of universal CRISPR-based diagnostics, including paper-based diagnostics that can be used in developing-country settings without electricity.' },
|
||||
{ from: 'FTX FF', to: 'Rethink Priorities', date: '2022-07-27', amount: 700000, description: 'We recommended a grant to support Rethink’s research and projects aimed at improving humanity’s long-term prospects.' },
|
||||
{ from: 'FTX FF', to: 'SecureBio', date: '2022-07-27', amount: 1200000, description: 'We recommended a grant to support the hiring of several key staff for Dr. Kevin Esvelt’s pandemic prevention work. SecureBio is working to implement universal DNA synthesis screening, build a reliable early warning system, and coordinate the development of improved personal protective equipment and its delivery to essential workers when needed.' },
|
||||
{ from: 'FTX FF', to: 'Lionel Levine, Cornell University', date: '2022-07-27', amount: 1500000, description: 'We recommended a grant to Cornell University to support Prof. Levine, as well as students and collaborators, to work on alignment theory research at the Cornell math department.' },
|
||||
{ from: 'FTX FF', to: 'Claudia Shi, Academic CS Research at Columbia University', date: '2022-07-27', amount: 100000, description: 'We recommended a grant to pay for research assistants over three years to support the work of a PhD student working on AI safety at Columbia University.' },
|
||||
{ from: 'FTX FF', to: 'Institute for Progress', date: '2022-07-27', amount: 480000, description: 'We recommended a grant to support the Institute’s research and policy engagement work on high skilled immigration, biosecurity, and pandemic prevention.' },
|
||||
{ from: 'FTX FF', to: 'Good Judgment Project', date: '2022-07-27', amount: 300000, description: 'We recommended a grant to support a Good Judgment initiative to produce forecasts on 10 Our World in Data data sets/charts.' },
|
||||
{ from: 'FTX FF', to: 'Peter Hrosso, Researcher', date: '2022-07-27', amount: 230000, description: 'We recommended a grant to support a project aimed at training large language models to represent the probability distribution over question answers in a prediction market.' },
|
||||
{ from: 'FTX FF', to: 'Michael Jacob, MITRE', date: '2022-07-27', amount: 485000, description: 'We recommended a grant to support research that we hope will be used to help strengthen the bioweapons convention and guide proactive actions to better secure those facilities or stop the dangerous work being done there.' },
|
||||
{ from: 'FTX FF', to: 'Charity Entrepreneurship', date: '2022-07-27', amount: 470000, description: 'We recommended a grant to support the incubation of new charities that will work on health security.' },
|
||||
{ from: 'FTX FF', to: 'Michael Robkin', date: '2022-07-27', amount: 200000, description: 'We recommended an investment to support the creation of Pretty Good PPE that is comfortable, storable, simple, and inexpensive. PGPPE aims to provide protection that is better than disposable masks and cheaper than both hazmat suits and N95s.' },
|
||||
{ from: 'FTX FF', to: 'Legal Priorities Project', date: '2022-07-27', amount: 700000, description: 'This grant will support one year of operating expenses and salaries at the Legal Priorities Project, a longtermist legal research and field-building organization.' },
|
||||
{ from: 'FTX FF', to: 'AI Safety Camp', date: '2022-07-27', amount: 290000, description: 'We recommended a grant to partially support the salaries for AI Safety Camp’s two directors and to support logistical expenses at its physical camp.' },
|
||||
{ from: 'FTX FF', to: 'Anca Dragan, UC Berkeley', date: '2022-07-27', amount: 800000, description: 'We recommended a grant to support a project to develop interactive AI algorithms for alignment that can uncover the causal features in human reward systems, and thereby help AI systems learn underlying human values that generalize to new situations.' },
|
||||
{ from: 'FTX FF', to: 'Association for Long Term Existence and Resilience', date: '2022-07-27', amount: 320000, description: 'We recommended a grant to support ALTER, an academic research and advocacy organization, which hopes to investigate, demonstrate, and foster useful ways to improve the future in the short term, and to safeguard and improve the long-term trajectory of humanity. The organizations initial focus is building bridges to academia via conferences and grants to find researchers who can focus on AI safety, and on policy for reducing biorisk.' },
|
||||
{ from: 'FTX FF', to: 'Manifold Markets', date: '2022-07-27', amount: 500000, description: 'We recommended a grant to support Manifold Markets in building a charity prediction market, as an experiment for enabling effective forecasters to direct altruistic donations.' },
|
||||
{ from: 'FTX FF', to: 'Guoliang (Greg) Liu, Virginia Tech', date: '2022-07-27', amount: 500000, description: 'We recommended a grant to support a project to develop a new material -- an ultra-thin polymer-based thin film -- for use in next-generation Personal Protective Equipment which is both more effective and more comfortable.' },
|
||||
{ from: 'FTX FF', to: 'Stimson South Asia Program', date: '2022-07-27', amount: 250000, description: 'We recommended a grant to support the identification and implementation of promising confidence-building measures to reduce conflict between India and Pakistan.' },
|
||||
{ from: 'FTX FF', to: 'Prometheus Science Bowl', date: '2022-07-27', amount: 100000, description: 'We recommended a grant to support a competition for work on Eliciting Latent Knowledge, an open problem in AI alignment, for talented high school and college students who are participating in Prometheus Science Bowl.' },
|
||||
{ from: 'FTX FF', to: 'Maxwell Tabarrok', date: '2022-07-27', amount: 7500, description: 'We recommended a grant to support this student to spend a summer at the Future of Humanity Institute at Oxford University researching differential tech development and the connection between existential risks to humanity and economic growth.' },
|
||||
{ from: 'FTX FF', to: 'HelixNano', date: '2022-07-27', amount: 10000000, description: 'We recommended an investment to support Helix Nano running preclinical and Phase 1 trials of a pan-variant Covid-19 vaccine.' },
|
||||
{ from: 'FTX FF', to: 'Giving What We Can', date: '2022-07-27', amount: 700000, description: 'We recommended a grant to support Giving What We Can’s mission to create a world in which giving effectively and significantly is a cultural norm.' },
|
||||
{ from: 'FTX FF', to: 'Gabriel Recchia, University of Cambridge', date: '2022-07-27', amount: 380000, description: 'We recommended a grant to support research on how to fine-tune GPT-3 models to identify flaws in other fine-tuned language models arguments for the correctness of their outputs, and to test whether these help nonexpert humans successfully judge such arguments.' },
|
||||
{ from: 'FTX FF', to: 'Simon Institute for Longterm Governance', date: '2022-07-27', amount: 820000, description: 'We recommended a grant to support SI’s policy work with the United Nations system on the prevention of existential risks to humanity.' },
|
||||
{ from: 'FTX FF', to: 'Centre for Effective Altruism', date: '2022-07-27', amount: 13940000, description: 'We recommended a grant for general support for their activities, including running conferences, supporting student groups, and maintaining online resources.' },
|
||||
{ from: 'FTX FF', to: 'Nonlinear', date: '2022-07-27', amount: 250000, description: 'We recommended a grant to support the maintenance of a library of high-quality audio content on the world’s most pressing problems, and a fund to provide productivity-enhancing equipment and support staff for people working on important social issues.' },
|
||||
{ from: 'FTX FF', to: 'Konstantinos Konstantinidis', date: '2022-07-27', amount: 85000, description: 'We recommended a grant to support two years of research on the impacts of disruptive space technologies, nuclear risk, and mitigating risks from future space-based weapons.' },
|
||||
{ from: 'FTX FF', to: 'Apollo Academic Surveys', date: '2022-07-27', amount: 250000, description: 'We recommended a grant to support Apollo’s work aggregating the views of academic experts in many different fields and making them freely available online.' },
|
||||
{ from: 'FTX FF', to: 'AI Safety Support', date: '2022-07-27', amount: 200000, description: 'We recommended a grant for general funding for community building and managing the talent pipeline for AI alignment researchers. AI Safety Support’s work includes one-on-one coaching, events, and research training programs.' },
|
||||
{ from: 'FTX FF', to: 'Daniel Brown, University of Utah', date: '2022-07-27', amount: 280000, description: 'We recommended a grant to support research on value alignment in AI systems, practical algorithms for efficient value alignment verification, and user studies and experiments to test these algorithms.' },
|
||||
{ from: 'FTX FF', to: 'Khalil Lab at Boston University', date: '2022-07-27', amount: 1550000, description: 'We recommended a grant to support the development of a cheap, scalable, and decentralized platform for the rapid generation of disease-neutralizing therapeutic antibodies.' },
|
||||
{ from: 'FTX FF', to: 'Sergey Levine, UC Berkeley', date: '2022-07-27', amount: 600000, description: 'We recommended a grant to support a project to study how large language models integrated with offline reinforcement learning pose a risk of machine deception and persuasion.' },
|
||||
{ from: 'FTX FF', to: 'Non-trivial Pursuits', date: '2022-07-27', amount: 1000000, description: 'We recommended a grant to support outreach to help students to learn about career options, develop their skills, and plan their careers to work on the world’s most pressing problems.' },
|
||||
{ from: 'FTX FF', to: 'Rational Animations', date: '2022-07-27', amount: 400000, description: 'We recommended a grant to support the creation of animated videos on topics related to rationality and effective altruism to explain these topics for a broader audience.' },
|
||||
{ from: 'FTX FF', to: 'Justin Mares, Biotech Researcher', date: '2022-07-27', amount: 140000, description: 'We recommended a grant to support research on the feasibility of inactivating viruses via electromagnetic radiation.' },
|
||||
{ from: 'FTX FF', to: 'Lightcone Infrastructure', date: '2022-07-27', amount: 2000000, description: 'We recommended a grant to support Lightcone’s ongoing projects including running the LessWrong forum, hosting conferences and events, and maintaining an office space for Effective Altruist organizations.' },
|
||||
{ from: 'FTX FF', to: 'Confirm Solutions', date: '2022-07-27', amount: 1000000, description: 'We recommended an investment in Confirm Solutions, a public-benefit corporation, to support development of statistical models and software tools that can automate parts of the regulatory process for complex clinical trials. We anticipate that this work can help to speed up approvals of new vaccines and medical treatments while enhancing their statistical rigor.' },
|
||||
{ from: 'FTX FF', to: 'High Impact Athletes', date: '2022-07-27', amount: 350000, description: 'We recommended a grant to support HIA’s work encouraging professional athletes to donate more of their earnings to high impact charities and causes, and to promote a culture of giving among their fans.' },
|
||||
{ from: 'FTX FF', to: 'High Impact Professionals', date: '2022-07-27', amount: 320000, description: 'We recommended a grant to support HIP’s work recruiting EA working professionals to use more of their resources, including their careers, to focus on the world’s most pressing problems.' },
|
||||
{ from: 'FTX FF', to: 'Berkeley Existential Risk Initiative', date: '2022-07-27', amount: 100000, description: 'We recommended a grant to support BERI in hiring a second core operations employee to contribute to BERI’s work supporting university research groups.' },
|
||||
{ from: 'FTX FF', to: 'Nathan Young', date: '2022-07-27', amount: 182000, description: 'We recommended a grant to support the creation of a website for collaboratively creating public forecasting questions for a range of prediction aggregators and markets.' },
|
||||
{ from: 'FTX FF', to: 'Bear F. Braumoeller, Department of Political Science, The Ohio State University', date: '2022-07-27', amount: 388080, description: 'We recommended a grant to support a postdoc and two research assistants for Professor Braumoeller’s MESO Lab for two years to carry out research on international orders and how they affect the probability of war.' },
|
||||
{ from: 'FTX FF', to: 'Siddharth Hiregowdara, AI Safety Introductory Materials', date: '2022-07-27', amount: 100000, description: 'We recommended a grant to support the production of high quality materials for learning about AI safety work.' },
|
||||
{ from: 'FTX FF', to: 'Longview', date: '2022-07-27', amount: 15000000, description: 'We recommended a grant to support Longview’s independent grantmaking on global priorities research, nuclear weapons policy, and other longtermist issues.' },
|
||||
{ from: 'FTX FF', to: 'Global Guessing', date: '2022-07-27', amount: 336000, description: 'We recommended a grant to support Global Guessing’s forecasting coverage on the Russian invasion of Ukraine, which they will also use to build tools and infrastructure to support future forecasting work.' },
|
||||
{ from: 'FTX FF', to: 'Brian Christian, Author', date: '2022-07-27', amount: 300000, description: 'We recommended a grant to support the completion of a book which explores the nature of human values and the implications for aligning AI with human preferences.' },
|
||||
{ from: 'FTX FF', to: 'Sage', date: '2022-07-27', amount: 700000, description: 'We recommended a grant to support the creation of a pilot version of a forecasting platform, and a paid forecasting team, to make predictions about questions relevant to high-impact research.' },
|
||||
{ from: 'FTX FF', to: 'EffiSciences', date: '2022-07-27', amount: 135000, description: 'We recommended a grant to support EffiSciences’s work promoting high impact research on global priorities (e.g. AI safety, biosecurity, and climate change) among French students and academics, and building up a community of people willing to work on important topics.' },
|
||||
{ from: 'FTX FF', to: 'Anysphere', date: '2022-07-27', amount: 200000, description: 'We recommended an investment to build a communication platform that provably leaks zero metadata.' },
|
||||
{ from: 'FTX FF', to: '1Day Sooner', date: '2022-07-27', amount: 350000, description: 'We recommended a grant to support 1DS’ work on pandemic preparedness, including advocacy for advance market purchase commitments, collaboration with the UK Pandemic Ethics Accelerator on challenge studies, and advocacy with 1Day Africa and the West African Health Organization for a global pandemic insurance fund.' },
|
||||
{ from: 'FTX FF', to: 'Cecil Abungu, Centre for the Study of Existential Risk, University of Cambridge', date: '2022-07-27', amount: 160000, description: 'We recommended a grant to Cecil Abungu, Visiting Researcher at the Centre for the Study of Existential Risk and Research Affiliate at the Legal Priorities Project, to support the writing and publication of a book on longtermist currents in historical African thought.' },
|
||||
{ from: 'FTX FF', to: 'Luke Hewitt', date: '2022-07-27', amount: 150000, description: 'We recommended a grant to support the development and application of a Minimum Viable Product of a data-driven approach to improving advocacy in areas of importance to societal well-being such as immigration policy.' },
|
||||
{ from: 'FTX FF', to: 'Dr. Emilio I. Alarcón, University of Ottawa Heart Institute & University of Ottawa', date: '2022-07-27', amount: 250000, description: 'This grant will support a project to develop new plastic surfaces incorporating molecules that can be activated with low-energy visible light to eradicate bacteria and kill viruses continuously. If successful, this project will change how plastic surfaces are currently decontaminated.' },
|
||||
{ from: 'FTX FF', to: 'Rajalakshmi Children Foundation', date: '2022-07-27', amount: 200000, description: 'We recommended a grant to support the identification of children in India from under-resourced areas who excel in math, science, and technology, and enable them to obtain high quality online education by digitally connecting them with mentors and teachers.' },
|
||||
{ from: 'FTX FF', to: 'Nikki Teran, Institute for Progress', date: '2022-07-27', amount: 135000, description: 'We recommended a grant to support the creation of biosecurity policy priorities via conversations with experts in security, technology, policy, and advocacy. It will develop position papers, research papers, and agendas for the biosecurity community.' },
|
||||
{ from: 'FTX FF', to: 'James Lin', date: '2022-07-27', amount: 190000, description: 'We recommended a grant to allow a reputable technology publication to engage 2-5 undergraduate student interns to write about topics including AI safety, alternative proteins, and biosecurity.' },
|
||||
{ from: 'FTX FF', to: 'Ray Amjad', date: '2022-07-27', amount: 300000, description: 'We recommended a grant to support the creation of a talent search organization which will help identify top young students around the world through a free to use website consisting of both challenging math and physics olympiad-style problems and discussion forums. Efforts will be particularly focused across India and China. These students will later be connected to support and programs so they can go on to work on the worlds most pressing issues.' },
|
||||
{ from: 'FTX FF', to: 'The Center for Election Science', date: '2022-07-27', amount: 300000, description: 'We recommended a grant to support the development of statewide ballot initiatives to institute approval voting. Approval voting is a simple voting method reform that lets voters select all the candidates they wish.' },
|
||||
{ from: 'FTX FF', to: 'AVECRIS Pte. Ltd.', date: '2022-07-27', amount: 3600000, description: 'We recommended an investment in AVECRIS’s Project DOOR to support the development of a next generation genetic vaccine platform that aims to allow for highly distributed vaccine production using AVECRIS’s advanced DNA vector delivery technology.' },
|
||||
{ from: 'FTX FF', to: 'Council on Strategic Risks', date: '2022-07-27', amount: 400000, description: 'We recommended a grant to support a project which will develop and advance ideas for strengthening regional and multilateral cooperation for addressing biological risks and filling gaps in current international institutions. These efforts include promoting the creation of a center with the capacity to rapidly respond to emerging infectious disease threats to prioritize blunting the impact of such events as well as quickly saving lives, and cooperative mechanisms to enhance biosafety and biosecurity while reducing the potential risks of spaces such as high-containment laboratories.' },
|
||||
{ from: 'FTX FF', to: 'Effective Ideas Blog Prize', date: '2022-07-27', amount: 900000, description: 'Longview Philanthropy and the Future Fund recommended a grant to support prizes for outstanding writing which encourages a broader public conversation around effective altruism and longtermism.' },
|
||||
{ from: 'FTX FF', to: 'Pathos Labs, PopShift', date: '2022-07-27', amount: 50000, description: 'We recommended a grant to support Pathos Labs to produce a PopShift convening connecting experts on the future of technology and existential risks with television writers to inspire new ideas for their shows.' },
|
||||
{ from: 'FTX FF', to: 'Piezo Therapeutics', date: '2022-07-27', amount: 1000000, description: 'We recommended an investment to support work on technology for delivering mRNA vaccines without lipid nanoparticles with the aim of making vaccines more safe, affordable, and scalable.' },
|
Can't render this file because it contains an unexpected character in line 21 and column 153.
|
|
@ -42,8 +42,10 @@
|
|||
"zod": "3.17.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/cheerio": "^0.22.31",
|
||||
"@types/mailgun-js": "0.22.12",
|
||||
"@types/module-alias": "2.0.1",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"firebase-functions-test": "0.3.3"
|
||||
},
|
||||
"private": true
|
||||
|
|
114
functions/src/scripts/grants/ftx-grants.csv
Normal file
114
functions/src/scripts/grants/ftx-grants.csv
Normal file
|
@ -0,0 +1,114 @@
|
|||
title description date amount areasOfInterest link
|
||||
School of Thinking This regrant will support a global media outreach project to create high quality video, podcast, and social media content about Effective Altruism. May 2022 $250,000 Values and Reflective Processes|Effective Altruism
|
||||
Legal Services Planning Grant This regrant will support six months of research on topics including how legal services can be effectively provided to the Effective Altruism community, materials to be included in a legal services handbook for EA organizations, novel legal questions particular to the EA community that might benefit from further research initiatives, and ways to create an effective EA professional network for practicing lawyers. May 2022 $100,000 Effective Altruism
|
||||
Manifold Markets This regrant will support Manifold Markets in building a play-money prediction market platform. The platform is also experimenting with impact certificates and charity prediction markets. March 2022 $1,000,000 Epistemic Institutions https://manifold.markets/
|
||||
David Xu This regrant will support six months of research on AI safety. March 2022 $50,000 Artificial Intelligence
|
||||
Trojan Detection Challenge at NeurIPS 2022 This regrant will support prizes for a trojan detection competition at NeurIPS, which involves identifying whether a deep neural network will suddenly change behavior if certain unknown conditions are met. May 2022 $50,000 Artificial Intelligence
|
||||
Effective Altruism Office Zurich This regrant will support renting and furnishing an office space for a year. May 2022 $52,000 Effective Altruism
|
||||
Akash Wasil This regrant will support an individual working on supporting students who are interested in focusing their careers on the world’s most pressing problems. March 2022 $26,000 Empowering Exceptional People|Effective Altruism
|
||||
Fiona Pollack This regrant will support six months of salary for an individual working to support Harvard students interested in working on the world’s most pressing problems and protecting and improving the long term future. April 2022 $30,000 Empowering Exceptional People|Effective Altruism
|
||||
Peter McLaughlin This regrant will support six months of research on criticisms of effective altruism. April 2022 $46,000 Research That Can Help Us Improve
|
||||
Dwarkesh Patel This regrant will support a promising podcaster to hire a research assistant and editor, purchase equipment, and cover travel to meet guests in person. The podcast covers technological progress, existential risk, economic growth, and the long term future. April 2022 $76,000 Values and Reflective Processes
|
||||
ALERT This regrant will support the creation of the Active Longtermist Emergency Response Team, an organization to rapidly manage emerging global events like Covid-19. May 2022 $150,000 https://forum.effectivealtruism.org/posts/sgcxDwyD2KL6BHH2C/case-for-emergency-response-teams
|
||||
EA Critiques and Red Teaming Prize This regrant will support prize money for a writing contest for critically engaging with theory or work in Effective Altruism. The goal of the contest is to produce thoughtful, action oriented critiques. May 2022 $100,000 Effective Altruism https://forum.effectivealtruism.org/posts/8hvmvrgcxJJ2pYR4X/announcing-a-contest-ea-criticism-and-red-teaming
|
||||
Federation for American Scientists This regrant will support a researcher and research assistant to work on high-skill immigration and AI policy at FAS for three years. May 2022 $1,000,000 Artificial Intelligence|Economic Growth https://fas.org/
|
||||
Ought This regrant will support Ought’s work building Elicit, a language-model based research assistant. This work contributes to research on reducing alignment risk through scaling human supervision via process-based systems. May 2022 $5,000,000 Artificial Intelligence
|
||||
ML Safety Scholars Program This regrant will fund a summer program for up to 100 students to spend 9 weeks studying machine learning, deep learning, and technical topics in safety. April 2022 $490,000 Artificial Intelligence|Empowering Exceptional People https://course.mlsafety.org
|
||||
AntiEntropy This regrant will support a project to create and house operations-related resources and guidance for EA-aligned organizations. March 2022 $120,000 Effective Altruism https://resourceportal.antientropy.org/
|
||||
Everett Smith This regrant will support a policy retreat on governing artificial intelligence. May 2022 $35,000 Artificial Intelligence
|
||||
Olle Häggström, Chalmers University of Technology This regrant will support research on statistical arguments relating to existential risk and work on risks from artificial intelligence, as well as outreach, supervision, and policy work on these topics. May 2022 $380,000 Artificial Intelligence|Effective Altruism|Research That Can Help Us Improve
|
||||
Essay Contest on Existential Risk in US Cost Benefit Analysis This regrant will support an essay contest on “Accounting for Existential Risks in US Cost-Benefit Analysis,” with the aim of contributing to the revision of OMB Circular-A4, a document which guides US government cost-benefit analysis. The Legal Priorities Project is administering the contest. May 2022 $137,500 Epistemic Institutions|Values and Reflective Processes https://www.legalpriorities.org/competition.html
|
||||
MineRL BASALT competition at NeurIPS This regrant will support a NeurIPS competition applying human feedback in a non-language-model setting, specifically pretrained models in Minecraft. The grant will be administered by the Berkeley Existential Risk Initiative. May 2022 $155,000 Artificial Intelligence https://minerl.io/basalt/
|
||||
QURI This regrant will support QURI to develop a programming language called "Squiggle" as a tool for probabilistic estimation. The hope is this will be a useful tool for forecasting and fermi estimates. May 2022 $200,000 Epistemic Institutions|Research That Can Help Us Improve https://www.squiggle-language.com/
|
||||
Andi Peng This regrant will support four months of salary and compute for research on AI alignment. May 2022 $42,600 Artificial Intelligence
|
||||
CSIS This regrant will support initiatives including a CSIS public event focused on the importance of investments in human capital to ensure US national security; roundtables with policymakers, immigration experts, national security professionals, and company representatives to discuss key policy actions that should be taken to bolster US national security through immigration reform; and two episodes of the “Vying for Talent” podcast focusing on the importance of foreign talent in bolstering America’s innovative capacity. May 2022 $75,000 Economic Growth|Great Power Relations
|
||||
Aaron Scher This regrant will support a summer of research on AI alignment in Berkeley. May 2022 $28,500 Artificial Intelligence
|
||||
Kris Shrishak This regrant will support research on how cryptography might be applied to AI safety research. April 2022 $28,000 Artificial Intelligence
|
||||
AI Impacts This regrant will support rerunning the highly-cited survey “When Will AI Exceed Human Performance? Evidence from AI Experts” from 2016, analysis, and publication of results. June 2022 $250,000 Artificial Intelligence|Research That Can Help Us Improve
|
||||
Chinmay Ingalagavi This regrant will support a Masters at LSE for a talented STEM student. May 2022 $50,000 Empowering Exceptional People
|
||||
Apart Research This regrant will support the creation of an AI Safety organization which will create a platform to share AI safety research ideas and educational materials, connect people working on AI safety, and bring new people into the field. May 2022 $95,000 Artificial Intelligence https://apartresearch.com/
|
||||
Tereza Flidrova This regrant will support a one year master’s program in architecture for a student interested in building civilizational shelters. May 2022 $32,000 Biorisk and Recovery from Catastrophe
|
||||
J. Peter Scoblic This regrant will fund a nuclear risk expert to construct nuclear war-related forecasting questions and provide forecasts and explanations on key nuclear war questions. May 2022 $25,000 Epistemic Institutions|Great Power Relations
|
||||
AI Risk Public Materials Competition This regrant will support two competitions to produce better public materials on the existential risk from AI. April 2022 $40,000 Artificial Intelligence
|
||||
Moncef Slaoui This regrant will fund the writing of Slaoui's memoir, especially including his experience directing Operation Warp Speed. May 2022 $150,000 Biorisk and Recovery from Catastrophe
|
||||
Artificial Intelligence Summer Residency Program This regrant will support a six week summer residency in Berkeley on AI safety. May 2022 $60,000 Artificial Intelligence
|
||||
Public Editor This regrant will support a project to use a combination of human feedback and Machine Learning to label misinformation and reasoning errors in popular news articles. March 2022 $500,000 Epistemic Institutions https://publiceditor.io
|
||||
The Good Ancestors Project This regrant will support the creation of The Good Ancestors Project, an Australian-based organization to host research and community building on topics relevant to making the long term future go well. May 2022 $75,000 Effective Altruism https://goodancestorsproject.org.au
|
||||
Thomas Kwa This regrant will support three months of research on AI safety. April 2022 $37,500 Artificial Intelligence
|
||||
Joshua Greene, Harvard University This regrant will support the real-world testing and roll-out of 'Red Brain, Blue Brain', an online quiz designed to reduce negative partisanship between Democrats and Republicans in the US. March 2022 $250,000 Values and Reflective Processes
|
||||
Braden Leach This regrant supported a recent law school graduate to work on biosecurity. Braden will research and write at the Johns Hopkins Center for Health Security. April 2022 $175,000 Biorisk and Recovery from Catastrophe|Empowering Exceptional People
|
||||
Adversarial Robustness Prizes at ECCV This regrant will support three prizes for the best papers on adversarial robustness research at a workshop at ECCV, the main fall computer vision conference. The best papers are selected to have higher relevance to long-term threat models than usual adversarial robustness papers. April 2022 $30,000 Artificial Intelligence
|
||||
Confido Institute The Confido Institute is working on developing a user-friendly interactive app, Confido, for making forecasts and communicating beliefs and uncertainty within groups and organizations. They are also building interactive educational programs about forecasting and working with uncertainty based around this app. May 2022 $190,000 Epistemic Institutions https://confido.tools/
|
||||
Supporting Agent Foundations AI safety research at ALTER This regrant will support 1.5-3 years of salary for a mathematics researcher to work with Vanessa Kosoy on the learning-theoretic AI safety agenda. April 2022 $200,000 Artificial Intelligence https://www.lesswrong.com/posts/it5odhMKY6xYLrFZD/closed-hiring-a-mathematician-to-work-on-the-learning
|
||||
Modeling Transformative AI Risks (Aryeh Englander, Sammy Martin, Analytica Consulting) This regrant will support two AI researchers, one or two additional assistants, and a consulting firm to continue to build out and fully implement the quantitative model for how to understand risks and interventions around AI safety, expanding on their earlier research on “Modeling Transformative AI Risk.” May 2022 $272,000 Artificial Intelligence|Research That Can Help Us Improve https://www.alignmentforum.org/s/aERZoriyHfCqvWkzg
|
||||
Impact Markets This regrant will support the creation of an “impact market.” The hope is to improve charity fundraising by allowing profit-motivated investors to earn returns by investing in charitable projects that are eventually deemed impactful. March 2022 $215,000 Effective Altruism|Research That Can Help Us Improve https://impactmarkets.io/
|
||||
AI Alignment Prize on Inverse Scaling This regrant will support prizes for a contest to find tasks where larger language models do worse (“inverse scaling”). May 2022 $250,000 Artificial Intelligence
|
||||
Swift Centre for Applied Forecasting This regrant will support the creation of the Swift Centre for Applied Forecasting, including salary for a director and a team of expert forecasters. They will forecast trends from Our World in Data charts, as well as other topics related to ensuring the long term future goes well, with a particular focus on explaining the “why” of forecast estimates. March 2022 $2,000,000 Epistemic Institutions https://www.swiftcentre.org/
|
||||
Lawrence Newport This regrant will support the launch and first year of a youtube channel focusing on video essays presented by Dr Lawrence Newport on longtermism, the future of humanity, and related topics. March 2022 $95,000 Effective Altruism
|
||||
Aidan O’Gara This regrant will find salary, compute, and a scholarship for an undergraduate student doing career development and research on language model safety. May 2022 $46,000 Artificial Intelligence
|
||||
Legal Priorities Project We recommended a grant to support the Legal Priorities Project’s ongoing research and outreach activities. This will allow LPP to pay two new hires and to put on a summer institute for non-US law students in Oxford. April 2022 $480,000 https://www.legalpriorities.org/
|
||||
Oded Galor, Brown University We recommended a grant to support two years of academic research on long-term economic growth. January 2022 $500,000 Economic Growth|Research That Can Help Us Improve
|
||||
The Atlas Fellowship We recommended a grant to support scholarships for talented and promising high school students to use towards educational opportunities and enrolling in a summer program. January 2022 $5,000,000 Empowering Exceptional People https://www.atlasfellowship.org/
|
||||
Sherlock Biosciences We recommended an investment to support the development of universal CRISPR-based diagnostics, including paper-based diagnostics that can be used in developing-country settings without electricity. February 2022 $2,000,000 Biorisk and Recovery from Catastrophe https://sherlock.bio/
|
||||
Rethink Priorities We recommended a grant to support Rethink’s research and projects aimed at improving humanity’s long-term prospects. March 2022 $700,000 Research That Can Help Us Improve
|
||||
SecureDNA We recommended a grant to support the hiring of several key staff for Dr. Kevin Esvelt’s pandemic prevention work. SecureDNA is working to implement universal DNA synthesis screening, build a reliable early warning system, and coordinate the development of improved personal protective equipment and its delivery to essential workers when needed. March 2022 $1,200,000 Biorisk and Recovery from Catastrophe
|
||||
Lionel Levine, Cornell University We recommended a grant to Cornell University to support Prof. Levine, as well as students and collaborators, to work on alignment theory research at the Cornell math department. April 2022 $1,500,000 Artificial Intelligence
|
||||
Claudia Shi, Academic CS Research at Columbia University We recommended a grant to pay for research assistants over three years to support the work of a PhD student working on AI safety at Columbia University. April 2022 $100,000 Artificial Intelligence
|
||||
Institute for Progress We recommended a grant to support the Institute’s research and policy engagement work on high skilled immigration, biosecurity, and pandemic prevention. May 2022 $480,000 Biorisk and Recovery from Catastrophe|Economic Growth https://progress.institute/
|
||||
Good Judgment Project We recommended a grant to support a Good Judgment initiative to produce forecasts on 10 Our World in Data data sets/charts. May 2022 $300,000 Epistemic Institutions
|
||||
Peter Hrosso, Researcher We recommended a grant to support a project aimed at training large language models to represent the probability distribution over question answers in a prediction market. May 2022 $230,000 Epistemic Institutions
|
||||
Michael Jacob, MITRE We recommended a grant to support research that we hope will be used to help strengthen the bioweapons convention and guide proactive actions to better secure those facilities or stop the dangerous work being done there. May 2022 $485,000 Biorisk and Recovery from Catastrophe
|
||||
Charity Entrepreneurship We recommended a grant to support the incubation of new charities that will work on health security. May 2022 $470,000 Biorisk and Recovery from Catastrophe|Effective Altruism
|
||||
Michael Robkin We recommended an investment to support the creation of Pretty Good PPE that is comfortable, storable, simple, and inexpensive. PGPPE aims to provide protection that is better than disposable masks and cheaper than both hazmat suits and N95s. May 2022 $200,000 Biorisk and Recovery from Catastrophe
|
||||
Legal Priorities Project This grant will support one year of operating expenses and salaries at the Legal Priorities Project, a longtermist legal research and field-building organization. June 2022 $700,000 https://www.legalpriorities.org/
|
||||
AI Safety Camp We recommended a grant to partially support the salaries for AI Safety Camp’s two directors and to support logistical expenses at its physical camp. June 2022 $290,000 Artificial Intelligence
|
||||
Anca Dragan, UC Berkeley We recommended a grant to support a project to develop interactive AI algorithms for alignment that can uncover the causal features in human reward systems, and thereby help AI systems learn underlying human values that generalize to new situations. May 2022 $800,000 Artificial Intelligence
|
||||
Association for Long Term Existence and Resilience We recommended a grant to support ALTER, an academic research and advocacy organization, which hopes to investigate, demonstrate, and foster useful ways to improve the future in the short term, and to safeguard and improve the long-term trajectory of humanity. The organization's initial focus is building bridges to academia via conferences and grants to find researchers who can focus on AI safety, and on policy for reducing biorisk. May 2022 $320,000 Artificial Intelligence|Biorisk and Recovery from Catastrophe https://alter.org.il
|
||||
Manifold Markets We recommended a grant to support Manifold Markets in building a charity prediction market, as an experiment for enabling effective forecasters to direct altruistic donations. May 2022 $500,000 Epistemic Institutions https://manifold.markets/
|
||||
Guoliang (Greg) Liu, Virginia Tech We recommended a grant to support a project to develop a new material -- an ultra-thin polymer-based thin film -- for use in next-generation Personal Protective Equipment which is both more effective and more comfortable. May 2022 $500,000 Biorisk and Recovery from Catastrophe
|
||||
Stimson South Asia Program We recommended a grant to support the identification and implementation of promising confidence-building measures to reduce conflict between India and Pakistan. April 2022 $250,000 Great Power Relations
|
||||
Prometheus Science Bowl We recommended a grant to support a competition for work on Eliciting Latent Knowledge, an open problem in AI alignment, for talented high school and college students who are participating in Prometheus Science Bowl. May 2022 $100,000 Artificial Intelligence|Empowering Exceptional People
|
||||
Maxwell Tabarrok We recommended a grant to support this student to spend a summer at the Future of Humanity Institute at Oxford University researching differential tech development and the connection between existential risks to humanity and economic growth. May 2022 $7,500 Economic Growth|Research That Can Help Us Improve
|
||||
HelixNano We recommended an investment to support Helix Nano running preclinical and Phase 1 trials of a pan-variant Covid-19 vaccine. January 2022 $10,000,000 Biorisk and Recovery from Catastrophe
|
||||
Giving What We Can We recommended a grant to support Giving What We Can’s mission to create a world in which giving effectively and significantly is a cultural norm. May 2022 $700,000 Effective Altruism https://www.givingwhatwecan.org/
|
||||
Gabriel Recchia, University of Cambridge We recommended a grant to support research on how to fine-tune GPT-3 models to identify flaws in other fine-tuned language models' arguments for the correctness of their outputs, and to test whether these help nonexpert humans successfully judge such arguments. May 2022 $380,000 Artificial Intelligence
|
||||
Simon Institute for Longterm Governance We recommended a grant to support SI’s policy work with the United Nations system on the prevention of existential risks to humanity. April 2022 $820,000 Great Power Relations https://www.simoninstitute.ch/
|
||||
Centre for Effective Altruism We recommended a grant for general support for their activities, including running conferences, supporting student groups, and maintaining online resources. March 2022 $13,940,000 Effective Altruism https://www.centreforeffectivealtruism.org/
|
||||
Nonlinear We recommended a grant to support the maintenance of a library of high-quality audio content on the world’s most pressing problems, and a fund to provide productivity-enhancing equipment and support staff for people working on important social issues. April 2022 $250,000 Effective Altruism
|
||||
Konstantinos Konstantinidis We recommended a grant to support two years of research on the impacts of disruptive space technologies, nuclear risk, and mitigating risks from future space-based weapons. May 2022 $85,000 Great Power Relations|Space Governance
|
||||
Apollo Academic Surveys We recommended a grant to support Apollo’s work aggregating the views of academic experts in many different fields and making them freely available online. May 2022 $250,000 Epistemic Institutions https://www.apollosurveys.org/
|
||||
AI Safety Support We recommended a grant for general funding for community building and managing the talent pipeline for AI alignment researchers. AI Safety Support’s work includes one-on-one coaching, events, and research training programs. May 2022 $200,000 Artificial Intelligence
|
||||
Daniel Brown, University of Utah We recommended a grant to support research on value alignment in AI systems, practical algorithms for efficient value alignment verification, and user studies and experiments to test these algorithms. May 2022 $280,000 Artificial Intelligence
|
||||
Khalil Lab at Boston University We recommended a grant to support the development of a cheap, scalable, and decentralized platform for the rapid generation of disease-neutralizing therapeutic antibodies. May 2022 $1,550,000 Biorisk and Recovery from Catastrophe https://www.bu.edu/khalillab/
|
||||
Sergey Levine, UC Berkeley We recommended a grant to support a project to study how large language models integrated with offline reinforcement learning pose a risk of machine deception and persuasion. June 2022 $600,000 Artificial Intelligence
|
||||
Non-trivial Pursuits We recommended a grant to support outreach to help students to learn about career options, develop their skills, and plan their careers to work on the world’s most pressing problems. May 2022 $1,000,000 Empowering Exceptional People|Effective Altruism https://non-trivial.org/
|
||||
Rational Animations We recommended a grant to support the creation of animated videos on topics related to rationality and effective altruism to explain these topics for a broader audience. May 2022 $400,000 Effective Altruism
|
||||
Justin Mares, Biotech Researcher We recommended a grant to support research on the feasibility of inactivating viruses via electromagnetic radiation. May 2022 $140,000 Biorisk and Recovery from Catastrophe
|
||||
Lightcone Infrastructure We recommended a grant to support Lightcone’s ongoing projects including running the LessWrong forum, hosting conferences and events, and maintaining an office space for Effective Altruist organizations. February 2022 $2,000,000 Artificial Intelligence|Effective Altruism https://www.lightconeinfrastructure.com/
|
||||
Confirm Solutions We recommended an investment in Confirm Solutions, a public-benefit corporation, to support development of statistical models and software tools that can automate parts of the regulatory process for complex clinical trials. We anticipate that this work can help to speed up approvals of new vaccines and medical treatments while enhancing their statistical rigor. May 2022 $1,000,000 Biorisk and Recovery from Catastrophe
|
||||
High Impact Athletes We recommended a grant to support HIA’s work encouraging professional athletes to donate more of their earnings to high impact charities and causes, and to promote a culture of giving among their fans. April 2022 $350,000 Effective Altruism https://highimpactathletes.org/
|
||||
High Impact Professionals We recommended a grant to support HIP’s work recruiting EA working professionals to use more of their resources, including their careers, to focus on the world’s most pressing problems. May 2022 $320,000 Empowering Exceptional People
|
||||
Berkeley Existential Risk Initiative We recommended a grant to support BERI in hiring a second core operations employee to contribute to BERI’s work supporting university research groups. March 2022 $100,000 https://existence.org
|
||||
Nathan Young We recommended a grant to support the creation of a website for collaboratively creating public forecasting questions for a range of prediction aggregators and markets. June 2022 $182,000 Epistemic Institutions
|
||||
Bear F. Braumoeller, Department of Political Science, The Ohio State University We recommended a grant to support a postdoc and two research assistants for Professor Braumoeller’s MESO Lab for two years to carry out research on international orders and how they affect the probability of war. April 2022 $388,080 Great Power Relations https://www.themesolab.com/
|
||||
Siddharth Hiregowdara, AI Safety Introductory Materials We recommended a grant to support the production of high quality materials for learning about AI safety work. March 2022 $100,000 Artificial Intelligence
|
||||
Longview We recommended a grant to support Longview’s independent grantmaking on global priorities research, nuclear weapons policy, and other longtermist issues. February 2022 $15,000,000 https://www.longview.org/
|
||||
Global Guessing We recommended a grant to support Global Guessing’s forecasting coverage on the Russian invasion of Ukraine, which they will also use to build tools and infrastructure to support future forecasting work. May 2022 $336,000 Epistemic Institutions|Great Power Relations
|
||||
Brian Christian, Author We recommended a grant to support the completion of a book which explores the nature of human values and the implications for aligning AI with human preferences. May 2022 $300,000 Artificial Intelligence
|
||||
Sage We recommended a grant to support the creation of a pilot version of a forecasting platform, and a paid forecasting team, to make predictions about questions relevant to high-impact research. May 2022 $700,000 Epistemic Institutions
|
||||
EffiSciences We recommended a grant to support EffiSciences’s work promoting high impact research on global priorities (e.g. AI safety, biosecurity, and climate change) among French students and academics, and building up a community of people willing to work on important topics. April 2022 $135,000 Effective Altruism https://www.effisciences.org/
|
||||
Anysphere We recommended an investment to build a communication platform that provably leaks zero metadata. May 2022 $200,000 https://anysphere.co
|
||||
1Day Sooner We recommended a grant to support 1DS’ work on pandemic preparedness, including advocacy for advance market purchase commitments, collaboration with the UK Pandemic Ethics Accelerator on challenge studies, and advocacy with 1Day Africa and the West African Health Organization for a global pandemic insurance fund. June 2022 $350,000 Biorisk and Recovery from Catastrophe https://www.1daysooner.org/
|
||||
Cecil Abungu, Centre for the Study of Existential Risk, University of Cambridge We recommended a grant to Cecil Abungu, Visiting Researcher at the Centre for the Study of Existential Risk and Research Affiliate at the Legal Priorities Project, to support the writing and publication of a book on longtermist currents in historical African thought. May 2022 $160,000
|
||||
Luke Hewitt We recommended a grant to support the development and application of a Minimum Viable Product of a data-driven approach to improving advocacy in areas of importance to societal well-being such as immigration policy. March 2022 $150,000 Epistemic Institutions|Empowering Exceptional People
|
||||
Dr. Emilio I. Alarcón, University of Ottawa Heart Institute & University of Ottawa This grant will support a project to develop new plastic surfaces incorporating molecules that can be activated with low-energy visible light to eradicate bacteria and kill viruses continuously. If successful, this project will change how plastic surfaces are currently decontaminated. March 2022 $250,000 Biorisk and Recovery from Catastrophe https://www.beatsresearch.com/
|
||||
Rajalakshmi Children Foundation We recommended a grant to support the identification of children in India from under-resourced areas who excel in math, science, and technology, and enable them to obtain high quality online education by digitally connecting them with mentors and teachers. May 2022 $200,000 Empowering Exceptional People https://www.rajalakshmifoundation.in/pratibhaposhak
|
||||
Nikki Teran, Institute for Progress We recommended a grant to support the creation of biosecurity policy priorities via conversations with experts in security, technology, policy, and advocacy. It will develop position papers, research papers, and agendas for the biosecurity community. May 2022 $135,000 Biorisk and Recovery from Catastrophe
|
||||
James Lin We recommended a grant to allow a reputable technology publication to engage 2-5 undergraduate student interns to write about topics including AI safety, alternative proteins, and biosecurity. May 2022 $190,000 Artificial Intelligence|Biorisk and Recovery from Catastrophe
|
||||
Ray Amjad We recommended a grant to support the creation of a talent search organization which will help identify top young students around the world through a free to use website consisting of both challenging math and physics olympiad-style problems and discussion forums. Efforts will be particularly focused across India and China. These students will later be connected to support and programs so they can go on to work on the world's most pressing issues. May 2022 $300,000 Empowering Exceptional People
|
||||
The Center for Election Science We recommended a grant to support the development of statewide ballot initiatives to institute approval voting. Approval voting is a simple voting method reform that lets voters select all the candidates they wish. March 2022 $300,000 Epistemic Institutions|Values and Reflective Processes https://electionscience.org/
|
||||
AVECRIS Pte. Ltd. We recommended an investment in AVECRIS’s Project DOOR to support the development of a next generation genetic vaccine platform that aims to allow for highly distributed vaccine production using AVECRIS’s advanced DNA vector delivery technology. May 2022 $3,600,000 Biorisk and Recovery from Catastrophe
|
||||
Council on Strategic Risks We recommended a grant to support a project which will develop and advance ideas for strengthening regional and multilateral cooperation for addressing biological risks and filling gaps in current international institutions. These efforts include promoting the creation of a center with the capacity to rapidly respond to emerging infectious disease threats to prioritize blunting the impact of such events as well as quickly saving lives, and cooperative mechanisms to enhance biosafety and biosecurity while reducing the potential risks of spaces such as high-containment laboratories. May 2022 $400,000 Biorisk and Recovery from Catastrophe
|
||||
Effective Ideas Blog Prize Longview Philanthropy and the Future Fund recommended a grant to support prizes for outstanding writing which encourages a broader public conversation around effective altruism and longtermism. January 2022 $900,000 Values and Reflective Processes|Effective Altruism https://effectiveideas.org/
|
||||
Pathos Labs, PopShift We recommended a grant to support Pathos Labs to produce a PopShift convening connecting experts on the future of technology and existential risks with television writers to inspire new ideas for their shows. $50,000 Values and Reflective Processes
|
||||
Piezo Therapeutics We recommended an investment to support work on technology for delivering mRNA vaccines without lipid nanoparticles with the aim of making vaccines more safe, affordable, and scalable. May 2022 $1,000,000 Biorisk and Recovery from Catastrophe
|
Can't render this file because it contains an unexpected character in line 22 and column 78.
|
125
functions/src/scripts/grants/scrape-ftx.ts
Normal file
125
functions/src/scripts/grants/scrape-ftx.ts
Normal file
|
@ -0,0 +1,125 @@
|
|||
// Run with `npx ts-node src/scripts/grants/scrape-ftx.ts`
|
||||
|
||||
import * as cheerio from 'cheerio'
|
||||
import * as fs from 'fs'
|
||||
|
||||
type FtxGrant = {
|
||||
title: string
|
||||
description: string
|
||||
date: string
|
||||
amount: string
|
||||
// Joint string separated with '|', eg 'Great Power Relations|Space Governance'
|
||||
areasOfInterest: string
|
||||
link?: string
|
||||
}
|
||||
|
||||
function elToFtxGrant($: cheerio.Root, el: cheerio.Element): FtxGrant {
|
||||
const $el = $(el)
|
||||
const title = $el.find('h2.grant-card__title').text().trim()
|
||||
const description = $el.find('.grant-card__description').text().trim()
|
||||
const date = $el.find('.grant-card__date').text().trim()
|
||||
const amount = $el.find('.grant-card__amount').text().trim()
|
||||
const areasOfInterest = $el
|
||||
.find('.area-of-interest__title')
|
||||
// Remove all leading and trailing whitespace
|
||||
.map((_, el) => $(el).text().trim())
|
||||
.get()
|
||||
.join('|')
|
||||
const link = $el.find('a.grant-card__link').attr('href')?.trim()
|
||||
|
||||
return {
|
||||
title,
|
||||
description,
|
||||
date,
|
||||
amount,
|
||||
areasOfInterest,
|
||||
link,
|
||||
} as FtxGrant
|
||||
}
|
||||
|
||||
async function scrapeFtx() {
|
||||
const resp = await fetch('https://ftxfuturefund.org/all-grants/#grants')
|
||||
const text = await resp.text()
|
||||
const $ = cheerio.load(text)
|
||||
const strip = (text: string) => text.replace(/'/g, '')
|
||||
const toNum = (text: string) => Number(text.replace(/[^0-9.-]+/g, ''))
|
||||
|
||||
// Parse Grant objects from each <div class="grant-card"> using cheerio
|
||||
const csvLines = [
|
||||
// Add a header row
|
||||
// 'title\tdescription\tdate\tamount\tareasOfInterest\tlink',
|
||||
...$('div.grant-card')
|
||||
.map((_, el) => elToFtxGrant($, el))
|
||||
.get()
|
||||
.map(
|
||||
(grant) =>
|
||||
// Join all attributes with tabs
|
||||
`{ from: 'FTX FF', to: '${
|
||||
grant.title
|
||||
}', date: '2022-07-27', amount: ${toNum(
|
||||
grant.amount
|
||||
)}, description: '${strip(grant.description)}' },`
|
||||
),
|
||||
]
|
||||
console.log(csvLines.join('\n'))
|
||||
fs.writeFileSync(
|
||||
'../web/lib/util/ftx-grants.ts',
|
||||
'export const grants = [\n' + csvLines.join('\n') + '\n]'
|
||||
)
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
scrapeFtx().then(() => process.exit())
|
||||
}
|
||||
|
||||
/*
|
||||
Example html grant card, for reference:
|
||||
<div class="grant-card" style="order: -1">
|
||||
<div class="grant-card__date">
|
||||
March 2022
|
||||
</div>
|
||||
<h2 class="grant-card__title">
|
||||
Manifold Markets
|
||||
</h2>
|
||||
<div class="grant-card__description">
|
||||
<p>This regrant will support Manifold Markets in building a play-money prediction market platform. The platform is also experimenting with impact certificates and charity prediction markets.</p>
|
||||
</div>
|
||||
<div class="grant-card__amount">
|
||||
$1,000,000
|
||||
</div>
|
||||
<a href="https://manifold.markets/" class="grant-card__link">
|
||||
manifold.markets </a>
|
||||
<div class="grant-card__areas-of-interest">
|
||||
<a href="https://ftx.tghp.co.uk/area-of-interest/#institution-epistemic-institutions" class="area-of-interest">
|
||||
<div class="area-of-interest__icon">
|
||||
<!--?xml version="1.0" encoding="utf-8"?-->
|
||||
<!-- Generator: Adobe Illustrator 25.3.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 512 512" style="enable-background:new 0 0 512 512;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#EF3C27;}
|
||||
</style>
|
||||
<g>
|
||||
<path class="st0" d="M491.8,307.6c5.9-6.6,9.6-15.4,9.6-24.9c0-16.9-11.3-31.2-26.7-35.8V85.4h5.2c5.9,0,10.7-4.8,10.8-10.7v-64
|
||||
C490.7,4.8,485.9,0,480,0H32.1c-5.9,0-10.7,4.8-10.7,10.7v64c0,5.9,4.8,10.7,10.7,10.7h64v30.6c-0.1,0.5-0.1,0.9-0.1,1.4
|
||||
c0,0.5,0,1,0.1,1.4v94.5c0,5,3.5,9.4,8.4,10.5l149.2,32c0.8,0.1,1.5,0.2,2.3,0.2s1.5-0.1,2.4-0.2l149.3-32
|
||||
c4.9-1.1,8.4-5.4,8.4-10.5V85.4h37.2v161.5c-15.4,4.6-26.7,18.9-26.7,35.8c0,9.6,3.6,18.3,9.6,24.9c-12.2,8.7-20.2,23-20.2,39.1
|
||||
v90.6c0,5.9,4.8,10.7,10.7,10.7h74.6c5.9,0,10.7-4.8,10.8-10.7v-90.6C512,330.6,504,316.3,491.8,307.6z M42.8,21.3h426.5V64h-63.9
|
||||
H106.8h-64V21.3z M256,234.4l-138.6-29.7v-76.7h277.2v76.7L256,234.4z M394.6,106.7H117.4V85.4h277.2V106.7z M448,282.7
|
||||
c0-8.8,7.2-16,16-16c8.8,0,15.9,7.2,16,16c0,8.8-7.2,16-16,16C455.2,298.7,448,291.5,448,282.7z M490.7,426.6h-53.4v-79.9
|
||||
c0-14.7,12-26.7,26.7-26.7c14.7,0,26.7,12,26.7,26.7V426.6z"></path>
|
||||
<path class="st0" d="M351.9,447.9h-21.3c-5.9,0-10.7,4.8-10.7,10.7s4.8,10.7,10.7,10.7h21.3c5.9,0,10.7,4.8,10.7,10.7
|
||||
c0,5.9-4.8,10.7-10.7,10.7h-74.6l-13-52.3c-0.5-1.9-1.4-3.5-2.8-4.9L72.9,244.9c-16.7-16.7-43.7-16.7-60.3,0
|
||||
C4.6,253,0.1,263.6,0.1,275.1s4.5,22.1,12.5,30.2L201,493.7c1.4,1.4,3.2,2.4,5,2.8l59.6,14.8c1,0.3,2.1,0.5,3.3,0.5h83.1
|
||||
c17.6,0,32-14.4,32-32C383.9,462.2,369.6,447.9,351.9,447.9z M65.4,327.8l30.1-30.1l15.1,15.1l-30.1,30.1L65.4,327.8z M21.4,275
|
||||
c0-5.7,2.2-11,6.2-15c8.3-8.3,21.9-8.3,30.2,0l22.7,22.7l-30.1,30.1L27.6,290C23.6,285.9,21.4,280.6,21.4,275z M214.1,476.5
|
||||
L95.6,357.9l30.1-30.1l118.5,118.5l10,40.2L214.1,476.5z"></path>
|
||||
</g>
|
||||
</svg>
|
||||
</div>
|
||||
<div class="area-of-interest__title">
|
||||
Epistemic Institutions
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
*/
|
67
functions/src/scripts/grants/scrape-sff.ts
Normal file
67
functions/src/scripts/grants/scrape-sff.ts
Normal file
|
@ -0,0 +1,67 @@
|
|||
// Run with `npx ts-node src/scripts/scrape-sff.ts`
|
||||
|
||||
import * as cheerio from 'cheerio'
|
||||
import * as fs from 'fs'
|
||||
|
||||
type SffGrant = {
|
||||
round: string
|
||||
source: string
|
||||
organization: string
|
||||
amount: string
|
||||
receivingCharity: string
|
||||
purpose: string
|
||||
}
|
||||
|
||||
/*
|
||||
Example tr for a SffGrant:
|
||||
<tr>
|
||||
<td style="text-align: left">SFF-2019-Q3</td>
|
||||
<td style="text-align: left">SFF DAF</td>
|
||||
<td style="text-align: left">80,000 Hours</td>
|
||||
<td style="text-align: left">$280,000</td>
|
||||
<td style="text-align: left">Centre for Effective Altruism, USA</td>
|
||||
<td style="text-align: left">General Support</td>
|
||||
</tr>
|
||||
*/
|
||||
function trToSffGrant($: cheerio.Root, el: cheerio.Element): SffGrant {
|
||||
const $el = $(el)
|
||||
const round = $el.find('td').eq(0).text().trim()
|
||||
const source = $el.find('td').eq(1).text().trim()
|
||||
const organization = $el.find('td').eq(2).text().trim()
|
||||
const amount = $el.find('td').eq(3).text().trim()
|
||||
const receivingCharity = $el.find('td').eq(4).text().trim()
|
||||
const purpose = $el.find('td').eq(5).text().trim()
|
||||
|
||||
return {
|
||||
// TODO: Map rounds to dates
|
||||
round,
|
||||
source,
|
||||
organization,
|
||||
amount,
|
||||
receivingCharity,
|
||||
purpose,
|
||||
} as SffGrant
|
||||
}
|
||||
|
||||
async function scrapeSff() {
|
||||
const resp = await fetch('http://survivalandflourishing.fund/')
|
||||
const text = await resp.text()
|
||||
const $ = cheerio.load(text)
|
||||
// Parse SffGrants from the <tr> using cheerio
|
||||
const grants = $('tr')
|
||||
.map((_, el) => trToSffGrant($, el))
|
||||
.get()
|
||||
const csvLines = [
|
||||
// Header row
|
||||
'round\tsource\torganization\tamount\treceivingCharity\tpurpose',
|
||||
...grants.map((grant) =>
|
||||
// Join all attributes with tabs, to avoid comma issues
|
||||
Object.values(grant).join('\t')
|
||||
),
|
||||
]
|
||||
fs.writeFileSync('sff-grants.csv', csvLines.join('\n'))
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
scrapeSff().then(() => process.exit())
|
||||
}
|
132
functions/src/scripts/grants/sff-grants.csv
Normal file
132
functions/src/scripts/grants/sff-grants.csv
Normal file
|
@ -0,0 +1,132 @@
|
|||
round source organization amount receivingCharity purpose
|
||||
|
||||
SFF-2019-Q3 SFF DAF 80,000 Hours $280,000 Centre for Effective Altruism, USA General Support
|
||||
SFF-2019-Q3 SFF DAF Center For Applied Rationality $110,000 SFF DAF General Support
|
||||
SFF-2019-Q3 SFF DAF Centre for the Study of Existential Risk $40,000 Chancellor, Masters and Scholars of the University of Cambridge General Support
|
||||
SFF-2019-Q3 SFF DAF Future of Life Institute $130,000 Future of Life Institute General Support
|
||||
SFF-2019-Q3 SFF DAF Global Catastrophic Risks Institute $60,000 Social and Environmental Entrepreneurs General Support
|
||||
SFF-2019-Q3 SFF DAF LessWrong 2.0 $260,000 Center for Applied Rationality General Support
|
||||
SFF-2019-Q4 SFF DAF 80,000 Hours $40,000 Centre for Effective Altruism, USA General Support of 80,000 Hours
|
||||
SFF-2019-Q4 SFF DAF AI Impacts $70,000 Machine Intelligence Research Institute General Support of AI Impacts
|
||||
SFF-2019-Q4 SFF DAF AAAI/ACM conference on AI, Ethics and Society (AIES 2020) $20,000 Association for the Advancement of Artificial Intelligence General Support of AIES
|
||||
SFF-2019-Q4 SFF DAF Alliance to Feed the Earth in Disasters (ALLFED) $10,000 Social and Environmental Entrepreneurs (SEE) General Support of ALLFED
|
||||
SFF-2019-Q4 SFF DAF Center For Applied Rationality $150,000 Center For Applied Rationality General Support
|
||||
SFF-2019-Q4 SFF DAF Charter Cities Institute $60,000 Center for Innovative Governance Research General Support of the Charter Cities Institute
|
||||
SFF-2019-Q4 SFF DAF Centre for the Study of Existential Risk, University of Cambridge $50,000 Cambridge in America General Support of Centre for the Study of Existential Risk, University of Cambridge
|
||||
SFF-2019-Q4 SFF DAF Earth Law Center $30,000 Earth Law Center General Support
|
||||
SFF-2019-Q4 SFF DAF Global Catastrophic Risk Institute $30,000 Social and Environmental Entrepreneurs General Support of GCRI
|
||||
SFF-2019-Q4 SFF DAF Longevity Research Institute $160,000 Longevity Research Institute General Support
|
||||
SFF-2019-Q4 SFF DAF Median Group $50,000 Median Group General Support
|
||||
SFF-2019-Q4 SFF DAF Modeling Cooperation $50,000 Convergence Analysis General Support of Modeling Cooperation
|
||||
SFF-2019-Q4 SFF DAF OAK (Optimizing Awakening and Kindness) $100,000 Center for Mindful Learning General Support of OAK (Optimizing Awakening and Kindness)
|
||||
SFF-2019-Q4 SFF DAF Ought Inc. $100,000 Ought Inc. General Support
|
||||
SFF-2020-H1 SFF DAF LessWrong $290,000 Center for Applied Rationality General support of LessWrong
|
||||
SFF-2020-H1 SFF DAF Machine Intelligence Research Institute $20,000 Machine Intelligence Research Institute General support
|
||||
SFF-2020-H1 SFF DAF Quantified Uncertainty Research Institute $120,000 Quantified Uncertainty Research Institute General support
|
||||
SFF-2020-H1 SFF DAF 80,000 Hours $120,000 Centre for Effective Altruism USA General support of 80,000 Hours
|
||||
SFF-2020-H1 SFF DAF Future of Life Institute $100,000 Future of Life Institute General support
|
||||
SFF-2020-H1 Jaan Tallinn LessWrong $110,000 Center for Applied Rationality General support of LessWrong
|
||||
SFF-2020-H1 Jaan Tallinn Machine Intelligence Research Institute $280,000 Machine Intelligence Research Institute General support
|
||||
SFF-2020-H1 Jaan Tallinn Quantified Uncertainty Research Institute $60,000 Quantified Uncertainty Research Institute General support
|
||||
SFF-2020-H1 Jaan Tallinn 80,000 Hours $30,000 Centre for Effective Altruism USA General support of 80,000 Hours
|
||||
SFF-2020-H1 Jaan Tallinn The Future Society $130,000 The Future Society, Inc. General support of The Future Society
|
||||
SFF-2020-H1 Jaan Tallinn Future of Life Institute $30,000 Future of Life Institute General support
|
||||
SFF-2020-H1 Jaan Tallinn Global Catastrophic Risk Institute $90,000 Social and Environmental Entrepreneurs General support of Global Catastrophic Risk Institute
|
||||
SFF-2020-H1 Jaan Tallinn Leverage Research $80,000 Leverage Research General support
|
||||
SFF-2020-H1 Jaan Tallinn AI Impacts $40,000 Machine Intelligence Research Institute General support of AI Impacts
|
||||
SFF-2020-H1 Jaan Tallinn Milan Griffes to pursue an MHS in Mental Health $30,000 Johns Hopkins University General support of Milan Griffes to pursue an MHS in Mental Health
|
||||
SFF-2020-H1 Jaan Tallinn BERI/CSER Collaboration $20,000 Berkeley Existential Risk Initiative General support of BERI/CSER Collaboration
|
||||
SFF-2020-H1 Jaan Tallinn Metamorphic Group LLC $10,000 Children, Families, and Communities General support of Metamorphic Group LLC
|
||||
SFF-2020-H1 Jaan Tallinn Convergence Analysis $10,000 Convergence Analysis General support
|
||||
SFF-2020-H1 Jed McCaleb LessWrong $30,000 Center for Applied Rationality General support of LessWrong
|
||||
SFF-2020-H1 Jed McCaleb Machine Intelligence Research Institute $40,000 Machine Intelligence Research Institute General support
|
||||
SFF-2020-H1 Jed McCaleb Quantified Uncertainty Research Institute $20,000 Quantified Uncertainty Research Institute General support
|
||||
SFF-2020-H1 Jed McCaleb 80,000 Hours $30,000 Centre for Effective Altruism USA General support of 80,000 Hours
|
||||
SFF-2020-H1 Jed McCaleb The Future Society $30,000 The Future Society, Inc. General support of The Future Society
|
||||
SFF-2020-H1 Jed McCaleb Future of Life Institute $10,000 Future of Life Institute General support
|
||||
SFF-2020-H1 Jed McCaleb Global Catastrophic Risk Institute $50,000 Social and Environmental Entrepreneurs General support of Global Catastrophic Risk Institute
|
||||
SFF-2020-H1 Jed McCaleb AI Impacts $20,000 Machine Intelligence Research Institute General support of AI Impacts
|
||||
SFF-2020-H1 Jed McCaleb Metamorphic Group LLC $10,000 Children, Families, and Communities General support of Metamorphic Group LLC
|
||||
SFF-2020-H1 Jed McCaleb Future of Humanity Foundation $10,000 Future of Humanity Foundation General support
|
||||
SFF-2020-H2 SFF DAF Future of Life Institute $23,000 Future of Life Institute General support
|
||||
SFF-2020-H2 SFF DAF Center for Applied Rationality $212,000 Center for Applied Rationality General support
|
||||
SFF-2020-H2 SAF Future of Humanity Institute, Research Scholars Programme $218,000 University of Oxford General support of the Research Scholars Programme at Future of Humanity Institute
|
||||
SFF-2020-H2 SFF DAF Center for Innovative Governance Research $58,000 Center for Innovative Governance Research General support
|
||||
SFF-2020-H2 SFF DAF The Center for Election Science $44,000 The Center for Election Science General support
|
||||
SFF-2020-H2 SFF DAF The Future Society, Inc. $37,000 The Future Society, Inc. General support
|
||||
SFF-2020-H2 SFF DAF Generation Pledge $37,000 Centre for Effective Altruism General support of Generation Pledge at the Centre for Effective Altruism
|
||||
SFF-2020-H2 SFF DAF Center for Applied Utilitarianism $21,000 N/A General support of Center for Applied Utilitarianism
|
||||
SFF-2020-H2 Jed McCaleb Topos Institute $144,000 Topos Institute Conditional support
|
||||
SFF-2020-H2 Jed McCaleb Center for Applied Rationality $23,000 Center for Applied Rationality General support
|
||||
SFF-2020-H2 Jed McCaleb Stanford Existential Risks Initiative $56,000 Stanford University General support of Stanford Existential Risks Initiative
|
||||
SFF-2020-H2 Jed McCaleb RadicalxChange Foundation Ltd. $13,000 RadicalxChange Foundation Ltd. General support
|
||||
SFF-2020-H2 Jed McCaleb The Roots of Progress $14,000 Mercatus Center Inc General support of The Roots of Progress
|
||||
SFF-2020-H2 Jaan Tallinn Center for Human-Compatible AI $779,000 UC Berkeley Foundation General support of Center for Human-Compatible AI
|
||||
SFF-2020-H2 Jaan Tallinn Machine Intelligence Research Institute $543,000 Machine Intelligence Research Institute General support
|
||||
SFF-2020-H2 Jaan Tallinn Future of Life Institute $347,000 Future of Life Institute General support
|
||||
SFF-2020-H2 Jaan Tallinn Topos Institute $151,000 Topos Institute Conditional support
|
||||
SFF-2020-H2 Jaan Tallinn Center for Applied Rationality $19,000 Center for Applied Rationality General support
|
||||
SFF-2020-H2 Jaan Tallinn Future of Humanity Institute: Research Scholars Programme $30,000 University of Oxford General support of the Research Scholars Programme at Future of Humanity Institute
|
||||
SFF-2020-H2 Jaan Tallinn Berkeley Existential Risk Initiative $247,000 Berkeley Existential Risk Initiative General support
|
||||
SFF-2020-H2 Jaan Tallinn Global Catastrophic Risk Institute $209,000 Social and Environmental Entrepreneurs General support of Global Catastrophic Risk Institute
|
||||
SFF-2020-H2 Jaan Tallinn Stanford Existential Risks Initiative $69,000 Stanford University General support of Stanford Existential Risks Initiative
|
||||
SFF-2020-H2 Jaan Tallinn Median Group $98,000 Median Foundation General support of Median Group
|
||||
SFF-2020-H2 Jaan Tallinn Modeling Cooperation $74,000 Convergence Analysis General support of Modeling Cooperation
|
||||
SFF-2020-H2 Jaan Tallinn Rethink Priorities $57,000 Rethink Priorities General support
|
||||
SFF-2020-H2 Jaan Tallinn RadicalxChange Foundation Ltd. $26,000 RadicalxChange Foundation Ltd. General support
|
||||
SFF-2020-H2 Jaan Tallinn The Roots of Progress $21,000 Mercatus Center Inc General support of The Roots of Progress
|
||||
SFF-2020-H2 Jaan Tallinn AAAI/ACM Conference on Artificial Intelligence, Ethics and Society $20,000 Association for the Advancement of Artificial Intelligence General support of AAAI/ACM Conference on Artificial Intelligence
|
||||
SFF-2020-H2 Jaan Tallinn Effective Thesis $15,000 N/A General Support
|
||||
SFF-2021-H1 Jaan Tallinn Center For Applied Rationality $1,207,000 Center For Applied Rationality General Support
|
||||
SFF-2021-H1 Jaan Tallinn LessWrong $1,055,000 Center For Applied Rationality General support of LessWrong
|
||||
SFF-2021-H1 Jaan Tallinn Alpenglow Group Limited $1,013,000 The Centre for Effective Altruism General support of Alpenglow Group Limited
|
||||
SFF-2021-H1 Jaan Tallinn David Krueger’s Research Group at Cambridge $1,000,000 Cambridge in America General support of David Krueger’s Research Group
|
||||
SFF-2021-H1 Jaan Tallinn Dr. Andrew Critch at CHAI, UC Berkeley $898,000 UC Berkeley Foundation General support of Dr. Andrew Critch at CHAI, UC Berkeley
|
||||
SFF-2021-H1 Jaan Tallinn Long-Term Future Fund $675,000 Centre for Effective Altruism General support of Long-Term Future Fund
|
||||
SFF-2021-H1 Jaan Tallinn BERI-FHI Collaboration $478,000 Berkeley Existential Risk Initiative General support of BERI-FHI Collaboration
|
||||
SFF-2021-H1 Jaan Tallinn Expii, Inc. $347,000 Idea Foundry General support of Expii, Inc.
|
||||
SFF-2021-H1 Jaan Tallinn BERI-SERI Collaboration $333,000 Berkeley Existential Risk Initiative General support of BERI-SERI Collaboration
|
||||
SFF-2021-H1 Jaan Tallinn Generation Pledge $291,000 Generation Pledge, Inc. General Support
|
||||
SFF-2021-H1 Jaan Tallinn Legal Priorities Project $265,000 Legal Priorities, Inc. General Support
|
||||
SFF-2021-H1 Jaan Tallinn Centre for the Governance of AI, Future of Humanity Institute $253,000 Berkeley Existential Risk Initiative General support of Centre for the Governance of AI, Future of Humanity Institute
|
||||
SFF-2021-H1 Jaan Tallinn AI Impacts $221,000 Machine Intelligence Research Institute General support of AI Impacts
|
||||
SFF-2021-H1 Jaan Tallinn AI Safety Support $200,000 Rethink Charity General support of AI Safety Support
|
||||
SFF-2021-H1 Jaan Tallinn Alliance to Feed the Earth in Disasters $175,000 Players Philanthropy Fund General support of Alliance to Feed the Earth in Disasters
|
||||
SFF-2021-H1 Jaan Tallinn All-Party Parliamentary Group for Future Generations $171,000 Founders for Good General support of All-Party Parliamentary Group for Future Generations
|
||||
SFF-2021-H1 Jaan Tallinn New Science Research, Inc. $147,000 New Science Research, Inc. General Support
|
||||
SFF-2021-H1 Jaan Tallinn Centre for the Study of Existential Risk, University of Cambridge $145,000 Cambridge in America General support of Centre for the Study of Existential Risk, University of Cambridge
|
||||
SFF-2021-H1 Jaan Tallinn Center for Innovative Governance Research (dba Charter Cities Institute) $137,000 Center for Innovative Governance Research (dba Charter Cities Institute) General Support
|
||||
SFF-2021-H1 Jaan Tallinn Moonlight Institute $127,000 Moonlight Institute General Support
|
||||
SFF-2021-H1 Jaan Tallinn Convergence: Project AI Clarity $103,000 Convergence Analysis General support of Convergence: Project AI Clarity
|
||||
SFF-2021-H1 Jaan Tallinn Viento $102,000 Social Good Fund General support of Viento
|
||||
SFF-2021-H1 Jaan Tallinn Centre for Enabling EA Learning & Research $61,000 Centre for Enabling EA Learning & Research General Support
|
||||
SFF-2021-H1 Jaan Tallinn Global Catastrophic Risk Institute $48,000 Social and Environmental Entrepreneurs General support of Global Catastrophic Risk Institute
|
||||
SFF-2021-H1 Jaan Tallinn BERI-CSER Collaboration $37,000 Berkeley Existential Risk Initiative General support of BERI-CSER Collaboration
|
||||
SFF-2021-H1 Jaan Tallinn Convergence $13,000 Convergence Analysis General Support of Convergence
|
||||
SFF-2021-H1 Jaan Tallinn Laboratory for Social Minds at Carnegie Mellon University $11,000 Carnegie Mellon University (CMU) General support of Laboratory for Social Minds at Carnegie Mellon University
|
||||
SFF-2021-H1 Jed McCaleb AI Impacts $82,000 Machine Intelligence Research Institute General support of AI Impacts
|
||||
SFF-2021-H1 Jed McCaleb New Science Research, Inc. $51,000 New Science Research, Inc. General Support
|
||||
SFF-2021-H1 Jed McCaleb Expii, Inc. $41,000 Idea Foundry General support of Expii, Inc.
|
||||
SFF-2021-H1 Jed McCaleb Centre for Enabling EA Learning & Research $21,000 Centre for Enabling EA Learning & Research General Support
|
||||
SFF-2021-H1 Jed McCaleb BERI-FHI Collaboration $17,000 Berkeley Existential Risk Initiative General support of BERI-FHI Collaboration
|
||||
SFF-2021-H1 Jed McCaleb Centre for the Governance of AI, Future of Humanity Institute $17,000 Berkeley Existential Risk Initiative General support of Centre for the Governance of AI, Future of Humanity Institute
|
||||
SFF-2021-H1 Jed McCaleb BERI-SERI Collaboration $14,000 Berkeley Existential Risk Initiative General support of BERI-SERI Collaboration
|
||||
SFF-2021-H2 Jaan Tallinn Long-Term Future Fund $1,417,000 Center For Effective Altruism General support of Long-Term Future Fund
|
||||
SFF-2021-H2 Jaan Tallinn Center on Long-Term Risk $1,218,000 Effective Altruism Foundation, Inc. General support of Center on Long-Term Risk
|
||||
SFF-2021-H2 Jaan Tallinn Alliance to Feed the Earth in Disasters $979,000 Players Philanthropy Fund General support of Alliance to Feed the Earth in Disasters
|
||||
SFF-2021-H2 Jaan Tallinn Alpenglow Group Limited $885,000 The Centre for Effective Altruism General support of Alpenglow Group Limited
|
||||
SFF-2021-H2 Jaan Tallinn Lightcone Infrastructure $380,000 Center for Applied Rationality General support of Lightcone Infrastructure
|
||||
SFF-2021-H2 The Casey and Family Foundation Lightcone Infrastructure $500,000 Center for Applied Rationality General support of Lightcone Infrastructure
|
||||
SFF-2021-H2 Jaan Tallinn EA Infrastructure Fund $699,000 Centre For Effective Altruism Usa Inc. General support of EA Infrastructure Fund
|
||||
SFF-2021-H2 Jaan Tallinn Centre for the Governance of AI. $591,000 Centre for Effective Altruism General support of Centre for the Governance of AI
|
||||
SFF-2021-H2 Jaan Tallinn Ought Inc. $542,000 Ought Inc. General support
|
||||
SFF-2021-H2 Jaan Tallinn New Science Research, Inc. $500,000 New Science Research, Inc. General support
|
||||
SFF-2021-H2 Jaan Tallinn BERI-CHAI Collaboration $248,000 Berkeley Existential Risk Initiative General support of BERI-CHAI Collaboration
|
||||
SFF-2021-H2 Jed McCaleb BERI-CHAI Collaboration $250,000 Berkeley Existential Risk Initiative General support of BERI-CHAI Collaboration
|
||||
SFF-2021-H2 Jaan Tallinn AI Objectives Institute $485,000 Foresight Institutee General support of AI Objectives Institute
|
||||
SFF-2021-H2 Jaan Tallinn Topos Institute $450,000 Topos Institute General support
|
||||
SFF-2021-H2 Jaan Tallinn AI Safety Camp $130,000 Rethink Charity General support of AI Safety Camp
|
||||
SFF-2021-H2 Jaan Tallinn Emergent Ventures India $115,000 The Mercatus Center General support of Emergent Ventures India
|
||||
SFF-2021-H2 Jaan Tallinn European Biostasis Foundation $103,000 European Biostasis Foundation General support
|
||||
SFF-2021-H2 Jaan Tallinn Modeling Cooperation $83,000 Convergence Analysis General support of Modeling Cooperation
|
||||
SFF-2021-H2 Jaan Tallinn Research on AI & International Relations $34,000 Convergence Analysis General support of Research on AI & International Relations
|
|
54
web/pages/grants/GranteeCard.tsx
Normal file
54
web/pages/grants/GranteeCard.tsx
Normal file
|
@ -0,0 +1,54 @@
|
|||
import Link from 'next/link'
|
||||
import Image from 'next/image'
|
||||
|
||||
import { Grantee } from '.'
|
||||
import { Row } from 'web/components/layout/row'
|
||||
import { formatLargeNumber } from 'common/util/format'
|
||||
|
||||
export default function GranteeCard(props: { grantee: Grantee }) {
|
||||
const { grantee } = props
|
||||
const { slug, photo, preview, totalReceived } = grantee
|
||||
|
||||
return (
|
||||
<Link href={`/grants/${slug}`} passHref>
|
||||
<div className="card card-compact transition:shadow flex-1 cursor-pointer border-2 bg-white hover:shadow-md">
|
||||
<div className="px-8">
|
||||
<figure className="relative h-32">
|
||||
{photo ? (
|
||||
<Image src={photo} alt="" layout="fill" objectFit="contain" />
|
||||
) : (
|
||||
<div className="h-full w-full bg-gradient-to-r from-slate-300 to-indigo-200">
|
||||
<div className="absolute inset-0 flex items-center justify-center p-2 text-2xl font-light">
|
||||
{grantee.name}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</figure>
|
||||
</div>
|
||||
<div className="card-body">
|
||||
<div className="line-clamp-4 text-sm">{preview}</div>
|
||||
{totalReceived > 0 && (
|
||||
<Row className="mt-4 flex-1 items-end justify-center gap-6 text-gray-900">
|
||||
<Row className="items-baseline gap-1">
|
||||
<span className="text-3xl font-semibold">
|
||||
{formatUsd(totalReceived)}
|
||||
</span>
|
||||
raised
|
||||
</Row>
|
||||
{/* {match && (
|
||||
<Col className="text-gray-500">
|
||||
<span className="text-xl">+{formatUsd(match)}</span>
|
||||
<span className="">match</span>
|
||||
</Col>
|
||||
)} */}
|
||||
</Row>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
|
||||
function formatUsd(usd: number) {
|
||||
return `$${formatLargeNumber(usd)}`
|
||||
}
|
908
web/pages/grants/ftxGrants.ts
Normal file
908
web/pages/grants/ftxGrants.ts
Normal file
|
@ -0,0 +1,908 @@
|
|||
import { Grant } from './index'
|
||||
|
||||
export const ftxGrants: Grant[] = [
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'School of Thinking',
|
||||
date: '2022-07-27',
|
||||
amount: 250000,
|
||||
description:
|
||||
'This regrant will support a global media outreach project to create high quality video and social media content about rationalism, longtermism and Effective Altruism.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Legal Services Planning Grant',
|
||||
date: '2022-07-27',
|
||||
amount: 100000,
|
||||
description:
|
||||
'This regrant will support six months of research on topics including how legal services can be effectively provided to the Effective Altruism community, materials to be included in a legal services handbook for EA organizations, novel legal questions particular to the EA community that might benefit from further research initiatives, and ways to create an effective EA professional network for practicing lawyers.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Manifold Markets',
|
||||
date: '2022-07-27',
|
||||
amount: 1000000,
|
||||
description:
|
||||
'This regrant will support Manifold Markets in building a play-money prediction market platform. The platform is also experimenting with impact certificates and charity prediction markets.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'David Xu',
|
||||
date: '2022-07-27',
|
||||
amount: 50000,
|
||||
description:
|
||||
'This regrant will support six months of research on AI safety.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Trojan Detection Challenge at NeurIPS 2022',
|
||||
date: '2022-07-27',
|
||||
amount: 50000,
|
||||
description:
|
||||
'This regrant will support prizes for a trojan detection competition at NeurIPS, which involves identifying whether a deep neural network will suddenly change behavior if certain unknown conditions are met.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Effective Altruism Office Zurich',
|
||||
date: '2022-07-27',
|
||||
amount: 52000,
|
||||
description:
|
||||
'This regrant will support renting and furnishing an office space for a year.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Akash Wasil',
|
||||
date: '2022-07-27',
|
||||
amount: 26000,
|
||||
description:
|
||||
'This regrant will support an individual working on supporting students who are interested in focusing their careers on the world’s most pressing problems.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Fiona Pollack',
|
||||
date: '2022-07-27',
|
||||
amount: 30000,
|
||||
description:
|
||||
'This regrant will support six months of salary for an individual working to support Harvard students interested in working on the world’s most pressing problems and protecting and improving the long term future.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Peter McLaughlin',
|
||||
date: '2022-07-27',
|
||||
amount: 46000,
|
||||
description:
|
||||
'This regrant will support six months of research on criticisms of effective altruism.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Dwarkesh Patel',
|
||||
date: '2022-07-27',
|
||||
amount: 76000,
|
||||
description:
|
||||
'This regrant will support a promising podcaster to hire a research assistant and editor, purchase equipment, and cover travel to meet guests in person. The podcast covers technological progress, existential risk, economic growth, and the long term future.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'ALERT',
|
||||
date: '2022-07-27',
|
||||
amount: 150000,
|
||||
description:
|
||||
'This regrant will support the creation of the Active Longtermist Emergency Response Team, an organization to rapidly manage emerging global events like Covid-19.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'EA Critiques and Red Teaming Prize',
|
||||
date: '2022-07-27',
|
||||
amount: 100000,
|
||||
description:
|
||||
'This regrant will support prize money for a writing contest for critically engaging with theory or work in Effective Altruism. The goal of the contest is to produce thoughtful, action oriented critiques.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Federation for American Scientists',
|
||||
date: '2022-07-27',
|
||||
amount: 1000000,
|
||||
description:
|
||||
'This regrant will support a researcher and research assistant to work on high-skill immigration and AI policy at FAS for three years.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Ought',
|
||||
date: '2022-07-27',
|
||||
amount: 5000000,
|
||||
description:
|
||||
'This regrant will support Ought’s work building Elicit, a language-model based research assistant. This work contributes to research on reducing alignment risk through scaling human supervision via process-based systems.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'ML Safety Scholars Program',
|
||||
date: '2022-07-27',
|
||||
amount: 490000,
|
||||
description:
|
||||
'This regrant will fund a summer program for up to 100 students to spend 9 weeks studying machine learning, deep learning, and technical topics in safety.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'AntiEntropy',
|
||||
date: '2022-07-27',
|
||||
amount: 120000,
|
||||
description:
|
||||
'This regrant will support a project to create and house operations-related resources and guidance for EA-aligned organizations.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Everett Smith',
|
||||
date: '2022-07-27',
|
||||
amount: 35000,
|
||||
description:
|
||||
'This regrant will support a policy retreat on governing artificial intelligence.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Olle Häggström, Chalmers University of Technology',
|
||||
date: '2022-07-27',
|
||||
amount: 380000,
|
||||
description:
|
||||
'This regrant will support research on statistical arguments relating to existential risk and work on risks from artificial intelligence, as well as outreach, supervision, and policy work on these topics.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Essay Contest on Existential Risk in US Cost Benefit Analysis',
|
||||
date: '2022-07-27',
|
||||
amount: 137500,
|
||||
description:
|
||||
'This regrant will support an essay contest on “Accounting for Existential Risks in US Cost-Benefit Analysis,” with the aim of contributing to the revision of OMB Circular-A4, a document which guides US government cost-benefit analysis. The Legal Priorities Project is administering the contest.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'MineRL BASALT competition at NeurIPS',
|
||||
date: '2022-07-27',
|
||||
amount: 155000,
|
||||
description:
|
||||
'This regrant will support a NeurIPS competition applying human feedback in a non-language-model setting, specifically pretrained models in Minecraft. The grant will be administered by the Berkeley Existential Risk Initiative.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'QURI',
|
||||
date: '2022-07-27',
|
||||
amount: 200000,
|
||||
description:
|
||||
'This regrant will support QURI to develop a programming language called "Squiggle" as a tool for probabilistic estimation. The hope is this will be a useful tool for forecasting and fermi estimates.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Andi Peng',
|
||||
date: '2022-07-27',
|
||||
amount: 42600,
|
||||
description:
|
||||
'This regrant will support four months of salary and compute for research on AI alignment.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'CSIS',
|
||||
date: '2022-07-27',
|
||||
amount: 75000,
|
||||
description:
|
||||
'This regrant will support initiatives including a CSIS public event focused on the importance of investments in human capital to ensure US national security; roundtables with policymakers, immigration experts, national security professionals, and company representatives to discuss key policy actions that should be taken to bolster US national security through immigration reform; and two episodes of the “Vying for Talent” podcast focusing on the importance of foreign talent in bolstering America’s innovative capacity.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Aaron Scher',
|
||||
date: '2022-07-27',
|
||||
amount: 28500,
|
||||
description:
|
||||
'This regrant will support a summer of research on AI alignment in Berkeley.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Kris Shrishak',
|
||||
date: '2022-07-27',
|
||||
amount: 28000,
|
||||
description:
|
||||
'This regrant will support research on how cryptography might be applied to AI safety research.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'AI Impacts',
|
||||
date: '2022-07-27',
|
||||
amount: 250000,
|
||||
description:
|
||||
'This regrant will support rerunning the highly-cited survey “When Will AI Exceed Human Performance? Evidence from AI Experts” from 2016, analysis, and publication of results.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Chinmay Ingalagavi',
|
||||
date: '2022-07-27',
|
||||
amount: 50000,
|
||||
description:
|
||||
'This regrant will support a Masters at LSE for a talented STEM student.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Apart Research',
|
||||
date: '2022-07-27',
|
||||
amount: 95000,
|
||||
description:
|
||||
'This regrant will support the creation of an AI Safety organization which will create a platform to share AI safety research ideas and educational materials, connect people working on AI safety, and bring new people into the field.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Tereza Flidrova',
|
||||
date: '2022-07-27',
|
||||
amount: 32000,
|
||||
description:
|
||||
'This regrant will support a one year master’s program in architecture for a student interested in building civilizational shelters.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'J. Peter Scoblic',
|
||||
date: '2022-07-27',
|
||||
amount: 25000,
|
||||
description:
|
||||
'This regrant will fund a nuclear risk expert to construct nuclear war-related forecasting questions and provide forecasts and explanations on key nuclear war questions.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'AI Risk Public Materials Competition',
|
||||
date: '2022-07-27',
|
||||
amount: 40000,
|
||||
description:
|
||||
'This regrant will support two competitions to produce better public materials on the existential risk from AI.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Moncef Slaoui',
|
||||
date: '2022-07-27',
|
||||
amount: 150000,
|
||||
description:
|
||||
'This regrant will fund the writing of Slaouis memoir, especially including his experience directing Operation Warp Speed.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Artificial Intelligence Summer Residency Program',
|
||||
date: '2022-07-27',
|
||||
amount: 60000,
|
||||
description:
|
||||
'This regrant will support a six week summer residency in Berkeley on AI safety.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Public Editor',
|
||||
date: '2022-07-27',
|
||||
amount: 500000,
|
||||
description:
|
||||
'This regrant will support a project to use a combination of human feedback and Machine Learning to label misinformation and reasoning errors in popular news articles.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'The Good Ancestors Project',
|
||||
date: '2022-07-27',
|
||||
amount: 75000,
|
||||
description:
|
||||
'This regrant will support the creation of The Good Ancestors Project, an Australian-based organization to host research and community building on topics relevant to making the long term future go well.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Thomas Kwa',
|
||||
date: '2022-07-27',
|
||||
amount: 37500,
|
||||
description:
|
||||
'This regrant will support three months of research on AI safety.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Joshua Greene, Harvard University',
|
||||
date: '2022-07-27',
|
||||
amount: 250000,
|
||||
description:
|
||||
'This regrant will support the real-world testing and roll-out of Red Brain, Blue Brain, an online quiz designed to reduce negative partisanship between Democrats and Republicans in the US.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Braden Leach',
|
||||
date: '2022-07-27',
|
||||
amount: 175000,
|
||||
description:
|
||||
'This regrant supported a recent law school graduate to work on biosecurity. Braden will research and write at the Johns Hopkins Center for Health Security.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Adversarial Robustness Prizes at ECCV',
|
||||
date: '2022-07-27',
|
||||
amount: 30000,
|
||||
description:
|
||||
'This regrant will support three prizes for the best papers on adversarial robustness research at a workshop at ECCV, the main fall computer vision conference. The best papers are selected to have higher relevance to long-term threat models than usual adversarial robustness papers.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Confido Institute',
|
||||
date: '2022-07-27',
|
||||
amount: 190000,
|
||||
description:
|
||||
'The Confido Institute is working on developing a user-friendly interactive app, Confido, for making forecasts and communicating beliefs and uncertainty within groups and organizations. They are also building interactive educational programs about forecasting and working with uncertainty based around this app.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Supporting Agent Foundations AI safety research at ALTER',
|
||||
date: '2022-07-27',
|
||||
amount: 200000,
|
||||
description:
|
||||
'This regrant will support 1.5-3 years of salary for a mathematics researcher to work with Vanessa Kosoy on the learning-theoretic AI safety agenda.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Modeling Transformative AI Risks (Aryeh Englander, Sammy Martin, Analytica Consulting)',
|
||||
date: '2022-07-27',
|
||||
amount: 272000,
|
||||
description:
|
||||
'This regrant will support two AI researchers, one or two additional assistants, and a consulting firm to continue to build out and fully implement the quantitative model for how to understand risks and interventions around AI safety, expanding on their earlier research on “Modeling Transformative AI Risk.”',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Impact Markets',
|
||||
date: '2022-07-27',
|
||||
amount: 215000,
|
||||
description:
|
||||
'This regrant will support the creation of an “impact market.” The hope is to improve charity fundraising by allowing profit-motivated investors to earn returns by investing in charitable projects that are eventually deemed impactful.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'AI Alignment Prize on Inverse Scaling',
|
||||
date: '2022-07-27',
|
||||
amount: 250000,
|
||||
description:
|
||||
'This regrant will support prizes for a contest to find tasks where larger language models do worse (“inverse scaling”).',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Swift Centre for Applied Forecasting',
|
||||
date: '2022-07-27',
|
||||
amount: 2000000,
|
||||
description:
|
||||
'This regrant will support the creation of the Swift Centre for Applied Forecasting, including salary for a director and a team of expert forecasters. They will forecast trends from Our World in Data charts, as well as other topics related to ensuring the long term future goes well, with a particular focus on explaining the “why” of forecast estimates.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Lawrence Newport',
|
||||
date: '2022-07-27',
|
||||
amount: 95000,
|
||||
description:
|
||||
'This regrant will support the launch and first year of a youtube channel focusing on video essays presented by Dr Lawrence Newport on longtermism, the future of humanity, and related topics.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Aidan O’Gara',
|
||||
date: '2022-07-27',
|
||||
amount: 46000,
|
||||
description:
|
||||
'This regrant will find salary, compute, and a scholarship for an undergraduate student doing career development and research on language model safety.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Legal Priorities Project',
|
||||
date: '2022-07-27',
|
||||
amount: 480000,
|
||||
description:
|
||||
'We recommended a grant to support the Legal Priorities Project’s ongoing research and outreach activities. This will allow LPP to pay two new hires and to put on a summer institute for non-US law students in Oxford.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Oded Galor, Brown University',
|
||||
date: '2022-07-27',
|
||||
amount: 500000,
|
||||
description:
|
||||
'We recommended a grant to support two years of academic research on long-term economic growth.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'The Atlas Fellowship',
|
||||
date: '2022-07-27',
|
||||
amount: 5000000,
|
||||
description:
|
||||
'We recommended a grant to support scholarships for talented and promising high school students to use towards educational opportunities and enrolling in a summer program.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Sherlock Biosciences',
|
||||
date: '2022-07-27',
|
||||
amount: 2000000,
|
||||
description:
|
||||
'We recommended an investment to support the development of universal CRISPR-based diagnostics, including paper-based diagnostics that can be used in developing-country settings without electricity.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Rethink Priorities',
|
||||
date: '2022-07-27',
|
||||
amount: 700000,
|
||||
description:
|
||||
'We recommended a grant to support Rethink’s research and projects aimed at improving humanity’s long-term prospects.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'SecureBio',
|
||||
date: '2022-07-27',
|
||||
amount: 1200000,
|
||||
description:
|
||||
'We recommended a grant to support the hiring of several key staff for Dr. Kevin Esvelt’s pandemic prevention work. SecureBio is working to implement universal DNA synthesis screening, build a reliable early warning system, and coordinate the development of improved personal protective equipment and its delivery to essential workers when needed.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Lionel Levine, Cornell University',
|
||||
date: '2022-07-27',
|
||||
amount: 1500000,
|
||||
description:
|
||||
'We recommended a grant to Cornell University to support Prof. Levine, as well as students and collaborators, to work on alignment theory research at the Cornell math department.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Claudia Shi, Academic CS Research at Columbia University',
|
||||
date: '2022-07-27',
|
||||
amount: 100000,
|
||||
description:
|
||||
'We recommended a grant to pay for research assistants over three years to support the work of a PhD student working on AI safety at Columbia University.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Institute for Progress',
|
||||
date: '2022-07-27',
|
||||
amount: 480000,
|
||||
description:
|
||||
'We recommended a grant to support the Institute’s research and policy engagement work on high skilled immigration, biosecurity, and pandemic prevention.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Good Judgment Project',
|
||||
date: '2022-07-27',
|
||||
amount: 300000,
|
||||
description:
|
||||
'We recommended a grant to support a Good Judgment initiative to produce forecasts on 10 Our World in Data data sets/charts.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Peter Hrosso, Researcher',
|
||||
date: '2022-07-27',
|
||||
amount: 230000,
|
||||
description:
|
||||
'We recommended a grant to support a project aimed at training large language models to represent the probability distribution over question answers in a prediction market.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Michael Jacob, MITRE',
|
||||
date: '2022-07-27',
|
||||
amount: 485000,
|
||||
description:
|
||||
'We recommended a grant to support research that we hope will be used to help strengthen the bioweapons convention and guide proactive actions to better secure those facilities or stop the dangerous work being done there.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Charity Entrepreneurship',
|
||||
date: '2022-07-27',
|
||||
amount: 470000,
|
||||
description:
|
||||
'We recommended a grant to support the incubation of new charities that will work on health security.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Michael Robkin',
|
||||
date: '2022-07-27',
|
||||
amount: 200000,
|
||||
description:
|
||||
'We recommended an investment to support the creation of Pretty Good PPE that is comfortable, storable, simple, and inexpensive. PGPPE aims to provide protection that is better than disposable masks and cheaper than both hazmat suits and N95s.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Legal Priorities Project',
|
||||
date: '2022-07-27',
|
||||
amount: 700000,
|
||||
description:
|
||||
'This grant will support one year of operating expenses and salaries at the Legal Priorities Project, a longtermist legal research and field-building organization.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'AI Safety Camp',
|
||||
date: '2022-07-27',
|
||||
amount: 290000,
|
||||
description:
|
||||
'We recommended a grant to partially support the salaries for AI Safety Camp’s two directors and to support logistical expenses at its physical camp.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Anca Dragan, UC Berkeley',
|
||||
date: '2022-07-27',
|
||||
amount: 800000,
|
||||
description:
|
||||
'We recommended a grant to support a project to develop interactive AI algorithms for alignment that can uncover the causal features in human reward systems, and thereby help AI systems learn underlying human values that generalize to new situations.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Association for Long Term Existence and Resilience',
|
||||
date: '2022-07-27',
|
||||
amount: 320000,
|
||||
description:
|
||||
'We recommended a grant to support ALTER, an academic research and advocacy organization, which hopes to investigate, demonstrate, and foster useful ways to improve the future in the short term, and to safeguard and improve the long-term trajectory of humanity. The organizations initial focus is building bridges to academia via conferences and grants to find researchers who can focus on AI safety, and on policy for reducing biorisk.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Manifold Markets',
|
||||
date: '2022-07-27',
|
||||
amount: 500000,
|
||||
description:
|
||||
'We recommended a grant to support Manifold Markets in building a charity prediction market, as an experiment for enabling effective forecasters to direct altruistic donations.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Guoliang (Greg) Liu, Virginia Tech',
|
||||
date: '2022-07-27',
|
||||
amount: 500000,
|
||||
description:
|
||||
'We recommended a grant to support a project to develop a new material -- an ultra-thin polymer-based thin film -- for use in next-generation Personal Protective Equipment which is both more effective and more comfortable.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Stimson South Asia Program',
|
||||
date: '2022-07-27',
|
||||
amount: 250000,
|
||||
description:
|
||||
'We recommended a grant to support the identification and implementation of promising confidence-building measures to reduce conflict between India and Pakistan.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Prometheus Science Bowl',
|
||||
date: '2022-07-27',
|
||||
amount: 100000,
|
||||
description:
|
||||
'We recommended a grant to support a competition for work on Eliciting Latent Knowledge, an open problem in AI alignment, for talented high school and college students who are participating in Prometheus Science Bowl.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Maxwell Tabarrok',
|
||||
date: '2022-07-27',
|
||||
amount: 7500,
|
||||
description:
|
||||
'We recommended a grant to support this student to spend a summer at the Future of Humanity Institute at Oxford University researching differential tech development and the connection between existential risks to humanity and economic growth.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'HelixNano',
|
||||
date: '2022-07-27',
|
||||
amount: 10000000,
|
||||
description:
|
||||
'We recommended an investment to support Helix Nano running preclinical and Phase 1 trials of a pan-variant Covid-19 vaccine.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Giving What We Can',
|
||||
date: '2022-07-27',
|
||||
amount: 700000,
|
||||
description:
|
||||
'We recommended a grant to support Giving What We Can’s mission to create a world in which giving effectively and significantly is a cultural norm.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Gabriel Recchia, University of Cambridge',
|
||||
date: '2022-07-27',
|
||||
amount: 380000,
|
||||
description:
|
||||
'We recommended a grant to support research on how to fine-tune GPT-3 models to identify flaws in other fine-tuned language models arguments for the correctness of their outputs, and to test whether these help nonexpert humans successfully judge such arguments.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Simon Institute for Longterm Governance',
|
||||
date: '2022-07-27',
|
||||
amount: 820000,
|
||||
description:
|
||||
'We recommended a grant to support SI’s policy work with the United Nations system on the prevention of existential risks to humanity.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Centre for Effective Altruism',
|
||||
date: '2022-07-27',
|
||||
amount: 13940000,
|
||||
description:
|
||||
'We recommended a grant for general support for their activities, including running conferences, supporting student groups, and maintaining online resources.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Nonlinear',
|
||||
date: '2022-07-27',
|
||||
amount: 250000,
|
||||
description:
|
||||
'We recommended a grant to support the maintenance of a library of high-quality audio content on the world’s most pressing problems, and a fund to provide productivity-enhancing equipment and support staff for people working on important social issues.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Konstantinos Konstantinidis',
|
||||
date: '2022-07-27',
|
||||
amount: 85000,
|
||||
description:
|
||||
'We recommended a grant to support two years of research on the impacts of disruptive space technologies, nuclear risk, and mitigating risks from future space-based weapons.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Apollo Academic Surveys',
|
||||
date: '2022-07-27',
|
||||
amount: 250000,
|
||||
description:
|
||||
'We recommended a grant to support Apollo’s work aggregating the views of academic experts in many different fields and making them freely available online.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'AI Safety Support',
|
||||
date: '2022-07-27',
|
||||
amount: 200000,
|
||||
description:
|
||||
'We recommended a grant for general funding for community building and managing the talent pipeline for AI alignment researchers. AI Safety Support’s work includes one-on-one coaching, events, and research training programs.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Daniel Brown, University of Utah',
|
||||
date: '2022-07-27',
|
||||
amount: 280000,
|
||||
description:
|
||||
'We recommended a grant to support research on value alignment in AI systems, practical algorithms for efficient value alignment verification, and user studies and experiments to test these algorithms.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Khalil Lab at Boston University',
|
||||
date: '2022-07-27',
|
||||
amount: 1550000,
|
||||
description:
|
||||
'We recommended a grant to support the development of a cheap, scalable, and decentralized platform for the rapid generation of disease-neutralizing therapeutic antibodies.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Sergey Levine, UC Berkeley',
|
||||
date: '2022-07-27',
|
||||
amount: 600000,
|
||||
description:
|
||||
'We recommended a grant to support a project to study how large language models integrated with offline reinforcement learning pose a risk of machine deception and persuasion.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Non-trivial Pursuits',
|
||||
date: '2022-07-27',
|
||||
amount: 1000000,
|
||||
description:
|
||||
'We recommended a grant to support outreach to help students to learn about career options, develop their skills, and plan their careers to work on the world’s most pressing problems.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Rational Animations',
|
||||
date: '2022-07-27',
|
||||
amount: 400000,
|
||||
description:
|
||||
'We recommended a grant to support the creation of animated videos on topics related to rationality and effective altruism to explain these topics for a broader audience.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Justin Mares, Biotech Researcher',
|
||||
date: '2022-07-27',
|
||||
amount: 140000,
|
||||
description:
|
||||
'We recommended a grant to support research on the feasibility of inactivating viruses via electromagnetic radiation.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Lightcone Infrastructure',
|
||||
date: '2022-07-27',
|
||||
amount: 2000000,
|
||||
description:
|
||||
'We recommended a grant to support Lightcone’s ongoing projects including running the LessWrong forum, hosting conferences and events, and maintaining an office space for Effective Altruist organizations.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Confirm Solutions',
|
||||
date: '2022-07-27',
|
||||
amount: 1000000,
|
||||
description:
|
||||
'We recommended an investment in Confirm Solutions, a public-benefit corporation, to support development of statistical models and software tools that can automate parts of the regulatory process for complex clinical trials. We anticipate that this work can help to speed up approvals of new vaccines and medical treatments while enhancing their statistical rigor.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'High Impact Athletes',
|
||||
date: '2022-07-27',
|
||||
amount: 350000,
|
||||
description:
|
||||
'We recommended a grant to support HIA’s work encouraging professional athletes to donate more of their earnings to high impact charities and causes, and to promote a culture of giving among their fans.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'High Impact Professionals',
|
||||
date: '2022-07-27',
|
||||
amount: 320000,
|
||||
description:
|
||||
'We recommended a grant to support HIP’s work recruiting EA working professionals to use more of their resources, including their careers, to focus on the world’s most pressing problems.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Berkeley Existential Risk Initiative',
|
||||
date: '2022-07-27',
|
||||
amount: 100000,
|
||||
description:
|
||||
'We recommended a grant to support BERI in hiring a second core operations employee to contribute to BERI’s work supporting university research groups.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Nathan Young',
|
||||
date: '2022-07-27',
|
||||
amount: 182000,
|
||||
description:
|
||||
'We recommended a grant to support the creation of a website for collaboratively creating public forecasting questions for a range of prediction aggregators and markets.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Bear F. Braumoeller, Department of Political Science, The Ohio State University',
|
||||
date: '2022-07-27',
|
||||
amount: 388080,
|
||||
description:
|
||||
'We recommended a grant to support a postdoc and two research assistants for Professor Braumoeller’s MESO Lab for two years to carry out research on international orders and how they affect the probability of war.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Siddharth Hiregowdara, AI Safety Introductory Materials',
|
||||
date: '2022-07-27',
|
||||
amount: 100000,
|
||||
description:
|
||||
'We recommended a grant to support the production of high quality materials for learning about AI safety work.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Longview',
|
||||
date: '2022-07-27',
|
||||
amount: 15000000,
|
||||
description:
|
||||
'We recommended a grant to support Longview’s independent grantmaking on global priorities research, nuclear weapons policy, and other longtermist issues.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Global Guessing',
|
||||
date: '2022-07-27',
|
||||
amount: 336000,
|
||||
description:
|
||||
'We recommended a grant to support Global Guessing’s forecasting coverage on the Russian invasion of Ukraine, which they will also use to build tools and infrastructure to support future forecasting work.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Brian Christian, Author',
|
||||
date: '2022-07-27',
|
||||
amount: 300000,
|
||||
description:
|
||||
'We recommended a grant to support the completion of a book which explores the nature of human values and the implications for aligning AI with human preferences.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Sage',
|
||||
date: '2022-07-27',
|
||||
amount: 700000,
|
||||
description:
|
||||
'We recommended a grant to support the creation of a pilot version of a forecasting platform, and a paid forecasting team, to make predictions about questions relevant to high-impact research.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'EffiSciences',
|
||||
date: '2022-07-27',
|
||||
amount: 135000,
|
||||
description:
|
||||
'We recommended a grant to support EffiSciences’s work promoting high impact research on global priorities (e.g. AI safety, biosecurity, and climate change) among French students and academics, and building up a community of people willing to work on important topics.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Anysphere',
|
||||
date: '2022-07-27',
|
||||
amount: 200000,
|
||||
description:
|
||||
'We recommended an investment to build a communication platform that provably leaks zero metadata.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: '1Day Sooner',
|
||||
date: '2022-07-27',
|
||||
amount: 350000,
|
||||
description:
|
||||
'We recommended a grant to support 1DS’ work on pandemic preparedness, including advocacy for advance market purchase commitments, collaboration with the UK Pandemic Ethics Accelerator on challenge studies, and advocacy with 1Day Africa and the West African Health Organization for a global pandemic insurance fund.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Cecil Abungu, Centre for the Study of Existential Risk, University of Cambridge',
|
||||
date: '2022-07-27',
|
||||
amount: 160000,
|
||||
description:
|
||||
'We recommended a grant to Cecil Abungu, Visiting Researcher at the Centre for the Study of Existential Risk and Research Affiliate at the Legal Priorities Project, to support the writing and publication of a book on longtermist currents in historical African thought.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Luke Hewitt',
|
||||
date: '2022-07-27',
|
||||
amount: 150000,
|
||||
description:
|
||||
'We recommended a grant to support the development and application of a Minimum Viable Product of a data-driven approach to improving advocacy in areas of importance to societal well-being such as immigration policy.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Dr. Emilio I. Alarcón, University of Ottawa Heart Institute & University of Ottawa',
|
||||
date: '2022-07-27',
|
||||
amount: 250000,
|
||||
description:
|
||||
'This grant will support a project to develop new plastic surfaces incorporating molecules that can be activated with low-energy visible light to eradicate bacteria and kill viruses continuously. If successful, this project will change how plastic surfaces are currently decontaminated.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Rajalakshmi Children Foundation',
|
||||
date: '2022-07-27',
|
||||
amount: 200000,
|
||||
description:
|
||||
'We recommended a grant to support the identification of children in India from under-resourced areas who excel in math, science, and technology, and enable them to obtain high quality online education by digitally connecting them with mentors and teachers.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Nikki Teran, Institute for Progress',
|
||||
date: '2022-07-27',
|
||||
amount: 135000,
|
||||
description:
|
||||
'We recommended a grant to support the creation of biosecurity policy priorities via conversations with experts in security, technology, policy, and advocacy. It will develop position papers, research papers, and agendas for the biosecurity community.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'James Lin',
|
||||
date: '2022-07-27',
|
||||
amount: 190000,
|
||||
description:
|
||||
'We recommended a grant to allow a reputable technology publication to engage 2-5 undergraduate student interns to write about topics including AI safety, alternative proteins, and biosecurity.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Ray Amjad',
|
||||
date: '2022-07-27',
|
||||
amount: 300000,
|
||||
description:
|
||||
'We recommended a grant to support the creation of a talent search organization which will help identify top young students around the world through a free to use website consisting of both challenging math and physics olympiad-style problems and discussion forums. Efforts will be particularly focused across India and China. These students will later be connected to support and programs so they can go on to work on the worlds most pressing issues.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'The Center for Election Science',
|
||||
date: '2022-07-27',
|
||||
amount: 300000,
|
||||
description:
|
||||
'We recommended a grant to support the development of statewide ballot initiatives to institute approval voting. Approval voting is a simple voting method reform that lets voters select all the candidates they wish.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'AVECRIS Pte. Ltd.',
|
||||
date: '2022-07-27',
|
||||
amount: 3600000,
|
||||
description:
|
||||
'We recommended an investment in AVECRIS’s Project DOOR to support the development of a next generation genetic vaccine platform that aims to allow for highly distributed vaccine production using AVECRIS’s advanced DNA vector delivery technology.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Council on Strategic Risks',
|
||||
date: '2022-07-27',
|
||||
amount: 400000,
|
||||
description:
|
||||
'We recommended a grant to support a project which will develop and advance ideas for strengthening regional and multilateral cooperation for addressing biological risks and filling gaps in current international institutions. These efforts include promoting the creation of a center with the capacity to rapidly respond to emerging infectious disease threats to prioritize blunting the impact of such events as well as quickly saving lives, and cooperative mechanisms to enhance biosafety and biosecurity while reducing the potential risks of spaces such as high-containment laboratories.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Effective Ideas Blog Prize',
|
||||
date: '2022-07-27',
|
||||
amount: 900000,
|
||||
description:
|
||||
'Longview Philanthropy and the Future Fund recommended a grant to support prizes for outstanding writing which encourages a broader public conversation around effective altruism and longtermism.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Pathos Labs, PopShift',
|
||||
date: '2022-07-27',
|
||||
amount: 50000,
|
||||
description:
|
||||
'We recommended a grant to support Pathos Labs to produce a PopShift convening connecting experts on the future of technology and existential risks with television writers to inspire new ideas for their shows.',
|
||||
},
|
||||
{
|
||||
from: 'FTX FF',
|
||||
to: 'Piezo Therapeutics',
|
||||
date: '2022-07-27',
|
||||
amount: 1000000,
|
||||
description:
|
||||
'We recommended an investment to support work on technology for delivering mRNA vaccines without lipid nanoparticles with the aim of making vaccines more safe, affordable, and scalable.',
|
||||
},
|
||||
]
|
107
web/pages/grants/index.tsx
Normal file
107
web/pages/grants/index.tsx
Normal file
|
@ -0,0 +1,107 @@
|
|||
import { searchInAny } from 'common/util/parse'
|
||||
import { debounce, sortBy } from 'lodash'
|
||||
import { useMemo, useState } from 'react'
|
||||
import { Col } from 'web/components/layout/col'
|
||||
import { Page } from 'web/components/page'
|
||||
import { Title } from 'web/components/title'
|
||||
import { ftxGrants } from './ftxGrants'
|
||||
import GranteeCard from './GranteeCard'
|
||||
|
||||
export type Grantee = {
|
||||
name: string // Better be unique lol
|
||||
// slug = name.toLowerCase().replace(/\s/g, '-')
|
||||
slug: string
|
||||
website?: string
|
||||
photo?: string
|
||||
preview: string
|
||||
description: string
|
||||
grantsReceived: Grant[]
|
||||
totalReceived: number
|
||||
}
|
||||
|
||||
export type Grant = {
|
||||
date: string // in YYYY-MM-DD format
|
||||
amount: number // in USD
|
||||
from: 'FTX FF' | 'SFF' | 'OP'
|
||||
to: string // The name of the receiving charity
|
||||
description: string // Why the grant was given; if stated
|
||||
}
|
||||
|
||||
// const grantees: Grantee[] = [
|
||||
// {
|
||||
// name: 'Manifold Markets',
|
||||
// slug: 'manifold-markets',
|
||||
// website: 'https://manifold.markets',
|
||||
// preview: '',
|
||||
// description: '',
|
||||
// grantsReceived: [
|
||||
// {
|
||||
// date: '2022-03-01',
|
||||
// amount: 500000,
|
||||
// from: 'FTX FF',
|
||||
// to: 'Manifold Markets',
|
||||
// description: 'Because you guys are awesome!',
|
||||
// },
|
||||
// ],
|
||||
// },
|
||||
// ]
|
||||
|
||||
const grantees = grantsToGrantees(ftxGrants)
|
||||
|
||||
function grantsToGrantees(grantsList: Grant[]) {
|
||||
const grantees = [] as Grantee[]
|
||||
for (const grant of grantsList) {
|
||||
const name = grant.to
|
||||
let grantee: Grantee | undefined = grantees.find((g) => g.name === name)
|
||||
if (!grantee) {
|
||||
grantee = {
|
||||
name,
|
||||
slug: name.toLowerCase().replace(/\s/g, '-'),
|
||||
preview: grant.description,
|
||||
description: grant.description,
|
||||
grantsReceived: [],
|
||||
totalReceived: 0,
|
||||
}
|
||||
grantees.push(grantee)
|
||||
}
|
||||
grantee.grantsReceived.push(grant)
|
||||
grantee.totalReceived += grant.amount
|
||||
}
|
||||
console.log(grantees)
|
||||
return grantees
|
||||
}
|
||||
|
||||
export default function Grants() {
|
||||
const [query, setQuery] = useState('')
|
||||
const debouncedQuery = debounce(setQuery, 50)
|
||||
|
||||
const filteredGrantees = useMemo(() => {
|
||||
const g = grantees.filter((grantee) =>
|
||||
searchInAny(query, grantee.name, grantee.description)
|
||||
)
|
||||
return sortBy(g, 'totalReceived').reverse()
|
||||
}, [query])
|
||||
|
||||
return (
|
||||
<Page>
|
||||
<Col className="w-full rounded px-4 py-6 sm:px-8 xl:w-[125%]">
|
||||
<Col className="">
|
||||
<Title className="!mt-0" text="EA Grants Database" />
|
||||
|
||||
<input
|
||||
type="text"
|
||||
onChange={(e) => debouncedQuery(e.target.value)}
|
||||
placeholder="Find a charity"
|
||||
className="input input-bordered mb-6 w-full"
|
||||
/>
|
||||
|
||||
<div className="grid max-w-xl grid-flow-row grid-cols-1 gap-4 lg:max-w-full lg:grid-cols-2 xl:grid-cols-3">
|
||||
{filteredGrantees.map((grantee) => (
|
||||
<GranteeCard grantee={grantee} key={grantee.name} />
|
||||
))}
|
||||
</div>
|
||||
</Col>
|
||||
</Col>
|
||||
</Page>
|
||||
)
|
||||
}
|
20
yarn.lock
20
yarn.lock
|
@ -3156,6 +3156,13 @@
|
|||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/cheerio@^0.22.31":
|
||||
version "0.22.31"
|
||||
resolved "https://registry.yarnpkg.com/@types/cheerio/-/cheerio-0.22.31.tgz#b8538100653d6bb1b08a1e46dec75b4f2a5d5eb6"
|
||||
integrity sha512-Kt7Cdjjdi2XWSfrZ53v4Of0wG3ZcmaegFXjMmz9tfNrZSkzzo36G0AL1YqSdcIA78Etjt6E609pt5h1xnQkPUw==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/connect-history-api-fallback@^1.3.5":
|
||||
version "1.3.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz#d1f7a8a09d0ed5a57aee5ae9c18ab9b803205dae"
|
||||
|
@ -4457,6 +4464,19 @@ cheerio@^1.0.0-rc.10:
|
|||
parse5-htmlparser2-tree-adapter "^7.0.0"
|
||||
tslib "^2.4.0"
|
||||
|
||||
cheerio@^1.0.0-rc.12:
|
||||
version "1.0.0-rc.12"
|
||||
resolved "https://registry.yarnpkg.com/cheerio/-/cheerio-1.0.0-rc.12.tgz#788bf7466506b1c6bf5fae51d24a2c4d62e47683"
|
||||
integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==
|
||||
dependencies:
|
||||
cheerio-select "^2.1.0"
|
||||
dom-serializer "^2.0.0"
|
||||
domhandler "^5.0.3"
|
||||
domutils "^3.0.1"
|
||||
htmlparser2 "^8.0.1"
|
||||
parse5 "^7.0.0"
|
||||
parse5-htmlparser2-tree-adapter "^7.0.0"
|
||||
|
||||
chokidar@^3.4.2, chokidar@^3.5.2, chokidar@^3.5.3:
|
||||
version "3.5.3"
|
||||
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd"
|
||||
|
|
Loading…
Reference in New Issue
Block a user