@article{31819, author = {Anubhav Jain and Shyue Ping Ong and Wei Chen and Bharat Medasan and Xiaohui Qu and Michael Kocher and Miriam Brafman and Guido Petretto and Gian-Marco Rignanese and Geoffroy Hautier and Daniel Gunter and Kristin A Persson}, title = {FireWorks: a dynamic workflow system designed for high-throughput applications}, abstract = {

This paper introduces FireWorks, a workflow software for running high-throughput calculation workflows at supercomputing centers. FireWorks has been used to complete over 50 million CPU-hours worth of computational chemistry and materials science calculations at the National Energy Research Supercomputing Center. It has been designed to serve the demanding high-throughput computing needs of these applications, with extensive support for (i) concurrent execution through job packing, (ii) failure detection and correction, (iii) provenance and reporting for long-running projects, (iv) automated duplicate detection, and (v) dynamic workflows (i.e., modifying the workflow graph during runtime). We have found that these features are highly relevant to enabling modern data-driven and high-throughput science applications, and we discuss our implementation strategy that rests on Python and NoSQL databases (MongoDB). Finally, we present performance data and limitations of our approach along with planned future work. 

}, year = {2015}, journal = {Concurrency and Computation: Practice and Experience}, volume = {27}, pages = {5037 - 5059}, month = {12/2015}, doi = {10.1002/cpe.3505}, language = {eng}, }