% Agrawal, Prateek % Encoding: utf-8 @Misc{Kashanskii2021a, author = {Kashanskii, Vladislav and Radchenko, Gleb and Prodan, Radu and Zabrovskiy, Anatoliy and Agrawal, Prateek}, howpublished = {Online Publication (Abstract)}, month = may, title = {{Automated Workflows Scheduling via Two-Phase Event-based MILP Heuristic for MRCPSP Problem}}, year = {2021}, abstract = {In today’s reality massive amounts of data-intensive tasks are managed by utilizing a large number of heterogeneous computing and storage elements interconnected through high-speed communication networks. However, one issue that still requires research effort is to enable effcient workflows scheduling in such complex environments. As the scale of the system grows and the workloads become more heterogeneous in the inner structure and the arrival patterns, scheduling problem becomes exponentially harder, requiring problem-specifc heuristics. Many techniques evolved to tackle this problem, including, but not limited to Heterogeneous Earliest Finish Time (HEFT), The Dynamic Scaling Consolidation Scheduling (DSCS), Partitioned Balanced Time Scheduling (PBTS), Deadline Constrained Critical Path (DCCP) and Partition Problem-based Dynamic Provisioning Scheduling (PPDPS). In this talk, we will discuss the two-phase heuristic for makespan-optimized assignment of tasks and computing machines on large-scale computing systems, consisting of matching phase with subsequent event-based MILP method for schedule generation. We evaluated the scalability of the heuristic using the Constraint Integer Programing (SCIP) solver with various configurations based on data sets, provided by the MACS framework. Preliminary results show that the model provides near-optimal assignments and schedules for workflows composed of up to 100 tasks with complex task I/O interactions and demonstrates variable sensitivity with respect to the scale of workflows and resource limitation policies imposed.}, keywords = {HPC Schedule Generation, MRCPSP Problem, Workflows Scheduling, Two-Phase Heuristic}, url = {https://ashpc21.si/booklet-of-abstracts/#dearflip-df_2168/} } @Misc{Vladislav2020, author = {Prodan, Radu and Kashanskii, Vladislav and Kimovski, Dragi and Agrawal, Prateek}, howpublished = {Online Publication (Abstract)}, month = feb, title = {{ASPIDE Project: Perspectives on the Scalable Monitoring and Auto-tuning}}, year = {2020}, abstract = {Extreme Data is an incarnation of Big Data concept distinguished by the massive amounts of data that must be queried, communicated and analyzed in (near) real-time by using a very large number of memory/storage elements of both, the converging Cloud and Pre-Exascale computing systems. Notable examples are the raw high energy physics data produced at a rate of hundreds of gigabits-per-second that must be filtered, stored and analyzed in a fault-tolerant fasion, multi-scale brain imaging data analysis and simulations, complex networks data analyses, driven by the social media systems. To handle such amounts of data multi-tierung architectures are introduced, including scheduling systems and distributed storage systems, ranging from in-memory databases to tape libraries. The ASPIDE project is contributing with the definition of a new programming paradigm, APIs, runtime tools and methodologies for expressing data intensive tasks on the converging large-scale systems , which can pave the way for the exploitation of parallelism policies over the various models of the system architectures, promoting high performance and efficiency, and offering powerful operations and mechanisms for processing extreme data sources at high speed and / or real-time.}, url = {https://research-explorer.app.ist.ac.at/record/7474} } @InProceedings{Agrawal2019b, author = {Kaur, Rupinder and Madaan, Vishu and Agrawal, Prateek}, booktitle = {Proceedings of the 3rd International Conference On Advanced Informatics For Computing Research}, title = {{Diagnosis of Arthritis Using K-Nearest Neighbor Approach}}, year = {2019}, editor = {Ashish Kumar Luhach and Dharm Singh Jat and Kamarul Bin Ghazali Hawari and Xiao-Zhi Gao and Pawan Lingras}, month = {September}, pages = {160--171}, publisher = {Springer Singapore}, series = {Communications in Computer and Information Science}, doi = {10.1007/978-981-15-0108-1_16}, url = {https://link.springer.com/chapter/10.1007/978-981-15-0108-1_16} } @InProceedings{Agrawal2019a, author = {Chaudhary, Deepak and Agrawal, Prateek and Madaan, Vishu}, booktitle = {Proceedings of the 3rd International Conference On Advanced Informatics For Computing Research}, title = {{Bank Cheque Validation Using Image Processing}}, year = {2019}, editor = {Ashish Kumar Luhach and Dharm Singh Jat and Kamarul Bin Ghazali Hawari and Xiao-Zhi Gao and Pawan Lingras}, month = {September}, pages = {148--159}, publisher = {Springer Singapore}, series = {Communications in Computer and Information Science}, doi = {10.1007/978-981-15-0108-1_15}, url = {https://link.springer.com/chapter/10.1007/978-981-15-0108-1_15} } @InProceedings{Agrawal2019, author = {Bhadwal, Neha and Agrawal, Prateek and Madaan, Vishu}, booktitle = {Proceedings of the 3rd International Conference On Advanced Informatics For Computing Research}, title = {{Bilingual Machine Translation System Between Hindi and Sanskrit Languages}}, year = {2019}, editor = {Ashish Kumar Luhach and Dharm Singh Jat and Kamarul Bin Ghazali Hawari and Xiao-Zhi Gao and Pawan Lingras}, month = {September}, pages = {312--321}, publisher = {Springer Singapore}, series = {Communications in Computer and Information Science}, doi = {10.1007/978-981-15-0108-1_29}, url = {https://link.springer.com/chapter/10.1007%2F978-981-15-0108-1_29} }