Fronk C; Yun J; Singh P; Petzold. L
Bayesian polynomial neural networks and polynomial neural ordinary differential equations. Journal Article
In: PLOS Computational Biology, 2024.
Links | BibTeX | Tags: Bayesian Inference, Deep Learning, Inverse Problem, Surrogate Modeling
@article{nokey,
title = {Bayesian polynomial neural networks and polynomial neural ordinary differential equations.},
author = {Colby Fronk and Jaewoong Yun and Prashant Singh and Linda Petzold.},
doi = {arXiv:2308.10892},
year = {2024},
date = {2024-08-01},
urldate = {2023-08-01},
journal = {PLOS Computational Biology},
keywords = {Bayesian Inference, Deep Learning, Inverse Problem, Surrogate Modeling},
pubstate = {published},
tppubtype = {article}
}
Karakulev A; Zachariah D; Singh P
Adaptive Parameter-Free Robust Learning using Latent Bernoulli Variables Conference
Proceedings of the 41st International Conference on Machine Learning (ICML 2024), 2024.
Abstract | Links | BibTeX | Tags: Bayesian Inference, Deep Learning, Optimization, Robust Learning
@conference{nokey,
title = {Adaptive Parameter-Free Robust Learning using Latent Bernoulli Variables},
author = {Aleksandr Karakulev and Dave Zachariah and Prashant Singh},
url = {https://icml.cc/virtual/2024/poster/32797},
doi = {https://doi.org/10.48550/arXiv.2312.00585},
year = {2024},
date = {2024-07-25},
urldate = {2024-07-25},
booktitle = {Proceedings of the 41st International Conference on Machine Learning (ICML 2024)},
abstract = {We present an efficient parameter-free approach for statistical learning from corrupted training sets. We identify corrupted and non-corrupted samples using latent Bernoulli variables, and therefore formulate the robust learning problem as maximization of the likelihood where latent variables are marginalized out. The resulting optimization problem is solved via variational inference using an efficient Expectation-Maximization based method. The proposed approach improves over the state-of-the-art by automatically inferring the corruption level and identifying outliers, while adding minimal computational overhead. We demonstrate our robust learning method on a wide variety of machine learning tasks including online learning and deep learning where it exhibits ability to adapt to different levels of noise and attain high prediction accuracy.},
keywords = {Bayesian Inference, Deep Learning, Optimization, Robust Learning},
pubstate = {published},
tppubtype = {conference}
}
We present an efficient parameter-free approach for statistical learning from corrupted training sets. We identify corrupted and non-corrupted samples using latent Bernoulli variables, and therefore formulate the robust learning problem as maximization of the likelihood where latent variables are marginalized out. The resulting optimization problem is solved via variational inference using an efficient Expectation-Maximization based method. The proposed approach improves over the state-of-the-art by automatically inferring the corruption level and identifying outliers, while adding minimal computational overhead. We demonstrate our robust learning method on a wide variety of machine learning tasks including online learning and deep learning where it exhibits ability to adapt to different levels of noise and attain high prediction accuracy. Enberg R; Costa M F; Koay Y S; Moretti S; Singh P; Waltari H
Enhancing Robustness: BSM Parameter Inference with n1D-CNN and Novel Data Augmentation Conference Forthcoming
European AI for Fundamental Physics Conference (to appear), Forthcoming.
BibTeX | Tags: Deep Learning, Inverse Problem
@conference{nokey,
title = {Enhancing Robustness: BSM Parameter Inference with n1D-CNN and Novel Data Augmentation},
author = {Rikard Enberg and Max Fusté Costa and Yong Sheng Koay and Stefano Moretti and Prashant Singh and Harri Waltari},
year = {2024},
date = {2024-04-30},
booktitle = {European AI for Fundamental Physics Conference (to appear)},
keywords = {Deep Learning, Inverse Problem},
pubstate = {forthcoming},
tppubtype = {conference}
}
Cheng L; Singh P; Ferranti F
Transfer learning-assisted inverse modeling in nanophotonics based on mixture density networks Journal Article
In: IEEE Access, vol. 12, pp. 55218-55224, 2024.
Links | BibTeX | Tags: Deep Learning, Inverse Problem
@article{cheng2024transfer,
title = {Transfer learning-assisted inverse modeling in nanophotonics based on mixture density networks},
author = {Liang Cheng and Prashant Singh and Francesco Ferranti},
url = {https://ieeexplore.ieee.org/abstract/document/10486893},
doi = {10.1109/ACCESS.2024.3383790},
year = {2024},
date = {2024-04-02},
urldate = {2024-01-01},
journal = {IEEE Access},
volume = {12},
pages = {55218-55224},
keywords = {Deep Learning, Inverse Problem},
pubstate = {published},
tppubtype = {article}
}
Enberg R; Costa M F; Koay Y S; Moretti S; Singh P; Waltari H
BSM models and parameter inference via an n-channel 1D-CNN Conference
Sixth annual workshop of the LPCC inter-experimental machine learning working group, CERN, Geneva, 2024.
Links | BibTeX | Tags: Deep Learning, Inverse Problem
@conference{nokey,
title = {BSM models and parameter inference via an n-channel 1D-CNN},
author = {Rikard Enberg and Max Fusté Costa and Yong Sheng Koay and Stefano Moretti and Prashant Singh and Harri Waltari},
url = {https://indico.cern.ch/event/1297159/contributions/5729212/attachments/2789892/4865115/IML3.pdf},
year = {2024},
date = {2024-01-12},
urldate = {2024-01-12},
booktitle = {Sixth annual workshop of the LPCC inter-experimental machine learning working group, CERN, Geneva},
keywords = {Deep Learning, Inverse Problem},
pubstate = {published},
tppubtype = {conference}
}
Chu J; Singh P; Toor S
Efficient Resource Scheduling for Distributed Infrastructures Using Negotiation Capabilities Conference
2023 IEEE 16th International Conference on Cloud Computing (CLOUD), IEEE IEEE, 2023.
Abstract | Links | BibTeX | Tags: Deep Learning, Distributed Computing, Surrogate Modeling
@conference{nokey,
title = {Efficient Resource Scheduling for Distributed Infrastructures Using Negotiation Capabilities},
author = {Junjie Chu and Prashant Singh and Salman Toor},
url = {https://ieeexplore.ieee.org/abstract/document/10255003},
doi = {https://doi.org/10.1109/CLOUD60044.2023.00065},
year = {2023},
date = {2023-07-02},
urldate = {2023-07-02},
booktitle = {2023 IEEE 16th International Conference on Cloud Computing (CLOUD)},
publisher = {IEEE},
organization = {IEEE},
abstract = {The information explosion drives enterprises and individuals to rent cloud computing infrastructure for their applications in the cloud. However, the agreements between cloud computing providers and clients are often inefficient. We propose an agent-based auto-negotiation system for resource scheduling using fuzzy logic. Our method completes a one-to-one auto-negotiation process and generates optimal offers for providers and clients. We compare the impact of different member functions, fuzzy rule sets, and negotiation scenarios on the offers to optimize the system. Our proposed method efficiently utilizes resources and offers interpretability, high flexibility, and customization. We successfully train machine learning models to replace the fuzzy negotiation system, improving processing speed. The article also highlights potential future improvements to the proposed system and machine learning models.},
keywords = {Deep Learning, Distributed Computing, Surrogate Modeling},
pubstate = {published},
tppubtype = {conference}
}
The information explosion drives enterprises and individuals to rent cloud computing infrastructure for their applications in the cloud. However, the agreements between cloud computing providers and clients are often inefficient. We propose an agent-based auto-negotiation system for resource scheduling using fuzzy logic. Our method completes a one-to-one auto-negotiation process and generates optimal offers for providers and clients. We compare the impact of different member functions, fuzzy rule sets, and negotiation scenarios on the offers to optimize the system. Our proposed method efficiently utilizes resources and offers interpretability, high flexibility, and customization. We successfully train machine learning models to replace the fuzzy negotiation system, improving processing speed. The article also highlights potential future improvements to the proposed system and machine learning models. Wrede F; Eriksson R; Jiang R; Petzold L; Engblom S; Hellander A; Singh P
Robust and integrative Bayesian neural networks for likelihood-free parameter inference Proceedings Article
In: 2022 International Joint Conference on Neural Networks (IJCNN), pp. 1–10, IEEE 2022.
Links | BibTeX | Tags: Bayesian Inference, Deep Learning, Inverse Problem, Surrogate Modeling
@inproceedings{wrede2022robust,
title = {Robust and integrative Bayesian neural networks for likelihood-free parameter inference},
author = {Fredrik Wrede and Robin Eriksson and Richard Jiang and Linda Petzold and Stefan Engblom and Andreas Hellander and Prashant Singh},
url = {https://ieeexplore.ieee.org/abstract/document/9892800
https://arxiv.org/pdf/2102.06521},
doi = {https://doi.org/10.1109/IJCNN55064.2022.9892800},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 International Joint Conference on Neural Networks (IJCNN)},
pages = {1--10},
organization = {IEEE},
keywords = {Bayesian Inference, Deep Learning, Inverse Problem, Surrogate Modeling},
pubstate = {published},
tppubtype = {inproceedings}
}
Akesson M; Singh P; Wrede F; Hellander A
Convolutional neural networks as summary statistics for approximate bayesian computation Journal Article
In: IEEE/ACM Transactions on Computational Biology and Bioinformatics, 2021.
Links | BibTeX | Tags: Bayesian Inference, Deep Learning, Inverse Problem
@article{akesson2021convolutional,
title = {Convolutional neural networks as summary statistics for approximate bayesian computation},
author = {Mattias Akesson and Prashant Singh and Fredrik Wrede and Andreas Hellander},
url = {https://ieeexplore.ieee.org/abstract/document/9525290},
doi = {https://doi.org/10.1109/TCBB.2021.3108695},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {IEEE/ACM Transactions on Computational Biology and Bioinformatics},
publisher = {IEEE},
keywords = {Bayesian Inference, Deep Learning, Inverse Problem},
pubstate = {published},
tppubtype = {article}
}
Singh P; Wrede F; Hellander A
Scalable machine learning-assisted model exploration and inference using Sciope Journal Article
In: Bioinformatics, vol. 37, no. 2, pp. 279–281, 2021.
Links | BibTeX | Tags: Bayesian Inference, Deep Learning, Inverse Problem, Optimization, Software, Surrogate Modeling
@article{singh2021scalable,
title = {Scalable machine learning-assisted model exploration and inference using Sciope},
author = {Prashant Singh and Fredrik Wrede and Andreas Hellander},
url = {https://academic.oup.com/bioinformatics/article/37/2/279/5876021},
doi = {https://doi.org/10.1093/bioinformatics/btaa673},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {Bioinformatics},
volume = {37},
number = {2},
pages = {279--281},
publisher = {Oxford University Press},
keywords = {Bayesian Inference, Deep Learning, Inverse Problem, Optimization, Software, Surrogate Modeling},
pubstate = {published},
tppubtype = {article}
}
Singh P; Elamin M M; Toor S
Towards Smart e-Infrastructures, A Community Driven Approach Based on Real Datasets Proceedings Article
In: 2020 IEEE Green Technologies Conference (GreenTech), pp. 109–114, IEEE 2020.
Links | BibTeX | Tags: Deep Learning, Distributed Computing, Surrogate Modeling
@inproceedings{singh2020towards,
title = {Towards Smart e-Infrastructures, A Community Driven Approach Based on Real Datasets},
author = {Prashant Singh and Mona Mohamed Elamin and Salman Toor},
url = {https://ieeexplore.ieee.org/abstract/document/9289758
https://arxiv.org/pdf/2012.09579
},
doi = {https://doi.org/10.1109/GreenTech46478.2020.9289758},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {2020 IEEE Green Technologies Conference (GreenTech)},
pages = {109--114},
organization = {IEEE},
keywords = {Deep Learning, Distributed Computing, Surrogate Modeling},
pubstate = {published},
tppubtype = {inproceedings}
}
Bayesian polynomial neural networks and polynomial neural ordinary differential equations. Journal Article
In: PLOS Computational Biology, 2024.
Adaptive Parameter-Free Robust Learning using Latent Bernoulli Variables Conference
Proceedings of the 41st International Conference on Machine Learning (ICML 2024), 2024.
Enhancing Robustness: BSM Parameter Inference with n1D-CNN and Novel Data Augmentation Conference Forthcoming
European AI for Fundamental Physics Conference (to appear), Forthcoming.
Transfer learning-assisted inverse modeling in nanophotonics based on mixture density networks Journal Article
In: IEEE Access, vol. 12, pp. 55218-55224, 2024.
BSM models and parameter inference via an n-channel 1D-CNN Conference
Sixth annual workshop of the LPCC inter-experimental machine learning working group, CERN, Geneva, 2024.
Efficient Resource Scheduling for Distributed Infrastructures Using Negotiation Capabilities Conference
2023 IEEE 16th International Conference on Cloud Computing (CLOUD), IEEE IEEE, 2023.
Robust and integrative Bayesian neural networks for likelihood-free parameter inference Proceedings Article
In: 2022 International Joint Conference on Neural Networks (IJCNN), pp. 1–10, IEEE 2022.
Convolutional neural networks as summary statistics for approximate bayesian computation Journal Article
In: IEEE/ACM Transactions on Computational Biology and Bioinformatics, 2021.
Scalable machine learning-assisted model exploration and inference using Sciope Journal Article
In: Bioinformatics, vol. 37, no. 2, pp. 279–281, 2021.
Towards Smart e-Infrastructures, A Community Driven Approach Based on Real Datasets Proceedings Article
In: 2020 IEEE Green Technologies Conference (GreenTech), pp. 109–114, IEEE 2020.