Skip to content

Commit fb0188f

Browse files
author
rafael orozco
committed
add many dois
1 parent dabe5b6 commit fb0188f

File tree

2 files changed

+25
-5
lines changed

2 files changed

+25
-5
lines changed

docs/paper/paper.bib

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ @inproceedings{louboutin2022accelerating
44
booktitle={Second International Meeting for Applied Geoscience \& Energy},
55
pages={1482--1486},
66
year={2022},
7+
doi={10.1190/image2022-3750561.1},
78
organization={Society of Exploration Geophysicists and American Association of Petroleum~…}
89
}
910

@@ -29,6 +30,7 @@ @article{alemohammad2023self
2930
title={Self-consuming generative models go mad},
3031
author={Alemohammad, Sina and Casco-Rodriguez, Josue and Luzi, Lorenzo and Humayun, Ahmed Imtiaz and Babaei, Hossein and LeJeune, Daniel and Siahkoohi, Ali and Baraniuk, Richard G},
3132
journal={arXiv preprint arXiv:2307.01850},
33+
doi={10.52591/lxai202312101},
3234
year={2023}
3335
}
3436

@@ -50,6 +52,7 @@ @article{bezanson2017julia
5052
number={1},
5153
pages={65--98},
5254
year={2017},
55+
doi={10.1137/141000671},
5356
publisher={SIAM},
5457
url={https://doi.org/10.1137/141000671}
5558
}
@@ -58,13 +61,15 @@ @article{peters2019symmetric
5861
title={Symmetric block-low-rank layers for fully reversible multilevel neural networks},
5962
author={Peters, Bas and Haber, Eldad and Lensink, Keegan},
6063
journal={arXiv preprint arXiv:1912.12137},
64+
doi={10.48550/arXiv.1912.12137},
6165
year={2019}
6266
}
6367

6468
@article{orozco2022memory,
6569
title={Memory Efficient Invertible Neural Networks for 3D Photoacoustic Imaging},
6670
author={Orozco, Rafael and Louboutin, Mathias and Herrmann, Felix J},
6771
journal={arXiv preprint arXiv:2204.11850},
72+
doi={10.48550/arXiv.2204.11850},
6873
year={2022}
6974
}
7075

@@ -84,6 +89,7 @@ @article{innes2019differentiable
8489
title={A differentiable programming system to bridge machine learning and scientific computing},
8590
author={Innes, Mike and Edelman, Alan and Fischer, Keno and Rackauckas, Chris and Saba, Elliot and Shah, Viral B and Tebbutt, Will},
8691
journal={arXiv preprint arXiv:1907.07587},
92+
doi={10.48550/arXiv.1907.07587},
8793
year={2019}
8894
}
8995

@@ -146,10 +152,15 @@ @software{nflows
146152
url = {https://doi.org/10.5281/zenodo.4296287}
147153
}
148154

149-
@article{paszke2017automatic,
150-
title={Automatic differentiation in pytorch},
151-
author={Paszke, Adam and Gross, Sam and Chintala, Soumith and Chanan, Gregory and Yang, Edward and DeVito, Zachary and Lin, Zeming and Desmaison, Alban and Antiga, Luca and Lerer, Adam},
152-
year={2017}
155+
156+
157+
@article{paszke2019pytorch,
158+
title={Pytorch: An imperative style, high-performance deep learning library},
159+
author={Paszke, Adam and Gross, Sam and Massa, Francisco and Lerer, Adam and Bradbury, James and Chanan, Gregory and Killeen, Trevor and Lin, Zeming and Gimelshein, Natalia and Antiga, Luca and others},
160+
journal={Advances in neural information processing systems},
161+
volume={32},
162+
doi={doi.org/10.48550/arXiv.1912.01703},
163+
year={2019}
153164
}
154165

155166
@book{haar1909theorie,
@@ -164,13 +175,15 @@ @article{dinh2014nice
164175
title={Nice: Non-linear independent components estimation},
165176
author={Dinh, Laurent and Krueger, David and Bengio, Yoshua},
166177
journal={arXiv preprint arXiv:1410.8516},
178+
doi={10.48550/arXiv.1410.8516},
167179
year={2014}
168180
}
169181

170182
@article{dinh2016density,
171183
title={Density estimation using real nvp},
172184
author={Dinh, Laurent and Sohl-Dickstein, Jascha and Bengio, Samy},
173185
journal={arXiv preprint arXiv:1605.08803},
186+
doi={10.48550/arXiv.1605.08803},
174187
year={2016}
175188
}
176189

@@ -181,6 +194,7 @@ @article{lensink2022fully
181194
volume={9},
182195
number={4},
183196
pages={60},
197+
doi={10.1007/s40687-022-00343-1},
184198
year={2022},
185199
publisher={Springer}
186200
}
@@ -217,6 +231,7 @@ @inproceedings{kumar2021enabling
217231
author={Kumar, Rajiv and Kotsi, Maria and Siahkoohi, Ali and Malcolm, Alison},
218232
booktitle={First International Meeting for Applied Geoscience \& Energy},
219233
pages={1515--1519},
234+
doi={10.1190/segam2021-3583705.1},
220235
year={2021},
221236
organization={Society of Exploration Geophysicists}
222237
}
@@ -235,20 +250,23 @@ @article{siahkoohi2021preconditioned
235250
title={Preconditioned training of normalizing flows for variational inference in inverse problems},
236251
author={Siahkoohi, Ali and Rizzuti, Gabrio and Louboutin, Mathias and Witte, Philipp A and Herrmann, Felix J},
237252
journal={arXiv preprint arXiv:2101.03709},
253+
doi={10.48550/arXiv.2101.03709},
238254
year={2021}
239255
}
240256

241257
@article{siahkoohi2022wave,
242258
title={Wave-equation-based inversion with amortized variational Bayesian inference},
243259
author={Siahkoohi, Ali and Orozco, Rafael and Rizzuti, Gabrio and Herrmann, Felix J},
244260
journal={arXiv preprint arXiv:2203.15881},
261+
doi={10.48550/arXiv.2203.15881},
245262
year={2022}
246263
}
247264

248265
@article{orozco2023refining,
249266
title={Refining Amortized Posterior Approximations using Gradient-Based Summary Statistics},
250267
author={Orozco, Rafael and Siahkoohi, Ali and Louboutin, Mathias and Herrmann, Felix J},
251268
journal={arXiv preprint arXiv:2305.08733},
269+
doi={10.48550/arXiv.2305.08733},
252270
year={2023}
253271
}
254272

@@ -299,13 +317,15 @@ @article{gahlot2023inference
299317
title={Inference of CO2 flow patterns--a feasibility study},
300318
author={Gahlot, Abhinav Prakash and Erdinc, Huseyin Tuna and Orozco, Rafael and Yin, Ziyi and Herrmann, Felix J},
301319
journal={arXiv preprint arXiv:2311.00290},
320+
doi={10.48550/arXiv.2311.00290},
302321
year={2023}
303322
}
304323

305324
@article{orozco2023amortized,
306325
title={Amortized Normalizing Flows for Transcranial Ultrasound with Uncertainty Quantification},
307326
author={Orozco, Rafael and Louboutin, Mathias and Siahkoohi, Ali and Rizzuti, Gabrio and van Leeuwen, Tristan and Herrmann, Felix},
308327
journal={arXiv preprint arXiv:2303.03478},
328+
doi={10.48550/arXiv.2303.03478},
309329
year={2023}
310330
}
311331

docs/paper/paper.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ The package we present, InvertibleNetworks.jl, is a pure Julia [@bezanson2017jul
4747
# Statement of need
4848

4949

50-
This software package focuses on memory efficiency. The promise of neural networks is in learning high-dimensional distributions from examples thus normalizing flow packages should allow easy application to large dimensional inputs such as images or 3D volumes. Interestingly, the invertibility of normalizing flows naturally alleviates memory concerns since intermediate network activations can be recomputed instead of saved in memory, greatly reducing the memory needed during backpropagation. The problem is that directly implementing normalizing flows in automatic differentiation frameworks such as PyTorch [@paszke2017automatic] will not automatically exploit this invertibility. The available packages for normalizing flows such as Bijectors.jl [@fjelde2020bijectors], NormalizingFlows.jl [@NormalizingFlows.jl], nflows [@nflows], normflows [@stimper2023normflows] and FrEIA [@freia] are built depending on automatic differentiation frameworks and thus do not exploit invertibility for memory efficiently.
50+
This software package focuses on memory efficiency. The promise of neural networks is in learning high-dimensional distributions from examples thus normalizing flow packages should allow easy application to large dimensional inputs such as images or 3D volumes. Interestingly, the invertibility of normalizing flows naturally alleviates memory concerns since intermediate network activations can be recomputed instead of saved in memory, greatly reducing the memory needed during backpropagation. The problem is that directly implementing normalizing flows in automatic differentiation frameworks such as PyTorch [@paszke2019pytorch] will not automatically exploit this invertibility. The available packages for normalizing flows such as Bijectors.jl [@fjelde2020bijectors], NormalizingFlows.jl [@NormalizingFlows.jl], nflows [@nflows], normflows [@stimper2023normflows] and FrEIA [@freia] are built depending on automatic differentiation frameworks and thus do not exploit invertibility for memory efficiently.
5151

5252
We chose to write this package in Julia since it was built with a commitment to facilitate interoperability with other packages for workflows in scientific machine learning [@louboutin2022accelerating]. The interoperability was facilitated by the multiple dispatch system of Julia. Our goal is to provide solutions for imaging problems with high degrees of freedom, where computational speed is crucial. We have found that this software significantly benefits from Julia's Just-In-Time compilation technology.
5353

0 commit comments

Comments
 (0)