Skip to content

Commit

Permalink
Minor paper adjustments
Browse files Browse the repository at this point in the history
* Affiliation
* PDBL citation
* DOIs and ArXiv format
  • Loading branch information
holl- committed Feb 28, 2024
1 parent ad74206 commit 35a457e
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 33 deletions.
75 changes: 44 additions & 31 deletions paper.bib
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
@article{rauber2020eagerpy,
title={{EagerPy}: Writing Code That Works Natively with {PyTorch}, {TensorFlow}, {JAX}, and {NumPy}},
author={Rauber, Jonas and Bethge, Matthias and Brendel, Wieland},
journal={arXiv preprint arXiv:2008.04175},

@misc{rauber2020eagerpy,
title={EagerPy: Writing Code That Works Natively with PyTorch, TensorFlow, JAX, and NumPy},
author={Jonas Rauber and Matthias Bethge and Wieland Brendel},
year={2020},
url={https://eagerpy.jonasrauber.de},
eprint={2008.04175},
archivePrefix={arXiv},
primaryClass={cs.LG}
}

@article{phiflow,
title={Learning to control pdes with differentiable physics},
author={Holl, Philipp and Koltun, Vladlen and Thuerey, Nils},
journal={arXiv preprint arXiv:2001.07457},
year={2020}
@inproceedings{phiflow,
title={Learning to Control PDEs with Differentiable Physics},
author={Holl, Philipp and Thuerey, Nils and Koltun, Vladlen},
booktitle={International Conference on Learning Representations},
year={2019}
}

@book{Python3,
Expand Down Expand Up @@ -244,10 +246,11 @@ @book{HFDPatterns2004

@inproceedings{TensorFlow2016,
title={Tensorflow: A system for large-scale machine learning},
author={Abadi, Mart{\'\i}n and Barham, Paul and Chen, Jianmin and Chen, Zhifeng and Davis, Andy and Dean, Jeffrey and Devin, Matthieu and Ghemawat, Sanjay and Irving, Geoffrey and Isard, Michael and others},
author={Abadi, Martin and Barham, Paul and Chen, Jianmin and Chen, Zhifeng and Davis, Andy and Dean, Jeffrey and Devin, Matthieu and Ghemawat, Sanjay and Irving, Geoffrey and Isard, Michael and others},
booktitle={12th $\{$USENIX$\}$ symposium on operating systems design and implementation ($\{$OSDI$\}$ 16)},
pages={265--283},
year={2016}
year={2016},
doi={}
}

@article{PyTorch2019,
Expand Down Expand Up @@ -328,18 +331,20 @@ @article{ScaleInvariant2022
year={2022}
}

@article{HalfInverse2022,
title={Half-inverse gradients for physical deep learning},
@inproceedings{HalfInverse2022,
title={Half-Inverse Gradients for Physical Deep Learning},
author={Schnell, Patrick and Holl, Philipp and Thuerey, Nils},
journal={arXiv preprint arXiv:2203.10131},
year={2022}
booktitle={International Conference on Learning Representations},
year={2021}
}

@article{PBDL2021,
title={Physics-based deep learning},
author={Thuerey, Nils and Holl, Philipp and Mueller, Maximilian and Schnell, Patrick and Trost, Felix and Um, Kiwon},
journal={arXiv preprint arXiv:2109.05237},
year={2021}
@misc{PBDL2021,
title={Physics-based Deep Learning},
author={Nils Thuerey and Philipp Holl and Maximilian Mueller and Patrick Schnell and Felix Trost and Kiwon Um},
year={2022},
eprint={2109.05237},
archivePrefix={arXiv},
primaryClass={cs.LG}
}


Expand All @@ -352,11 +357,13 @@ @article{PDEBench
year={2022}
}

@article{PDEArena,
@misc{PDEArena,
title={Towards Multi-spatiotemporal-scale Generalized PDE Modeling},
author={Gupta, Jayesh K and Brandstetter, Johannes},
journal={arXiv preprint arXiv:2209.15616},
year={2022}
author={Jayesh K. Gupta and Johannes Brandstetter},
year={2022},
eprint={2209.15616},
archivePrefix={arXiv},
primaryClass={cs.LG}
}


Expand All @@ -374,13 +381,15 @@ @article{wandel2021teaching
volume={33},
number={4},
year={2021},
publisher={AIP Publishing}
publisher={AIP Publishing},
doi={DOI: 10.1063/5.0047428}
}
@inproceedings{brandstetter2022clifford,
title={Clifford Neural Layers for PDE Modeling},
author={Brandstetter, Johannes and van den Berg, Rianne and Welling, Max and Gupta, Jayesh K},
booktitle={The Eleventh International Conference on Learning Representations},
year={2022}
year={2022},
doi={10.48550/arXiv.2209.04934}
}
@inproceedings{wandel2020learning,
title={Learning Incompressible Fluid Dynamics from Scratch-Towards Fast, Differentiable Fluid Models that Generalize},
Expand All @@ -393,7 +402,8 @@ @inproceedings{sengar2021multi
author={Sengar, Vartika and Seemakurthy, Karthik and Gubbi, Jayavardhana and P, Balamuralidhar},
booktitle={Proceedings of the twelfth Indian conference on computer vision, graphics and image processing},
pages={1--9},
year={2021}
year={2021},
doi={10.1145/3490035.3490283}
}
@article{parekh1993sex,
title={Sex differences in control of renal outer medullary circulation in rats: role of prostaglandins},
Expand All @@ -403,13 +413,15 @@ @article{parekh1993sex
number={4},
pages={F629--F636},
year={1993},
publisher={American Physiological Society Bethesda, MD}
publisher={American Physiological Society Bethesda, MD},
doi={10.1152/ajprenal.1993.264.4.F629}
}
@inproceedings{ramos2022control,
title={Control of Two-way Coupled Fluid Systems with Differentiable Solvers},
author={Ramos, Brener and Trost, Felix and Thuerey, Nils},
booktitle={ICLR 2022 Workshop on Generalizable Policy Learning in Physical World},
year={2022}
year={2022},
doi={10.48550/arXiv.2206.00342}
}
@inproceedings{wang2022approximately,
title={Approximately equivariant networks for imperfectly symmetric dynamics},
Expand All @@ -434,7 +446,8 @@ @inproceedings{wang2023applications
volume={12509},
pages={300--305},
year={2023},
organization={SPIE}
organization={SPIE},
doi={10.1117/12.2656026}
}
@article{wu2022learning,
title={Learning to accelerate partial differential equations via latent global evolution},
Expand Down
4 changes: 2 additions & 2 deletions paper.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ authors:
orcid: 0000-0001-6647-8910
affiliation: 1
affiliations:
- name: Technical University of Munich
- name: School of Computation, Information and Technology, Technical University of Munich, Germany
index: 1
date: 01 August 2023
bibliography: paper.bib
Expand Down Expand Up @@ -95,7 +95,7 @@ $\Phi_\textrm{Flow}$ includes geometry, physics, and visualization modules, all

It was first used to show that differentiable PDE simulations can be used to train neural networks that steer the dynamics towards desired outcomes [@phiflow].
Differentiable PDEs, implemented against $\Phi_\textrm{ML}$'s API, were later shown to benefit learning corrections for low-resolution or incomplete physics models [@SolverInTheLoop2020].
These findings were summarized and formalized in [@PBDL2021], along with many additional examples.
These findings were summarized and formalized in @PBDL2021, along with many additional examples.

The library was also used in network optimization publications, such as showing that inverted simulations can be used to train networks [@ScaleInvariant2022] and that gradient inversion benefits learning the solutions to inverse problems [@HalfInverse2022].

Expand Down

0 comments on commit 35a457e

Please sign in to comment.