Skip to content

Creating pull request for 10.21105.joss.04101 #2946

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Feb 11, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
149 changes: 149 additions & 0 deletions joss.04101/10.21105.joss.04101.crossref.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
<?xml version="1.0" encoding="UTF-8"?>
<doi_batch xmlns="http://www.crossref.org/schema/4.4.0" xmlns:ai="http://www.crossref.org/AccessIndicators.xsd" xmlns:rel="http://www.crossref.org/relations.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="4.4.0" xsi:schemaLocation="http://www.crossref.org/schema/4.4.0 http://www.crossref.org/schemas/crossref4.4.0.xsd">
<head>
<doi_batch_id>0cada83d463e68ecbf06a2c85c8c2aec</doi_batch_id>
<timestamp>20220211182155</timestamp>
<depositor>
<depositor_name>JOSS Admin</depositor_name>
<email_address>[email protected]</email_address>
</depositor>
<registrant>The Open Journal</registrant>
</head>
<body>
<journal>
<journal_metadata>
<full_title>Journal of Open Source Software</full_title>
<abbrev_title>JOSS</abbrev_title>
<issn media_type="electronic">2475-9066</issn>
<doi_data>
<doi>10.21105/joss</doi>
<resource>https://joss.theoj.org</resource>
</doi_data>
</journal_metadata>
<journal_issue>
<publication_date media_type="online">
<month>02</month>
<year>2022</year>
</publication_date>
<journal_volume>
<volume>7</volume>
</journal_volume>
<issue>70</issue>
</journal_issue>
<journal_article publication_type="full_text">
<titles>
<title>TorchMetrics - Measuring Reproducibility in PyTorch</title>
</titles>
<contributors>
<person_name sequence="first" contributor_role="author">
<given_name>Nicki</given_name>
<surname>Detlefsen</surname>
<ORCID>http://orcid.org/0000-0002-8133-682X</ORCID>
</person_name>
<person_name sequence="additional" contributor_role="author">
<given_name>Jiri</given_name>
<surname>Borovec</surname>
<ORCID>http://orcid.org/0000-0001-7437-824X</ORCID>
</person_name>
<person_name sequence="additional" contributor_role="author">
<given_name>Justus</given_name>
<surname>Schock</surname>
<ORCID>http://orcid.org/0000-0003-0512-3053</ORCID>
</person_name>
<person_name sequence="additional" contributor_role="author">
<given_name>Ananya</given_name>
<surname>Jha</surname>
</person_name>
<person_name sequence="additional" contributor_role="author">
<given_name>Teddy</given_name>
<surname>Koker</surname>
</person_name>
<person_name sequence="additional" contributor_role="author">
<given_name>Luca</given_name>
<surname>Di Liello</surname>
</person_name>
<person_name sequence="additional" contributor_role="author">
<given_name>Daniel</given_name>
<surname>Stancl</surname>
</person_name>
<person_name sequence="additional" contributor_role="author">
<given_name>Changsheng</given_name>
<surname>Quan</surname>
</person_name>
<person_name sequence="additional" contributor_role="author">
<given_name>Maxim</given_name>
<surname>Grechkin</surname>
</person_name>
<person_name sequence="additional" contributor_role="author">
<given_name>William</given_name>
<surname>Falcon</surname>
</person_name>
</contributors>
<publication_date>
<month>02</month>
<day>11</day>
<year>2022</year>
</publication_date>
<pages>
<first_page>4101</first_page>
</pages>
<publisher_item>
<identifier id_type="doi">10.21105/joss.04101</identifier>
</publisher_item>
<ai:program name="AccessIndicators">
<ai:license_ref applies_to="vor">http://creativecommons.org/licenses/by/4.0/</ai:license_ref>
<ai:license_ref applies_to="am">http://creativecommons.org/licenses/by/4.0/</ai:license_ref>
<ai:license_ref applies_to="tdm">http://creativecommons.org/licenses/by/4.0/</ai:license_ref>
</ai:program>
<rel:program>
<rel:related_item>
<rel:description>Software archive</rel:description>
<rel:inter_work_relation relationship-type="references" identifier-type="doi">“https://doi.org/10.5281/zenodo.6037875”</rel:inter_work_relation>
</rel:related_item>
<rel:related_item>
<rel:description>GitHub review issue</rel:description>
<rel:inter_work_relation relationship-type="hasReview" identifier-type="uri">https://github.com/openjournals/joss-reviews/issues/4101</rel:inter_work_relation>
</rel:related_item>
</rel:program>
<doi_data>
<doi>10.21105/joss.04101</doi>
<resource>https://joss.theoj.org/papers/10.21105/joss.04101</resource>
<collection property="text-mining">
<item>
<resource mime_type="application/pdf">https://joss.theoj.org/papers/10.21105/joss.04101.pdf</resource>
</item>
</collection>
</doi_data>
<citation_list>
<citation key="ref1">
<unstructured_citation>GANs Trained by a Two Time-Scale Update Rule Converge to a Local Nash Equilibrium, 30, https://proceedings.neurips.cc/paper/2017/hash/8a1d694707eb0fefe65871369074926d-Abstract.html, Advances in Neural Information Processing Systems, Curran Associates, Inc., Heusel, Martin and Ramsauer, Hubert and Unterthiner, Thomas and Nessler, Bernhard and Hochreiter, Sepp, 6629–6640, 2017</unstructured_citation>
</citation>
<citation key="ref2">
<unstructured_citation>Papers With Code, https://paperswithcode.com/, Accessed: 2021-12-01</unstructured_citation>
</citation>
<citation key="ref3">
<unstructured_citation>Arxiv, https://arxiv.org/, Accessed: 2021-12-16</unstructured_citation>
</citation>
<citation key="ref4">
<unstructured_citation>PyTorch: An Imperative Style, High-Performance Deep Learning Library, Paszke, Adam and Gross, Sam and Massa, Francisco and Lerer, Adam and Bradbury, James and Chanan, Gregory and Killeen, Trevor and Lin, Zeming and Gimelshein, Natalia and Antiga, Luca and Desmaison, Alban and Kopf, Andreas and Yang, Edward and DeVito, Zachary and Raison, Martin and Tejani, Alykhan and Chilamkurthy, Sasank and Steiner, Benoit and Fang, Lu and Bai, Junjie and Chintala, Soumith, Advances in Neural Information Processing Systems 32, Wallach, H. and Larochelle, H. and Beygelzimer, A. and d’ Alché-Buc, F. and Fox, E. and Garnett, R., 8024–8035, 2019, Curran Associates, Inc., http://papers.neurips.cc/paper/9015-pytorch-an-imperative-style-high-performance-deep-learning-library.pdf</unstructured_citation>
</citation>
<citation key="ref5">
<unstructured_citation>Transformers: State-of-the-Art Natural Language Processing, Wolf, Thomas and Debut, Lysandre and Sanh, Victor and Chaumond, Julien and Delangue, Clement and Moi, Anthony and Cistac, Pierric and Rault, Tim and Louf, Rémi and Funtowicz, Morgan and Davison, Joe and Shleifer, Sam and von Platen, Patrick and Ma, Clara and Jernite, Yacine and Plu, Julien and Xu, Canwen and Scao, Teven Le and Gugger, Sylvain and Drame, Mariama and Lhoest, Quentin and Rush, Alexander M., Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations, oct, 2020, Online, Association for Computational Linguistics, https://www.aclweb.org/anthology/2020.emnlp-demos.6, 38–45, 10</unstructured_citation>
</citation>
<citation key="ref6">
<unstructured_citation>Scikit-learn: Machine Learning in Python, Pedregosa, F. and Varoquaux, G. and Gramfort, A. and Michel, V. and Thirion, B. and Grisel, O. and Blondel, M. and Prettenhofer, P. and Weiss, R. and Dubourg, V. and Vanderplas, J. and Passos, A. and Cournapeau, D. and Brucher, M. and Perrot, M. and Duchesnay, E., Journal of Machine Learning Research, 12, 2825–2830, 2011</unstructured_citation>
</citation>
<citation key="ref7">
<unstructured_citation>Keras, Chollet, François and others, 2015, GitHub, https://github.com/fchollet/keras</unstructured_citation>
</citation>
<citation key="ref8">
<unstructured_citation>Scaling Vision Transformers, Zhai, Xiaohua and Kolesnikov, Alexander and Houlsby, Neil and Beyer, Lucas, 2021, 2106.04560, arXiv, cs.CV</unstructured_citation>
</citation>
<citation key="ref9">
<unstructured_citation>RoBERTa: A Robustly Optimized BERT Pretraining Approach, Liu, Yinhan and Ott, Myle and Goyal, Naman and Du, Jingfei and Joshi, Mandar and Chen, Danqi and Levy, Omer and Lewis, Mike and Zettlemoyer, Luke and Stoyanov, Veselin, 2019, 1907.11692, arXiv, cs.CL</unstructured_citation>
</citation>
</citation_list>
</journal_article>
</journal>
</body>
</doi_batch>
Binary file added joss.04101/10.21105.joss.04101.pdf
Binary file not shown.