diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64..0000000000000000000000000000000000000000 --- a/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/README.md b/README.md index 63571ba176703b654531f40001dc5e98889ce98e..c76af1e09d3c761c7bb46cb48351c21bf28871ed 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,12 @@ -### Deep learning project seed -Use this seed to start new deep learning / ML projects. - -- Built in setup.py -- Built in requirements -- Examples with MNIST -- Badges -- Bibtex - -#### Goals -The goal of this seed is to structure ML paper-code the same so that work can easily be extended and replicated. - -### DELETE EVERYTHING ABOVE FOR YOUR PROJECT - ---- - <div align="center"> -# Your Project Name +# Deep-Inverse + -[](https://www.nature.com/articles/nature14539) +[](https://papers.nips.cc/paper/2018/file/d903e9608cfbf08910611e4346a0ba44-Paper.pdf) +[](https://iopscience.iop.org/article/10.1088/1361-6420/aaf14a) +[](https://arxiv.org/pdf/2008.02839.pdf) +[](https://www.nature.com/articles/s41597-021-00893--z) [](https://papers.nips.cc/book/advances-in-neural-information-processing-systems-31-2018) [](https://papers.nips.cc/book/advances-in-neural-information-processing-systems-31-2018) [](https://papers.nips.cc/book/advances-in-neural-information-processing-systems-31-2018) @@ -34,49 +22,21 @@ Conference --> </div> -## Description -What it does +## GOALS OF THE PROJECT +We would like to build a data-driven model to reconstruct CT images. The model should be evaluated on the LoDoPaB challenge. (https://lodopab.grand-challenge.org/) ## How to run -First, install dependencies -```bash + # clone project -git clone https://github.com/YourGithubName/deep-learning-project-template +git clone https://gitlab.rrz.uni-hamburg.de/BAT9096/deepinverse.git # install project -cd deep-learning-project-template -pip install -e . -pip install -r requirements.txt - ``` - Next, navigate to any file and run it. - ```bash -# module folder -cd project # run module (example: mnist as your main contribution) -python lit_classifier_main.py -``` -## Imports -This project is setup as a package which means you can now easily import any file into any other file like so: -```python -from project.datasets.mnist import mnist -from project.lit_classifier_main import LitClassifier -from pytorch_lightning import Trainer - -# model -model = LitClassifier() - -# data -train, val, test = mnist() -# train -trainer = Trainer() -trainer.fit(model, train, val) - -# test using the best model! -trainer.test(test_dataloaders=test) -``` +## Imports +Not yet completed ### Citation ``` diff --git a/project/__init__.py b/project/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/project/lit_autoencoder.py b/project/lit_autoencoder.py deleted file mode 100644 index 3f9ff0c6c9994d0561b464c3303bd2faa614f445..0000000000000000000000000000000000000000 --- a/project/lit_autoencoder.py +++ /dev/null @@ -1,88 +0,0 @@ -from argparse import ArgumentParser -import torch -from torch import nn -import torch.nn.functional as F -from torch.utils.data import DataLoader -import pytorch_lightning as pl -from torch.utils.data import random_split - -from torchvision.datasets.mnist import MNIST -from torchvision import transforms - - -class LitAutoEncoder(pl.LightningModule): - - def __init__(self): - super().__init__() - self.encoder = nn.Sequential( - nn.Linear(28 * 28, 64), - nn.ReLU(), - nn.Linear(64, 3) - ) - self.decoder = nn.Sequential( - nn.Linear(3, 64), - nn.ReLU(), - nn.Linear(64, 28 * 28) - ) - - def forward(self, x): - # in lightning, forward defines the prediction/inference actions - embedding = self.encoder(x) - return embedding - - def training_step(self, batch, batch_idx): - x, y = batch - x = x.view(x.size(0), -1) - z = self.encoder(x) - x_hat = self.decoder(z) - loss = F.mse_loss(x_hat, x) - return loss - - def configure_optimizers(self): - optimizer = torch.optim.Adam(self.parameters(), lr=1e-3) - return optimizer - - -def cli_main(): - pl.seed_everything(1234) - - # ------------ - # args - # ------------ - parser = ArgumentParser() - parser.add_argument('--batch_size', default=32, type=int) - parser.add_argument('--hidden_dim', type=int, default=128) - parser = pl.Trainer.add_argparse_args(parser) - args = parser.parse_args() - - # ------------ - # data - # ------------ - dataset = MNIST('', train=True, download=True, transform=transforms.ToTensor()) - mnist_test = MNIST('', train=False, download=True, transform=transforms.ToTensor()) - mnist_train, mnist_val = random_split(dataset, [55000, 5000]) - - train_loader = DataLoader(mnist_train, batch_size=args.batch_size) - val_loader = DataLoader(mnist_val, batch_size=args.batch_size) - test_loader = DataLoader(mnist_test, batch_size=args.batch_size) - - # ------------ - # model - # ------------ - model = LitAutoEncoder() - - # ------------ - # training - # ------------ - trainer = pl.Trainer.from_argparse_args(args) - trainer.fit(model, train_loader, val_loader) - - # ------------ - # testing - # ------------ - result = trainer.test(test_dataloaders=test_loader) - print(result) - - -if __name__ == '__main__': - cli_main() diff --git a/project/lit_image_classifier.py b/project/lit_image_classifier.py deleted file mode 100644 index 1296a3f126eaf69628fc20a9ec58bf95043d6668..0000000000000000000000000000000000000000 --- a/project/lit_image_classifier.py +++ /dev/null @@ -1,109 +0,0 @@ -from argparse import ArgumentParser - -import torch -import pytorch_lightning as pl -from torch.nn import functional as F -from torch.utils.data import DataLoader, random_split - -from torchvision.datasets.mnist import MNIST -from torchvision import transforms - - -class Backbone(torch.nn.Module): - def __init__(self, hidden_dim=128): - super().__init__() - self.l1 = torch.nn.Linear(28 * 28, hidden_dim) - self.l2 = torch.nn.Linear(hidden_dim, 10) - - def forward(self, x): - x = x.view(x.size(0), -1) - x = torch.relu(self.l1(x)) - x = torch.relu(self.l2(x)) - return x - - -class LitClassifier(pl.LightningModule): - def __init__(self, backbone, learning_rate=1e-3): - super().__init__() - self.save_hyperparameters() - self.backbone = backbone - - def forward(self, x): - # use forward for inference/predictions - embedding = self.backbone(x) - return embedding - - def training_step(self, batch, batch_idx): - x, y = batch - y_hat = self.backbone(x) - loss = F.cross_entropy(y_hat, y) - self.log('train_loss', loss, on_epoch=True) - return loss - - def validation_step(self, batch, batch_idx): - x, y = batch - y_hat = self.backbone(x) - loss = F.cross_entropy(y_hat, y) - self.log('valid_loss', loss, on_step=True) - - def test_step(self, batch, batch_idx): - x, y = batch - y_hat = self.backbone(x) - loss = F.cross_entropy(y_hat, y) - self.log('test_loss', loss) - - def configure_optimizers(self): - # self.hparams available because we called self.save_hyperparameters() - return torch.optim.Adam(self.parameters(), lr=self.hparams.learning_rate) - - @staticmethod - def add_model_specific_args(parent_parser): - parser = ArgumentParser(parents=[parent_parser], add_help=False) - parser.add_argument('--learning_rate', type=float, default=0.0001) - return parser - - -def cli_main(): - pl.seed_everything(1234) - - # ------------ - # args - # ------------ - parser = ArgumentParser() - parser.add_argument('--batch_size', default=32, type=int) - parser.add_argument('--hidden_dim', type=int, default=128) - parser = pl.Trainer.add_argparse_args(parser) - parser = LitClassifier.add_model_specific_args(parser) - args = parser.parse_args() - - # ------------ - # data - # ------------ - dataset = MNIST('', train=True, download=True, transform=transforms.ToTensor()) - mnist_test = MNIST('', train=False, download=True, transform=transforms.ToTensor()) - mnist_train, mnist_val = random_split(dataset, [55000, 5000]) - - train_loader = DataLoader(mnist_train, batch_size=args.batch_size) - val_loader = DataLoader(mnist_val, batch_size=args.batch_size) - test_loader = DataLoader(mnist_test, batch_size=args.batch_size) - - # ------------ - # model - # ------------ - model = LitClassifier(Backbone(hidden_dim=args.hidden_dim), args.learning_rate) - - # ------------ - # training - # ------------ - trainer = pl.Trainer.from_argparse_args(args) - trainer.fit(model, train_loader, val_loader) - - # ------------ - # testing - # ------------ - result = trainer.test(test_dataloaders=test_loader) - print(result) - - -if __name__ == '__main__': - cli_main() diff --git a/project/lit_mnist.py b/project/lit_mnist.py deleted file mode 100644 index 873337858fed5e3c816dc63984cd350ccf303c22..0000000000000000000000000000000000000000 --- a/project/lit_mnist.py +++ /dev/null @@ -1,96 +0,0 @@ -from argparse import ArgumentParser - -import torch -import pytorch_lightning as pl -from torch.nn import functional as F -from torch.utils.data import DataLoader, random_split - -from torchvision.datasets.mnist import MNIST -from torchvision import transforms - - -class LitClassifier(pl.LightningModule): - def __init__(self, hidden_dim=128, learning_rate=1e-3): - super().__init__() - self.save_hyperparameters() - - self.l1 = torch.nn.Linear(28 * 28, self.hparams.hidden_dim) - self.l2 = torch.nn.Linear(self.hparams.hidden_dim, 10) - - def forward(self, x): - x = x.view(x.size(0), -1) - x = torch.relu(self.l1(x)) - x = torch.relu(self.l2(x)) - return x - - def training_step(self, batch, batch_idx): - x, y = batch - y_hat = self(x) - loss = F.cross_entropy(y_hat, y) - return loss - - def validation_step(self, batch, batch_idx): - x, y = batch - y_hat = self(x) - loss = F.cross_entropy(y_hat, y) - self.log('valid_loss', loss) - - def test_step(self, batch, batch_idx): - x, y = batch - y_hat = self(x) - loss = F.cross_entropy(y_hat, y) - self.log('test_loss', loss) - - def configure_optimizers(self): - return torch.optim.Adam(self.parameters(), lr=self.hparams.learning_rate) - - @staticmethod - def add_model_specific_args(parent_parser): - parser = ArgumentParser(parents=[parent_parser], add_help=False) - parser.add_argument('--hidden_dim', type=int, default=128) - parser.add_argument('--learning_rate', type=float, default=0.0001) - return parser - - -def cli_main(): - pl.seed_everything(1234) - - # ------------ - # args - # ------------ - parser = ArgumentParser() - parser.add_argument('--batch_size', default=32, type=int) - parser = pl.Trainer.add_argparse_args(parser) - parser = LitClassifier.add_model_specific_args(parser) - args = parser.parse_args() - - # ------------ - # data - # ------------ - dataset = MNIST('', train=True, download=True, transform=transforms.ToTensor()) - mnist_test = MNIST('', train=False, download=True, transform=transforms.ToTensor()) - mnist_train, mnist_val = random_split(dataset, [55000, 5000]) - - train_loader = DataLoader(mnist_train, batch_size=args.batch_size) - val_loader = DataLoader(mnist_val, batch_size=args.batch_size) - test_loader = DataLoader(mnist_test, batch_size=args.batch_size) - - # ------------ - # model - # ------------ - model = LitClassifier(args.hidden_dim, args.learning_rate) - - # ------------ - # training - # ------------ - trainer = pl.Trainer.from_argparse_args(args) - trainer.fit(model, train_loader, val_loader) - - # ------------ - # testing - # ------------ - trainer.test(test_dataloaders=test_loader) - - -if __name__ == '__main__': - cli_main() diff --git a/setup.py b/setup.py index 3de44ddb7dcf7a866421b6aade7ebad55e8752f3..8b7141066c744fa3ebb2f91c61f184431c6fef11 100644 --- a/setup.py +++ b/setup.py @@ -3,13 +3,13 @@ from setuptools import setup, find_packages setup( - name='project', + name='DeepInverse', version='0.0.0', - description='Describe Your Cool Project', + description='Deep Learning in Inverse Problems', author='', author_email='', # REPLACE WITH YOUR OWN GITHUB PROJECT LINK - url='https://github.com/PyTorchLightning/pytorch-lightning-conference-seed', + url='https://gitlab.rrz.uni-hamburg.de/BAT9096/deepinverse.git', install_requires=['pytorch-lightning'], packages=find_packages(), )