From 02b5e527ce1950027d24498715a7147832ff12cc Mon Sep 17 00:00:00 2001 From: Pierre Segonne <32778266+pierresegonne@users.noreply.github.com> Date: Tue, 20 Oct 2020 09:54:12 +0200 Subject: [PATCH] Initial commit --- .github/workflows/ci-testing.yml | 70 +++++++++++ .gitignore | 129 ++++++++++++++++++++ LICENSE | 201 +++++++++++++++++++++++++++++++ README.md | 89 ++++++++++++++ project/__init__.py | 0 project/lit_autoencoder.py | 88 ++++++++++++++ project/lit_image_classifier.py | 109 +++++++++++++++++ project/lit_mnist.py | 96 +++++++++++++++ requirements.txt | 3 + setup.cfg | 43 +++++++ setup.py | 16 +++ tests/__init__.py | 0 tests/requirements.txt | 8 ++ tests/test_classifier.py | 15 +++ 14 files changed, 867 insertions(+) create mode 100644 .github/workflows/ci-testing.yml create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md create mode 100644 project/__init__.py create mode 100644 project/lit_autoencoder.py create mode 100644 project/lit_image_classifier.py create mode 100644 project/lit_mnist.py create mode 100644 requirements.txt create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 tests/__init__.py create mode 100644 tests/requirements.txt create mode 100644 tests/test_classifier.py diff --git a/.github/workflows/ci-testing.yml b/.github/workflows/ci-testing.yml new file mode 100644 index 0000000..e3da50c --- /dev/null +++ b/.github/workflows/ci-testing.yml @@ -0,0 +1,70 @@ +name: CI testing + +# see: https://help.github.com/en/actions/reference/events-that-trigger-workflows +on: + # Trigger the workflow on push or pull request, but only for the master branch + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + pytest: + + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-20.04, macOS-10.15, windows-2019] + python-version: [3.7] + + # Timeout: https://stackoverflow.com/a/59076067/4521646 + timeout-minutes: 35 + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + # Github Actions: Run step on specific OS: https://stackoverflow.com/a/57948488/4521646 + - name: Setup macOS + if: runner.os == 'macOS' + run: | + brew install libomp # https://github.com/pytorch/pytorch/issues/20030 + + # Note: This uses an internal pip API and may not always work + # https://github.com/actions/cache/blob/master/examples.md#multiple-oss-in-a-workflow + - name: Get pip cache + id: pip-cache + run: | + python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)" + + - name: Cache pip + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-py${{ matrix.python-version }}-${{ hashFiles('requirements.txt') }} + restore-keys: | + ${{ runner.os }}-py${{ matrix.python-version }}- + + - name: Install dependencies + run: | + pip install --requirement requirements.txt --upgrade --quiet --find-links https://download.pytorch.org/whl/cpu/torch_stable.html --use-feature=2020-resolver + pip install --requirement tests/requirements.txt --quiet --use-feature=2020-resolver + python --version + pip --version + pip list + shell: bash + + - name: Tests + run: | + coverage run --source project -m py.test project tests -v --junitxml=junit/test-results-${{ runner.os }}-${{ matrix.python-version }}.xml + + - name: Statistics + if: success() + run: | + coverage report diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..06f9346 --- /dev/null +++ b/.gitignore @@ -0,0 +1,129 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class +.github + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# Lightning /research +test_tube_exp/ +tests/tests_tt_dir/ +tests/save_dir +default/ +data/ +test_tube_logs/ +test_tube_data/ +datasets/ +model_weights/ +tests/save_dir +tests/tests_tt_dir/ +processed/ +raw/ + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +# IDEs +.idea +.vscode + +# seed project +lightning_logs/ +MNIST +.DS_Store diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..63571ba --- /dev/null +++ b/README.md @@ -0,0 +1,89 @@ +### Deep learning project seed +Use this seed to start new deep learning / ML projects. + +- Built in setup.py +- Built in requirements +- Examples with MNIST +- Badges +- Bibtex + +#### Goals +The goal of this seed is to structure ML paper-code the same so that work can easily be extended and replicated. + +### DELETE EVERYTHING ABOVE FOR YOUR PROJECT + +--- + +
+ +# Your Project Name + +[![Paper](http://img.shields.io/badge/paper-arxiv.1001.2234-B31B1B.svg)](https://www.nature.com/articles/nature14539) +[![Conference](http://img.shields.io/badge/NeurIPS-2019-4b44ce.svg)](https://papers.nips.cc/book/advances-in-neural-information-processing-systems-31-2018) +[![Conference](http://img.shields.io/badge/ICLR-2019-4b44ce.svg)](https://papers.nips.cc/book/advances-in-neural-information-processing-systems-31-2018) +[![Conference](http://img.shields.io/badge/AnyConference-year-4b44ce.svg)](https://papers.nips.cc/book/advances-in-neural-information-processing-systems-31-2018) + +![CI testing](https://github.com/PyTorchLightning/deep-learning-project-template/workflows/CI%20testing/badge.svg?branch=master&event=push) + + + +
+ +## Description +What it does + +## How to run +First, install dependencies +```bash +# clone project +git clone https://github.com/YourGithubName/deep-learning-project-template + +# install project +cd deep-learning-project-template +pip install -e . +pip install -r requirements.txt + ``` + Next, navigate to any file and run it. + ```bash +# module folder +cd project + +# run module (example: mnist as your main contribution) +python lit_classifier_main.py +``` + +## Imports +This project is setup as a package which means you can now easily import any file into any other file like so: +```python +from project.datasets.mnist import mnist +from project.lit_classifier_main import LitClassifier +from pytorch_lightning import Trainer + +# model +model = LitClassifier() + +# data +train, val, test = mnist() + +# train +trainer = Trainer() +trainer.fit(model, train, val) + +# test using the best model! +trainer.test(test_dataloaders=test) +``` + +### Citation +``` +@article{YourName, + title={Your Title}, + author={Your team}, + journal={Location}, + year={Year} +} +``` diff --git a/project/__init__.py b/project/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/project/lit_autoencoder.py b/project/lit_autoencoder.py new file mode 100644 index 0000000..3f9ff0c --- /dev/null +++ b/project/lit_autoencoder.py @@ -0,0 +1,88 @@ +from argparse import ArgumentParser +import torch +from torch import nn +import torch.nn.functional as F +from torch.utils.data import DataLoader +import pytorch_lightning as pl +from torch.utils.data import random_split + +from torchvision.datasets.mnist import MNIST +from torchvision import transforms + + +class LitAutoEncoder(pl.LightningModule): + + def __init__(self): + super().__init__() + self.encoder = nn.Sequential( + nn.Linear(28 * 28, 64), + nn.ReLU(), + nn.Linear(64, 3) + ) + self.decoder = nn.Sequential( + nn.Linear(3, 64), + nn.ReLU(), + nn.Linear(64, 28 * 28) + ) + + def forward(self, x): + # in lightning, forward defines the prediction/inference actions + embedding = self.encoder(x) + return embedding + + def training_step(self, batch, batch_idx): + x, y = batch + x = x.view(x.size(0), -1) + z = self.encoder(x) + x_hat = self.decoder(z) + loss = F.mse_loss(x_hat, x) + return loss + + def configure_optimizers(self): + optimizer = torch.optim.Adam(self.parameters(), lr=1e-3) + return optimizer + + +def cli_main(): + pl.seed_everything(1234) + + # ------------ + # args + # ------------ + parser = ArgumentParser() + parser.add_argument('--batch_size', default=32, type=int) + parser.add_argument('--hidden_dim', type=int, default=128) + parser = pl.Trainer.add_argparse_args(parser) + args = parser.parse_args() + + # ------------ + # data + # ------------ + dataset = MNIST('', train=True, download=True, transform=transforms.ToTensor()) + mnist_test = MNIST('', train=False, download=True, transform=transforms.ToTensor()) + mnist_train, mnist_val = random_split(dataset, [55000, 5000]) + + train_loader = DataLoader(mnist_train, batch_size=args.batch_size) + val_loader = DataLoader(mnist_val, batch_size=args.batch_size) + test_loader = DataLoader(mnist_test, batch_size=args.batch_size) + + # ------------ + # model + # ------------ + model = LitAutoEncoder() + + # ------------ + # training + # ------------ + trainer = pl.Trainer.from_argparse_args(args) + trainer.fit(model, train_loader, val_loader) + + # ------------ + # testing + # ------------ + result = trainer.test(test_dataloaders=test_loader) + print(result) + + +if __name__ == '__main__': + cli_main() diff --git a/project/lit_image_classifier.py b/project/lit_image_classifier.py new file mode 100644 index 0000000..1296a3f --- /dev/null +++ b/project/lit_image_classifier.py @@ -0,0 +1,109 @@ +from argparse import ArgumentParser + +import torch +import pytorch_lightning as pl +from torch.nn import functional as F +from torch.utils.data import DataLoader, random_split + +from torchvision.datasets.mnist import MNIST +from torchvision import transforms + + +class Backbone(torch.nn.Module): + def __init__(self, hidden_dim=128): + super().__init__() + self.l1 = torch.nn.Linear(28 * 28, hidden_dim) + self.l2 = torch.nn.Linear(hidden_dim, 10) + + def forward(self, x): + x = x.view(x.size(0), -1) + x = torch.relu(self.l1(x)) + x = torch.relu(self.l2(x)) + return x + + +class LitClassifier(pl.LightningModule): + def __init__(self, backbone, learning_rate=1e-3): + super().__init__() + self.save_hyperparameters() + self.backbone = backbone + + def forward(self, x): + # use forward for inference/predictions + embedding = self.backbone(x) + return embedding + + def training_step(self, batch, batch_idx): + x, y = batch + y_hat = self.backbone(x) + loss = F.cross_entropy(y_hat, y) + self.log('train_loss', loss, on_epoch=True) + return loss + + def validation_step(self, batch, batch_idx): + x, y = batch + y_hat = self.backbone(x) + loss = F.cross_entropy(y_hat, y) + self.log('valid_loss', loss, on_step=True) + + def test_step(self, batch, batch_idx): + x, y = batch + y_hat = self.backbone(x) + loss = F.cross_entropy(y_hat, y) + self.log('test_loss', loss) + + def configure_optimizers(self): + # self.hparams available because we called self.save_hyperparameters() + return torch.optim.Adam(self.parameters(), lr=self.hparams.learning_rate) + + @staticmethod + def add_model_specific_args(parent_parser): + parser = ArgumentParser(parents=[parent_parser], add_help=False) + parser.add_argument('--learning_rate', type=float, default=0.0001) + return parser + + +def cli_main(): + pl.seed_everything(1234) + + # ------------ + # args + # ------------ + parser = ArgumentParser() + parser.add_argument('--batch_size', default=32, type=int) + parser.add_argument('--hidden_dim', type=int, default=128) + parser = pl.Trainer.add_argparse_args(parser) + parser = LitClassifier.add_model_specific_args(parser) + args = parser.parse_args() + + # ------------ + # data + # ------------ + dataset = MNIST('', train=True, download=True, transform=transforms.ToTensor()) + mnist_test = MNIST('', train=False, download=True, transform=transforms.ToTensor()) + mnist_train, mnist_val = random_split(dataset, [55000, 5000]) + + train_loader = DataLoader(mnist_train, batch_size=args.batch_size) + val_loader = DataLoader(mnist_val, batch_size=args.batch_size) + test_loader = DataLoader(mnist_test, batch_size=args.batch_size) + + # ------------ + # model + # ------------ + model = LitClassifier(Backbone(hidden_dim=args.hidden_dim), args.learning_rate) + + # ------------ + # training + # ------------ + trainer = pl.Trainer.from_argparse_args(args) + trainer.fit(model, train_loader, val_loader) + + # ------------ + # testing + # ------------ + result = trainer.test(test_dataloaders=test_loader) + print(result) + + +if __name__ == '__main__': + cli_main() diff --git a/project/lit_mnist.py b/project/lit_mnist.py new file mode 100644 index 0000000..8733378 --- /dev/null +++ b/project/lit_mnist.py @@ -0,0 +1,96 @@ +from argparse import ArgumentParser + +import torch +import pytorch_lightning as pl +from torch.nn import functional as F +from torch.utils.data import DataLoader, random_split + +from torchvision.datasets.mnist import MNIST +from torchvision import transforms + + +class LitClassifier(pl.LightningModule): + def __init__(self, hidden_dim=128, learning_rate=1e-3): + super().__init__() + self.save_hyperparameters() + + self.l1 = torch.nn.Linear(28 * 28, self.hparams.hidden_dim) + self.l2 = torch.nn.Linear(self.hparams.hidden_dim, 10) + + def forward(self, x): + x = x.view(x.size(0), -1) + x = torch.relu(self.l1(x)) + x = torch.relu(self.l2(x)) + return x + + def training_step(self, batch, batch_idx): + x, y = batch + y_hat = self(x) + loss = F.cross_entropy(y_hat, y) + return loss + + def validation_step(self, batch, batch_idx): + x, y = batch + y_hat = self(x) + loss = F.cross_entropy(y_hat, y) + self.log('valid_loss', loss) + + def test_step(self, batch, batch_idx): + x, y = batch + y_hat = self(x) + loss = F.cross_entropy(y_hat, y) + self.log('test_loss', loss) + + def configure_optimizers(self): + return torch.optim.Adam(self.parameters(), lr=self.hparams.learning_rate) + + @staticmethod + def add_model_specific_args(parent_parser): + parser = ArgumentParser(parents=[parent_parser], add_help=False) + parser.add_argument('--hidden_dim', type=int, default=128) + parser.add_argument('--learning_rate', type=float, default=0.0001) + return parser + + +def cli_main(): + pl.seed_everything(1234) + + # ------------ + # args + # ------------ + parser = ArgumentParser() + parser.add_argument('--batch_size', default=32, type=int) + parser = pl.Trainer.add_argparse_args(parser) + parser = LitClassifier.add_model_specific_args(parser) + args = parser.parse_args() + + # ------------ + # data + # ------------ + dataset = MNIST('', train=True, download=True, transform=transforms.ToTensor()) + mnist_test = MNIST('', train=False, download=True, transform=transforms.ToTensor()) + mnist_train, mnist_val = random_split(dataset, [55000, 5000]) + + train_loader = DataLoader(mnist_train, batch_size=args.batch_size) + val_loader = DataLoader(mnist_val, batch_size=args.batch_size) + test_loader = DataLoader(mnist_test, batch_size=args.batch_size) + + # ------------ + # model + # ------------ + model = LitClassifier(args.hidden_dim, args.learning_rate) + + # ------------ + # training + # ------------ + trainer = pl.Trainer.from_argparse_args(args) + trainer.fit(model, train_loader, val_loader) + + # ------------ + # testing + # ------------ + trainer.test(test_dataloaders=test_loader) + + +if __name__ == '__main__': + cli_main() diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..30840f0 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +pytorch-lightning >= 1.0.0rc2 +torch >= 1.3.0 +torchvision >= 0.6.0 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..268bbb9 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,43 @@ +[tool:pytest] +norecursedirs = + .git + dist + build +addopts = + --strict + --doctest-modules + --durations=0 + +[coverage:report] +exclude_lines = + pragma: no-cover + pass + +[flake8] +max-line-length = 120 +exclude = .tox,*.egg,build,temp +select = E,W,F +doctests = True +verbose = 2 +# https://pep8.readthedocs.io/en/latest/intro.html#error-codes +format = pylint +# see: https://www.flake8rules.com/ +ignore = + E731 # Do not assign a lambda expression, use a def + W504 # Line break occurred after a binary operator + F401 # Module imported but unused + F841 # Local variable name is assigned to but never used + W605 # Invalid escape sequence 'x' + +# setup.cfg or tox.ini +[check-manifest] +ignore = + *.yml + .github + .github/* + +[metadata] +license_file = LICENSE +description-file = README.md +# long_description = file:README.md +# long_description_content_type = text/markdown diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..3de44dd --- /dev/null +++ b/setup.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python + +from setuptools import setup, find_packages + +setup( + name='project', + version='0.0.0', + description='Describe Your Cool Project', + author='', + author_email='', + # REPLACE WITH YOUR OWN GITHUB PROJECT LINK + url='https://github.com/PyTorchLightning/pytorch-lightning-conference-seed', + install_requires=['pytorch-lightning'], + packages=find_packages(), +) + diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/requirements.txt b/tests/requirements.txt new file mode 100644 index 0000000..09f038f --- /dev/null +++ b/tests/requirements.txt @@ -0,0 +1,8 @@ +coverage +codecov>=2.1 +pytest>=3.0.5 +pytest-cov +pytest-flake8 +flake8 +check-manifest +twine==1.13.0 \ No newline at end of file diff --git a/tests/test_classifier.py b/tests/test_classifier.py new file mode 100644 index 0000000..e173fd5 --- /dev/null +++ b/tests/test_classifier.py @@ -0,0 +1,15 @@ +from pytorch_lightning import Trainer, seed_everything +from project.lit_mnist import LitClassifier +from project.datasets.mnist import mnist + + +def test_lit_classifier(): + seed_everything(1234) + + model = LitClassifier() + train, val, test = mnist() + trainer = Trainer(limit_train_batches=50, limit_val_batches=20, max_epochs=2) + trainer.fit(model, train, val) + + results = trainer.test(test_dataloaders=test) + assert results[0]['test_acc'] > 0.7