From b0cec7c6b7eee7a11ad70ac8bea29a87d51f111d Mon Sep 17 00:00:00 2001
From: TomBaxter <tkb608@gmail.com>
Date: Fri, 12 May 2017 12:06:46 -0400
Subject: [PATCH 001/192] Add wb_flower service to docker-compose.yml

For Celery troubleshooting
Listens at port 5556
---
 docker-compose.yml | 10 ++++++++++
 1 file changed, 10 insertions(+)

diff --git a/docker-compose.yml b/docker-compose.yml
index 94b3758af2e..14a7982cd3a 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -176,6 +176,16 @@ services:
       - wb_tmp_vol:/tmp
     stdin_open: true
 
+  # wb_flower:
+  #   image: quay.io/centerforopenscience/waterbutler:develop
+  #   # Install flower here, instead of in WB repo, due to tornado version conflict
+  #   command: [/bin/bash, -c, "pip install flower && celery flower -A waterbutler.tasks.app.app --port=5556 --broker=amqp://guest:guest@192.168.168.167:5672//"]
+  #   depends_on:
+  #     - rabbitmq
+  #   # Use non-default port to avoid conflict with OSF flower
+  #   ports:
+  #     - 5556:5556
+
   ##################################
   # Central Authentication Service #
   ##################################

From 07ca132c752ea7e6df4bb68c0dcaa803a30875e7 Mon Sep 17 00:00:00 2001
From: TomBaxter <tkb608@gmail.com>
Date: Mon, 15 May 2017 10:50:01 -0400
Subject: [PATCH 002/192] Add unoconv service to docker-compose.yml

---
 .docker-compose.mfr.env |  2 ++
 docker-compose.yml      | 14 ++++++++++++++
 2 files changed, 16 insertions(+)

diff --git a/.docker-compose.mfr.env b/.docker-compose.mfr.env
index 06fc0ad9a40..1610a7a56d9 100644
--- a/.docker-compose.mfr.env
+++ b/.docker-compose.mfr.env
@@ -7,4 +7,6 @@ SERVER_CONFIG_HMAC_SECRET=changeme
 SERVER_CONFIG_ADDRESS=0.0.0.0
 SERVER_CONFIG_ALLOWED_PROVIDER_DOMAINS='http://192.168.168.167:5000/ http://192.168.168.167:7777/'
 
+UNOCONV_PORT_2002_TCP_ADDR=192.168.168.167
+
 #PYTHONUNBUFFERED=0 # This when set to 0 will allow print statements to be visible in the Docker logs
diff --git a/docker-compose.yml b/docker-compose.yml
index b4fa7377c71..47e85cf406e 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -132,6 +132,20 @@ services:
     volumes:
       - mfr_requirements_vol:/usr/local/lib/python3.5
       - mfr_requirements_local_bin_vol:/usr/local/bin
+      - wb_tmp_vol:/tmp
+    stdin_open: true
+
+  unoconv:
+    image: centerforopenscience/unoconv
+    command: /bin/bash -c "
+      /opt/libreoffice4.4/program/python -u /usr/bin/unoconv --listener --server=0.0.0.0 --port=2002 -vvv &&
+      chmod -R 777 /tmp/mfrlocalcache
+      "
+    restart: unless-stopped
+    ports:
+      - 2002:2002
+    volumes:
+      - wb_tmp_vol:/tmp
     stdin_open: true
 
   ###############

From 0251ccac9d445af47d7f4f7aea560c07df404f87 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Tue, 26 Sep 2017 10:46:35 -0500
Subject: [PATCH 003/192] Remove osf/utils/auth and use similar methods from
 framework/auth/core.py

---
 addons/dataverse/models.py                    |  2 +-
 addons/osfstorage/models.py                   |  2 +-
 api_tests/nodes/filters/test_filters.py       |  2 +-
 .../registrations/filters/test_filters.py     |  2 +-
 osf/models/node.py                            |  2 +-
 osf/utils/auth.py                             | 96 -------------------
 osf_tests/test_comment.py                     |  2 +-
 osf_tests/test_node.py                        |  6 +-
 osf_tests/test_user.py                        |  2 +-
 9 files changed, 10 insertions(+), 106 deletions(-)
 delete mode 100644 osf/utils/auth.py

diff --git a/addons/dataverse/models.py b/addons/dataverse/models.py
index 103c79a95dd..31e725ae3f4 100644
--- a/addons/dataverse/models.py
+++ b/addons/dataverse/models.py
@@ -7,7 +7,7 @@
 from framework.auth.decorators import Auth
 from framework.exceptions import HTTPError
 from osf.models.files import File, Folder, FileVersion, BaseFileNode
-from osf.utils.auth import _get_current_user
+from framework.auth.core import _get_current_user
 from addons.base import exceptions
 from addons.dataverse.client import connect_from_settings_or_401
 from addons.dataverse.serializer import DataverseSerializer
diff --git a/addons/osfstorage/models.py b/addons/osfstorage/models.py
index 9dcc8c36959..9260cf364c1 100644
--- a/addons/osfstorage/models.py
+++ b/addons/osfstorage/models.py
@@ -9,7 +9,7 @@
 from addons.base.models import BaseNodeSettings, BaseStorageAddon
 from osf.exceptions import InvalidTagError, NodeStateError, TagNotFoundError
 from osf.models import File, FileVersion, Folder, TrashedFileNode, BaseFileNode
-from osf.utils.auth import Auth
+from framework.auth.core import Auth
 from website.files import exceptions
 from website.files import utils as files_utils
 from website.util import permissions
diff --git a/api_tests/nodes/filters/test_filters.py b/api_tests/nodes/filters/test_filters.py
index 4b716b8cdf7..5edb86eb9fc 100644
--- a/api_tests/nodes/filters/test_filters.py
+++ b/api_tests/nodes/filters/test_filters.py
@@ -6,7 +6,7 @@
     NodeRelationFactory,
     ProjectFactory,
 )
-from osf.utils.auth import Auth
+from framework.auth.core import Auth
 
 
 class NodesListFilteringMixin(object):
diff --git a/api_tests/registrations/filters/test_filters.py b/api_tests/registrations/filters/test_filters.py
index 45d03a4ebb5..b80be86347d 100644
--- a/api_tests/registrations/filters/test_filters.py
+++ b/api_tests/registrations/filters/test_filters.py
@@ -3,7 +3,7 @@
 import pytest
 
 from osf.models import Node
-from osf.utils.auth import Auth
+from framework.auth.core import Auth
 from osf_tests.factories import (
     AuthUserFactory,
     NodeFactory,
diff --git a/osf/models/node.py b/osf/models/node.py
index 2300d71fa0c..f50f5183148 100644
--- a/osf/models/node.py
+++ b/osf/models/node.py
@@ -41,7 +41,7 @@
 from osf.models.tag import Tag
 from osf.models.user import OSFUser
 from osf.models.validators import validate_doi, validate_title
-from osf.utils.auth import Auth, get_user
+from framework.auth.core import Auth, get_user
 from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
 from osf.utils.fields import NonNaiveDateTimeField
 from osf.utils.requests import DummyRequest, get_request_and_user_id
diff --git a/osf/utils/auth.py b/osf/utils/auth.py
deleted file mode 100644
index 4a6c90b3887..00000000000
--- a/osf/utils/auth.py
+++ /dev/null
@@ -1,96 +0,0 @@
-import logging
-
-from django.apps import apps
-from django.db.models import Q
-from django.core.exceptions import ObjectDoesNotExist
-
-from framework.sessions import session
-
-logger = logging.getLogger(__name__)
-
-def _get_current_user():
-    # avoid cirdep
-    from osf.models import OSFUser
-    uid = session._get_current_object() and session.data.get('auth_user_id')
-    return OSFUser.load(uid)
-
-# TODO: This should be a class method of User?
-def get_user(email=None, password=None, verification_key=None):
-    """Get an instance of User matching the provided params.
-
-    :return: The instance of User requested
-    :rtype: User or None
-    """
-    User = apps.get_model('osf.OSFUser')
-    # tag: database
-    if password and not email:
-        raise AssertionError('If a password is provided, an email must also '
-                             'be provided.')
-
-    query_list = []
-    if email:
-        email = email.strip().lower()
-        query_list.append(Q(emails__address=email) | Q(username=email))
-    if password:
-        password = password.strip()
-        try:
-            query = query_list[0]
-            for query_part in query_list[1:]:
-                query = query & query_part
-            user = User.objects.get(query)
-        except Exception as err:
-            logger.error(err)
-            user = None
-        if user and not user.check_password(password):
-            return False
-        return user
-    if verification_key:
-        query_list.append(Q(verification_key=verification_key))
-    try:
-        user = User.objects.get(query_list[0])
-        return user
-    except Exception as err:
-        logger.error(err)
-        return None
-
-class Auth(object):
-    def __init__(self, user=None, api_node=None,
-                 private_key=None):
-        self.user = user
-        self.api_node = api_node
-        self.private_key = private_key
-
-    def __repr__(self):
-        return ('<Auth(user="{self.user}", '
-                'private_key={self.private_key})>').format(self=self)
-
-    @property
-    def logged_in(self):
-        return self.user is not None
-
-    @property
-    def private_link(self):
-        if not self.private_key:
-            return None
-        try:
-            # Avoid circular import
-            from osf.models import PrivateLink
-
-            private_link = PrivateLink.objects.get(key=self.private_key)
-
-            if private_link.is_deleted:
-                return None
-
-        except ObjectDoesNotExist:
-            return None
-
-        return private_link
-
-    @classmethod
-    def from_kwargs(cls, request_args, kwargs):
-        user = request_args.get('user') or kwargs.get('user') or _get_current_user()
-        private_key = request_args.get('view_only')
-        return cls(
-            user=user,
-            private_key=private_key,
-        )
diff --git a/osf_tests/test_comment.py b/osf_tests/test_comment.py
index 22986162194..8e25164e45a 100644
--- a/osf_tests/test_comment.py
+++ b/osf_tests/test_comment.py
@@ -17,7 +17,7 @@
 from framework.exceptions import PermissionsError
 from tests.base import capture_signals
 from osf.models import Comment, NodeLog, Guid, BaseFileNode
-from osf.utils.auth import Auth
+from framework.auth.core import Auth
 from .factories import (
     CommentFactory,
     ProjectFactory,
diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py
index 9d32c451877..78d16170719 100644
--- a/osf_tests/test_node.py
+++ b/osf_tests/test_node.py
@@ -35,7 +35,7 @@
 from osf.models.spam import SpamStatus
 from addons.wiki.models import NodeWikiPage
 from osf.exceptions import ValidationError, ValidationValueError
-from osf.utils.auth import Auth
+from framework.auth.core import Auth
 
 from osf_tests.factories import (
     AuthUserFactory,
@@ -1949,7 +1949,7 @@ def test_add_private_link(self, node):
         link.save()
         assert link in node.private_links.all()
 
-    @mock.patch('osf.utils.auth.Auth.private_link')
+    @mock.patch('framework.auth.core.Auth.private_link')
     def test_has_anonymous_link(self, mock_property, node):
         mock_property.return_value(mock.MagicMock())
         mock_property.anonymous = True
@@ -1963,7 +1963,7 @@ def test_has_anonymous_link(self, mock_property, node):
 
         assert has_anonymous_link(node, auth2) is True
 
-    @mock.patch('osf.utils.auth.Auth.private_link')
+    @mock.patch('framework.auth.core.Auth.private_link')
     def test_has_no_anonymous_link(self, mock_property, node):
         mock_property.return_value(mock.MagicMock())
         mock_property.anonymous = False
diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py
index 8d882770e9e..323cb9e3774 100644
--- a/osf_tests/test_user.py
+++ b/osf_tests/test_user.py
@@ -24,7 +24,7 @@
 from website.views import find_bookmark_collection
 
 from osf.models import AbstractNode, OSFUser, Tag, Contributor, Session
-from osf.utils.auth import Auth
+from framework.auth.core import Auth
 from osf.utils.names import impute_names_model
 from osf.exceptions import ValidationError
 from osf.modm_compat import Q

From 554353048c5a8d9ce61f4739fef5a212e29163d7 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Tue, 26 Sep 2017 12:57:43 -0500
Subject: [PATCH 004/192] Import get_user from framework.auth.core instead of
 from obsolete osf.utils.auth.

---
 osf_tests/test_auth_utils.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/osf_tests/test_auth_utils.py b/osf_tests/test_auth_utils.py
index 441d84bf864..ff3aa9484f2 100644
--- a/osf_tests/test_auth_utils.py
+++ b/osf_tests/test_auth_utils.py
@@ -1,6 +1,6 @@
 import pytest
 
-from osf.utils import auth
+from framework.auth.core import get_user
 
 from .factories import UserFactory
 
@@ -10,12 +10,12 @@ class TestGetUser:
 
     def test_get_user_by_email(self):
         user = UserFactory()
-        assert auth.get_user(email=user.username) == user
-        assert auth.get_user(email=user.username.upper()) == user
+        assert get_user(email=user.username) == user
+        assert get_user(email=user.username.upper()) == user
 
     def test_get_user_with_wrong_password_returns_false(self):
         user = UserFactory.build()
         user.set_password('killerqueen')
         assert bool(
-            auth.get_user(email=user.username, password='wrong')
+            get_user(email=user.username, password='wrong')
         ) is False

From bac517534aa35fc1a1de99bd02295fd77661ae2e Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Wed, 6 Sep 2017 15:57:15 -0400
Subject: [PATCH 005/192] Use subqueries to prevent duped records - Reverse
 order of default_qs &. - Upgrade django-include so annotate() can be used.

---
 api/base/utils.py          | 27 ++++++++++++++-------------
 api/nodes/views.py         |  7 +++----
 api/registrations/views.py |  6 +++---
 api/users/views.py         | 13 +++++--------
 4 files changed, 25 insertions(+), 28 deletions(-)

diff --git a/api/base/utils.py b/api/base/utils.py
index 9c602dccfdc..35c7050bc8e 100644
--- a/api/base/utils.py
+++ b/api/base/utils.py
@@ -4,7 +4,7 @@
 
 import furl
 from django.core.exceptions import ObjectDoesNotExist
-from django.db.models import Q
+from django.db.models import OuterRef, Exists, Q
 from rest_framework.exceptions import NotFound
 from rest_framework.reverse import reverse
 
@@ -13,7 +13,7 @@
 from framework.auth import Auth
 from framework.auth.cas import CasResponse
 from framework.auth.oauth_scopes import ComposedScopes, normalize_scopes
-from osf.models import OSFUser, Node, Registration
+from osf.models import OSFUser, Contributor
 from osf.models.base import GuidMixin
 from osf.modm_compat import to_django_query
 from osf.utils.requests import check_select_for_update
@@ -153,21 +153,22 @@ def waterbutler_url_for(request_type, provider, path, node_id, token, obj_args=N
     url.args.update(query)
     return url.url
 
-def default_node_list_queryset():
-    return Node.objects.filter(is_deleted=False)
 
-def default_node_permission_queryset(user):
+def default_node_list_queryset(model_cls):
+    return model_cls.objects.filter(is_deleted=False)
+
+def default_node_permission_queryset(user, model_cls):
     if user.is_anonymous:
-        return Node.objects.filter(is_public=True)
-    return Node.objects.filter(Q(is_public=True) | Q(contributor__user_id=user.pk))
+        return model_cls.objects.filter(is_public=True)
+    sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user__id=user.id, read=True)
+    return model_cls.objects.annotate(contrib=Exists(sub_qs)).filter(Q(contrib=True) | Q(is_public=True))
 
-def default_registration_list_queryset():
-    return Registration.objects.filter(is_deleted=False)
+def default_node_list_permission_queryset(user, model_cls):
+    # **DO NOT** change the order of the querysets below.
+    # If get_roots() is called on default_node_list_qs & default_node_permission_qs,
+    # Django's alaising will break and the resulting QS will be empty and you will be sad.
+    return default_node_permission_queryset(user, model_cls) & default_node_list_queryset(model_cls)
 
-def default_registration_permission_queryset(user):
-    if user.is_anonymous:
-        return Registration.objects.filter(is_public=True)
-    return Registration.objects.filter(Q(is_public=True) | Q(contributor__user_id=user.pk))
 
 def extend_querystring_params(url, params):
     scheme, netloc, path, query, _ = urlparse.urlsplit(url)
diff --git a/api/nodes/views.py b/api/nodes/views.py
index 023b44f344b..3d5914cc269 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -34,7 +34,7 @@
     NonCookieAuthThrottle,
     AddContributorThrottle,
 )
-from api.base.utils import default_node_list_queryset, default_node_permission_queryset
+from api.base.utils import default_node_list_queryset, default_node_list_permission_queryset
 from api.base.utils import get_object_or_error, is_bulk_request, get_user_auth, is_truthy
 from api.base.views import JSONAPIBaseView
 from api.base.views import (
@@ -276,8 +276,7 @@ class NodeList(JSONAPIBaseView, bulk_views.BulkUpdateJSONAPIView, bulk_views.Bul
 
     # overrides NodesFilterMixin
     def get_default_queryset(self):
-        user = self.request.user
-        return default_node_list_queryset() & default_node_permission_queryset(user)
+        return default_node_list_permission_queryset(user=self.request.user, model_cls=Node)
 
     # overrides ListBulkCreateJSONAPIView, BulkUpdateJSONAPIView
     def get_queryset(self):
@@ -1240,7 +1239,7 @@ class NodeChildrenList(JSONAPIBaseView, bulk_views.ListBulkCreateJSONAPIView, No
     ordering = ('-date_modified',)
 
     def get_default_queryset(self):
-        return default_node_list_queryset()
+        return default_node_list_queryset(model_cls=Node)
 
     # overrides ListBulkCreateJSONAPIView
     def get_queryset(self):
diff --git a/api/registrations/views.py b/api/registrations/views.py
index 45e3df4a4c4..495e1b101a9 100644
--- a/api/registrations/views.py
+++ b/api/registrations/views.py
@@ -12,7 +12,7 @@
 from api.base.pagination import NodeContributorPagination
 from api.base.parsers import JSONAPIRelationshipParser
 from api.base.parsers import JSONAPIRelationshipParserForRegularJSON
-from api.base.utils import get_user_auth, default_registration_list_queryset, default_registration_permission_queryset
+from api.base.utils import get_user_auth, default_node_list_permission_queryset
 from api.comments.serializers import RegistrationCommentSerializer, CommentCreateSerializer
 from api.identifiers.serializers import RegistrationIdentifierSerializer
 from api.nodes.views import NodeIdentifierList
@@ -157,7 +157,7 @@ class RegistrationList(JSONAPIBaseView, generics.ListAPIView, NodesFilterMixin):
 
     # overrides NodesFilterMixin
     def get_default_queryset(self):
-        return default_registration_list_queryset() & default_registration_permission_queryset(self.request.user)
+        return default_node_list_permission_queryset(user=self.request.user, model_cls=Registration)
 
     def is_blacklisted(self):
         query_params = self.parse_query_params(self.request.query_params)
@@ -511,7 +511,7 @@ class RegistrationChildrenList(JSONAPIBaseView, generics.ListAPIView, ListFilter
     ordering = ('-date_modified',)
 
     def get_default_queryset(self):
-        return default_registration_list_queryset() & default_registration_permission_queryset(self.request.user)
+        return default_node_list_permission_queryset(user=self.request.user, model_cls=Registration)
 
     def get_queryset(self):
         registration = self.get_node()
diff --git a/api/users/views.py b/api/users/views.py
index d4c21125838..a5cdcb0b206 100644
--- a/api/users/views.py
+++ b/api/users/views.py
@@ -8,9 +8,7 @@
                               JSONAPIRelationshipParserForRegularJSON)
 from api.base.serializers import AddonAccountSerializer
 from api.base.utils import (default_node_list_queryset,
-                            default_node_permission_queryset,
-                            default_registration_list_queryset,
-                            default_registration_permission_queryset,
+                            default_node_list_permission_queryset,
                             get_object_or_error,
                             get_user_auth)
 from api.base.views import JSONAPIBaseView, WaterButlerMixin
@@ -33,7 +31,7 @@
 from rest_framework import permissions as drf_permissions
 from rest_framework import generics
 from rest_framework.exceptions import NotAuthenticated, NotFound
-from osf.models import Contributor, ExternalAccount, QuickFilesNode, AbstractNode, PreprintService, OSFUser
+from osf.models import Contributor, ExternalAccount, QuickFilesNode, AbstractNode, PreprintService, OSFUser, Registration, Node
 
 
 class UserMixin(object):
@@ -528,10 +526,9 @@ class UserNodes(JSONAPIBaseView, generics.ListAPIView, UserMixin, NodesFilterMix
     # overrides NodesFilterMixin
     def get_default_queryset(self):
         user = self.get_user()
-        qs = default_node_list_queryset().filter(contributor__user__id=user.id)
         if user != self.request.user:
-            return qs & default_node_permission_queryset(self.request.user)
-        return qs
+            return default_node_list_permission_queryset(user=self.request.user, model_cls=Node).filter(contributor__user__id=user.id)
+        return default_node_list_queryset(model_cls=Node).filter(contributor__user__id=user.id)
 
     # overrides ListAPIView
     def get_queryset(self):
@@ -739,7 +736,7 @@ class UserRegistrations(JSONAPIBaseView, generics.ListAPIView, UserMixin, NodesF
     def get_default_queryset(self):
         user = self.get_user()
         current_user = self.request.user
-        qs = default_registration_list_queryset() & default_registration_permission_queryset(current_user)
+        qs = default_node_list_permission_queryset(user=current_user, model_cls=Registration)
         return qs.filter(contributor__user__id=user.id)
 
     # overrides ListAPIView

From 061d1d00db2c07499bd1b81e26ce66a8a4f4db2f Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Wed, 27 Sep 2017 15:04:02 -0500
Subject: [PATCH 006/192] Remove unneeded distinct usages on NodeList and
 RegistrationList views.

---
 api/nodes/views.py         | 2 +-
 api/registrations/views.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/api/nodes/views.py b/api/nodes/views.py
index 3d5914cc269..b9e867bb80a 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -296,7 +296,7 @@ def get_queryset(self):
                     raise PermissionDenied
             return nodes
         else:
-            return self.get_queryset_from_request().distinct('id', 'date_modified')
+            return self.get_queryset_from_request()
 
     # overrides ListBulkCreateJSONAPIView, BulkUpdateJSONAPIView, BulkDestroyJSONAPIView
     def get_serializer_class(self):
diff --git a/api/registrations/views.py b/api/registrations/views.py
index 495e1b101a9..adfc83e7042 100644
--- a/api/registrations/views.py
+++ b/api/registrations/views.py
@@ -171,7 +171,7 @@ def is_blacklisted(self):
     # overrides ListAPIView
     def get_queryset(self):
         blacklisted = self.is_blacklisted()
-        registrations = self.get_queryset_from_request().distinct('id', 'date_modified')
+        registrations = self.get_queryset_from_request()
         # If attempting to filter on a blacklisted field, exclude withdrawals.
         if blacklisted:
             return registrations.exclude(retraction__isnull=False)

From d77017b79466e0302a03fe4063cd93f7c6cfe3a3 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 28 Sep 2017 10:44:57 -0500
Subject: [PATCH 007/192] Write redundant subquery for node files list to get
 rid of distinct.

---
 api/nodes/views.py | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/api/nodes/views.py b/api/nodes/views.py
index b9e867bb80a..9dbf22ee0a5 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -1,7 +1,7 @@
 import re
 
 from django.apps import apps
-from django.db.models import Q
+from django.db.models import Q, OuterRef, Exists
 from rest_framework import generics, permissions as drf_permissions
 from rest_framework.exceptions import PermissionDenied, ValidationError, NotFound, MethodNotAllowed, NotAuthenticated
 from rest_framework.response import Response
@@ -1932,11 +1932,12 @@ def get_default_queryset(self):
             # We should not have gotten a file here
             raise NotFound
 
-        return files_list.children.prefetch_related('node__guids', 'versions', 'tags', 'guids')
+        sub_qs = type(files_list).objects.filter(_children=OuterRef('pk'), pk=files_list.pk)
+        return files_list.children.annotate(folder=Exists(sub_qs)).filter(Q(folder=True)).prefetch_related('node__guids', 'versions', 'tags', 'guids')
 
     # overrides ListAPIView
     def get_queryset(self):
-        return self.get_queryset_from_request().distinct()
+        return self.get_queryset_from_request()
 
 
 class NodeFileDetail(JSONAPIBaseView, generics.RetrieveAPIView, WaterButlerMixin, NodeMixin):

From 74f56e4685fdd34838475a646feaa087b859b2d6 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 28 Sep 2017 13:47:26 -0500
Subject: [PATCH 008/192] Use subquery instead of distinct on NodePreprintsList
 view.

---
 api/nodes/views.py | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/api/nodes/views.py b/api/nodes/views.py
index 9dbf22ee0a5..394b31e781c 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -98,7 +98,7 @@
 from framework.postcommit_tasks.handlers import enqueue_postcommit_task
 from osf.models import AbstractNode
 from osf.models import (Node, PrivateLink, Institution, Comment, DraftRegistration,)
-from osf.models import OSFUser
+from osf.models import OSFUser, Contributor
 from osf.models import NodeRelation, Guid
 from osf.models import BaseFileNode
 from osf.models.files import File, Folder
@@ -3451,8 +3451,9 @@ def get_default_queryset(self):
         # Permissions on the node are handled by the permissions_classes
         # Permissions on the list objects are handled by the query
         if auth_user:
-            return node.preprints.filter(Q(is_published=True) | Q(node__contributor__user_id=auth_user.id, node__contributor__admin=True))
+            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user__id=auth.user.id, admin=True)
+            return node.preprints.annotate(contrib=Exists(sub_qs)).filter(Q(contrib=True) | Q(is_published=True))
         return node.preprints.filter(is_published=True)
 
     def get_queryset(self):
-        return self.get_queryset_from_request().distinct('id', 'date_modified')
+        return self.get_queryset_from_request()

From b61888b65a20ac42ef13567eff772fc63bb16f8d Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 28 Sep 2017 13:49:12 -0500
Subject: [PATCH 009/192] Use subquery instead of distinct on PreprintsList
 view.

---
 api/preprints/views.py | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/api/preprints/views.py b/api/preprints/views.py
index d81ed7156db..0ff1180413b 100644
--- a/api/preprints/views.py
+++ b/api/preprints/views.py
@@ -1,13 +1,13 @@
 import re
 
-from django.db.models import Q
+from django.db.models import Q, Exists, OuterRef
 
 from rest_framework import generics
 from rest_framework.exceptions import NotFound, PermissionDenied, NotAuthenticated
 from rest_framework import permissions as drf_permissions
 
 from framework.auth.oauth_scopes import CoreScopes
-from osf.models import PreprintService
+from osf.models import PreprintService, Contributor
 from osf.utils.requests import check_select_for_update
 
 from api.base.exceptions import Conflict
@@ -179,13 +179,13 @@ def get_default_queryset(self):
         no_user_query = Q(is_published=True, node__is_public=True)
         if auth_user:
             contrib_user_query = Q(is_published=True, node__contributor__user_id=auth_user.id, node__contributor__read=True)
-            admin_user_query = Q(node__contributor__user_id=auth_user.id, node__contributor__admin=True)
-            return default_qs.filter(no_user_query | contrib_user_query | admin_user_query)
+            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user__id=auth.user.id, admin=True)
+            return default_qs.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | contrib_user_query | no_user_query)
         return default_qs.filter(no_user_query)
 
     # overrides ListAPIView
     def get_queryset(self):
-        return self.get_queryset_from_request().distinct('id', 'date_created')
+        return self.get_queryset_from_request()
 
 class PreprintDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, PreprintMixin, WaterButlerMixin):
     """Preprint Detail  *Writeable*.

From ffebf48b88ed2e6f27a3b3730e9bae199c31d75e Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 28 Sep 2017 14:03:26 -0500
Subject: [PATCH 010/192] Use subquery instead of distinct on
 PreprintProviderPreprintList view.

---
 api/preprint_providers/views.py | 11 +++++------
 1 file changed, 5 insertions(+), 6 deletions(-)

diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index 814fffcd66c..4f9bcb99f30 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -1,11 +1,11 @@
 from rest_framework import generics
 from rest_framework import permissions as drf_permissions
 
-from django.db.models import Q
+from django.db.models import Q, Exists, OuterRef
 
 from framework.auth.oauth_scopes import CoreScopes
 
-from osf.models import AbstractNode, Subject, PreprintProvider
+from osf.models import AbstractNode, Subject, PreprintProvider, Contributor
 
 from api.base import permissions as base_permissions
 from api.base.filters import PreprintFilterMixin, ListFilterMixin
@@ -205,14 +205,13 @@ def get_default_queryset(self):
         no_user_query = Q(is_published=True, node__is_public=True)
 
         if auth_user:
-            contrib_user_query = Q(is_published=True, node__contributor__user_id=auth_user.id, node__contributor__read=True)
-            admin_user_query = Q(node__contributor__user_id=auth_user.id, node__contributor__admin=True)
-            return default_qs.filter(no_user_query | contrib_user_query | admin_user_query)
+            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user__id=auth.user.id, admin=True)
+            return model_cls.objects.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | contrib_user_query | no_user_query )
         return default_qs.filter(no_user_query)
 
     # overrides ListAPIView
     def get_queryset(self):
-        return self.get_queryset_from_request().distinct('id', 'date_created')
+        return self.get_queryset_from_request()
 
 
 class PreprintProviderTaxonomies(JSONAPIBaseView, generics.ListAPIView):

From 17c18ed9dd82ab4851a596df7ca62bd6605ddff6 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 28 Sep 2017 14:42:27 -0500
Subject: [PATCH 011/192] Use a subquery instead of distinct for UserPreprints.

---
 api/users/views.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/api/users/views.py b/api/users/views.py
index a5cdcb0b206..bc10fb7ee4e 100644
--- a/api/users/views.py
+++ b/api/users/views.py
@@ -27,7 +27,7 @@
                                    ReadEmailUserDetailSerializer,)
 from django.contrib.auth.models import AnonymousUser
 from framework.auth.oauth_scopes import CoreScopes, normalize_scopes
-from django.db.models import Q
+from django.db.models import Q, Exists, OuterRef
 from rest_framework import permissions as drf_permissions
 from rest_framework import generics
 from rest_framework.exceptions import NotAuthenticated, NotFound
@@ -604,12 +604,12 @@ def get_default_queryset(self):
         )
         no_user_query = Q(is_published=True, node__is_public=True)
         if auth_user:
-            admin_user_query = Q(node__contributor__user_id=auth_user.id, node__contributor__admin=True)
-            return default_qs.filter(no_user_query | admin_user_query)
+            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user__id=auth.user.id, admin=True)
+            return default_qs.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | no_user_query)
         return default_qs.filter(no_user_query)
 
     def get_queryset(self):
-        return self.get_queryset_from_request().distinct('id', 'date_created')
+        return self.get_queryset_from_request()
 
 
 class UserInstitutions(JSONAPIBaseView, generics.ListAPIView, UserMixin):

From 47d76db40a316fb559927731722b77094199d5bb Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 28 Sep 2017 14:51:24 -0500
Subject: [PATCH 012/192] Remove unneeded distinct usages now that a subquery
 is being used.

---
 api/users/views.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/api/users/views.py b/api/users/views.py
index bc10fb7ee4e..4e068f2e3d4 100644
--- a/api/users/views.py
+++ b/api/users/views.py
@@ -537,7 +537,6 @@ def get_queryset(self):
             .select_related('node_license')
             .order_by('-date_modified', )
             .include('contributor__user__guids', 'root__guids', limit_includes=10)
-            .distinct('id', 'date_modified')
         )
 
 
@@ -741,7 +740,7 @@ def get_default_queryset(self):
 
     # overrides ListAPIView
     def get_queryset(self):
-        return self.get_queryset_from_request().select_related('node_license').include('contributor__user__guids', 'root__guids', limit_includes=10).distinct('id', 'date_modified')
+        return self.get_queryset_from_request().select_related('node_license').include('contributor__user__guids', 'root__guids', limit_includes=10)
 
 
 class UserInstitutionsRelationship(JSONAPIBaseView, generics.RetrieveDestroyAPIView, UserMixin):

From 99b749dc645d86052402552644ce059aa6405f5c Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 28 Sep 2017 17:20:32 -0500
Subject: [PATCH 013/192] Remove unnecessary distincts from get_metaschemas.

---
 website/project/views/drafts.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/website/project/views/drafts.py b/website/project/views/drafts.py
index 9421a488d8e..1951271b43b 100644
--- a/website/project/views/drafts.py
+++ b/website/project/views/drafts.py
@@ -350,9 +350,9 @@ def get_metaschemas(*args, **kwargs):
     count = request.args.get('count', 100)
     include = request.args.get('include', 'latest')
 
-    meta_schemas = MetaSchema.objects.filter(active=True).distinct()
+    meta_schemas = MetaSchema.objects.filter(active=True)
     if include == 'latest':
-        meta_schemas.filter(schema_version=LATEST_SCHEMA_VERSION).distinct()
+        meta_schemas.filter(schema_version=LATEST_SCHEMA_VERSION)
 
     meta_schemas = sorted(meta_schemas, key=lambda x: METASCHEMA_ORDERING.index(x.name))
 

From 7340c42504f10eb5752434368244bd8fa0826585 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 28 Sep 2017 18:39:18 -0500
Subject: [PATCH 014/192] Fix copy/paste issues with Preprint Provider Preprint
 List default queryset.

---
 api/preprint_providers/views.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index 4f9bcb99f30..09c2521e9c5 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -205,8 +205,9 @@ def get_default_queryset(self):
         no_user_query = Q(is_published=True, node__is_public=True)
 
         if auth_user:
+            contrib_user_query = Q(is_published=True, node__contributor__user_id=auth_user.id, node__contributor__read=True)
             sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user__id=auth.user.id, admin=True)
-            return model_cls.objects.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | contrib_user_query | no_user_query )
+            return default_qs.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | contrib_user_query | no_user_query )
         return default_qs.filter(no_user_query)
 
     # overrides ListAPIView

From 1164ed9e8b676d632b9e7ae141ec0a48673bb0a0 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 28 Sep 2017 18:44:28 -0500
Subject: [PATCH 015/192] Remove unneeded double underscores, and use id from
 auth_user.

---
 api/preprint_providers/views.py | 2 +-
 api/preprints/views.py          | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index 09c2521e9c5..00cebb8ae51 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -206,7 +206,7 @@ def get_default_queryset(self):
 
         if auth_user:
             contrib_user_query = Q(is_published=True, node__contributor__user_id=auth_user.id, node__contributor__read=True)
-            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user__id=auth.user.id, admin=True)
+            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user_id=auth_user.id, admin=True)
             return default_qs.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | contrib_user_query | no_user_query )
         return default_qs.filter(no_user_query)
 
diff --git a/api/preprints/views.py b/api/preprints/views.py
index 0ff1180413b..c337f9ce64d 100644
--- a/api/preprints/views.py
+++ b/api/preprints/views.py
@@ -179,7 +179,7 @@ def get_default_queryset(self):
         no_user_query = Q(is_published=True, node__is_public=True)
         if auth_user:
             contrib_user_query = Q(is_published=True, node__contributor__user_id=auth_user.id, node__contributor__read=True)
-            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user__id=auth.user.id, admin=True)
+            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user_id=auth_user.id, admin=True)
             return default_qs.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | contrib_user_query | no_user_query)
         return default_qs.filter(no_user_query)
 

From 0684d4bf041e1657edce34495d2f9929e9df3a47 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 28 Sep 2017 18:48:44 -0500
Subject: [PATCH 016/192] Remove double underscore and use auth_user since
 already fetched.

---
 api/users/views.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/api/users/views.py b/api/users/views.py
index 4e068f2e3d4..31db1267aed 100644
--- a/api/users/views.py
+++ b/api/users/views.py
@@ -603,7 +603,7 @@ def get_default_queryset(self):
         )
         no_user_query = Q(is_published=True, node__is_public=True)
         if auth_user:
-            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user__id=auth.user.id, admin=True)
+            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user_id=auth_user.id, admin=True)
             return default_qs.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | no_user_query)
         return default_qs.filter(no_user_query)
 

From e38374729c8e98b837ff371dedbc718db8c5d807 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 28 Sep 2017 19:19:11 -0500
Subject: [PATCH 017/192] Remove extra space before parenthesis.

---
 api/preprint_providers/views.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index 00cebb8ae51..328aea333f2 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -207,7 +207,7 @@ def get_default_queryset(self):
         if auth_user:
             contrib_user_query = Q(is_published=True, node__contributor__user_id=auth_user.id, node__contributor__read=True)
             sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user_id=auth_user.id, admin=True)
-            return default_qs.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | contrib_user_query | no_user_query )
+            return default_qs.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | contrib_user_query | no_user_query)
         return default_qs.filter(no_user_query)
 
     # overrides ListAPIView

From 25db1e4a490a1d43e2f62de9e89de9b0b1554a10 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Fri, 29 Sep 2017 15:34:17 -0500
Subject: [PATCH 018/192] Add another level of subquerying from preprints ->
 nodes -> contributors.

---
 api/nodes/views.py              | 6 ++++--
 api/preprint_providers/views.py | 5 +++--
 api/preprints/views.py          | 8 +++++---
 api/users/views.py              | 6 ++++--
 4 files changed, 16 insertions(+), 9 deletions(-)

diff --git a/api/nodes/views.py b/api/nodes/views.py
index 394b31e781c..ce9978512c9 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -3451,8 +3451,10 @@ def get_default_queryset(self):
         # Permissions on the node are handled by the permissions_classes
         # Permissions on the list objects are handled by the query
         if auth_user:
-            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user__id=auth.user.id, admin=True)
-            return node.preprints.annotate(contrib=Exists(sub_qs)).filter(Q(contrib=True) | Q(is_published=True))
+            admin_subquery = Contributor.objects.filter(node=OuterRef('pk'), user__id=auth_user.id, admin=True)
+            node_subquery = AbstractNode.objects.annotate(contrib=Exists(admin_subquery)).filter(preprints=OuterRef('pk'), contrib=True)
+            return node.preprints.annotate(node_present=Exists(node_subquery)).filter(Q(node_present=True) | Q(is_published=True))
+
         return node.preprints.filter(is_published=True)
 
     def get_queryset(self):
diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index 328aea333f2..8b8e7491f04 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -206,8 +206,9 @@ def get_default_queryset(self):
 
         if auth_user:
             contrib_user_query = Q(is_published=True, node__contributor__user_id=auth_user.id, node__contributor__read=True)
-            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user_id=auth_user.id, admin=True)
-            return default_qs.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | contrib_user_query | no_user_query)
+            admin_subquery = Contributor.objects.filter(node=OuterRef('pk'), user_id=auth_user.id, admin=True)
+            node_subquery = AbstractNode.objects.annotate(contrib=Exists(admin_subquery)).filter(preprints=OuterRef('pk'), contrib=True)
+            return default_qs.annotate(node_present=Exists(node_subquery)).filter(Q(node_present=True) | contrib_user_query | no_user_query)
         return default_qs.filter(no_user_query)
 
     # overrides ListAPIView
diff --git a/api/preprints/views.py b/api/preprints/views.py
index c337f9ce64d..224fc998b59 100644
--- a/api/preprints/views.py
+++ b/api/preprints/views.py
@@ -7,7 +7,7 @@
 from rest_framework import permissions as drf_permissions
 
 from framework.auth.oauth_scopes import CoreScopes
-from osf.models import PreprintService, Contributor
+from osf.models import PreprintService, Contributor, AbstractNode
 from osf.utils.requests import check_select_for_update
 
 from api.base.exceptions import Conflict
@@ -179,8 +179,10 @@ def get_default_queryset(self):
         no_user_query = Q(is_published=True, node__is_public=True)
         if auth_user:
             contrib_user_query = Q(is_published=True, node__contributor__user_id=auth_user.id, node__contributor__read=True)
-            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user_id=auth_user.id, admin=True)
-            return default_qs.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | contrib_user_query | no_user_query)
+            admin_subquery = Contributor.objects.filter(node=OuterRef('pk'), user_id=auth_user.id, admin=True)
+            node_subquery = AbstractNode.objects.annotate(contrib=Exists(admin_subquery)).filter(preprints=OuterRef('pk'), contrib=True)
+            return default_qs.annotate(node_present=Exists(node_subquery)).filter(Q(node_present=True) | contrib_user_query | no_user_query)
+
         return default_qs.filter(no_user_query)
 
     # overrides ListAPIView
diff --git a/api/users/views.py b/api/users/views.py
index 31db1267aed..c675c115274 100644
--- a/api/users/views.py
+++ b/api/users/views.py
@@ -603,8 +603,10 @@ def get_default_queryset(self):
         )
         no_user_query = Q(is_published=True, node__is_public=True)
         if auth_user:
-            sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user_id=auth_user.id, admin=True)
-            return default_qs.annotate(admin_user=Exists(sub_qs)).filter(Q(admin_user=True) | no_user_query)
+            admin_subquery = Contributor.objects.filter(node=OuterRef('pk'), user_id=auth_user.id, admin=True)
+            node_subquery = AbstractNode.objects.annotate(contrib=Exists(admin_subquery)).filter(preprints=OuterRef('pk'), contrib=True)
+            return default_qs.annotate(node_present=Exists(node_subquery)).filter(Q(node_present=True) | no_user_query)
+
         return default_qs.filter(no_user_query)
 
     def get_queryset(self):

From eed513afdd4305715780d5ef00ff8c4627aeff90 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Tue, 26 Sep 2017 14:33:27 -0400
Subject: [PATCH 019/192] Optimize _get_children and node_child_tree views.

---
 tests/test_views.py                           |  11 +-
 website/project/views/node.py                 | 159 +++++++++++-------
 .../static/js/projectSettingsTreebeardBase.js |  11 +-
 3 files changed, 113 insertions(+), 68 deletions(-)

diff --git a/tests/test_views.py b/tests/test_views.py
index 12634bb015f..7c90467c056 100644
--- a/tests/test_views.py
+++ b/tests/test_views.py
@@ -962,13 +962,12 @@ def test_get_node_with_children(self):
         res = self.app.get(url, auth=self.user.auth)
         tree = res.json[0]
         parent_node_id = tree['node']['id']
-        child1_id = tree['children'][0]['node']['id']
-        child2_id = tree['children'][1]['node']['id']
-        child3_id = tree['children'][2]['node']['id']
+        child_ids = [child['node']['id'] for child in tree['children']]
+
         assert_equal(parent_node_id, project._primary_key)
-        assert_equal(child1_id, child1._primary_key)
-        assert_equal(child2_id, child2._primary_key)
-        assert_equal(child3_id, child3._primary_key)
+        assert_in(child1._primary_key, child_ids)
+        assert_in(child2._primary_key, child_ids)
+        assert_in(child3._primary_key, child_ids)
 
     def test_get_node_with_child_linked_to_parent(self):
         project = ProjectFactory(creator=self.user)
diff --git a/website/project/views/node.py b/website/project/views/node.py
index bfeba53c08f..f8bb068904c 100644
--- a/website/project/views/node.py
+++ b/website/project/views/node.py
@@ -2,13 +2,14 @@
 import logging
 import httplib as http
 import math
+from collections import defaultdict
 from itertools import islice
 
 from bs4 import BeautifulSoup
 from flask import request
 from django.apps import apps
 from django.core.exceptions import ValidationError
-from django.db.models import Count, Q
+from django.db.models import Count, Q, OuterRef, Exists, Subquery
 
 from framework import status
 from framework.utils import iso8601format
@@ -35,7 +36,7 @@
 from website.project.model import has_anonymous_link, NodeUpdateError, validate_title
 from website.project.forms import NewNodeForm
 from website.project.metadata.utils import serialize_meta_schemas
-from osf.models import AbstractNode, PrivateLink, Contributor
+from osf.models import AbstractNode, PrivateLink, Contributor, Node, NodeRelation
 from osf.models.contributor import get_contributor_permissions
 from osf.models.licenses import serialize_node_license_record
 from website import settings
@@ -812,22 +813,34 @@ def get_affiliated_institutions(obj):
         })
     return ret
 
-def _get_children(node, auth, indent=0):
+def serialize_children(child_list, nested, indent=0):
+    results = []
+    for child in child_list:
+        results.append({
+            'id': child._id,
+            'title': child.title,
+            'is_public': child.is_public,
+            'parent_id': child.parentnode_id,
+            'indent': indent
+        })
+        if child._id in nested.keys():
+            results.extend(serialize_children(nested.get(child._id), nested, indent + 1))
+    return results
 
-    children = []
+def _get_children(node, auth):
+    is_admin = Contributor.objects.filter(node=OuterRef('pk'), admin=True, user=auth.user)
+    parent_node_sqs = NodeRelation.objects.filter(child=OuterRef('pk'), is_node_link=False).values('parent__guids___id')
+    children = (Node.objects.get_children(node)
+                .filter(is_deleted=False)
+                .annotate(parentnode_id=Subquery(parent_node_sqs[:1]))
+                .annotate(has_admin_perm=Exists(is_admin))
+                .filter(has_admin_perm=True))
 
-    for child in node.nodes_primary:
-        if not child.is_deleted and child.has_permission(auth.user, ADMIN):
-            children.append({
-                'id': child._primary_key,
-                'title': child.title,
-                'indent': indent,
-                'is_public': child.is_public,
-                'parent_id': child.parent_id,
-            })
-            children.extend(_get_children(child, auth, indent + 1))
+    nested = defaultdict(list)
+    for child in children:
+        nested[child.parentnode_id].append(child)
 
-    return children
+    return serialize_children(nested[node._id], nested)
 
 
 @must_be_valid_project
@@ -885,73 +898,101 @@ def _get_readable_descendants(auth, node, permission=None):
                 descendants.append(descendant)
     return descendants, all_readable
 
-def node_child_tree(user, nodes):
-    """ Format data to test for node privacy settings for use in treebeard.
-    :param user: modular odm User object
-    :param nodes: list of parent project node objects
-    :return: treebeard-formatted data
-    """
-    items = []
-
-    for node in nodes:
-        assert node, '{} is not a valid Node.'.format(node._id)
-
-        can_read = node.has_permission(user, READ)
-        can_read_children = node.has_permission_on_children(user, 'read')
-        if not can_read and not can_read_children:
-            continue
-
-        contributors = []
-        for contributor in node.contributors:
-            contributors.append({
-                'id': contributor._id,
-                'is_admin': node.has_permission(contributor, ADMIN),
-                'is_confirmed': contributor.is_confirmed
+def serialize_child_tree(child_list, user, nested):
+    serialized_children = []
+    for child in child_list:
+        if child.has_read_perm or child.has_permission_on_children(user, READ):
+            contributors = [{
+                'id': contributor.user._id,
+                'is_admin': contributor.admin,
+                'is_confirmed': contributor.user.is_confirmed,
+                'visible': contributor.visible
+            } for contributor in child.contributor_set.all()]
+
+            serialized_children.append({
+                'node': {
+                    'id': child._id,
+                    'url': child.url if child.has_read_perm else '',
+                    'title': child.title if child.has_read_perm else 'Private Project',
+                    'is_public': child.is_public,
+                    'contributors': contributors,
+                    'is_admin': child.has_admin_perm,
+                },
+                'user_id': user._id,
+                'children': serialize_child_tree(nested.get(child._id), user, nested) if child._id in nested.keys() else [],
+                'nodeType': 'project' if not child.parentnode_id else 'component',
+                'category': child.category,
+                'permissions': {
+                    'view': child.has_read_perm,
+                    'is_admin': child.has_admin_perm
+                }
             })
 
-        affiliated_institutions = [{
-            'id': affiliated_institution.pk,
-            'name': affiliated_institution.name
-        } for affiliated_institution in node.affiliated_institutions.all()]
-
-        children = node.get_nodes(**{'is_deleted': False, 'is_node_link': False})
-        children_tree = []
-        # List project/node if user has at least 'read' permissions (contributor or admin viewer) or if
-        # user is contributor on a component of the project/node
-        children_tree.extend(node_child_tree(user, children))
+    return sorted(serialized_children, key=lambda k: len(k['children']), reverse=True)
 
-        item = {
+def node_child_tree(user, node):
+    """ Format data to test for node privacy settings for use in treebeard.
+    :param user: user object
+    :param node: parent project node object
+    :return: treebeard-formatted data
+    """
+    serialized_nodes = []
+
+    assert node, '{} is not a valid Node.'.format(node._id)
+
+    is_admin_sqs = Contributor.objects.filter(node=OuterRef('pk'), admin=True, user=user)
+    can_read_sqs = Contributor.objects.filter(node=OuterRef('pk'), read=True, user=user)
+    parent_node_sqs = NodeRelation.objects.filter(child=OuterRef('pk'), is_node_link=False).values('parent__guids___id')
+    children = (Node.objects.get_children(node)
+                .filter(is_deleted=False)
+                .annotate(parentnode_id=Subquery(parent_node_sqs[:1]))
+                .annotate(has_admin_perm=Exists(is_admin_sqs))
+                .annotate(has_read_perm=Exists(can_read_sqs))
+                .include('contributor__user__guids')
+                )
+
+    nested = defaultdict(list)
+    for child in children:
+        nested[child.parentnode_id].append(child)
+
+    contributors = [{
+        'id': contributor.user._id,
+        'is_admin': node.has_permission(contributor.user, ADMIN),
+        'is_confirmed': contributor.user.is_confirmed,
+        'visible': contributor.visible
+    } for contributor in node.contributor_set.all().include('user__guids')]
+
+    can_read = node.has_permission(user, READ)
+    is_admin = node.has_permission(user, ADMIN)
+
+    if can_read or node.has_permission_on_children(user, READ):
+        serialized_nodes.append({
             'node': {
                 'id': node._id,
                 'url': node.url if can_read else '',
                 'title': node.title if can_read else 'Private Project',
                 'is_public': node.is_public,
                 'contributors': contributors,
-                'visible_contributors': list(node.visible_contributor_ids),
-                'is_admin': node.has_permission(user, ADMIN),
-                'affiliated_institutions': affiliated_institutions
+                'is_admin': is_admin
             },
             'user_id': user._id,
-            'children': children_tree,
+            'children': serialize_child_tree(nested.get(node._id), user, nested) if node._id in nested.keys() else [],
             'kind': 'folder' if not node.parent_node or not node.parent_node.has_permission(user, 'read') else 'node',
             'nodeType': node.project_or_component,
             'category': node.category,
             'permissions': {
                 'view': can_read,
-                'is_admin': node.has_permission(user, 'read')
+                'is_admin': is_admin
             }
-        }
-
-        items.append(item)
-
-    return items
+        })
 
+    return serialized_nodes
 
 @must_be_logged_in
 @must_be_valid_project
 def get_node_tree(auth, **kwargs):
     node = kwargs.get('node') or kwargs['project']
-    tree = node_child_tree(auth.user, [node])
+    tree = node_child_tree(auth.user, node)
     return tree
 
 @must_be_valid_project
diff --git a/website/static/js/projectSettingsTreebeardBase.js b/website/static/js/projectSettingsTreebeardBase.js
index 7b3df134e43..7df60699548 100644
--- a/website/static/js/projectSettingsTreebeardBase.js
+++ b/website/static/js/projectSettingsTreebeardBase.js
@@ -7,6 +7,7 @@
 
 var m = require('mithril');
 var Fangorn = require('js/fangorn').Fangorn;
+var lodashGet = require('lodash.get');
 
 
 function resolveToggle(item) {
@@ -32,6 +33,7 @@ function getNodesOriginal(nodeTree, nodesOriginal) {
     var j;
     var adminContributors = [];
     var registeredContributors = [];
+    var visibleContributors = [];
     var nodeId = nodeTree.node.id;
     for (i=0; i < nodeTree.node.contributors.length; i++) {
         if (nodeTree.node.contributors[i].is_admin) {
@@ -40,10 +42,13 @@ function getNodesOriginal(nodeTree, nodesOriginal) {
         if (nodeTree.node.contributors[i].is_confirmed) {
             registeredContributors.push(nodeTree.node.contributors[i].id);
         }
+        if (nodeTree.node.contributors[i].visible) {
+            visibleContributors.push(nodeTree.node.contributors[i].id);
+        }
     }
-    var nodeInstitutions = [];
+    var nodeInstitutions = lodashGet(nodeTree.node, 'affiliated_institutions', []);
 
-    nodeInstitutions = nodeTree.node.affiliated_institutions.map(function(item) {
+    nodeInstitutions = nodeInstitutions.map(function(item) {
         return item.id;
     });
 
@@ -53,7 +58,7 @@ function getNodesOriginal(nodeTree, nodesOriginal) {
         title: nodeTree.node.title,
         contributors: nodeTree.node.contributors,
         isAdmin: nodeTree.node.is_admin,
-        visibleContributors: nodeTree.node.visible_contributors,
+        visibleContributors: visibleContributors,
         adminContributors: adminContributors,
         registeredContributors: registeredContributors,
         institutions: nodeInstitutions,

From 84b54e100414e3876f3ecbd16b08a5bad6585faa Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Wed, 4 Oct 2017 11:35:59 -0400
Subject: [PATCH 020/192] Add appropriate error for accessing old SHARE API v1
 search

[#OSF-8552]
---
 website/routes.py |  1 +
 website/views.py  | 11 +++++++++++
 2 files changed, 12 insertions(+)

diff --git a/website/routes.py b/website/routes.py
index aa867cec304..0d1eb2008a4 100644
--- a/website/routes.py
+++ b/website/routes.py
@@ -943,6 +943,7 @@ def make_url_map(app):
 
         Rule(['/search/', '/search/<type>/'], ['get', 'post'], search_views.search_search, json_renderer),
         Rule('/search/projects/', 'get', search_views.search_projects_by_title, json_renderer),
+        Rule('/share/search/', 'get', website_views.legacy_share_v1_search, json_renderer),
 
     ], prefix='/api/v1')
 
diff --git a/website/views.py b/website/views.py
index 551a6e2b2cf..a56ccaae975 100644
--- a/website/views.py
+++ b/website/views.py
@@ -20,6 +20,7 @@
 from framework.forms import utils as form_utils
 from framework.routing import proxy_url
 from framework.auth.core import get_current_user_id
+from website import settings
 from website.institutions.views import serialize_institution
 
 from osf.models import BaseFileNode, Guid, Institution, PreprintService, AbstractNode
@@ -347,3 +348,13 @@ def redirect_to_home():
 def redirect_to_cos_news(**kwargs):
     # Redirect to COS News page
     return redirect('https://cos.io/news/')
+
+
+# Return error for legacy SHARE v1 search route
+def legacy_share_v1_search(**kwargs):
+    return HTTPError(
+        http.BAD_REQUEST,
+        data=dict(
+            message_long='Please use v2 of the SHARE search API available at {}api/v2/share/search/creativeworks/_search.'.format(settings.SHARE_URL)
+        )
+    )

From ae6a1203f4a81546f5ce680b0ac25ec8d1cf8343 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Fri, 6 Oct 2017 08:58:58 -0500
Subject: [PATCH 021/192] Add optimize_subject_query method.

---
 api/taxonomies/utils.py | 7 +++++++
 1 file changed, 7 insertions(+)
 create mode 100644 api/taxonomies/utils.py

diff --git a/api/taxonomies/utils.py b/api/taxonomies/utils.py
new file mode 100644
index 00000000000..8ba347fbc4d
--- /dev/null
+++ b/api/taxonomies/utils.py
@@ -0,0 +1,7 @@
+from django.db.models import Count
+
+def optimize_subject_query(subject_queryset):
+    """
+    Optimize subject queryset for TaxonomySerializer
+    """
+    return subject_queryset.prefetch_related('parent', 'provider').annotate(children_count=Count('children'))

From e6b5c03360d0e93b8314cd90b0bdb9120f3f5111 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Fri, 6 Oct 2017 09:00:06 -0500
Subject: [PATCH 022/192] Turn child_count field into SerializerMethodField on
 TaxonomySerializer.

- If queryset has been annotated with children_count, show this field, otherwise calculate obj.child_count.
---
 api/taxonomies/serializers.py | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/api/taxonomies/serializers.py b/api/taxonomies/serializers.py
index 016fe0c1822..5bde6510f4f 100644
--- a/api/taxonomies/serializers.py
+++ b/api/taxonomies/serializers.py
@@ -30,7 +30,7 @@ class TaxonomySerializer(JSONAPISerializer):
         max_version='2.3',
     )
     parent = TaxonomyField()
-    child_count = ser.IntegerField()
+    child_count = ser.SerializerMethodField()
     share_title = ser.CharField(source='provider.share_title', read_only=True)
     path = ser.CharField(read_only=True)
 
@@ -39,6 +39,10 @@ class TaxonomySerializer(JSONAPISerializer):
         'self': 'get_absolute_url',
     })
 
+    def get_child_count(self, obj):
+        children_count = getattr(obj, 'children_count', None)
+        return children_count if children_count is not None else obj.child_count
+
     def get_parents(self, obj):
         if not obj.parent:
             return []

From e96bd8ade647ad9aad2a95dbcf6f9ad441ec90d0 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Fri, 6 Oct 2017 09:02:24 -0500
Subject: [PATCH 023/192] Optimize TaxonomyList endpoint.

---
 api/taxonomies/views.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/api/taxonomies/views.py b/api/taxonomies/views.py
index 27107a738a4..d7ee511efcd 100644
--- a/api/taxonomies/views.py
+++ b/api/taxonomies/views.py
@@ -7,6 +7,7 @@
 from api.base import permissions as base_permissions
 from api.base.versioning import DeprecatedEndpointMixin
 from api.taxonomies.serializers import TaxonomySerializer
+from api.taxonomies.utils import optimize_subject_query
 from osf.models import Subject
 from framework.auth.oauth_scopes import CoreScopes
 
@@ -53,7 +54,7 @@ class TaxonomyList(DeprecatedEndpointMixin, JSONAPIBaseView, generics.ListAPIVie
     ordering = ('-id',)
 
     def get_default_queryset(self):
-        return Subject.objects.all()
+        return optimize_subject_query(Subject.objects.all())
 
     def get_queryset(self):
         return self.get_queryset_from_request()

From 2bea16185337ec8d78f711a0d756906ccaf98969 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Fri, 6 Oct 2017 09:22:02 -0500
Subject: [PATCH 024/192] Optimize preprint provider taxonomy list when no
 filter.

---
 api/preprint_providers/views.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index 814fffcd66c..e5fe99dfaaf 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -14,6 +14,7 @@
 from api.base.utils import get_object_or_error, get_user_auth
 from api.licenses.views import LicenseList
 from api.taxonomies.serializers import TaxonomySerializer
+from api.taxonomies.utils import optimize_subject_query
 from api.preprint_providers.serializers import PreprintProviderSerializer
 from api.preprints.serializers import PreprintSerializer
 
@@ -257,7 +258,7 @@ def get_queryset(self):
                 allowed_parents = [id_ for sublist in provider.subjects_acceptable for id_ in sublist[0]]
                 allows_children = [subs[0][-1] for subs in provider.subjects_acceptable if subs[1]]
                 return [sub for sub in Subject.objects.filter(parent___id=parent) if provider.subjects_acceptable == [] or self.is_valid_subject(allows_children=allows_children, allowed_parents=allowed_parents, sub=sub)]
-        return provider.all_subjects
+        return optimize_subject_query(provider.all_subjects)
 
 
 class PreprintProviderHighlightedSubjectList(JSONAPIBaseView, generics.ListAPIView):

From af515f28d1f80f3df5cd565b387000f6775a3364 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Fri, 6 Oct 2017 10:00:09 -0500
Subject: [PATCH 025/192] Optimize query for when filtering preprint provider
 taxonomies on parent and not provider.subjects.exists.

---
 api/preprint_providers/views.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index e5fe99dfaaf..02520755974 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -257,7 +257,7 @@ def get_queryset(self):
                 #  Calculate this here to only have to do it once.
                 allowed_parents = [id_ for sublist in provider.subjects_acceptable for id_ in sublist[0]]
                 allows_children = [subs[0][-1] for subs in provider.subjects_acceptable if subs[1]]
-                return [sub for sub in Subject.objects.filter(parent___id=parent) if provider.subjects_acceptable == [] or self.is_valid_subject(allows_children=allows_children, allowed_parents=allowed_parents, sub=sub)]
+                return [sub for sub in optimize_subject_query(Subject.objects.filter(parent___id=parent)) if provider.subjects_acceptable == [] or self.is_valid_subject(allows_children=allows_children, allowed_parents=allowed_parents, sub=sub)]
         return optimize_subject_query(provider.all_subjects)
 
 

From 0c34036f4ed7e4c0acd8acac948515a927388b3f Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Fri, 6 Oct 2017 10:22:44 -0500
Subject: [PATCH 026/192] Improve PreprintProviderHighlightedSubjectList query.

---
 api/preprint_providers/views.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index 02520755974..c70f200779f 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -277,7 +277,7 @@ class PreprintProviderHighlightedSubjectList(JSONAPIBaseView, generics.ListAPIVi
 
     def get_queryset(self):
         provider = get_object_or_error(PreprintProvider, self.kwargs['provider_id'], self.request, display_name='PreprintProvider')
-        return Subject.objects.filter(id__in=[s.id for s in provider.highlighted_subjects]).order_by('text')
+        return optimize_subject_query(Subject.objects.filter(id__in=[s.id for s in provider.highlighted_subjects]).order_by('text'))
 
 
 class PreprintProviderLicenseList(LicenseList):

From c9de6651b4d2678e79c097131ddef2356638867e Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Fri, 6 Oct 2017 11:08:22 -0500
Subject: [PATCH 027/192] Add optimizations for when top_level_subjects is
 called.

---
 osf/models/preprint_provider.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/osf/models/preprint_provider.py b/osf/models/preprint_provider.py
index c892ed67cb5..eb59a0843bd 100644
--- a/osf/models/preprint_provider.py
+++ b/osf/models/preprint_provider.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 from django.db import models
 from django.contrib.postgres import fields
+from api.taxonomies.utils import optimize_subject_query
 
 from osf.models.base import BaseModel, ObjectIDMixin
 from osf.models.licenses import NodeLicense
@@ -71,11 +72,11 @@ def highlighted_subjects(self):
     @property
     def top_level_subjects(self):
         if self.subjects.exists():
-            return self.subjects.filter(parent__isnull=True)
+            return optimize_subject_query(self.subjects.filter(parent__isnull=True))
         else:
             # TODO: Delet this when all PreprintProviders have a mapping
             if len(self.subjects_acceptable) == 0:
-                return Subject.objects.filter(parent__isnull=True, provider___id='osf')
+                return optimize_subject_query(Subject.objects.filter(parent__isnull=True, provider___id='osf'))
             tops = set([sub[0][0] for sub in self.subjects_acceptable])
             return [Subject.load(sub) for sub in tops]
 

From f401ee1b2f66a23725fae780108048638046dc59 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Fri, 6 Oct 2017 13:18:29 -0500
Subject: [PATCH 028/192] Optimize taxonomy detail endpoint.

---
 api/taxonomies/views.py | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/api/taxonomies/views.py b/api/taxonomies/views.py
index d7ee511efcd..10143ca78e3 100644
--- a/api/taxonomies/views.py
+++ b/api/taxonomies/views.py
@@ -1,4 +1,6 @@
 from rest_framework import generics, permissions as drf_permissions
+from rest_framework.exceptions import NotFound
+from django.core.exceptions import ObjectDoesNotExist
 
 from api.base.views import JSONAPIBaseView
 from api.base.utils import get_object_or_error
@@ -100,4 +102,7 @@ class TaxonomyDetail(JSONAPIBaseView, generics.RetrieveAPIView):
     view_name = 'taxonomy-detail'
 
     def get_object(self):
-        return get_object_or_error(Subject, self.kwargs['taxonomy_id'], self.request)
+        try:
+             return optimize_subject_query(Subject.objects).get(_id=self.kwargs['taxonomy_id'])
+        except ObjectDoesNotExist:
+            raise NotFound

From db99de7402bcbb0490954be77db6692fc75ef5a7 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Fri, 6 Oct 2017 13:29:16 -0500
Subject: [PATCH 029/192] Add optimization for another conditional branch of
 PreprintProviderTaxonomy view

---
 api/preprint_providers/views.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index c70f200779f..239cdcb0582 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -251,7 +251,7 @@ def get_queryset(self):
             if parent == 'null':
                 return provider.top_level_subjects
             if provider.subjects.exists():
-                return provider.subjects.filter(parent___id=parent)
+                return optimize_subject_query(provider.subjects.filter(parent___id=parent))
             else:
                 # TODO: Delet this when all PreprintProviders have a mapping
                 #  Calculate this here to only have to do it once.

From e5d7ae9b957ef7480cfca4c2b2dc8267610492fe Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Fri, 6 Oct 2017 13:56:07 -0500
Subject: [PATCH 030/192] Fix indent and remove unused import.

---
 api/taxonomies/views.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/api/taxonomies/views.py b/api/taxonomies/views.py
index 10143ca78e3..34359df2eca 100644
--- a/api/taxonomies/views.py
+++ b/api/taxonomies/views.py
@@ -3,7 +3,6 @@
 from django.core.exceptions import ObjectDoesNotExist
 
 from api.base.views import JSONAPIBaseView
-from api.base.utils import get_object_or_error
 from api.base.filters import ListFilterMixin
 from api.base.pagination import NoMaxPageSizePagination
 from api.base import permissions as base_permissions
@@ -103,6 +102,6 @@ class TaxonomyDetail(JSONAPIBaseView, generics.RetrieveAPIView):
 
     def get_object(self):
         try:
-             return optimize_subject_query(Subject.objects).get(_id=self.kwargs['taxonomy_id'])
+            return optimize_subject_query(Subject.objects).get(_id=self.kwargs['taxonomy_id'])
         except ObjectDoesNotExist:
             raise NotFound

From 04342c06b43219b161561e6aaa62b6a231c4d247 Mon Sep 17 00:00:00 2001
From: TomBaxter <tkb608@gmail.com>
Date: Wed, 11 Oct 2017 14:42:34 -0400
Subject: [PATCH 031/192] Update addons/googledrive/README.md with rate limit
 tip

No ticket
[skip ci]
---
 addons/googledrive/README.md | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/addons/googledrive/README.md b/addons/googledrive/README.md
index dd9bacfd4ad..3ed19d6471b 100644
--- a/addons/googledrive/README.md
+++ b/addons/googledrive/README.md
@@ -11,6 +11,7 @@
 3. Click on the "Google Drive API" link, and enable it
 4. Click on "Credentials", and "create credentials". Select "Oath Client ID", with "web application" and set the redirect uri to `http://localhost:5000/oauth/callback/googledrive/`
 5. Submit your new client ID and make a note of your new ID and secret
+6. (Optional) You may find that the default 10 "QPS per User" rate limit is too restrictive. This can result in unexpected 403 "User Rate Limit Exceeded" messages. You may find it useful to request this limit be raised to 100. To do so, in the Google API console, from the dashboard of your project, click on "Google Drive API" in the list of APIs. Then click the "quotas" tab. Then click any of the pencils in the quotas table. Click the "apply for higher quota" link. Request that your "QPS per User" be raised to 100.  
 
 ### Enable for OSF
 1. Create a local googledrive settings file with `cp addons/googledrive/settings/local-dist.py addons/googledrive/settings/local.py`
@@ -18,4 +19,4 @@
 3. Ensure `"googledrive"` exists in the addons list in `"addons.json"`
 4. Restart your server
 5. Connect googledrive as a provider
-6. Import and configure your new provider
\ No newline at end of file
+6. Import and configure your new provider

From 4fa4c76e9a5647d9dbe327bd98d9abe1492487cd Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Wed, 18 Oct 2017 16:57:50 -0400
Subject: [PATCH 032/192] Implement lazy loading on the files page.

- Use subqueries and annotations to reduce # of initial queries.
- Replace logo spinner with ball-pulse loading indicator.
---
 website/static/js/fangorn.js           | 53 +++++++++++++-------
 website/static/js/fileViewTreebeard.js |  8 ---
 website/templates/project/files.mako   |  6 +--
 website/util/rubeus.py                 | 68 ++++++++++++--------------
 4 files changed, 66 insertions(+), 69 deletions(-)

diff --git a/website/static/js/fangorn.js b/website/static/js/fangorn.js
index c8232b096f3..35c6f7b772f 100644
--- a/website/static/js/fangorn.js
+++ b/website/static/js/fangorn.js
@@ -1590,31 +1590,46 @@ function _loadTopLevelChildren() {
  * @this Treebeard.controller
  * @private
  */
-var NO_AUTO_EXPAND_PROJECTS = ['ezcuj', 'ecmz4', 'w4wvg', 'sn64d'];
 function expandStateLoad(item) {
     var tb = this,
+        icon = $('.tb-row[data-id="' + item.id + '"]').find('.tb-toggle-icon'),
+        toggleIcon = tbOptions.resolveToggle(item),
+        addonList = [],
         i;
-    if (item.children.length > 0 && item.depth === 1) {
-        // NOTE: On the RPP *only*: Load the top-level project's OSF Storage
-        // but do NOT lazy-load children in order to save hundreds of requests.
-        // TODO: We might want to do this for every project, but that's TBD.
-        // /sloria
-        if (window.contextVars && window.contextVars.node && NO_AUTO_EXPAND_PROJECTS.indexOf(window.contextVars.node.id) > -1) {
-            tb.updateFolder(null, item.children[0]);
-        } else {
-            for (i = 0; i < item.children.length; i++) {
-                tb.updateFolder(null, item.children[i]);
+
+    if (item.depth > 1 && !item.data.isAddonRoot && item.children.length == 0 && item.open) {
+        m.render(icon.get(0), tbOptions.resolveRefreshIcon());
+        $osf.ajaxJSON(
+            'GET',
+            '/api/v1/project/' + item.data.nodeID + '/files/grid/'
+        ).done(function(xhr) {
+            var data = xhr.data[0].children;
+            for (i = 0; i < data.length; i++) {
+                var child = tb.buildTree(data[i], item);
+                if (child.data.isAddonRoot) {
+                    addonList.push(child);
+                }
+                item.add(child);
             }
-        }
-    }
-    if (item.children.length > 0 && item.depth === 2) {
-        for (i = 0; i < item.children.length; i++) {
-            if (item.children[i].data.isAddonRoot || item.children[i].data.addonFullName === 'OSF Storage' ) {
-                tb.updateFolder(null, item.children[i]);
+
+            item.open = true;
+            tb.redraw();
+
+            for (i=0; i < addonList.length; ++i) {
+                tb.toggleFolder(tb.returnIndex(addonList[i].id));
             }
-        }
+
+            m.render(icon.get(0), tbOptions.resolveToggle(item));
+            tb.redraw();
+
+        }).fail(function(xhr) {
+            item.notify.update('Unable to retrieve components.', 'danger', undefined, 3000);
+            item.open = false;
+        });
     }
-        $('.fangorn-toolbar-icon').tooltip();
+
+    $('.fangorn-toolbar-icon').tooltip();
+
 }
 
 /**
diff --git a/website/static/js/fileViewTreebeard.js b/website/static/js/fileViewTreebeard.js
index 4f7d3005d62..ae0906c2b8a 100644
--- a/website/static/js/fileViewTreebeard.js
+++ b/website/static/js/fileViewTreebeard.js
@@ -61,14 +61,6 @@ function FileViewTreebeard(data) {
                 width: '100%'
             }];
         },
-        ontogglefolder : function (tree) {
-            Fangorn.DefaultOptions.ontogglefolder.call(this, tree);
-            var containerHeight = this.select('#tb-tbody').height();
-            if (!this.options.naturalScrollLimit){
-                this.options.showTotal = Math.floor(containerHeight / this.options.rowHeight) + 1;
-            }
-            this.redraw();
-        },
         lazyLoadOnLoad: function(tree, event) {
             var tb = this;
             Fangorn.DefaultOptions.lazyLoadOnLoad.call(tb, tree, event);
diff --git a/website/templates/project/files.mako b/website/templates/project/files.mako
index 1fe153fcdf3..f7a7816b058 100644
--- a/website/templates/project/files.mako
+++ b/website/templates/project/files.mako
@@ -9,13 +9,9 @@
 %endif
 
 <div id="treeGrid">
-	<div class="spinner-loading-wrapper">
-		<div class="logo-spin logo-lg"></div>
-		<p class="m-t-sm fg-load-message"> Loading files...  </p>
-	</div>
+    <div class="ball-scale ball-scale-blue text-center m-v-xl"><div></div></div>
 </div>
 
-
 <%def name="stylesheets()">
     ${parent.stylesheets()}
     % for stylesheet in tree_css:
diff --git a/website/util/rubeus.py b/website/util/rubeus.py
index 49bef702ca3..6e342111c83 100644
--- a/website/util/rubeus.py
+++ b/website/util/rubeus.py
@@ -11,6 +11,8 @@
 from framework.auth.decorators import Auth
 
 from django.apps import apps
+from django.db import connection
+from django.db.models import Exists, OuterRef
 
 from website import settings
 from website.util import paths
@@ -163,32 +165,13 @@ def to_hgrid(self):
         """Return the Rubeus.JS representation of the node's file data, including
         addons and components
         """
-        root = self._serialize_node(self.node)
+        print(len(connection.queries))
+        root = self._get_nodes(self.node)
         return [root]
 
-    def _collect_components(self, node, visited):
-        rv = []
-        if not node.can_view(self.auth):
-            return rv
-        for child in node.get_nodes(is_deleted=False):
-            if not child.can_view(self.auth):
-                if child.primary:
-                    for desc in child.find_readable_descendants(self.auth):
-                        visited.append(desc.resolve()._id)
-                        rv.append(self._serialize_node(desc, visited=visited, parent=node))
-            elif child.resolve()._id not in visited:
-                visited.append(child.resolve()._id)
-                rv.append(self._serialize_node(child, visited=visited, parent=node))
-        return rv
-
-    def _get_node_name(self, node):
+    def _get_node_name(self, node, can_view):
         """Input node object, return the project name to be display.
         """
-        NodeRelation = apps.get_model('osf.NodeRelation')
-        is_node_relation = isinstance(node, NodeRelation)
-        node = node.child if is_node_relation else node
-        can_view = node.can_view(auth=self.auth)
-
         if can_view:
             node_name = sanitize.unescape_entities(node.title)
         elif node.is_registration:
@@ -202,26 +185,18 @@ def _get_node_name(self, node):
 
         return node_name
 
-    def _serialize_node(self, node, visited=None, parent=None):
-        """Returns the rubeus representation of a node folder.
-        """
-        visited = visited or []
-        visited.append(node.resolve()._id)
+    def _serialize_node(self, node, parent=None, children=[]):
+        is_pointer = parent and node.linked_node
         can_view = node.can_view(auth=self.auth)
-        if can_view:
-            children = self._collect_addons(node) + self._collect_components(node, visited)
-        else:
-            children = []
-
-        is_pointer = parent and parent.has_node_link_to(node)
+        can_edit = node.has_write_perm if hasattr(node, 'has_write_perm') else node.can_edit(auth=self.auth)
 
         return {
             # TODO: Remove safe_unescape_html when mako html safe comes in
-            'name': self._get_node_name(node),
+            'name': self._get_node_name(node, can_view),
             'category': node.category,
             'kind': FOLDER,
             'permissions': {
-                'edit': node.can_edit(self.auth) and not node.is_registration,
+                'edit': can_edit and not node.is_registration,
                 'view': can_view,
             },
             'urls': {
@@ -231,10 +206,29 @@ def _serialize_node(self, node, visited=None, parent=None):
             'children': children,
             'isPointer': is_pointer,
             'isSmartFolder': False,
-            'nodeType': node.project_or_component,
-            'nodeID': node.resolve()._id,
+            'nodeType': 'component' if parent else 'project',
+            'nodeID': node._id,
         }
 
+    def _get_nodes(self, node):
+        AbstractNode = apps.get_model('osf.AbstractNode')
+        Contributor = apps.get_model('osf.Contributor')
+        NodeRelation = apps.get_model('osf.NodeRelation')
+
+        data = []
+        if node.can_view(auth=self.auth):
+            serialized_addons = self._collect_addons(node)
+            linked_node_sqs = NodeRelation.objects.filter(parent=node, is_node_link=True)
+            has_write_perm_sqs = Contributor.objects.filter(node=OuterRef('pk'), write=True, user=self.auth.user)
+            children = (AbstractNode.objects
+                        .filter(is_deleted=False, _parents__parent=node)
+                        .annotate(linked_node=Exists(linked_node_sqs))
+                        .annotate(has_write_perm=Exists(has_write_perm_sqs))
+                        )
+            serialized_children = [self._serialize_node(child, parent=node) for child in children]
+            data = serialized_addons + serialized_children
+        return self._serialize_node(node, children=data)
+
     def _collect_addons(self, node):
         rv = []
         for addon in node.get_addons():

From e957a9f6af4a52d0c2514daff15ca341380574fb Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 5 Oct 2017 11:51:13 -0400
Subject: [PATCH 033/192] Remove find and find_one methods from BaseModel

[#OSF-8295]
---
 osf/models/base.py | 26 +-------------------------
 1 file changed, 1 insertion(+), 25 deletions(-)

diff --git a/osf/models/base.py b/osf/models/base.py
index 7d6e7d2caa4..ad24a4bcc9c 100644
--- a/osf/models/base.py
+++ b/osf/models/base.py
@@ -2,7 +2,6 @@
 import random
 
 import bson
-import modularodm.exceptions
 from django.contrib.contenttypes.fields import (GenericForeignKey,
                                                 GenericRelation)
 from django.contrib.contenttypes.models import ContentType
@@ -15,7 +14,6 @@
 from include import IncludeQuerySet
 from osf.utils.caching import cached_property
 from osf.exceptions import ValidationError
-from osf.modm_compat import to_django_query
 from osf.utils.fields import LowercaseCharField, NonNaiveDateTimeField
 
 ALPHABET = '23456789abcdefghjkmnpqrstuvwxyz'
@@ -44,10 +42,6 @@ def generate_object_id():
 
 
 class BaseModel(models.Model):
-    """Base model that acts makes subclasses mostly compatible with the
-    modular-odm ``StoredObject`` interface.
-    """
-
     migration_page_size = 50000
 
     objects = models.QuerySet.as_manager()
@@ -84,27 +78,9 @@ def load(cls, data, select_for_update=False):
         except cls.DoesNotExist:
             return None
 
-    @classmethod
-    def find_one(cls, query, select_for_update=False):
-        try:
-            if select_for_update:
-                return cls.objects.filter(to_django_query(query, model_cls=cls)).select_for_update().get()
-            return cls.objects.get(to_django_query(query, model_cls=cls))
-        except cls.DoesNotExist:
-            raise modularodm.exceptions.NoResultsFound()
-        except cls.MultipleObjectsReturned as e:
-            raise modularodm.exceptions.MultipleResultsFound(*e.args)
-
-    @classmethod
-    def find(cls, query=None):
-        if not query:
-            return cls.objects.all()
-        else:
-            return cls.objects.filter(to_django_query(query, model_cls=cls))
-
     @classmethod
     def remove(cls, query=None):
-        return cls.find(query).delete()
+        return cls.objects.filter(query).delete() if query else cls.objects.all().delete()
 
     @classmethod
     def remove_one(cls, obj):

From 05184077832545bdb899e352653d7a1f8ea6fe37 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 5 Oct 2017 11:54:13 -0400
Subject: [PATCH 034/192] Remove ODMOrderingFilter and tests

---
 api/base/filters.py            | 21 --------------
 api/base/settings/defaults.py  |  2 +-
 api_tests/base/test_filters.py | 53 ----------------------------------
 3 files changed, 1 insertion(+), 75 deletions(-)

diff --git a/api/base/filters.py b/api/base/filters.py
index dbac3cb80a2..80dcb2785ff 100644
--- a/api/base/filters.py
+++ b/api/base/filters.py
@@ -14,7 +14,6 @@
 from django.core.exceptions import ValidationError
 from django.db.models import QuerySet as DjangoQuerySet
 from django.db.models import Q
-from modularodm.query import queryset as modularodm_queryset
 from rest_framework import serializers as ser
 from rest_framework.filters import OrderingFilter
 from osf.models import Subject
@@ -44,26 +43,6 @@ def sort_fn(a, b):
         return 0
     return sort_fn
 
-class ODMOrderingFilter(OrderingFilter):
-    """Adaptation of rest_framework.filters.OrderingFilter to work with modular-odm."""
-    # override
-    def filter_queryset(self, request, queryset, view):
-        ordering = self.get_ordering(request, queryset, view)
-        if isinstance(queryset, DjangoQuerySet):
-            if queryset.ordered:
-                return queryset
-            elif ordering and getattr(queryset.query, 'distinct_fields', None):
-                order_fields = tuple([field.lstrip('-') for field in ordering])
-                distinct_fields = queryset.query.distinct_fields
-                queryset.query.distinct_fields = tuple(set(distinct_fields + order_fields))
-            return super(ODMOrderingFilter, self).filter_queryset(request, queryset, view)
-        if ordering:
-            if not isinstance(queryset, modularodm_queryset.BaseQuerySet) and isinstance(ordering, (list, tuple)):
-                sorted_list = sorted(queryset, cmp=sort_multiple(ordering))
-                return sorted_list
-            return queryset.sort(*ordering)
-        return queryset
-
 
 class FilterMixin(object):
     """ View mixin with helper functions for filtering. """
diff --git a/api/base/settings/defaults.py b/api/base/settings/defaults.py
index 8f89797e9ff..accfce2a7de 100644
--- a/api/base/settings/defaults.py
+++ b/api/base/settings/defaults.py
@@ -152,7 +152,7 @@
         '2.5',
         '2.6',
     ),
-    'DEFAULT_FILTER_BACKENDS': ('api.base.filters.ODMOrderingFilter',),
+    'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.OrderingFilter',),
     'DEFAULT_PAGINATION_CLASS': 'api.base.pagination.JSONAPIPagination',
     'ORDERING_PARAM': 'sort',
     'DEFAULT_AUTHENTICATION_CLASSES': (
diff --git a/api_tests/base/test_filters.py b/api_tests/base/test_filters.py
index 257e729180c..b77abe01ced 100644
--- a/api_tests/base/test_filters.py
+++ b/api_tests/base/test_filters.py
@@ -354,59 +354,6 @@ def test_parse_query_params_uses_field_source_attribute(self):
         assert_equal(parsed_field ['op'], 'eq')
 
 
-class TestODMOrderingFilter(ApiTestCase):
-    class query:
-        title = ' '
-        def __init__(self, title):
-            self.title = title
-        def __str__(self):
-            return self.title
-
-    class query_with_num:
-        title = ' '
-        number = 0
-        def __init__(self, title, number):
-            self.title = title
-            self.number = number
-        def __str__(self):
-            return self.title
-
-
-    def test_filter_queryset_forward(self):
-        query_to_be_sorted = [self.query(x) for x in 'NewProj Zip Proj Activity'.split()]
-        sorted_query = sorted(query_to_be_sorted, cmp=filters.sort_multiple(['title']))
-        sorted_output = [str(i) for i in sorted_query]
-        assert_equal(sorted_output, ['Activity', 'NewProj', 'Proj', 'Zip'])
-
-
-    def test_filter_queryset_forward_duplicate(self):
-        query_to_be_sorted = [self.query(x) for x in 'NewProj Activity Zip Activity'.split()]
-        sorted_query = sorted(query_to_be_sorted, cmp=filters.sort_multiple(['title']))
-        sorted_output = [str(i) for i in sorted_query]
-        assert_equal(sorted_output, ['Activity', 'Activity', 'NewProj', 'Zip'])
-
-
-    def test_filter_queryset_reverse(self):
-        query_to_be_sorted = [self.query(x) for x in 'NewProj Zip Proj Activity'.split()]
-        sorted_query = sorted(query_to_be_sorted, cmp=filters.sort_multiple(['-title']))
-        sorted_output = [str(i) for i in sorted_query]
-        assert_equal(sorted_output, ['Zip', 'Proj', 'NewProj', 'Activity'])
-
-    def test_filter_queryset_reverse_duplicate(self):
-        query_to_be_sorted = [self.query(x) for x in 'NewProj Activity Zip Activity'.split()]
-        sorted_query = sorted(query_to_be_sorted, cmp=filters.sort_multiple(['-title']))
-        sorted_output = [str(i) for i in sorted_query]
-        assert_equal(sorted_output, ['Zip', 'NewProj', 'Activity', 'Activity'])
-
-    def test_filter_queryset_handles_multiple_fields(self):
-        objs = [self.query_with_num(title='NewProj', number=10),
-                self.query_with_num(title='Zip', number=20),
-                self.query_with_num(title='Activity', number=30),
-                self.query_with_num(title='Activity', number=40)]
-        actual = [x.number for x in sorted(objs, cmp=filters.sort_multiple(['title', '-number']))]
-        assert_equal(actual, [40, 30, 10, 20])
-
-
 class TestQueryPatternRegex(TestCase):
 
     def setUp(self):

From 6f7f1c59189689971cb3d6a644e031eb09227fef Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 5 Oct 2017 13:57:50 -0400
Subject: [PATCH 035/192] Remove modm from osf_tests/ and tests/

---
 addons/wiki/tests/test_wiki.py                |  2 +-
 osf_tests/factories.py                        |  5 +---
 osf_tests/test_archiver.py                    |  5 +---
 osf_tests/test_guid.py                        |  7 -----
 osf_tests/test_node.py                        | 11 ++++----
 osf_tests/test_private_link.py                |  2 +-
 osf_tests/test_registrations.py               | 27 +++++++++----------
 osf_tests/test_sanctions.py                   | 21 +++------------
 osf_tests/test_session.py                     | 19 +++++++------
 osf_tests/test_user.py                        | 19 +++++--------
 osf_tests/test_validators.py                  |  2 +-
 scripts/tests/test_add_preprint_providers.py  |  8 +++---
 tests/base.py                                 |  2 +-
 tests/test_metadata.py                        |  4 +--
 tests/test_node_licenses.py                   |  6 ++---
 tests/test_notifications.py                   |  1 -
 tests/test_oauth.py                           |  8 +++---
 tests/test_registrations/test_embargoes.py    | 12 ++++-----
 .../test_registration_approvals.py            |  4 +--
 tests/test_registrations/test_retractions.py  |  4 +--
 tests/test_registrations/test_views.py        | 18 ++++++-------
 tests/test_subjects.py                        |  2 +-
 22 files changed, 75 insertions(+), 114 deletions(-)

diff --git a/addons/wiki/tests/test_wiki.py b/addons/wiki/tests/test_wiki.py
index 0581e83d5c5..c1943b556e3 100644
--- a/addons/wiki/tests/test_wiki.py
+++ b/addons/wiki/tests/test_wiki.py
@@ -196,7 +196,7 @@ def test_project_wiki_edit_post_with_new_wname_and_content(self):
         res = self.app.post(url, {'content': page_content}, auth=self.user.auth).follow()
         assert_equal(res.status_code, 200)
 
-        new_wiki_page_count = NodeWikiPage.find().count()
+        new_wiki_page_count = NodeWikiPage.objects.all().count()
         # A new wiki page was created in the db
         assert_equal(new_wiki_page_count, old_wiki_page_count + 1)
 
diff --git a/osf_tests/factories.py b/osf_tests/factories.py
index 10c08641b70..19acb1bf533 100644
--- a/osf_tests/factories.py
+++ b/osf_tests/factories.py
@@ -26,7 +26,6 @@
 from osf import models
 from osf.models.sanctions import Sanction
 from osf.utils.names import impute_names_model
-from osf.modm_compat import Q
 from addons.osfstorage.models import OsfStorageFile
 
 fake = Factory.create()
@@ -246,9 +245,7 @@ class Meta:
     def _create(cls, *args, **kwargs):
         kwargs['node_license'] = kwargs.get(
             'node_license',
-            models.NodeLicense.find_one(
-                Q('name', 'eq', 'No license')
-            )
+            models.NodeLicense.objects.get(name='No license')
         )
         return super(NodeLicenseRecordFactory, cls)._create(*args, **kwargs)
 
diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py
index 105cd590de4..dc52fec7929 100644
--- a/osf_tests/test_archiver.py
+++ b/osf_tests/test_archiver.py
@@ -287,10 +287,7 @@ def from_question(qid, question):
         # reason. Update the doc currently in the db rather than saving a new
         # one.
 
-        schema = MetaSchema.find_one(
-            Q('name', 'eq', _schema['name']) &
-            Q('schema_version', 'eq', _schema['version'])
-        )
+        schema = MetaSchema.objects.get(name=_schema['name'], schema_version=_schema['version'])
         schema.schema = _schema
         schema.save()
 
diff --git a/osf_tests/test_guid.py b/osf_tests/test_guid.py
index 7becfa57e14..1131aa255b4 100644
--- a/osf_tests/test_guid.py
+++ b/osf_tests/test_guid.py
@@ -4,7 +4,6 @@
 from django.core.exceptions import MultipleObjectsReturned
 
 from osf.models import Guid, NodeLicenseRecord, OSFUser
-from osf.modm_compat import Q
 from osf_tests.factories import AuthUserFactory, UserFactory, NodeFactory, NodeLicenseRecordFactory, \
     RegistrationFactory, PreprintFactory, PreprintProviderFactory
 from tests.base import OsfTestCase
@@ -51,12 +50,6 @@ def test_referent(self, Factory):
         guid = Guid.objects.get(_id=obj._id)
         assert guid.referent == obj
 
-    def test_querying_on_referent(self):
-        user = UserFactory()
-
-        guids = Guid.find(Q('referent', 'eq', user))
-        assert user._id in guids.values_list('_id', flat=True)
-
     @pytest.mark.parametrize('Factory',
     [
         UserFactory,
diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py
index 215f5d15d0e..c530d87f219 100644
--- a/osf_tests/test_node.py
+++ b/osf_tests/test_node.py
@@ -484,8 +484,7 @@ def test_basic_querying(self):
         node_1 = ProjectFactory(is_public=False)
         node_2 = ProjectFactory(is_public=True)
 
-        results = Node.find()
-        assert len(results) == 2
+        assert Node.objects.all().count() == 2
 
         private = Node.objects.filter(is_public=False)
         assert node_1 in private
@@ -512,10 +511,10 @@ def test_title_validation(self):
     def test_remove_one(self):
         node = ProjectFactory()
         node2 = ProjectFactory()
-        assert len(Node.find()) == 2  # sanity check
+        assert Node.objects.all().count() == 2  # sanity check
         Node.remove_one(node)
-        assert len(Node.find()) == 1
-        assert node2 in Node.find()
+        assert Node.objects.all().count() == 1
+        assert node2 in Node.objects.all()
 
     def test_querying_on_guid_id(self):
         node = NodeFactory()
@@ -2012,7 +2011,7 @@ def test_node_scale_with_deleted_parent(self):
     def test_create_from_node(self):
         proj = ProjectFactory()
         user = proj.creator
-        schema = MetaSchema.find()[0]
+        schema = MetaSchema.objects.first()
         data = {'some': 'data'}
         draft = DraftRegistration.create_from_node(
             proj,
diff --git a/osf_tests/test_private_link.py b/osf_tests/test_private_link.py
index d8a479a821f..720b0622999 100644
--- a/osf_tests/test_private_link.py
+++ b/osf_tests/test_private_link.py
@@ -50,7 +50,7 @@ def test_node_scale_with_deleted_parent(self):
     def test_create_from_node(self):
         proj = NodeFactory()
         user = proj.creator
-        schema = MetaSchema.find()[0]
+        schema = MetaSchema.objects.first()
         data = {'some': 'data'}
         draft = DraftRegistration.create_from_node(
             proj,
diff --git a/osf_tests/test_registrations.py b/osf_tests/test_registrations.py
index 489ffb01458..589c60a1c77 100644
--- a/osf_tests/test_registrations.py
+++ b/osf_tests/test_registrations.py
@@ -6,7 +6,6 @@
 from framework.auth.core import Auth
 from osf.models import Node, Registration, Sanction, MetaSchema, NodeLog
 from addons.wiki.models import NodeWikiPage
-from osf.modm_compat import Q
 
 from website import settings
 from website.util.permissions import READ, WRITE, ADMIN
@@ -308,22 +307,22 @@ def test_sanction_none(self):
 
     def test_sanction_embargo_termination_first(self):
         embargo_termination_approval = factories.EmbargoTerminationApprovalFactory()
-        registration = Registration.find_one(Q('embargo_termination_approval', 'eq', embargo_termination_approval))
+        registration = Registration.objects.get(embargo_termination_approval=embargo_termination_approval)
         assert registration.sanction == embargo_termination_approval
 
     def test_sanction_retraction(self):
         retraction = factories.RetractionFactory()
-        registration = Registration.find_one(Q('retraction', 'eq', retraction))
+        registration = Registration.objects.get(retraction=retraction)
         assert registration.sanction == retraction
 
     def test_sanction_embargo(self):
         embargo = factories.EmbargoFactory()
-        registration = Registration.find_one(Q('embargo', 'eq', embargo))
+        registration = Registration.objects.get(embargo=embargo)
         assert registration.sanction == embargo
 
     def test_sanction_registration_approval(self):
         registration_approval = factories.RegistrationApprovalFactory()
-        registration = Registration.find_one(Q('registration_approval', 'eq', registration_approval))
+        registration = Registration.objects.get(registration_approval=registration_approval)
         assert registration.sanction == registration_approval
 
     def test_sanction_searches_parents(self):
@@ -338,7 +337,7 @@ def test_sanction_searches_parents(self):
 
     def test_is_pending_registration(self):
         registration_approval = factories.RegistrationApprovalFactory()
-        registration = Registration.find_one(Q('registration_approval', 'eq', registration_approval))
+        registration = Registration.objects.get(registration_approval=registration_approval)
         assert registration_approval.is_pending_approval
         assert registration.is_pending_registration
 
@@ -353,7 +352,7 @@ def test_is_pending_registration_searches_parents(self):
 
     def test_is_registration_approved(self):
         registration_approval = factories.RegistrationApprovalFactory(state=Sanction.APPROVED, approve=True)
-        registration = Registration.find_one(Q('registration_approval', 'eq', registration_approval))
+        registration = Registration.objects.get(registration_approval=registration_approval)
         assert registration.is_registration_approved
 
     def test_is_registration_approved_searches_parents(self):
@@ -369,7 +368,7 @@ def test_is_registration_approved_searches_parents(self):
 
     def test_is_retracted(self):
         retraction = factories.RetractionFactory(state=Sanction.APPROVED, approve=True)
-        registration = Registration.find_one(Q('retraction', 'eq', retraction))
+        registration = Registration.objects.get(retraction=retraction)
         assert registration.is_retracted
 
     @mock.patch('website.project.tasks.send_share_node_data')
@@ -385,7 +384,7 @@ def test_is_retracted_searches_parents(self, mock_registration_updated, mock_upd
 
     def test_is_pending_retraction(self):
         retraction = factories.RetractionFactory()
-        registration = Registration.find_one(Q('retraction', 'eq', retraction))
+        registration = Registration.objects.get(retraction=retraction)
         assert retraction.is_pending_approval is True
         assert registration.is_pending_retraction is True
 
@@ -401,7 +400,7 @@ def test_is_pending_retraction_searches_parents(self, mock_update_search):
 
     def test_embargo_end_date(self):
         embargo = factories.EmbargoFactory()
-        registration = Registration.find_one(Q('embargo', 'eq', embargo))
+        registration = Registration.objects.get(embargo=embargo)
         assert registration.embargo_end_date == embargo.embargo_end_date
 
     def test_embargo_end_date_searches_parents(self):
@@ -415,7 +414,7 @@ def test_embargo_end_date_searches_parents(self):
 
     def test_is_pending_embargo(self):
         embargo = factories.EmbargoFactory()
-        registration = Registration.find_one(Q('embargo', 'eq', embargo))
+        registration = Registration.objects.get(embargo=embargo)
         assert embargo.is_pending_approval
         assert registration.is_pending_embargo
 
@@ -430,7 +429,7 @@ def test_is_pending_embargo_searches_parents(self):
 
     def test_is_embargoed(self):
         embargo = factories.EmbargoFactory()
-        registration = Registration.find_one(Q('embargo', 'eq', embargo))
+        registration = Registration.objects.get(embargo=embargo)
         registration.embargo.state = Sanction.APPROVED
         registration.embargo.save()
         assert registration.is_embargoed
@@ -464,9 +463,7 @@ def test_factory(self):
         assert draft.initiator == node.creator
 
         # Pick an arbitrary v2 schema
-        schema = MetaSchema.find(
-            Q('schema_version', 'eq', 2)
-        )[0]
+        schema = MetaSchema.objects.filter(schema_version=2).first()
         data = {'some': 'data'}
         draft = factories.DraftRegistrationFactory(registration_schema=schema, registration_metadata=data)
         assert draft.registration_schema == schema
diff --git a/osf_tests/test_sanctions.py b/osf_tests/test_sanctions.py
index 071587feec5..1f3bb8eb839 100644
--- a/osf_tests/test_sanctions.py
+++ b/osf_tests/test_sanctions.py
@@ -6,7 +6,6 @@
 
 from django.utils import timezone
 
-from osf.modm_compat import Q
 from osf.models import DraftRegistrationApproval, MetaSchema, NodeLog
 from osf_tests import factories
 from osf_tests.utils import mock_archive
@@ -81,10 +80,7 @@ class TestDraftRegistrationApprovals:
     def test_on_complete_immediate_creates_registration_for_draft_initiator(self, mock_enquque):
         user = factories.UserFactory()
         project = factories.ProjectFactory(creator=user)
-        registration_schema = MetaSchema.find_one(
-            Q('name', 'eq', 'Prereg Challenge') &
-            Q('schema_version', 'eq', 2)
-        )
+        registration_schema = MetaSchema.objects.get(name='Prereg Challenge', schema_version=2)
         draft = factories.DraftRegistrationFactory(
             branched_from=project,
             registration_schema=registration_schema,
@@ -114,10 +110,7 @@ def test_approval_after_initiator_is_merged_into_another_user(self, mock_enqueue
         mergee = factories.UserFactory(fullname='Manny Mergee')
         merger = factories.UserFactory(fullname='Merve Merger')
         project = factories.ProjectFactory(creator=mergee)
-        registration_schema = MetaSchema.find_one(
-            Q('name', 'eq', 'Prereg Challenge') &
-            Q('schema_version', 'eq', 2)
-        )
+        registration_schema = MetaSchema.objects.get(name='Prereg Challenge', schema_version=2)
         draft = factories.DraftRegistrationFactory(
             branched_from=project,
             registration_schema=registration_schema,
@@ -149,10 +142,7 @@ def test_on_complete_embargo_creates_registration_for_draft_initiator(self, mock
         )
         approval.save()
         project = factories.ProjectFactory(creator=user)
-        registration_schema = MetaSchema.find_one(
-            Q('name', 'eq', 'Prereg Challenge') &
-            Q('schema_version', 'eq', 2)
-        )
+        registration_schema = MetaSchema.objects.get(name='Prereg Challenge', schema_version=2)
         draft = factories.DraftRegistrationFactory(
             branched_from=project,
             registration_schema=registration_schema,
@@ -191,10 +181,7 @@ def test_on_reject(self, mock_send_mail):
         )
         approval.save()
         project = factories.ProjectFactory(creator=user)
-        registration_schema = MetaSchema.find_one(
-            Q('name', 'eq', 'Prereg Challenge') &
-            Q('schema_version', 'eq', 2)
-        )
+        registration_schema = MetaSchema.objects.get(name='Prereg Challenge', schema_version=2)
         draft = factories.DraftRegistrationFactory(
             branched_from=project,
             registration_schema=registration_schema,
diff --git a/osf_tests/test_session.py b/osf_tests/test_session.py
index 760f1a1286f..ff8ddd804ea 100644
--- a/osf_tests/test_session.py
+++ b/osf_tests/test_session.py
@@ -4,7 +4,6 @@
 from tests.base import DbTestCase
 from osf_tests.factories import SessionFactory, UserFactory
 from osf.models import OSFUser, Session
-from osf.modm_compat import Q
 
 @pytest.mark.django_db
 class TestSession:
@@ -28,7 +27,7 @@ def test_remove(self):
         session2.save()
 
         assert Session.objects.count() == 2  # sanity check
-        Session.remove(Q('data.auth_user_id', 'eq', '123ab'))
+        Session.objects.filter(data__auth_user_id='123ab').delete()
         assert Session.objects.count() == 1
 
 
@@ -48,30 +47,30 @@ def test_remove_session_for_user(self):
         SessionFactory(user=self.user)
 
         # sanity check
-        assert Session.find().count() == 1
+        assert Session.objects.all().count() == 1
 
         utils.remove_sessions_for_user(self.user)
-        assert Session.find().count() == 0
+        assert Session.objects.all().count() == 0
 
         SessionFactory()
         SessionFactory(user=self.user)
 
         # sanity check
-        assert Session.find().count() == 2
+        assert Session.objects.all().count() == 2
 
         utils.remove_sessions_for_user(self.user)
-        assert Session.find().count() == 1
+        assert Session.objects.all().count() == 1
 
     def test_password_change_clears_sessions(self):
         SessionFactory(user=self.user)
         SessionFactory(user=self.user)
         SessionFactory(user=self.user)
-        assert Session.find().count() == 3
+        assert Session.objects.all().count() == 3
         self.user.set_password('killerqueen')
-        assert Session.find().count() == 0
+        assert Session.objects.all().count() == 0
 
     def test_remove_session(self):
         session = SessionFactory(user=self.user)
-        assert Session.find().count() == 1
+        assert Session.objects.all().count() == 1
         utils.remove_session(session)
-        assert Session.find().count() == 0
+        assert Session.objects.all().count() == 0
diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py
index 2af23eec3b2..071f335e73b 100644
--- a/osf_tests/test_user.py
+++ b/osf_tests/test_user.py
@@ -29,7 +29,6 @@
 from osf.utils.auth import Auth
 from osf.utils.names import impute_names_model
 from osf.exceptions import ValidationError
-from osf.modm_compat import Q
 
 from .utils import capture_signals
 from .factories import (
@@ -627,12 +626,12 @@ def test_user_get_cookie_no_session(self):
         super_secret_key = 'children need maps'
         signer = itsdangerous.Signer(super_secret_key)
         assert(
-            Session.find(Q('data.auth_user_id', 'eq', user._id)).count() == 0
+            Session.objects.filter(data__auth_user_id=user._id).count() == 0
         )
 
         cookie = user.get_or_create_cookie(super_secret_key)
 
-        session = Session.find(Q('data.auth_user_id', 'eq', user._id))[0]
+        session = Session.objects.filter(data__auth_user_id=user._id).first()
 
         assert session._id == signer.unsign(cookie)
         assert session.data['auth_user_id'] == user._id
@@ -653,7 +652,7 @@ def test_get_user_by_cookie_bad_cookie(self):
     def test_get_user_by_cookie_no_user_id(self):
         user = UserFactory()
         cookie = user.get_or_create_cookie()
-        session = Session.find_one(Q('data.auth_user_id', 'eq', user._id))
+        session = Session.objects.get(data__auth_user_id=user._id)
         del session.data['auth_user_id']
         session.save()
         assert OSFUser.from_cookie(cookie) is None
@@ -1301,12 +1300,6 @@ def setUp(self):
         super(TestUser, self).setUp()
         self.user = AuthUserFactory()
 
-    def tearDown(self):
-        AbstractNode.remove()
-        OSFUser.remove()
-        Session.remove()
-        super(TestUser, self).tearDown()
-
     # Regression test for https://github.com/CenterForOpenScience/osf.io/issues/2454
     def test_add_unconfirmed_email_when_email_verifications_is_empty(self):
         self.user.email_verifications = []
@@ -1395,7 +1388,7 @@ def test_add_same_unconfirmed_email_twice(self):
             self.user.get_unconfirmed_email_for_token(token1)
 
     def test_contributed_property(self):
-        projects_contributed_to = AbstractNode.find(Q('contributors', 'eq', self.user))
+        projects_contributed_to = AbstractNode.objects.filter(_contributors=self.user)
         assert list(self.user.contributed.all()) == list(projects_contributed_to)
 
     def test_contributor_to_property(self):
@@ -1438,7 +1431,7 @@ def test_visible_contributor_to_property(self):
     def test_created_property(self):
         # make sure there's at least one project
         ProjectFactory(creator=self.user)
-        projects_created_by_user = AbstractNode.find(Q('creator', 'eq', self.user))
+        projects_created_by_user = AbstractNode.objects.filter(creator=self.user)
         assert list(self.user.created.all()) == list(projects_created_by_user)
 
 
@@ -1635,7 +1628,7 @@ def is_mrm_field(value):
         # check fields set on merged user
         assert other_user.merged_by == self.user
 
-        assert Session.find(Q('data.auth_user_id', 'eq', other_user._id)).count() == 0
+        assert Session.objects.filter(data__auth_user_id=other_user._id).count() == 0
 
     def test_merge_unconfirmed(self):
         self._add_unconfirmed_user()
diff --git a/osf_tests/test_validators.py b/osf_tests/test_validators.py
index a422496b920..4732877a464 100644
--- a/osf_tests/test_validators.py
+++ b/osf_tests/test_validators.py
@@ -1,5 +1,5 @@
 import pytest
-from modularodm.exceptions import ValidationValueError
+from osf.exceptions import ValidationValueError
 
 from osf.models import validators
 
diff --git a/scripts/tests/test_add_preprint_providers.py b/scripts/tests/test_add_preprint_providers.py
index 7ffe1bfe897..e107a45baa8 100644
--- a/scripts/tests/test_add_preprint_providers.py
+++ b/scripts/tests/test_add_preprint_providers.py
@@ -19,7 +19,7 @@ def tearDown(self):
 
     def test_add_prod_providers(self):
         populate_main('prod')
-        providers = PreprintProvider.find()
+        providers = PreprintProvider.objects.all()
         assert_equal(providers.count(), len(PROD_PREPRINT_PROVIDERS))
         ids = [provider._id for provider in providers]
         for id in PROD_PREPRINT_PROVIDERS:
@@ -29,7 +29,7 @@ def test_add_prod_providers(self):
 
     def test_add_default_providers(self):
         populate_main(None)
-        providers = PreprintProvider.find()
+        providers = PreprintProvider.objects.all()
         assert_equal(providers.count(), len(PROD_PREPRINT_PROVIDERS))
         ids = [provider._id for provider in providers]
         for id in PROD_PREPRINT_PROVIDERS:
@@ -39,8 +39,8 @@ def test_add_default_providers(self):
 
     def test_add_staging_providers(self):
         populate_main('stage')
-        providers = PreprintProvider.find()
-        assert_equal(PreprintProvider.find().count(), len(STAGING_PREPRINT_PROVIDERS))
+        providers = PreprintProvider.objects.all()
+        assert_equal(PreprintProvider.objects.all().count(), len(STAGING_PREPRINT_PROVIDERS))
         ids = [provider._id for provider in providers]
         for id in STAGING_PREPRINT_PROVIDERS:
             assert_in(id, ids)
diff --git a/tests/base.py b/tests/base.py
index 042b6b4980e..33425b92151 100644
--- a/tests/base.py
+++ b/tests/base.py
@@ -39,7 +39,7 @@
 
 def get_default_metaschema():
     """This needs to be a method so it gets called after the test database is set up"""
-    return MetaSchema.find()[0]
+    return MetaSchema.objects.first()
 
 try:
     test_app = init_app(routes=True, set_backends=False)
diff --git a/tests/test_metadata.py b/tests/test_metadata.py
index b050f67b800..f092d6e732c 100644
--- a/tests/test_metadata.py
+++ b/tests/test_metadata.py
@@ -21,14 +21,14 @@ def test_ensure_schemas(self):
         # Should be zero MetaSchema records to begin with
         MetaSchema.remove()
         assert_equal(
-            MetaSchema.find().count(),
+            MetaSchema.objects.all().count(),
             0
         )
 
         ensure_schemas()
 
         assert_equal(
-            MetaSchema.find().count(),
+            MetaSchema.objects.all().count(),
             len(OSF_META_SCHEMAS)
         )
 
diff --git a/tests/test_node_licenses.py b/tests/test_node_licenses.py
index 9802bced57c..ef457ffb667 100644
--- a/tests/test_node_licenses.py
+++ b/tests/test_node_licenses.py
@@ -85,12 +85,12 @@ def test_ensure_licenses_updates_existing_licenses(self):
         assert_equal(ensure_licenses(), (0, 16))
 
     def test_ensure_licenses_no_licenses(self):
-        before_count = NodeLicense.find().count()
+        before_count = NodeLicense.objects.all().count()
         NodeLicense.remove()
-        assert_false(NodeLicense.find().count())
+        assert_false(NodeLicense.objects.all().count())
 
         ensure_licenses()
-        assert_equal(before_count, NodeLicense.find().count())
+        assert_equal(before_count, NodeLicense.objects.all().count())
 
     def test_ensure_licenses_some_missing(self):
         NodeLicense.objects.get(license_id='LGPL3').delete()
diff --git a/tests/test_notifications.py b/tests/test_notifications.py
index b51b16dae89..3b6294bfc7d 100644
--- a/tests/test_notifications.py
+++ b/tests/test_notifications.py
@@ -4,7 +4,6 @@
 from schema import Schema, And, Use, Or
 from django.utils import timezone
 
-from osf.modm_compat import Q
 from nose.tools import *  # noqa PEP8 asserts
 
 from framework.auth import Auth
diff --git a/tests/test_oauth.py b/tests/test_oauth.py
index 3abe567af3a..b21f3c1ab9e 100644
--- a/tests/test_oauth.py
+++ b/tests/test_oauth.py
@@ -105,7 +105,7 @@ def test_disconnect(self):
         self.user.save()
 
         # If the external account isn't attached, this test has no meaning
-        assert_equal(ExternalAccount.find().count(), 1)
+        assert_equal(ExternalAccount.objects.all().count(), 1)
         assert_in(
             external_account,
             self.user.external_accounts.all(),
@@ -133,7 +133,7 @@ def test_disconnect(self):
         )
 
         # External account is still in the database
-        assert_equal(ExternalAccount.find().count(), 1)
+        assert_equal(ExternalAccount.objects.all().count(), 1)
 
     def test_disconnect_with_multiple_connected(self):
         # Disconnect an account connected to multiple users from one user
@@ -170,7 +170,7 @@ def test_disconnect_with_multiple_connected(self):
         )
 
         # External account is still in the database
-        assert_equal(ExternalAccount.find().count(), 1)
+        assert_equal(ExternalAccount.objects.all().count(), 1)
 
         other_user.reload()
 
@@ -511,7 +511,7 @@ def test_multiple_users_associated(self):
         )
 
         assert_equal(
-            ExternalAccount.find().count(),
+            ExternalAccount.objects.all().count(),
             1
         )
 
diff --git a/tests/test_registrations/test_embargoes.py b/tests/test_registrations/test_embargoes.py
index 83b5735bda6..bb43ccb64fe 100644
--- a/tests/test_registrations/test_embargoes.py
+++ b/tests/test_registrations/test_embargoes.py
@@ -44,13 +44,13 @@ def setUp(self):
 
     # Node#_initiate_embargo tests
     def test__initiate_embargo_saves_embargo(self):
-        initial_count = Embargo.find().count()
+        initial_count = Embargo.objects.all().count()
         self.registration._initiate_embargo(
             self.user,
             self.valid_embargo_end_date,
             for_existing_registration=True
         )
-        assert_equal(Embargo.find().count(), initial_count + 1)
+        assert_equal(Embargo.objects.all().count(), initial_count + 1)
 
     def test_state_can_be_set_to_complete(self):
         embargo = EmbargoFactory()
@@ -102,13 +102,13 @@ def test__initiate_embargo_adds_admins_on_child_nodes(self):
         assert_not_in(child_non_admin._id, embargo.approval_state)
 
     def test__initiate_embargo_with_save_does_save_embargo(self):
-        initial_count = Embargo.find().count()
+        initial_count = Embargo.objects.all().count()
         self.registration._initiate_embargo(
             self.user,
             self.valid_embargo_end_date,
             for_existing_registration=True,
         )
-        assert_equal(Embargo.find().count(), initial_count + 1)
+        assert_equal(Embargo.objects.all().count(), initial_count + 1)
 
     # Node#embargo_registration tests
     def test_embargo_from_non_admin_raises_PermissionsError(self):
@@ -815,7 +815,7 @@ def test_register_draft_without_embargo_creates_registration_approval(self, mock
         )
         assert_equal(res.status_code, 202)
 
-        registration = Registration.find().order_by('-registered_date').first()
+        registration = Registration.objects.all().order_by('-registered_date').first()
         assert_not_equal(registration.registration_approval, None)
 
     # Regression test for https://openscience.atlassian.net/browse/OSF-5039
@@ -903,7 +903,7 @@ def test_POST_register_embargo_is_not_public(self, mock_enqueue):
 
         assert_equal(res.status_code, 202)
 
-        registration = Registration.find().order_by('-registered_date').first()
+        registration = Registration.objects.all().order_by('-registered_date').first()
 
         assert_false(registration.is_public)
         assert_true(registration.is_pending_embargo_for_existing_registration)
diff --git a/tests/test_registrations/test_registration_approvals.py b/tests/test_registrations/test_registration_approvals.py
index fb949f4930a..18413c880cf 100644
--- a/tests/test_registrations/test_registration_approvals.py
+++ b/tests/test_registrations/test_registration_approvals.py
@@ -38,11 +38,11 @@ def setUp(self):
         self.valid_embargo_end_date = timezone.now() + datetime.timedelta(days=3)
 
     def test__require_approval_saves_approval(self):
-        initial_count = RegistrationApproval.find().count()
+        initial_count = RegistrationApproval.objects.all().count()
         self.registration._initiate_approval(
             self.user
         )
-        assert_equal(RegistrationApproval.find().count(), initial_count + 1)
+        assert_equal(RegistrationApproval.objects.all().count(), initial_count + 1)
 
     def test__initiate_approval_does_not_create_tokens_for_unregistered_admin(self):
         unconfirmed_user = UnconfirmedUserFactory()
diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py
index 20d92a088b7..d03d01ed56d 100644
--- a/tests/test_registrations/test_retractions.py
+++ b/tests/test_registrations/test_retractions.py
@@ -42,9 +42,9 @@ def test_set_public_registration_to_private_raises_NodeStateException(self):
         assert_true(self.registration.is_public)
 
     def test_initiate_retraction_saves_retraction(self):
-        initial_count = Retraction.find().count()
+        initial_count = Retraction.objects.all().count()
         self.registration._initiate_retraction(self.user)
-        assert_equal(Retraction.find().count(), initial_count + 1)
+        assert_equal(Retraction.objects.all().count(), initial_count + 1)
 
     def test__initiate_retraction_does_not_create_tokens_for_unregistered_admin(self):
         unconfirmed_user = UnconfirmedUserFactory()
diff --git a/tests/test_registrations/test_views.py b/tests/test_registrations/test_views.py
index 61dbb151bcc..b8843cd189f 100644
--- a/tests/test_registrations/test_views.py
+++ b/tests/test_registrations/test_views.py
@@ -229,7 +229,7 @@ def test_register_draft_registration_with_embargo_is_not_public(self, mock_enque
 
         assert_equal(res.status_code, http.ACCEPTED)
 
-        registration = Registration.find().order_by('-registered_date').first()
+        registration = Registration.objects.all().order_by('-registered_date').first()
 
         assert_false(registration.is_public)
         assert_true(registration.is_pending_embargo)
@@ -401,20 +401,20 @@ def test_update_draft_registration_non_admin(self):
         assert_equal(res.status_code, http.FORBIDDEN)
 
     def test_delete_draft_registration(self):
-        assert_equal(1, DraftRegistration.find().count())
+        assert_equal(1, DraftRegistration.objects.all().count())
         url = self.node.api_url_for('delete_draft_registration', draft_id=self.draft._id)
 
         res = self.app.delete(url, auth=self.user.auth)
         assert_equal(res.status_code, http.NO_CONTENT)
-        assert_equal(0, DraftRegistration.find().count())
+        assert_equal(0, DraftRegistration.objects.all().count())
 
     def test_delete_draft_registration_non_admin(self):
-        assert_equal(1, DraftRegistration.find().count())
+        assert_equal(1, DraftRegistration.objects.all().count())
         url = self.node.api_url_for('delete_draft_registration', draft_id=self.draft._id)
 
         res = self.app.delete(url, auth=self.non_admin.auth, expect_errors=True)
         assert_equal(res.status_code, http.FORBIDDEN)
-        assert_equal(1, DraftRegistration.find().count())
+        assert_equal(1, DraftRegistration.objects.all().count())
 
     @mock.patch('website.archiver.tasks.archive')
     def test_delete_draft_registration_registered(self, mock_register_draft):
@@ -430,21 +430,21 @@ def test_delete_draft_registration_approved_and_registration_deleted(self, mock_
         self.draft.registered_node.is_deleted = True
         self.draft.registered_node.save()
 
-        assert_equal(1, DraftRegistration.find().count())
+        assert_equal(1, DraftRegistration.objects.all().count())
         url = self.node.api_url_for('delete_draft_registration', draft_id=self.draft._id)
 
         res = self.app.delete(url, auth=self.user.auth)
         assert_equal(res.status_code, http.NO_CONTENT)
-        assert_equal(0, DraftRegistration.find().count())
+        assert_equal(0, DraftRegistration.objects.all().count())
 
     def test_only_admin_can_delete_registration(self):
         non_admin = AuthUserFactory()
-        assert_equal(1, DraftRegistration.find().count())
+        assert_equal(1, DraftRegistration.objects.all().count())
         url = self.node.api_url_for('delete_draft_registration', draft_id=self.draft._id)
 
         res = self.app.delete(url, auth=non_admin.auth, expect_errors=True)
         assert_equal(res.status_code, http.FORBIDDEN)
-        assert_equal(1, DraftRegistration.find().count())
+        assert_equal(1, DraftRegistration.objects.all().count())
 
     def test_get_metaschemas(self):
         url = api_url_for('get_metaschemas')
diff --git a/tests/test_subjects.py b/tests/test_subjects.py
index 6449a75d8bc..5d6f1becf8d 100644
--- a/tests/test_subjects.py
+++ b/tests/test_subjects.py
@@ -1,7 +1,7 @@
 # -*- coding: utf-8 -*-
 from django.core.exceptions import ValidationError
 from nose.tools import *  # flake8: noqa (PEP8 asserts)
-from modularodm.exceptions import ValidationValueError
+from osf.exceptions import ValidationValueError
 
 from tests.base import OsfTestCase
 from osf_tests.factories import SubjectFactory, PreprintFactory, PreprintProviderFactory

From 11637bd098fa4fdbf96bbcffcb24bbe82514aef5 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 6 Oct 2017 09:08:10 -0400
Subject: [PATCH 036/192] Removal of modm_compat, its tests, and uses of
 to_django_query

---
 api/base/utils.py             |  10 +-
 osf/modm_compat.py            | 174 ----------------------------------
 osf_tests/test_modm_compat.py |  26 -----
 3 files changed, 4 insertions(+), 206 deletions(-)
 delete mode 100644 osf/modm_compat.py
 delete mode 100644 osf_tests/test_modm_compat.py

diff --git a/api/base/utils.py b/api/base/utils.py
index 9c602dccfdc..b2e27aa21ca 100644
--- a/api/base/utils.py
+++ b/api/base/utils.py
@@ -15,7 +15,6 @@
 from framework.auth.oauth_scopes import ComposedScopes, normalize_scopes
 from osf.models import OSFUser, Node, Registration
 from osf.models.base import GuidMixin
-from osf.modm_compat import to_django_query
 from osf.utils.requests import check_select_for_update
 from website import settings as website_settings
 from website import util as website_util  # noqa
@@ -93,11 +92,10 @@ def get_object_or_error(model_cls, query_or_pk, request, display_name=None):
                 obj = model_cls.load(query_or_pk, select_for_update=select_for_update)
     else:
         # they passed a query
-        if hasattr(model_cls, 'primary_identifier_name'):
-            query = to_django_query(query_or_pk, model_cls=model_cls)
-        else:
-            # fall back to modmcompatibility's find_one
-            obj = model_cls.find_one(query_or_pk, select_for_update=select_for_update)
+        try:
+            obj = model_cls.objects.get(query_or_pk, select_for_update=select_for_update)
+        except model_cls.DoesNotExist:
+            obj = None
 
     if not obj:
         if not query:
diff --git a/osf/modm_compat.py b/osf/modm_compat.py
deleted file mode 100644
index 6e457ec77ed..00000000000
--- a/osf/modm_compat.py
+++ /dev/null
@@ -1,174 +0,0 @@
-# -*- coding: utf-8 -*-
-from operator import and_, or_
-
-from django.db.models import Q as DjangoQ
-from django.db.models import FieldDoesNotExist
-
-from modularodm import Q as MODMQ
-from modularodm.query import query, QueryGroup
-
-
-class BaseQ(object):
-    def __or__(self, other):
-        return OrQ(self, other)
-
-    def __and__(self, other):
-        return AndQ(self, other)
-
-
-class CompoundQ(BaseQ, query.QueryGroup):
-    @property
-    def nodes(self):
-        return self.__queries
-
-    def __init__(self, *queries):
-        self.__queries = queries
-
-    def __repr__(self):
-        return '<{0}({1})>'.format(
-            self.__class__.__name__,
-            ', '.join(repr(node) for node in self.nodes)
-        )
-
-    @classmethod
-    def from_modm_query(cls, query, model_cls=None):
-        op_function = and_ if query.operator == 'and' else or_
-        return reduce(op_function, (Q.from_modm_query(node, model_cls) for node in query.nodes))
-
-
-class AndQ(CompoundQ):
-    operator = 'and'
-
-    def __and__(self, other):
-        return AndQ(other, *self.nodes)
-
-    def to_django_query(self):
-        return reduce(lambda acc, val: acc & val, (q.to_django_query() for q in self.nodes))
-
-
-class OrQ(CompoundQ):
-    operator = 'or'
-
-    def __or__(self, other):
-        return OrQ(other, *self.nodes)
-
-    def to_django_query(self):
-        return reduce(lambda acc, val: acc | val, (q.to_django_query() for q in self.nodes))
-
-
-class Q(BaseQ, query.RawQuery):
-    QUERY_MAP = {'eq': 'exact'}
-
-    @classmethod
-    def from_modm_query(cls, query, model_cls=None):
-        from django.contrib.contenttypes.models import ContentType
-
-        if isinstance(query, QueryGroup):
-            compound_cls = AndQ if query.operator == 'and' else OrQ
-            return compound_cls.from_modm_query(query, model_cls=model_cls)
-        elif isinstance(query, MODMQ):
-            attribute = query.attribute
-            if attribute == 'referent':
-                # if it's a referent they must have passed an instance
-                return cls('object_id', 'eq', query.argument.id) & cls('content_type', 'eq',
-                                                                       ContentType.objects.get_for_model(
-                                                                           query.argument))
-            if model_cls:
-                field_aliases = getattr(model_cls, 'FIELD_ALIASES', {})
-                attribute = field_aliases.get(attribute, attribute)
-                field = _get_field(model_cls, attribute)
-                internal_type = _get_internal_type(field)
-                # Mongo compatibility fix: an 'eq' query on array fields
-                # behaves like 'contains' for postgres ArrayFields
-                if (
-                    internal_type == 'ArrayField' and
-                    query.operator == 'eq'
-                ):
-                    return cls(attribute, 'contains', [query.argument])
-                # Queries like Q('tags', 'eq', []) should be translated to
-                # Q('tags', 'isnull', True)
-                elif internal_type == 'ManyToManyField' and query.argument in (list(), tuple()):
-                    is_null = query.operator == 'eq'
-                    return cls(attribute, 'isnull', is_null)
-            return cls(attribute, query.operator, query.argument)
-        elif isinstance(query, cls):
-            if query.attribute == 'referent':
-                # if it's a referent they must have passed an instance
-                return cls('object_id', 'eq', query.argument.pk) & cls('content_type', 'eq',
-                                                                       ContentType.objects.get_for_model(
-                                                                           query.argument))
-            if model_cls:
-                field_aliases = getattr(model_cls, 'FIELD_ALIASES', {})
-                if query.attribute in field_aliases:
-                    return cls(field_aliases[query.attribute], query.operator, query.argument)
-            return query
-        else:
-            raise ValueError(
-                'from_modm_query must receive either a modularodm.Q, modularodm.query.QueryGroup, '
-                'or osf.modm_compat.Q object'
-            )
-
-    @property
-    def operator(self):
-        return self.__op
-
-    @property
-    def attribute(self):
-        return self.__key
-
-    @property
-    def argument(self):
-        return self.__val
-
-    @property
-    def op(self):
-        if self.__val is None:
-            return 'isnull'
-        return self.QUERY_MAP.get(self.__op, self.__op)
-
-    @property
-    def key(self):
-        return self.__key
-
-    @property
-    def val(self):
-        if self.__val is None:
-            return True if self.__op == 'eq' else False
-        return self.__val
-
-    def __init__(self, key, op, val):
-        self.__op = op
-        self.__key = key
-        self.__val = val
-
-    def to_django_query(self):
-        if self.op == 'ne':
-            return ~DjangoQ(**{'__'.join(self.key.split('.')): self.val})
-        return DjangoQ(**{'__'.join(self.key.split('.') + [self.op]): self.val})
-
-    def __repr__(self):
-        return '<Q({}, {}, {})>'.format(self.key, self.op, self.val)
-
-
-def _get_field(model_cls, field_name):
-    try:
-        return model_cls._meta.get_field(field_name)
-    except FieldDoesNotExist:
-        return None
-
-
-def _get_internal_type(field):
-    # NOTE: GenericForeignKey does not implement get_internal_type
-    if hasattr(field, 'get_internal_type'):
-        return field.get_internal_type()
-    else:
-        return None
-
-
-def to_django_query(query, model_cls=None):
-    """Translate a modular-odm Q or QueryGroup to a Django query.
-    """
-    # temporary measure as queries are converted away from modm
-    if isinstance(query, DjangoQ):
-        return query
-    return Q.from_modm_query(query, model_cls=model_cls).to_django_query()
diff --git a/osf_tests/test_modm_compat.py b/osf_tests/test_modm_compat.py
deleted file mode 100644
index 72e89f9f465..00000000000
--- a/osf_tests/test_modm_compat.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from modularodm import Q
-from django.db.models import Q as DjangoQ
-from osf.modm_compat import to_django_query
-
-class TestToDjangoQuery:
-
-    def test_returns_a_django_q(self):
-        q = Q('foo', 'eq', 42)
-        django_q = to_django_query(q)
-        assert type(django_q) is DjangoQ
-
-    def test_handles_or_queries(self):
-        q = Q('foo', 'eq', 42) | Q('bar', 'eq', 24)
-        django_q = to_django_query(q)
-        assert type(django_q) is DjangoQ
-        assert django_q.connector == 'OR'
-        assert len(django_q.children) == 2
-        assert django_q.children == [('foo__exact', 42), ('bar__exact', 24)]
-
-    def test_handles_and_queries(self):
-        q = Q('foo', 'eq', 42) & Q('bar', 'eq', 24)
-        django_q = to_django_query(q)
-        assert type(django_q) is DjangoQ
-        assert django_q.connector == 'AND'
-        assert len(django_q.children) == 2
-        assert django_q.children == [('foo__exact', 42), ('bar__exact', 24)]

From 5573d0493502988fc4b49d86f265b3bdaf2e0cca Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 6 Oct 2017 09:45:34 -0400
Subject: [PATCH 037/192] Remove modm from scripts; remove pop confs

---
 scripts/create_fakes.py                |    8 +-
 scripts/populate_conferences.py        | 1034 ------------------------
 scripts/populate_preprint_providers.py |    6 +-
 3 files changed, 5 insertions(+), 1043 deletions(-)
 delete mode 100644 scripts/populate_conferences.py

diff --git a/scripts/create_fakes.py b/scripts/create_fakes.py
index 83af3943302..7be0ba3fc97 100644
--- a/scripts/create_fakes.py
+++ b/scripts/create_fakes.py
@@ -44,8 +44,6 @@
 import pytz
 from faker import Factory
 from faker.providers import BaseProvider
-from modularodm.exceptions import NoResultsFound
-from modularodm.query.querydialect import DefaultQueryDialect as Q
 django.setup()
 
 from framework.auth import Auth
@@ -308,8 +306,8 @@ def create_fake_project(creator, n_users, privacy, n_components, name, n_tags, p
         provider = None
         if preprint_provider:
             try:
-                provider = models.PreprintProvider.find_one(Q('_id', 'eq', provider))
-            except NoResultsFound:
+                provider = models.PreprintProvider.objects.get(_id=provider)
+            except models.PreprintProvider.DoesNotExist:
                 pass
         if not provider:
             provider = PreprintProviderFactory(name=fake.science_word())
@@ -372,7 +370,7 @@ def render_generations_from_node_structure_list(parent, creator, node_structure_
 
 def main():
     args = parse_args()
-    creator = models.OSFUser.find(Q('username', 'eq', args.user))[0]
+    creator = models.OSFUser.objects.filter(username=args.user).first()
     for i in range(args.n_projects):
         name = args.name + str(i) if args.name else ''
         create_fake_project(creator, args.n_users, args.privacy, args.n_components, name, args.n_tags,
diff --git a/scripts/populate_conferences.py b/scripts/populate_conferences.py
deleted file mode 100644
index e4c063bfa0d..00000000000
--- a/scripts/populate_conferences.py
+++ /dev/null
@@ -1,1034 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-
-import os
-import sys
-
-import django
-from modularodm import Q
-from modularodm.exceptions import ModularOdmException
-django.setup()
-
-from osf.models import Conference, OSFUser
-
-from website import settings
-from website.app import init_app
-from datetime import datetime
-
-
-def main():
-    init_app(set_backends=True, routes=False)
-    dev = 'dev' in sys.argv
-    populate_conferences(dev=dev)
-
-
-MEETING_DATA = {
-    'spsp2014': {
-        'name': 'Society for Personality and Social Psychology 2014',
-        'info_url': None,
-        'logo_url': None,
-        'location': 'Austin, TX',
-        'start_date': 'Feb 13 2014',
-        'end_date': 'Feb 15 2014',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'asb2014': {
-        'name': 'Association of Southeastern Biologists 2014',
-        'info_url': 'http://www.sebiologists.org/meetings/talks_posters.html',
-        'logo_url': None,
-        'location': 'Spartanburg, SC',
-        'start_date': 'Apr 2 2014',
-        'end_date': 'Apr 4 2014',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'aps2014': {
-        'name': 'Association for Psychological Science 2014',
-        'info_url': 'https://cos.io/aps/',
-        'logo_url': '/static/img/2014_Convention_banner-with-APS_700px.jpg',
-        'location': 'San Franscisco, CA',
-        'start_date': 'May 22 2014',
-        'end_date': 'May 25 2014',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'annopeer2014': {
-        'name': '#annopeer',
-        'info_url': None,
-        'logo_url': None,
-        'location': None,
-        'start_date': None,
-        'end_date': None,
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'cpa2014': {
-        'name': 'Canadian Psychological Association 2014',
-        'info_url': None,
-        'logo_url': None,
-        'location': 'Vancouver, BC',
-        'start_date': 'Jun 05 2014',
-        'end_date': 'Jun 07 2014',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'filaments2014': {
-        'name': 'National Radio Astronomy Observatory Filaments 2014',
-        'info_url': None,
-        'logo_url': 'https://science.nrao.edu/science/meetings/2014/'
-                    'filamentary-structure/images/filaments2014_660x178.png',
-        'location': 'Charlottesville, VA',
-        'start_date': 'Oct 10 2014',
-        'end_date': 'Oct 11 2014',
-        'active': False,
-        'admins': [
-            'lvonschi@nrao.edu',
-            # 'Dkim@nrao.edu',
-        ],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'bitss2014': {
-        'name': 'Berkeley Initiative for Transparency in the Social Sciences Research Transparency Forum 2014',
-        'info_url': None,
-        'logo_url': os.path.join(
-            settings.STATIC_URL_PATH,
-            'img',
-            'conferences',
-            'bitss.jpg',
-        ),
-        'location': 'Berkeley, CA',
-        'start_date': 'Dec 11 2014',
-        'end_date': 'Dec 12 2014',
-        'active': False,
-        'admins': [
-            'gkroll@berkeley.edu',
-            'awais@berkeley.edu',
-        ],
-        'public_projects': True,
-        'poster': False,
-        'talk': True,
-        'is_meeting': True
-    },
-    'spsp2015': {
-        'name': 'Society for Personality and Social Psychology 2015',
-        'info_url': None,
-        'logo_url': None,
-        'location': 'Long Beach, CA',
-        'start_date': 'Feb 26 2015',
-        'end_date': 'Feb 28 2015',
-        'active': False,
-        'admins': [
-            'meetings@spsp.org',
-        ],
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'aps2015': {
-        'name': 'Association for Psychological Science 2015',
-        'info_url': None,
-        'logo_url': 'http://www.psychologicalscience.org/images/APS_2015_Banner_990x157.jpg',
-        'location': 'New York, NY',
-        'start_date': 'May 21 2015',
-        'end_date': 'May 24 2015',
-        'admins': [],
-        'active': False,
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'icps2015': {
-        'name': 'International Convention of Psychological Science 2015',
-        'info_url': None,
-        'logo_url': 'http://icps.psychologicalscience.org/wp-content/themes/deepblue/images/ICPS_Website-header_990px.jpg',
-        'location': 'Amsterdam, The Netherlands',
-        'start_date': 'Mar 12 2015',
-        'end_date': 'Mar 14 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'mpa2015': {
-        'name': 'Midwestern Psychological Association 2015',
-        'info_url': None,
-        'logo_url': 'http://www.midwesternpsych.org/resources/Pictures/MPA%20logo.jpg',
-        'location': 'Chicago, IL',
-        'start_date': 'Apr 30 2015',
-        'end_date': 'May 02 2015',
-        'active': False,
-        'admins': [
-            'mpa@kent.edu',
-        ],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'NCCC2015': {
-        'name': 'North Carolina Cognition Conference 2015',
-        'info_url': None,
-        'logo_url': None,
-        'location': 'Elon, NC',
-        'start_date': 'Feb 21 2015',
-        'end_date': 'Feb 21 2015',
-        'active': False,
-        'admins': [
-            'aoverman@elon.edu',
-        ],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'VPRSF2015': {
-        'name': 'Virginia Piedmont Regional Science Fair 2015',
-        'info_url': None,
-        'logo_url': 'http://vprsf.org/wp-content/themes/VPRSF/images/logo.png',
-        'location': 'Charlottesville, VA',
-        'start_date': 'Mar 17 2015',
-        'end_date': 'Mar 17 2015',
-        'active': False,
-        'admins': [
-            'director@vprsf.org',
-        ],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'APRS2015': {
-        'name': 'UVA Annual Postdoctoral Research Symposium 2015',
-        'info_url': None,
-        'logo_url': 'http://s1.postimg.org/50qj9u6i7/GPA_Logo.jpg',
-        'location': 'Charlottesville, VA',
-        'start_date': None,
-        'end_date': None,
-        'active': False,
-        'admins': [
-            'mhurst@virginia.edu',
-        ],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'ASB2015': {
-        'name': 'Association of Southeastern Biologists 2015',
-        'info_url': None,
-        'logo_url': 'http://www.sebiologists.org/wp/wp-content/uploads/2014/09/banner_image_Large.png',
-        'location': 'Chattanooga, TN',
-        'start_date': 'Apr 01 2015',
-        'end_date': 'Apr 04 2015',
-        'active': False,
-        'admins': [
-            'amorris.mtsu@gmail.com',
-        ],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'TeaP2015': {
-        'name': 'Tagung experimentell arbeitender Psychologen 2015',
-        'info_url': None,
-        'logo_url': None,
-        'location': 'Hildesheim, Germany',
-        'start_date': 'Mar 08 2015',
-        'end_date': 'Mar 11 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'VSSEF2015': {
-        'name': 'Virginia State Science and Engineering Fair 2015',
-        'info_url': 'http://www.vmi.edu/conferences/vssef/vssef_home/',
-        'logo_url': 'http://www.vmi.edu/uploadedImages/Images/Headers/vssef4.jpg',
-        'location': 'Lexington, VA',
-        'start_date': 'Mar 27 2015',
-        'end_date': 'Mar 28 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'RMPA2015': {
-        'name': 'Rocky Mountain Psychological Association 2015',
-        'info_url': 'http://www.rockymountainpsych.org/uploads/7/4/2/6/7426961/85th_annual_rmpa_conference_program_hr.pdf',
-        'logo_url': 'http://www.rockymountainpsych.org/uploads/7/4/2/6/7426961/header_images/1397234084.jpg',
-        'location': 'Boise, Idaho',
-        'start_date': 'Apr 09 2015',
-        'end_date': 'Apr 11 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'ARP2015': {
-        'name': 'Association for Research in Personality 2015',
-        'info_url': 'http://www.personality-arp.org/conference/',
-        'logo_url': 'http://www.personality-arp.org/wp-content/uploads/conference/st-louis-arp.jpg',
-        'location': 'St. Louis, MO',
-        'start_date': 'Jun 11 2015',
-        'end_date': 'Jun 13 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'SEP2015': {
-        'name': 'Society of Experimental Psychologists Meeting 2015',
-        'info_url': 'http://faculty.virginia.edu/Society_of_Experimental_Psychologists/',
-        'logo_url': 'http://www.sepsych.org/nav/images/SEP-header.gif',
-        'location': 'Charlottesville, VA',
-        'start_date': 'Apr 17 2015',
-        'end_date': 'Apr 18 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'Reid2015': {
-        'name': 'L. Starling Reid Undergraduate Psychology Conference 2015',
-        'info_url': 'http://avillage.web.virginia.edu/Psych/Conference',
-        'location': 'Charlottesville, VA',
-        'start_date': 'Apr 17 2015',
-        'end_date': 'Apr 17 2015',
-        'logo_url': None,
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'NEEPS2015': {
-        'name': 'Northeastern Evolutionary Psychology Conference 2015',
-        'info_url': 'http://neeps2015.weebly.com/',
-        'location': 'Boston, MA',
-        'start_date': 'Apr 09 2015',
-        'end_date': 'Apr 11 2015',
-        'logo_url': None,
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'VaACS2015': {
-        'name': 'Virginia Section American Chemical Society Student Poster Session 2015',
-        'info_url': 'http://virginia.sites.acs.org/',
-        'logo_url': 'http://virginia.sites.acs.org/Bulletin/15/UVA.jpg',
-        'location': 'Charlottesville, VA',
-        'start_date': 'Apr 17 2015',
-        'end_date': 'Apr 17 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'MADSSCi2015': {
-        'name': 'Mid-Atlantic Directors and Staff of Scientific Cores & Southeastern Association of Shared Services 2015',
-        'info_url': 'http://madssci.abrf.org',
-        'logo_url': 'http://s24.postimg.org/qtc3baefp/2015madssci_seasr.png',
-        'location': 'Charlottesville, VA',
-        'start_date': 'Jun 03 2015',
-        'end_date': 'Jun 5 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'NRAO2015': {
-        'name': 'National Radio Astronomy Observatory Accretion 2015',
-        'info_url': 'https://science.nrao.edu/science/meetings/2015/accretion2015/posters',
-        'location': 'Charlottesville, VA',
-        'start_date': 'Oct 09 2015',
-        'end_date': 'Oct 10 2015',
-        'logo_url': None,
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'ARCS2015': {
-        'name': 'Advancing Research Communication and Scholarship 2015',
-        'info_url': 'http://commons.pacificu.edu/arcs/',
-        'logo_url': 'http://commons.pacificu.edu/assets/md5images/4dfd167454e9f4745360a9550e189323.png',
-        'location': 'Philadelphia, PA',
-        'start_date': 'Apr 26 2015',
-        'end_date': 'Apr 28 2015',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'singlecasedesigns2015': {
-        'name': 'Single Case Designs in Clinical Psychology: Uniting Research and Practice',
-        'info_url': 'https://www.royalholloway.ac.uk/psychology/events/eventsarticles/singlecasedesignsinclinicalpsychologyunitingresearchandpractice.aspx',
-        'logo_url': None,
-        'location': 'London, UK',
-        'start_date': 'Apr 17 2015',
-        'end_date': 'Apr 17 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'OSFM2015': {
-        'name': 'OSF for Meetings 2015',
-        'info_url': None,
-        'logo_url': None,
-        'location': 'Charlottesville, VA',
-        'start_date': None,
-        'end_date': None,
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'JSSP2015': {
-        'name': 'Japanese Society of Social Psychology 2015',
-        'info_url': 'http://www.socialpsychology.jp/conf2015/index.html',
-        'logo_url': None,
-        'location': 'Tokyo, Japan',
-        'start_date': 'Oct 31 2015',
-        'end_date': 'Nov 01 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    '4S2015': {
-        'name': 'Society for Social Studies of Science 2015',
-        'info_url': 'http://www.4sonline.org/meeting',
-        'logo_url': 'http://www.4sonline.org/ee/denver-skyline.jpg',
-        'location': 'Denver, CO',
-        'start_date': 'Nov 11 2015',
-        'end_date': 'Nov 14 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'IARR2016': {
-        'name': 'International Association for Relationship Research 2016',
-        'info_url': 'http://iarr.psych.utoronto.ca/',
-        'logo_url': None,
-        'location': 'Toronto, Canada',
-        'start_date': 'Jul 20 2016',
-        'end_date': 'Jul 24 2016',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'IA2015': {
-        'name': 'Inclusive Astronomy 2015',
-        'info_url': 'https://vanderbilt.irisregistration.com/Home/Site?code=InclusiveAstronomy2015',
-        'logo_url': 'https://vanderbilt.blob.core.windows.net/images/Inclusive%20Astronomy.jpg',
-        'location': 'Nashville, TN',
-        'start_date': 'Jun 17 2015',
-        'end_date': 'Jun 19 2015',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'R2RC': {
-        'name': 'Right to Research Coalition',
-        'info_url': None,
-        'logo_url': None,
-        'location': None,
-        'start_date': None,
-        'end_date': None,
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'OpenCon2015': {
-        'name': 'OpenCon2015',
-        'info_url': 'http://opencon2015.org/',
-        'logo_url': 'http://s8.postimg.org/w9b30pxyd/Open_Con2015_new_logo.png',
-        'location': 'Brussels, Belgium',
-        'start_date': 'Nov 14 2015',
-        'end_date': 'Nov 16 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'ESIP2015': {
-        'name': 'Earth Science Information Partners 2015',
-        'info_url': 'http://esipfed.org/',
-        'logo_url': 'http://s30.postimg.org/m2uz2g4pt/ESIP.png',
-        'location': None,
-        'start_date': None,
-        'end_date': None,
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'SPSP2016': {
-        'name': 'Society for Personality and Social Psychology 2016 ',
-        'info_url': 'http://meeting.spsp.org',
-        'logo_url': None,
-        'location': 'San Diego, CA',
-        'start_date': 'Jan 28 2016',
-        'end_date': 'Jan 30 2016',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'NACIII': {
-        'name': '2015 National Astronomy Consortium (NAC) III Workshop',
-        'info_url': 'https://info.nrao.edu/do/odi/meetings/2015/nac111/',
-        'logo_url': None,
-        'location': 'Washington, DC',
-        'start_date': 'Aug 29 2015',
-        'end_date': 'Aug 30 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'CDS2015': {
-        'name': 'Cognitive Development Society 2015',
-        'info_url': 'http://meetings.cogdevsoc.org/',
-        'logo_url': None,
-        'location': 'Columbus, OH',
-        'start_date': 'Oct 09 2015',
-        'end_date': 'Oct 10 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'SEASR2016': {
-        'name': 'Southeastern Association of Shared Resources 2016',
-        'info_url': 'http://seasr.abrf.org',
-        'logo_url': None,
-        'location': 'Atlanta, GA',
-        'start_date': 'Jun 22 2016',
-        'end_date': 'Jun 24 2016',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'Accretion2015': {
-        'name': 'Observational Evidence of Gas Accretion onto Galaxies?',
-        'info_url': 'https://science.nrao.edu/science/meetings/2015/accretion2015',
-        'logo_url': None,
-        'location':'Charlottesville, VA',
-        'start_date':'Oct 09 2015',
-        'end_date':'Oct 10 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    '2020Futures': {
-        'name': 'U.S. Radio/Millimeter/Submillimeter Science Futures in the 2020s',
-        'info_url': 'https://science.nrao.edu/science/meetings/2015/2020futures/home',
-        'logo_url': None,
-        'location':'Chicago, IL',
-        'start_date':'Dec 15 2015',
-        'end_date':'Dec 17 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'RMPA2016': {
-        'name': 'Rocky Mountain Psychological Association 2016',
-        'info_url': 'http://www.rockymountainpsych.org/convention-info.html',
-        'logo_url': 'http://www.rockymountainpsych.org/uploads/7/4/2/6/7426961/header_images/1397234084.jpg',
-        'location':'Denver, CO',
-        'start_date':'Apr 14 2016',
-        'end_date':'Apr 16 2016',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'CNI2015': {
-        'name': 'Coalition for Networked Information (CNI) Fall Membership Meeting 2015',
-        'info_url': 'https://wp.me/P1LncT-64s',
-        'logo_url': None,
-        'location':'Washington, DC',
-        'start_date':'Dec 14 2015',
-        'end_date':'Dec 16 2015',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': False,
-        'talk': True,
-        'is_meeting': True
-    },
-    'SWPA2016': {
-        'name': 'Southwestern Psychological Association Convention 2016',
-        'info_url': 'https://www.swpsych.org/conv_dates.php',
-        'logo_url': 'http://s28.postimg.org/xbwyqqvx9/SWPAlogo4.jpg',
-        'location':'Dallas, TX',
-        'start_date':'Apr 08 2016',
-        'end_date':'Apr 10 2016',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'ESIP2016W': {
-        'name': 'Earth Science Information Partners Winter Meeting 2016',
-        'info_url': 'http://commons.esipfed.org/2016WinterMeeting',
-        'logo_url': 'http://s30.postimg.org/m2uz2g4pt/ESIP.png',
-        'location':'Washington, DC',
-        'start_date':'Jan 06 2016',
-        'end_date':'Jan 08 2016',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'MiamiBrainhack15': {
-        'name': 'University of Miami Brainhack 2015',
-        'info_url': 'http://brainhack.org/americas/',
-        'logo_url': None,
-        'location': None,
-        'start_date': 'Oct 23 2015',
-        'end_date': 'Oct 25 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'PsiChiRepository': {
-        'name': 'Psi Chi',
-        'location': None,
-        'start_date': None,
-        'end_date': None,
-        'info_url': 'http://www.psichi.org/?ResearchAdvisory#.VmBpeOMrI1g',
-        'logo_url': 'http://s11.postimg.org/4g2451vcz/Psi_Chi_Logo.png',
-        'active': True,
-        'admins': [
-            'research.director@psichi.org',
-        ],
-        'field_names': {
-            'submission1': 'measures',
-            'submission2': 'materials',
-            'submission1_plural': 'measures/scales',
-            'submission2_plural': 'study materials',
-            'meeting_title_type': 'Repository',
-            'add_submission': 'materials',
-            'mail_subject': 'Title',
-            'mail_message_body': 'Measure or material short description',
-            'mail_attachment': 'Your measure/scale or material file(s)'
-        },
-        'is_meeting': False
-    },
-    'GI2015': {
-        'name': 'Genome Informatics 2015',
-        'info_url': 'https://meetings.cshl.edu/meetings.aspx?meet=info&year=15',
-        'logo_url': None,
-        'location':'Cold Spring Harbor, NY' ,
-        'start_date': 'Oct 28 2015',
-        'end_date': 'Oct 31 2015',
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'MADSSCi2016': {
-        'name': 'Mid-Atlantic Directors and Staff of Scientific Cores & Southeastern Association of Shared Services 2016',
-        'info_url': 'http://madssci.abrf.org',
-        'logo_url': 'http://madssci.abrf.org/sites/default/files/madssci-logo-bk.png',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'SMM2015': {
-        'name': 'The Society for Marine Mammalogy',
-        'info_url': 'https://www.marinemammalscience.org/conference/',
-        'logo_url': None,
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'TESS': {
-        'name': 'Time-sharing Experiments for the Social Sciences',
-        'info_url': 'http://www.tessexperiments.org',
-        'logo_url': None,
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': False,
-        'talk': True,
-        'field_names': {
-            'submission1': 'poster',
-            'submission2': 'study',
-            'submission1_plural': 'posters',
-            'submission2_plural': 'studies',
-            'meeting_title_type': 'Studies',
-            'add_submission': 'studies',
-        },
-        'is_meeting': False
-    },
-    'ASCERM2016': {
-        'name': 'ASCE Rocky Mountain Student Conference 2016',
-        'info_url': 'http://luninuxos.com/asce/',
-        'logo_url': 'http://s2.postimg.org/eaduh2ovt/2016_ASCE_Rocky_Mtn_banner.png',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': False,
-        'talk': True,
-        'is_meeting': True
-    },
-    'ARCA2016': {
-        'name': '5th Applied Research Conference in Africa',
-        'info_url': 'http://www.arcaconference.org/',
-        'logo_url': 'http://www.arcaconference.org/images/ARCA_LOGO_NEW.JPG',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': False,
-        'talk': True,
-        'is_meeting': True
-    },
-    'CURCONF2016': {
-        'name': 'CUR Biennial Conference 2016',
-        'info_url': 'http://www.cur.org/conferences_and_events/biennial2016/',
-        'logo_url': 'http://s11.postimg.org/v8feuna4y/Conference_logo_eps.jpg',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'CATALISE2016': {
-        'name': 'Criteria and Terminology Applied to Language Impairments: Synthesising the Evidence (CATALISE) 2016',
-        'info_url': None,
-        'logo_url': None,
-        'active': False,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'Emergy2016': {
-        'name': '9th Biennial Emergy Research Conference',
-        'info_url': 'http://www.cep.ees.ufl.edu/emergy/conferences/ERC09_2016/index.shtml',
-        'logo_url': 'http://s12.postimg.org/uf9ioqmct/emergy.jpg',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'aps2016': {
-        'name': 'Association for Psychological Science 2016',
-        'info_url': 'http://www.psychologicalscience.org/convention',
-        'logo_url': 'http://www.psychologicalscience.org/redesign/wp-content/uploads/2015/03/APS_2016_Banner_990x157.jpg',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'jssp2016': {
-        'name': 'Japanese Society of Social Psychology 2016',
-        'info_url': 'http://www.socialpsychology.jp/conf2016/',
-        'logo_url': None,
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'sepech2016': {
-        'name': 'XI SEPECH - Research Seminar in Human Sciences (Seminário de Pesquisa em Ciências Humanas)',
-        'info_url': 'http://www.uel.br/eventos/sepech/sepech2016/',
-        'logo_url': None,
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'etmaal2016': {
-        'name': 'Etmaal van de Communicatiewetenschap 2016 - Media Psychology',
-        'info_url': 'https://etmaal2016.wordpress.com',
-        'logo_url': None,
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'WSAN2016': {
-        'name': 'WSAN2016 Erasmus University Rotterdam',
-        'info_url': 'http://www.humane.eu/wsan/',
-        'logo_url': None,
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'ContainerStrategies': {
-        'name': 'Container Strategies for Data & Software Preservation',
-        'info_url': 'https://daspos.crc.nd.edu/index.php/workshops/container-strategies-for-data-software-preservation-that-promote-open-science',
-        'logo_url': 'http://s17.postimg.org/8nl1v5mxb/Screen_Shot_2016_03_02_at_9_05_24_PM.png',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'is_meeting': True
-    },
-    'CNI2016': {
-        'name': 'Coalition for Networked Information (CNI) Spring Membership Meeting 2016',
-        'info_url': 'https://wp.me/P1LncT-6fd',
-        'logo_url': None,
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': False,
-        'talk': True,
-        'is_meeting': True
-    },
-    'XGAL2016': {
-        'name': 'Molecular Gas in Galactic Environments 2016',
-        'info_url': 'https://science.nrao.edu/science/meetings/2016/molecular-gas-in-galactic-environments/home',
-        'logo_url': None,
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'DLF2016': {
-        'name': 'Digital Library Federation 2016 DLF Forum',
-        'info_url': 'https://www.diglib.org/forums/2016forum/',
-        'logo_url': 'https://www.diglib.org/wp-content/themes/construct/lib/scripts/timthumb/thumb.php?src=https://www.diglib.org/wp-content/uploads/2016/02/DLF-Forum-2016-Slider-Website-1.png&w=580&h=252&zc=1&q=100',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'start_date': 'Nov 7 2016',
-        'end_date': 'Nov 9 2016',
-        'locztion': 'Milwaukee, Wisconsin',
-        'is_meeting': True
-    },
-    'ESCAN2016': {
-        'name': 'European Society for Cognitive and Affective Neuroscience (ESCAN) 2016',
-        'info_url': 'http://congressos.abreu.pt/escan2016/',
-        'logo_url': 'http://congressos.abreu.pt/escan2016/images/escan-logo.png',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'Reid2016': {
-        'name': 'L. Starling Reid Undergraduate Psychology Conference 2016',
-        'info_url': 'http://cacsprd.web.virginia.edu/Psych/Conference',
-        'logo_url': None,
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'CNS2016': {
-        'name': 'The Cognitive Neuroscience Society (CNS) 2016',
-        'info_url': 'http://www.cogneurosociety.org/annual-meeting/',
-        'logo_url': None,
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'HEPA2016': {
-        'name': 'HEPA Europe Annual Meeting 2016',
-        'info_url': 'http://www.hepaeurope2016.eu/',
-        'logo_url': None,
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-    'OGH': {
-        'name': 'Open Global Health',
-        'info_url': None,
-        'logo_url': 'http://s33.postimg.org/7tjjpvg4f/Drawing.png',
-        'active': True,
-        'admins': [],
-        'public_projects': True,
-        'poster': True,
-        'talk': True,
-        'is_meeting': True
-    },
-}
-
-
-def populate_conferences(dev=False):
-    if dev:
-        Conference.remove()
-    date_format = '%b %d %Y'
-    for meeting, attrs in MEETING_DATA.iteritems():
-        meeting = meeting.strip()
-        admin_emails = attrs.pop('admins', [])
-        admin_objs = []
-        if not dev:
-            for email in admin_emails:
-                try:
-                    user = OSFUser.find_one(Q('username', 'iexact', email))
-                    admin_objs.append(user)
-                except ModularOdmException:
-                    raise RuntimeError('Username {0!r} is not registered.'.format(email))
-
-        # Convert string into datetime object
-        try:
-            attrs['end_date'] = datetime.strptime(attrs.get('end_date'), date_format)
-            attrs['start_date'] = datetime.strptime(attrs.get('start_date'), date_format)
-        except TypeError:
-            print '** Meeting {} does not have a start or end date. **'.format(meeting)
-        custom_fields = attrs.pop('field_names', {})
-
-        conf = Conference(
-            endpoint=meeting, admins=admin_objs, **attrs
-        )
-        conf.field_names.update(custom_fields)
-        try:
-            conf.save()
-        except ModularOdmException:
-            conf = Conference.find_one(Q('endpoint', 'eq', meeting))
-            for key, value in attrs.items():
-                if isinstance(value, dict):
-                    current = getattr(conf, key)
-                    current.update(value)
-                    setattr(conf, key, current)
-                else:
-                    setattr(conf, key, value)
-            conf.admins = admin_objs
-            changed_fields = conf.save()
-            if changed_fields:
-                print('Updated {}: {}'.format(meeting, changed_fields))
-        else:
-            print('Added new Conference: {}'.format(meeting))
-
-
-if __name__ == '__main__':
-    main()
diff --git a/scripts/populate_preprint_providers.py b/scripts/populate_preprint_providers.py
index a1c6bcb9220..0c46d829004 100644
--- a/scripts/populate_preprint_providers.py
+++ b/scripts/populate_preprint_providers.py
@@ -6,8 +6,6 @@
 import sys
 
 from django.db import transaction
-from modularodm import Q
-from modularodm.exceptions import NoResultsFound
 from website.app import init_app
 from website.settings import PREPRINT_PROVIDER_DOMAINS, DOMAIN, PROTOCOL
 import django
@@ -39,8 +37,8 @@ def get_subject_id(name):
 
 def get_license(name):
     try:
-        license = NodeLicense.find_one(Q('name', 'eq', name))
-    except NoResultsFound:
+        license = NodeLicense.objects.get(name=name)
+    except NodeLicense.DoesNotExist:
         raise Exception('License: "{}" not found'.format(name))
     return license
 

From 6cd326a0753b5692e43fc44c0322f318736f9fee Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 6 Oct 2017 10:32:14 -0400
Subject: [PATCH 038/192] Remove references to modm in docstrings

---
 website/notifications/utils.py | 20 ++++++++++----------
 1 file changed, 10 insertions(+), 10 deletions(-)

diff --git a/website/notifications/utils.py b/website/notifications/utils.py
index 55e1f53b5a8..1cae44fb84f 100644
--- a/website/notifications/utils.py
+++ b/website/notifications/utils.py
@@ -176,7 +176,7 @@ def get_configured_projects(user):
     """Filter all user subscriptions for ones that are on parent projects
      and return the node objects.
 
-    :param user: modular odm User object
+    :param user: OSFUser object
     :return: list of node objects for projects with no parent
     """
     configured_projects = set()
@@ -226,8 +226,8 @@ def get_all_user_subscriptions(user, extra=None):
 def get_all_node_subscriptions(user, node, user_subscriptions=None):
     """ Get all Subscription objects for a node that the user is subscribed to
 
-    :param user: modular odm User object
-    :param node: modular odm Node object
+    :param user: OSFUser object
+    :param node: Node object
     :param user_subscriptions: all Subscription objects that the user is subscribed to
     :return: list of Subscription objects for a node that the user is subscribed to
     """
@@ -238,7 +238,7 @@ def get_all_node_subscriptions(user, node, user_subscriptions=None):
 
 def format_data(user, nodes):
     """ Format subscriptions data for project settings page
-    :param user: modular odm User object
+    :param user: OSFUser object
     :param nodes: list of parent project node objects
     :return: treebeard-formatted data
     """
@@ -324,9 +324,9 @@ def format_file_subscription(user, node_id, path, provider):
 
 def serialize_event(user, subscription=None, node=None, event_description=None):
     """
-    :param user: modular odm User object
-    :param subscription: modular odm Subscription object, use if parsing particular subscription
-    :param node: modular odm Node object, use if node is known
+    :param user: OSFUser object
+    :param subscription: Subscription object, use if parsing particular subscription
+    :param node: Node object, use if node is known
     :param event_description: use if specific subscription is known
     :return: treebeard-formatted subscription event
     """
@@ -366,7 +366,7 @@ def get_parent_notification_type(node, event, user):
     type on the parent project for the same event.
     :param obj node: event owner (Node or User object)
     :param str event: notification event (e.g. 'comment_replies')
-    :param obj user: modular odm User object
+    :param obj user: OSFUser object
     :return: str notification type (e.g. 'email_transactional')
     """
     AbstractNode = apps.get_model('osf.AbstractNode')
@@ -393,8 +393,8 @@ def get_global_notification_type(global_subscription, user):
     """
     Given a global subscription (e.g. NotificationSubscription object with event_type
     'global_file_updated'), find the user's notification type.
-    :param obj global_subscription: modular odm NotificationSubscription object
-    :param obj user: modular odm User object
+    :param obj global_subscription: NotificationSubscription object
+    :param obj user: OSFUser object
     :return: str notification type (e.g. 'email_transactional')
     """
     for notification_type in constants.NOTIFICATION_TYPES:

From 5e3b30bd6ff46c00ac12ec79acdc4f9f87abccd2 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 6 Oct 2017 11:06:29 -0400
Subject: [PATCH 039/192] Remove modm from framework auth, database,
 encryptions and sessions

---
 framework/auth/core.py           | 19 ++++++++-----------
 framework/auth/utils.py          |  7 ++++---
 framework/auth/views.py          | 13 +++++--------
 framework/database/__init__.py   |  2 +-
 framework/encryption/__init__.py | 16 ----------------
 framework/sessions/utils.py      |  4 ++--
 6 files changed, 20 insertions(+), 41 deletions(-)

diff --git a/framework/auth/core.py b/framework/auth/core.py
index cbc36c1ca98..a9d67ea8100 100644
--- a/framework/auth/core.py
+++ b/framework/auth/core.py
@@ -5,14 +5,13 @@
 import logging
 
 from django.utils import timezone
-from django.db.models import Q as DQ
+from django.db.models import Q
 from django.db.models import Subquery
+from django.core.validators import URLValidator
 from flask import request
 from framework.sessions import session
-from modularodm import Q
 
-from modularodm.exceptions import QueryException, ValidationError, ValidationValueError
-from modularodm.validators import URLValidator
+from osf.exceptions import ValidationValueError, ValidationError
 from osf.utils.requests import check_select_for_update
 from website import security, settings
 
@@ -116,7 +115,7 @@ def get_user(email=None, password=None, token=None, external_id_provider=None, e
 
     if email:
         email = email.strip().lower()
-        qs = qs.filter(DQ(DQ(username=email) | DQ(id=Subquery(Email.objects.filter(address=email).values('user_id')))))
+        qs = qs.filter(Q(Q(username=email) | Q(id=Subquery(Email.objects.filter(address=email).values_list('user_id', flat=True)))))
 
     if password:
         password = password.strip()
@@ -163,17 +162,15 @@ def logged_in(self):
     def private_link(self):
         if not self.private_key:
             return None
+        # Avoid circular import
+        from osf.models import PrivateLink
         try:
-            # Avoid circular import
-            from osf.models import PrivateLink
-            private_link = PrivateLink.find_one(
-                Q('key', 'eq', self.private_key)
-            )
+            private_link = PrivateLink.objects.get(key=self.private_key)
 
             if private_link.is_deleted:
                 return None
 
-        except QueryException:
+        except PrivateLink.DoesNotExist:
             return None
 
         return private_link
diff --git a/framework/auth/utils.py b/framework/auth/utils.py
index 7c8bbe919a0..77235165e4b 100644
--- a/framework/auth/utils.py
+++ b/framework/auth/utils.py
@@ -4,8 +4,8 @@
 from nameparser.parser import HumanName
 import requests
 
+from django.db.models import Q
 from django.core.exceptions import ValidationError
-from modularodm import Q
 
 from website import settings
 
@@ -89,8 +89,9 @@ def privacy_info_handle(info, anonymous, name=False):
 
 def ensure_external_identity_uniqueness(provider, identity, user=None):
     from osf.models import OSFUser
-
-    users_with_identity = OSFUser.find(Q('external_identity.{}.{}'.format(provider, identity), 'ne', None))
+    users_with_identity = OSFUser.objects.filter(
+        Q(**{'external_identity__{}__{}__isnull'.format(provider, identity): False})
+    )
     for existing_user in users_with_identity:
         if user and user._id == existing_user._id:
             continue
diff --git a/framework/auth/views.py b/framework/auth/views.py
index b8b114b702d..5332d169238 100644
--- a/framework/auth/views.py
+++ b/framework/auth/views.py
@@ -8,10 +8,6 @@
 from django.utils import timezone
 from flask import request
 
-from modularodm import Q
-from modularodm.exceptions import NoResultsFound
-from modularodm.exceptions import ValidationValueError
-
 from framework import forms, sentry, status
 from framework import auth as framework_auth
 from framework.auth import exceptions
@@ -32,6 +28,7 @@
 from website.util import web_url_for
 from website.util.time import throttle_period_expired
 from website.util.sanitize import strip_html
+from osf.exceptions import ValidationValueError
 from osf.models.preprint_provider import PreprintProvider
 from osf.utils.requests import check_select_for_update
 
@@ -456,8 +453,8 @@ def auth_email_logout(token, user):
             'message_long': 'The private link you used is expired.'
         })
     try:
-        user_merge = OSFUser.find_one(Q('emails__address', 'eq', unconfirmed_email))
-    except NoResultsFound:
+        user_merge = OSFUser.objects.get(emails__address=unconfirmed_email)
+    except OSFUser.DoesNotExist:
         user_merge = False
     if user_merge:
         remove_sessions_for_user(user_merge)
@@ -724,8 +721,8 @@ def send_confirm_email(user, email, renew=False, external_id_provider=None, exte
     )
 
     try:
-        merge_target = OSFUser.find_one(Q('emails__address', 'eq', email))
-    except NoResultsFound:
+        merge_target = OSFUser.objects.get(emails__address=email)
+    except OSFUser.DoesNotExist:
         merge_target = None
 
     campaign = campaigns.campaign_for_user(user)
diff --git a/framework/database/__init__.py b/framework/database/__init__.py
index 2ad8279e5a3..ca46565110b 100644
--- a/framework/database/__init__.py
+++ b/framework/database/__init__.py
@@ -12,7 +12,7 @@
 
 
 def get_or_http_error(Model, pk_or_query, allow_deleted=False, display_name=None):
-    """Load an instance of Model by primary key or modularodm.Q query. Raise an appropriate
+    """Load an instance of Model by primary key or query. Raise an appropriate
     HTTPError if no record is found or if the query fails to find a unique record
     :param type Model: StoredObject subclass to query
     :param pk_or_query:
diff --git a/framework/encryption/__init__.py b/framework/encryption/__init__.py
index 291c2e1a24b..b3786d312f7 100644
--- a/framework/encryption/__init__.py
+++ b/framework/encryption/__init__.py
@@ -1,7 +1,5 @@
 import jwe
 
-from modularodm.fields import StringField
-
 from website import settings
 
 SENSITIVE_DATA_KEY = jwe.kdf(settings.SENSITIVE_DATA_SECRET.encode('utf-8'), settings.SENSITIVE_DATA_SALT.encode('utf-8'))
@@ -26,17 +24,3 @@ def decrypt(value):
         value = ensure_bytes(value)
         return jwe.decrypt(bytes(value), SENSITIVE_DATA_KEY)
     return None
-
-
-class EncryptedStringField(StringField):
-
-    def to_storage(self, value, translator=None):
-        if not settings.RUNNING_MIGRATION:
-            value = encrypt(value)
-        return super(EncryptedStringField, self).to_storage(value, translator=translator)
-
-    def from_storage(self, value, translator=None):
-        value = super(EncryptedStringField, self).from_storage(value, translator=translator)
-        if settings.RUNNING_MIGRATION:
-            return value
-        return decrypt(value)
diff --git a/framework/sessions/utils.py b/framework/sessions/utils.py
index c76f51a4141..927cf789a2a 100644
--- a/framework/sessions/utils.py
+++ b/framework/sessions/utils.py
@@ -1,5 +1,4 @@
 # -*- coding: utf-8 -*-
-from modularodm import Q
 
 
 def remove_sessions_for_user(user):
@@ -11,7 +10,8 @@ def remove_sessions_for_user(user):
     """
     from osf.models import Session
 
-    Session.remove(Q('data.auth_user_id', 'eq', user._id))
+    if user._id:
+        Session.objects.filter(data__auth_user_id=user._id).delete()
 
 
 def remove_session(session):

From eb3b93b077144169e9f66475d337f6cecc94e1d0 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 6 Oct 2017 11:22:55 -0400
Subject: [PATCH 040/192] Remove old factories and their uses

---
 api/caching/tests/test_caching.py           |   2 +-
 scripts/tests/test_embargo_registrations.py |   3 +-
 scripts/tests/test_retract_registrations.py |   3 +-
 scripts/tests/test_triggered_mails.py       |   2 +-
 tests/factories.py                          | 931 --------------------
 tests/mock_addons.py                        | 841 ------------------
 6 files changed, 4 insertions(+), 1778 deletions(-)
 delete mode 100644 tests/factories.py
 delete mode 100644 tests/mock_addons.py

diff --git a/api/caching/tests/test_caching.py b/api/caching/tests/test_caching.py
index d37735b5950..74a6c38aef1 100644
--- a/api/caching/tests/test_caching.py
+++ b/api/caching/tests/test_caching.py
@@ -11,7 +11,7 @@
 from requests.auth import HTTPBasicAuth
 
 from osf.models import OSFUser
-from tests.factories import create_fake_project
+from scripts.create_fakes import create_fake_project
 from tests.base import DbTestCase
 
 
diff --git a/scripts/tests/test_embargo_registrations.py b/scripts/tests/test_embargo_registrations.py
index d11e64044dd..7f746a0c432 100644
--- a/scripts/tests/test_embargo_registrations.py
+++ b/scripts/tests/test_embargo_registrations.py
@@ -6,8 +6,7 @@
 from nose.tools import *  # noqa
 
 from tests.base import OsfTestCase
-from tests.factories import RegistrationFactory
-from tests.factories import UserFactory
+from osf_tests.factories import RegistrationFactory, UserFactory
 
 from scripts.embargo_registrations import main
 
diff --git a/scripts/tests/test_retract_registrations.py b/scripts/tests/test_retract_registrations.py
index 3f47d575a45..585e95a6069 100644
--- a/scripts/tests/test_retract_registrations.py
+++ b/scripts/tests/test_retract_registrations.py
@@ -6,8 +6,7 @@
 from nose.tools import *  # noqa
 
 from tests.base import OsfTestCase
-from tests.factories import RegistrationFactory
-from tests.factories import UserFactory
+from osf_tests.factories import RegistrationFactory, UserFactory
 
 from scripts.retract_registrations import main
 
diff --git a/scripts/tests/test_triggered_mails.py b/scripts/tests/test_triggered_mails.py
index a0d2beed64b..38b0522faa2 100644
--- a/scripts/tests/test_triggered_mails.py
+++ b/scripts/tests/test_triggered_mails.py
@@ -5,7 +5,7 @@
 from nose.tools import * # noqa
 
 from tests.base import OsfTestCase
-from tests.factories import UserFactory
+from osf_tests.factories import UserFactory
 
 from scripts.triggered_mails import main, find_inactive_users_with_no_inactivity_email_sent_or_queued
 from website import mails
diff --git a/tests/factories.py b/tests/factories.py
deleted file mode 100644
index 94313434805..00000000000
--- a/tests/factories.py
+++ /dev/null
@@ -1,931 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Factories for the OSF models, including an abstract ModularOdmFactory.
-
-Example usage: ::
-
-    >>> from tests.factories import UserFactory
-    >>> user1 = UserFactory()
-    >>> user1.username
-    fred0@example.com
-    >>> user2 = UserFactory()
-    fred1@example.com
-
-Factory boy docs: http://factoryboy.readthedocs.org/
-
-"""
-from django.utils import timezone
-from factory import base, Sequence, SubFactory, post_generation, LazyAttribute
-import mock
-from mock import patch, Mock
-
-from framework.auth import Auth
-from framework.auth.utils import impute_names_model, impute_names
-from framework.mongo import StoredObject
-from tests.base import fake
-from tests.base import get_default_metaschema
-from tests import mock_addons as addons_base
-from addons.wiki.models import NodeWikiPage
-from addons.osfstorage.models import OsfStorageFile
-from osf.models import (Subject, NotificationSubscription, NotificationDigest,
-                        ArchiveJob, ArchiveTarget, Identifier, NodeLicense,
-                        NodeLicenseRecord, Embargo, RegistrationApproval,
-                        Retraction, Sanction, Comment, DraftRegistration,
-                        MetaSchema, AbstractNode, NodeLog,
-                        PrivateLink, Tag, Institution,
-                        ApiOAuth2PersonalToken, ApiOAuth2Application, ExternalAccount,
-                        ExternalProvider, OSFUser, PreprintService,
-                        PreprintProvider, Session, Guid)
-from osf_tests.factories import fake_email
-from website.archiver import ARCHIVER_SUCCESS
-from website.util import permissions
-from website.exceptions import InvalidSanctionApprovalToken
-
-
-# TODO: This is a hack. Check whether FactoryBoy can do this better
-def save_kwargs(**kwargs):
-    for value in kwargs.itervalues():
-        if isinstance(value, StoredObject) and not value._is_loaded:
-            value.save()
-
-
-def FakerAttribute(provider, **kwargs):
-    """Attribute that lazily generates a value using the Faker library.
-    Example: ::
-
-        class UserFactory(ModularOdmFactory):
-            name = FakerAttribute('name')
-    """
-    fake_gen = getattr(fake, provider)
-    if not fake_gen:
-        raise ValueError('{0!r} is not a valid faker provider.'.format(provider))
-    return LazyAttribute(lambda x: fake_gen(**kwargs))
-
-
-class ModularOdmFactory(base.Factory):
-    """Base factory for modular-odm objects.
-    """
-    class Meta:
-        abstract = True
-
-    @classmethod
-    def _build(cls, target_class, *args, **kwargs):
-        """Build an object without saving it."""
-        save_kwargs(**kwargs)
-        return target_class(*args, **kwargs)
-
-    @classmethod
-    def _create(cls, target_class, *args, **kwargs):
-        save_kwargs(**kwargs)
-        instance = target_class(*args, **kwargs)
-        instance.save()
-        return instance
-
-
-class PreprintProviderFactory(ModularOdmFactory):
-    class Meta:
-        model = PreprintProvider
-        abstract = False
-
-    def __init__(self, provider_id, provider_name):
-        super(PreprintProviderFactory, self).__init()
-        self._id = provider_id
-        self.name = provider_name
-        self.save()
-
-
-class UserFactory(ModularOdmFactory):
-    class Meta:
-        model = OSFUser
-        abstract = False
-
-    username = Sequence(lambda n: 'fred{0}@mail.com'.format(n))
-    # Don't use post generation call to set_password because
-    # It slows down the tests dramatically
-    password = 'password'
-    fullname = Sequence(lambda n: 'Freddie Mercury{0}'.format(n))
-    is_registered = True
-    is_claimed = True
-    date_confirmed = timezone.now()
-    merged_by = None
-    email_verifications = {}
-    verification_key = None
-    verification_key_v2 = {}
-
-    @post_generation
-    def set_names(self, create, extracted):
-        parsed = impute_names_model(self.fullname)
-        for key, value in parsed.items():
-            setattr(self, key, value)
-        if create:
-            self.save()
-
-    @post_generation
-    def set_emails(self, create, extracted):
-        if not self.emails.filter(address=self.username).exists():
-            if not self.id:
-                # Perform implicit save to populate M2M
-                self.save()
-            self.emails.create(address=self.username)
-            self.save()
-
-
-class AuthUserFactory(UserFactory):
-    """A user that automatically has an api key, for quick authentication.
-
-    Example: ::
-        user = AuthUserFactory()
-        res = self.app.get(url, auth=user.auth)  # user is "logged in"
-    """
-
-    @post_generation
-    def add_auth(self, create, extracted):
-        self.set_password('password', notify=False)
-        self.save()
-        self.auth = (self.username, 'password')
-
-
-class TagFactory(ModularOdmFactory):
-    class Meta:
-        model = Tag
-
-    _id = Sequence(lambda n: 'scientastic-{}'.format(n))
-
-
-class ApiOAuth2ApplicationFactory(ModularOdmFactory):
-    class Meta:
-        model = ApiOAuth2Application
-
-    owner = SubFactory(UserFactory)
-
-    name = Sequence(lambda n: 'Example OAuth2 Application #{}'.format(n))
-
-    home_url = 'ftp://ftp.ncbi.nlm.nimh.gov/'
-    callback_url = 'http://example.uk'
-
-
-class ApiOAuth2PersonalTokenFactory(ModularOdmFactory):
-    class Meta:
-        model = ApiOAuth2PersonalToken
-
-    owner = SubFactory(UserFactory)
-
-    scopes = 'osf.full_write osf.full_read'
-
-    name = Sequence(lambda n: 'Example OAuth2 Personal Token #{}'.format(n))
-
-
-class PrivateLinkFactory(ModularOdmFactory):
-    class Meta:
-        model = PrivateLink
-
-    name = "link"
-    key = Sequence(lambda n: 'foobar{}'.format(n))
-    anonymous = False
-    creator = SubFactory(AuthUserFactory)
-
-
-class AbstractNodeFactory(ModularOdmFactory):
-    class Meta:
-        model = AbstractNode
-
-    title = 'The meaning of life'
-    description = 'The meaning of life is 42.'
-    creator = SubFactory(AuthUserFactory)
-
-
-class ProjectFactory(AbstractNodeFactory):
-    type = 'osf.node'
-    category = 'project'
-
-
-class CollectionFactory(ProjectFactory):
-    is_collection = True
-
-
-class BookmarkCollectionFactory(CollectionFactory):
-    is_bookmark_collection = True
-
-
-class NodeFactory(AbstractNodeFactory):
-    category = 'hypothesis'
-    parent = SubFactory(ProjectFactory)
-
-
-class PreprintProviderFactory(ModularOdmFactory):
-    name = 'OSFArxiv'
-    description = 'Preprint service for the OSF'
-
-    class Meta:
-        model = PreprintProvider
-
-    @classmethod
-    def _create(cls, target_class, name=None, description=None, *args, **kwargs):
-        provider = target_class(*args, **kwargs)
-        provider.name = name
-        provider.description = description
-        provider.save()
-
-        return provider
-
-
-class PreprintFactory(ModularOdmFactory):
-    creator = None
-    category = 'project'
-    doi = Sequence(lambda n: '10.12345/0{}'.format(n))
-    provider = SubFactory(PreprintProviderFactory)
-    external_url = 'http://hello.org'
-
-    class Meta:
-        model = PreprintService
-
-    @classmethod
-    def _create(cls, target_class, project=None, is_public=True, filename='preprint_file.txt', provider=None, 
-                doi=None, external_url=None, is_published=True, subjects=None, finish=True, *args, **kwargs):
-        save_kwargs(**kwargs)
-        user = None
-        if project:
-            user = project.creator
-        user = kwargs.get('user') or kwargs.get('creator') or user or UserFactory()
-        kwargs['creator'] = user
-        # Original project to be converted to a preprint
-        project = project or AbstractNodeFactory(*args, **kwargs)
-        if user._id not in project.permissions:
-            project.add_contributor(
-                contributor=user,
-                permissions=permissions.CREATOR_PERMISSIONS,
-                log=False,
-                save=False
-            )
-        project.save()
-        project.reload()
-
-        file = OsfStorageFile.create(
-            node=project,
-            path='/{}'.format(filename),
-            name=filename,
-            materialized_path='/{}'.format(filename))
-        file.save()
-
-        preprint = target_class(node=project, provider=provider)
-
-        auth = Auth(project.creator)
-
-        if finish:
-            preprint.set_primary_file(file, auth=auth)
-            subjects = subjects or [[SubjectFactory()._id]]
-            preprint.set_subjects(subjects, auth=auth)
-            preprint.set_published(is_published, auth=auth)
-        
-        if not preprint.is_published:
-            project._has_abandoned_preprint = True
-
-        project.preprint_article_doi = doi
-        project.save()
-        preprint.save()
-
-        return preprint
-
-
-class SubjectFactory(ModularOdmFactory):
-
-    text = Sequence(lambda n: 'Example Subject #{}'.format(n))
-    class Meta:
-        model = Subject
-
-    @classmethod
-    def _create(cls, target_class, text=None, parents=[], *args, **kwargs):
-        try:
-            subject = Subject.objects.get(text=text)
-        except Subject.DoesNotExist:
-            subject = target_class(*args, **kwargs)
-            subject.text = text
-            subject.save()
-            subject.parents.add(*parents)
-            subject.save()
-        return subject
-
-
-class RegistrationFactory(AbstractNodeFactory):
-
-    creator = None
-    # Default project is created if not provided
-    category = 'project'
-
-    @classmethod
-    def _build(cls, target_class, *args, **kwargs):
-        raise Exception('Cannot build registration without saving.')
-
-    @classmethod
-    def _create(cls, target_class, project=None, is_public=False,
-                schema=None, data=None,
-                archive=False, embargo=None, registration_approval=None, retraction=None,
-                *args, **kwargs):
-        save_kwargs(**kwargs)
-        user = None
-        if project:
-            user = project.creator
-        user = kwargs.get('user') or kwargs.get('creator') or user or UserFactory()
-        kwargs['creator'] = user
-        # Original project to be registered
-        project = project or target_class(*args, **kwargs)
-        if user._id not in project.permissions:
-            project.add_contributor(
-                contributor=user,
-                permissions=permissions.CREATOR_PERMISSIONS,
-                log=False,
-                save=False
-            )
-        project.save()
-
-        # Default registration parameters
-        schema = schema or get_default_metaschema()
-        data = data or {'some': 'data'}
-        auth = Auth(user=user)
-        register = lambda: project.register_node(
-            schema=schema,
-            auth=auth,
-            data=data
-        )
-
-        def add_approval_step(reg):
-            if embargo:
-                reg.embargo = embargo
-            elif registration_approval:
-                reg.registration_approval = registration_approval
-            elif retraction:
-                reg.retraction = retraction
-            else:
-                reg.require_approval(reg.creator)
-            reg.save()
-            reg.sanction.add_authorizer(reg.creator, reg)
-            reg.sanction.save()
-
-        with patch('framework.celery_tasks.handlers.enqueue_task'):
-            reg = register()
-            add_approval_step(reg)
-        if not archive:
-            with patch.object(reg.archive_job, 'archive_tree_finished', Mock(return_value=True)):
-                reg.archive_job.status = ARCHIVER_SUCCESS
-                reg.archive_job.save()
-                reg.sanction.state = Sanction.APPROVED
-                reg.sanction.save()
-        ArchiveJob(
-            src_node=project,
-            dst_node=reg,
-            initiator=user,
-        )
-        if is_public:
-            reg.is_public = True
-        reg.save()
-        return reg
-
-
-class WithdrawnRegistrationFactory(AbstractNodeFactory):
-
-    @classmethod
-    def _create(cls, *args, **kwargs):
-
-        registration = kwargs.pop('registration', None)
-        registration.is_public = True
-        user = kwargs.pop('user', registration.creator)
-
-        registration.retract_registration(user)
-        withdrawal = registration.retraction
-
-        for token in withdrawal.approval_state.values():
-            try:
-                withdrawal.approve_retraction(user, token['approval_token'])
-                withdrawal.save()
-
-                return withdrawal
-            except InvalidSanctionApprovalToken:
-                continue
-
-
-class ForkFactory(ModularOdmFactory):
-    class Meta:
-        model = AbstractNode
-
-    @classmethod
-    def _create(cls, *args, **kwargs):
-
-        project = kwargs.pop('project', None)
-        user = kwargs.pop('user', project.creator)
-        title = kwargs.pop('title', None)
-
-        fork = project.fork_node(auth=Auth(user), title=title)
-        fork.save()
-        return fork
-
-
-class NodeLogFactory(ModularOdmFactory):
-    class Meta:
-        model = NodeLog
-    action = 'file_added'
-    user = SubFactory(UserFactory)
-
-
-class SanctionFactory(ModularOdmFactory):
-    class Meta:
-        abstract = True
-
-    @classmethod
-    def _create(cls, target_class, initiated_by=None, approve=False, *args, **kwargs):
-        user = kwargs.get('user') or UserFactory()
-        kwargs['initiated_by'] = initiated_by or user
-        sanction = ModularOdmFactory._create(target_class, *args, **kwargs)
-        reg_kwargs = {
-            'creator': user,
-            'user': user,
-            sanction.SHORT_NAME: sanction
-        }
-        RegistrationFactory(**reg_kwargs)
-        if not approve:
-            sanction.state = Sanction.UNAPPROVED
-            sanction.save()
-        return sanction
-
-class RetractionFactory(SanctionFactory):
-    class Meta:
-        model = Retraction
-    user = SubFactory(UserFactory)
-
-class EmbargoFactory(SanctionFactory):
-    class Meta:
-        model = Embargo
-    user = SubFactory(UserFactory)
-
-class RegistrationApprovalFactory(SanctionFactory):
-    class Meta:
-        model = RegistrationApproval
-    user = SubFactory(UserFactory)
-
-class EmbargoTerminationApprovalFactory(ModularOdmFactory):
-
-    FACTORY_STRATEGY = base.CREATE_STRATEGY
-
-    @classmethod
-    def create(cls, registration=None, user=None, embargo=None, *args, **kwargs):
-        if registration:
-            if not user:
-                user = registration.creator
-        else:
-            user = user or AuthUserFactory()
-            if not embargo:
-                embargo = EmbargoFactory(initiated_by=user)
-                registration = embargo._get_registration()
-            else:
-                registration = RegistrationFactory(creator=user, user=user, embargo=embargo)
-        with mock.patch('website.project.sanctions.Sanction.is_approved', mock.Mock(return_value=True)):
-            with mock.patch('website.project.sanctions.TokenApprovableSanction.ask', mock.Mock()):
-                approval = registration.request_embargo_termination(Auth(user))
-                return approval
-
-
-class NodeWikiFactory(ModularOdmFactory):
-    class Meta:
-        model = NodeWikiPage
-
-    page_name = 'home'
-    content = 'Some content'
-    version = 1
-    user = SubFactory(UserFactory)
-    node = SubFactory(NodeFactory)
-
-    @post_generation
-    def set_node_keys(self, create, extracted):
-        self.node.wiki_pages_current[self.page_name] = self._id
-        if self.node.wiki_pages_versions.get(self.page_name, None):
-            self.node.wiki_pages_versions[self.page_name].append(self._id)
-        else:
-            self.node.wiki_pages_versions[self.page_name] = [self._id]
-        self.node.save()
-
-
-class UnregUserFactory(ModularOdmFactory):
-    """Factory for an unregistered user. Uses User.create_unregistered()
-    to create an instance.
-
-    """
-    class Meta:
-        model = OSFUser
-        abstract = False
-    email = Sequence(lambda n: "brian{0}@queen.com".format(n))
-    fullname = Sequence(lambda n: "Brian May{0}".format(n))
-
-    @classmethod
-    def _build(cls, target_class, *args, **kwargs):
-        '''Build an object without saving it.'''
-        return target_class.create_unregistered(*args, **kwargs)
-
-    @classmethod
-    def _create(cls, target_class, *args, **kwargs):
-        instance = target_class.create_unregistered(*args, **kwargs)
-        instance.save()
-        return instance
-
-class UnconfirmedUserFactory(ModularOdmFactory):
-    """Factory for a user that has not yet confirmed their primary email
-    address (username).
-    """
-    class Meta:
-        model = OSFUser
-    username = Sequence(lambda n: 'roger{0}@queen.com'.format(n))
-    fullname = Sequence(lambda n: 'Roger Taylor{0}'.format(n))
-    password = 'killerqueen'
-
-    @classmethod
-    def _build(cls, target_class, username, password, fullname):
-        '''Build an object without saving it.'''
-        return target_class.create_unconfirmed(
-            username=username, password=password, fullname=fullname
-        )
-
-    @classmethod
-    def _create(cls, target_class, username, password, fullname):
-        instance = target_class.create_unconfirmed(
-            username=username, password=password, fullname=fullname
-        )
-        instance.save()
-        return instance
-
-
-class AuthFactory(base.Factory):
-    class Meta:
-        model = Auth
-    user = SubFactory(UserFactory)
-
-
-class ProjectWithAddonFactory(ProjectFactory):
-    """Factory for a project that has an addon. The addon will be added to
-    both the Node and the creator records. ::
-
-        p = ProjectWithAddonFactory(addon='github')
-        p.get_addon('github') # => github node settings object
-        p.creator.get_addon('github') # => github user settings object
-
-    """
-
-    # TODO: Should use mock addon objects
-    @classmethod
-    def _build(cls, target_class, addon='s3', *args, **kwargs):
-        '''Build an object without saving it.'''
-        instance = ProjectFactory._build(target_class, *args, **kwargs)
-        auth = Auth(user=instance.creator)
-        instance.add_addon(addon, auth)
-        instance.creator.add_addon(addon)
-        return instance
-
-    @classmethod
-    def _create(cls, target_class, addon='s3', *args, **kwargs):
-        instance = ProjectFactory._create(target_class, *args, **kwargs)
-        auth = Auth(user=instance.creator)
-        instance.add_addon(addon, auth)
-        instance.creator.add_addon(addon)
-        instance.save()
-        return instance
-
-# Deprecated unregistered user factory, used mainly for testing migration
-
-class DeprecatedUnregUser(object):
-    '''A dummy "model" for an unregistered user.'''
-    def __init__(self, nr_name, nr_email):
-        self.nr_name = nr_name
-        self.nr_email = nr_email
-
-    def to_dict(self):
-        return {"nr_name": self.nr_name, "nr_email": self.nr_email}
-
-
-class DeprecatedUnregUserFactory(base.Factory):
-    """Generates a dictonary represenation of an unregistered user, in the
-    format expected by the OSF.
-    ::
-
-        >>> from tests.factories import UnregUserFactory
-        >>> UnregUserFactory()
-        {'nr_name': 'Tom Jones0', 'nr_email': 'tom0@example.com'}
-        >>> UnregUserFactory()
-        {'nr_name': 'Tom Jones1', 'nr_email': 'tom1@example.com'}
-    """
-    class Meta:
-        model = DeprecatedUnregUser
-
-    nr_name = Sequence(lambda n: "Tom Jones{0}".format(n))
-    nr_email = Sequence(lambda n: "tom{0}@mail.com".format(n))
-
-    @classmethod
-    def _create(cls, target_class, *args, **kwargs):
-        return target_class(*args, **kwargs).to_dict()
-
-    _build = _create
-
-
-class CommentFactory(ModularOdmFactory):
-    class Meta:
-        model = Comment
-    content = Sequence(lambda n: 'Comment {0}'.format(n))
-    is_public = True
-
-    @classmethod
-    def _build(cls, target_class, *args, **kwargs):
-        node = kwargs.pop('node', None) or NodeFactory()
-        user = kwargs.pop('user', None) or node.creator
-        target = kwargs.pop('target', None) or Guid.load(node._id)
-        content = kwargs.pop('content', None) or 'Test comment.'
-        instance = target_class(
-            node=node,
-            user=user,
-            target=target,
-            content=content,
-            *args, **kwargs
-        )
-        if isinstance(target.referent, target_class):
-            instance.root_target = target.referent.root_target
-        else:
-            instance.root_target = target
-        return instance
-
-    @classmethod
-    def _create(cls, target_class, *args, **kwargs):
-        node = kwargs.pop('node', None) or NodeFactory()
-        user = kwargs.pop('user', None) or node.creator
-        target = kwargs.pop('target', None) or Guid.load(node._id)
-        content = kwargs.pop('content', None) or 'Test comment.'
-        instance = target_class(
-            node=node,
-            user=user,
-            target=target,
-            content=content,
-            *args, **kwargs
-        )
-        if isinstance(target.referent, target_class):
-            instance.root_target = target.referent.root_target
-        else:
-            instance.root_target = target
-        instance.save()
-        return instance
-
-
-class InstitutionFactory(ProjectFactory):
-
-    default_institution_attributes = {
-        '_id': fake.md5,
-        'name': fake.company,
-        'logo_name': fake.file_name,
-        'auth_url': fake.url,
-        'domains': lambda: [fake.url()],
-        'email_domains': lambda: [fake.domain_name()],
-    }
-
-    def _build(cls, target_class, *args, **kwargs):
-        inst = ProjectFactory._build(target_class)
-        for inst_attr, node_attr in Institution.attribute_map.items():
-            default = cls.default_institution_attributes.get(inst_attr)
-            if callable(default):
-                default = default()
-            setattr(inst, node_attr, kwargs.pop(inst_attr, default))
-        for key, val in kwargs.items():
-            setattr(inst, key, val)
-        return Institution(inst)
-
-    @classmethod
-    def _create(cls, target_class, *args, **kwargs):
-        inst = ProjectFactory._build(target_class)
-        for inst_attr, node_attr in Institution.attribute_map.items():
-            default = cls.default_institution_attributes.get(inst_attr)
-            if callable(default):
-                default = default()
-            setattr(inst, node_attr, kwargs.pop(inst_attr, default))
-        for key, val in kwargs.items():
-            setattr(inst, key, val)
-        inst.save()
-        return Institution(inst)
-
-class NotificationSubscriptionFactory(ModularOdmFactory):
-    class Meta:
-        model = NotificationSubscription
-
-
-class NotificationDigestFactory(ModularOdmFactory):
-    class Meta:
-        model = NotificationDigest
-
-
-class ExternalAccountFactory(ModularOdmFactory):
-    class Meta:
-        model = ExternalAccount
-
-    provider = 'mock2'
-    provider_id = Sequence(lambda n: 'user-{0}'.format(n))
-    provider_name = 'Fake Provider'
-    display_name = Sequence(lambda n: 'user-{0}'.format(n))
-
-
-class SessionFactory(ModularOdmFactory):
-    class Meta:
-        model = Session
-
-    @classmethod
-    def _build(cls, target_class, *args, **kwargs):
-        user = kwargs.pop('user', None)
-        instance = target_class(*args, **kwargs)
-
-        if user:
-            instance.data['auth_user_username'] = user.username
-            instance.data['auth_user_id'] = user._primary_key
-            instance.data['auth_user_fullname'] = user.fullname
-
-        return instance
-
-    @classmethod
-    def _create(cls, target_class, *args, **kwargs):
-        instance = cls._build(target_class, *args, **kwargs)
-        instance.save()
-        return instance
-
-
-class MockOAuth2Provider(ExternalProvider):
-    name = "Mock OAuth 2.0 Provider"
-    short_name = "mock2"
-
-    client_id = "mock2_client_id"
-    client_secret = "mock2_client_secret"
-
-    auth_url_base = "https://mock2.com/auth"
-    callback_url = "https://mock2.com/callback"
-    auto_refresh_url = "https://mock2.com/callback"
-    refresh_time = 300
-    expiry_time = 9001
-
-    def handle_callback(self, response):
-        return {
-            'provider_id': 'mock_provider_id'
-        }
-
-
-class MockAddonNodeSettings(addons_base.AddonNodeSettingsBase):
-    pass
-
-
-class MockAddonUserSettings(addons_base.AddonUserSettingsBase):
-    pass
-
-
-class MockAddonUserSettingsMergeable(addons_base.AddonUserSettingsBase):
-    def merge(self):
-        pass
-
-
-class MockOAuthAddonUserSettings(addons_base.AddonOAuthUserSettingsBase):
-    oauth_provider = MockOAuth2Provider
-
-
-class MockOAuthAddonNodeSettings(addons_base.AddonOAuthNodeSettingsBase):
-    oauth_provider = MockOAuth2Provider
-
-    folder_id = 'foo'
-    folder_name = 'Foo'
-    folder_path = '/Foo'
-
-
-class ArchiveTargetFactory(ModularOdmFactory):
-    class Meta:
-        model = ArchiveTarget
-
-
-class ArchiveJobFactory(ModularOdmFactory):
-    class Meta:
-        model = ArchiveJob
-
-class DraftRegistrationFactory(ModularOdmFactory):
-    class Meta:
-        model = DraftRegistration
-
-    @classmethod
-    def _create(cls, *args, **kwargs):
-        branched_from = kwargs.get('branched_from')
-        initiator = kwargs.get('initiator')
-        registration_schema = kwargs.get('registration_schema')
-        registration_metadata = kwargs.get('registration_metadata')
-        if not branched_from:
-            project_params = {}
-            if initiator:
-                project_params['creator'] = initiator
-            branched_from = ProjectFactory(**project_params)
-        initiator = branched_from.creator
-        registration_schema = registration_schema or MetaSchema.find()[0]
-        registration_metadata = registration_metadata or {}
-        draft = DraftRegistration.create_from_node(
-            branched_from,
-            user=initiator,
-            schema=registration_schema,
-            data=registration_metadata,
-        )
-        return draft
-
-class NodeLicenseRecordFactory(ModularOdmFactory):
-    class Meta:
-        model = NodeLicenseRecord
-
-    @classmethod
-    def _create(cls, *args, **kwargs):
-        kwargs['node_license'] = kwargs.get(
-            'node_license',
-            NodeLicense.objects.get(name='No license')
-        )
-        return super(NodeLicenseRecordFactory, cls)._create(*args, **kwargs)
-
-
-class IdentifierFactory(ModularOdmFactory):
-    class Meta:
-        model = Identifier
-
-    referent = SubFactory(RegistrationFactory)
-    value = Sequence(lambda n: 'carp:/2460{}'.format(n))
-
-    @classmethod
-    def _create(cls, *args, **kwargs):
-        kwargs['category'] = kwargs.get('category', 'carpid')
-
-        return super(IdentifierFactory, cls)._create(*args, **kwargs)
-
-
-def render_generations_from_parent(parent, creator, num_generations):
-    current_gen = parent
-    for generation in xrange(0, num_generations):
-        next_gen = NodeFactory(
-            parent=current_gen,
-            creator=creator,
-            title=fake.sentence(),
-            description=fake.paragraph()
-        )
-        current_gen = next_gen
-    return current_gen
-
-
-def render_generations_from_node_structure_list(parent, creator, node_structure_list):
-    new_parent = None
-    for node_number in node_structure_list:
-        if isinstance(node_number, list):
-            render_generations_from_node_structure_list(new_parent or parent, creator, node_number)
-        else:
-            new_parent = render_generations_from_parent(parent, creator, node_number)
-    return new_parent
-
-
-def create_fake_user():
-    email = fake_email()
-    name = fake.name()
-    parsed = impute_names(name)
-    user = UserFactory(
-        username=email,
-        fullname=name,
-        is_registered=True,
-        is_claimed=True,
-        date_registered=fake.date_time(),
-        emails=[email],
-        **parsed
-    )
-    user.set_password('faker123')
-    user.save()
-    return user
-
-
-def create_fake_project(creator, n_users, privacy, n_components, name, n_tags, presentation_name, is_registration):
-    auth = Auth(user=creator)
-    project_title = name if name else fake.sentence()
-    if not is_registration:
-        project = ProjectFactory(
-            title=project_title,
-            description=fake.paragraph(),
-            creator=creator
-        )
-    else:
-        project = RegistrationFactory(
-            title=project_title,
-            description=fake.paragraph(),
-            creator=creator
-        )
-    project.set_privacy(privacy)
-    for _ in range(n_users):
-        contrib = create_fake_user()
-        project.add_contributor(contrib, auth=auth)
-    if isinstance(n_components, int):
-        for _ in range(n_components):
-            NodeFactory(
-                project=project,
-                title=fake.sentence(),
-                description=fake.paragraph(),
-                creator=creator
-            )
-    elif isinstance(n_components, list):
-        render_generations_from_node_structure_list(project, creator, n_components)
-    for _ in range(n_tags):
-        project.add_tag(fake.word(), auth=auth)
-    if presentation_name is not None:
-        project.add_tag(presentation_name, auth=auth)
-        project.add_tag('poster', auth=auth)
-
-    project.save()
-    return project
diff --git a/tests/mock_addons.py b/tests/mock_addons.py
deleted file mode 100644
index a8aab29f15c..00000000000
--- a/tests/mock_addons.py
+++ /dev/null
@@ -1,841 +0,0 @@
-# -*- coding: utf-8 -*-
-import os
-from time import sleep
-
-from bson import ObjectId
-import markupsafe
-import requests
-
-from modularodm import fields
-
-from framework.auth import Auth
-from framework.auth.decorators import must_be_logged_in
-from framework.exceptions import (
-    PermissionsError,
-    HTTPError,
-)
-from framework.mongo import StoredObject
-
-from website import settings
-from addons.base import serializer, logger
-from osf.models import OSFUser, AbstractNode
-from website.util import waterbutler_url_for
-
-from website.oauth.signals import oauth_complete
-
-
-class AddonSettingsBase(StoredObject):
-
-    _id = fields.StringField(default=lambda: str(ObjectId()))
-    deleted = fields.BooleanField(default=False)
-
-    _meta = {
-        'abstract': True,
-    }
-
-    def delete(self, save=True):
-        self.deleted = True
-        self.on_delete()
-        if save:
-            self.save()
-
-    def undelete(self, save=True):
-        self.deleted = False
-        self.on_add()
-        if save:
-            self.save()
-
-    def to_json(self, user):
-        return {
-            'addon_short_name': self.config.short_name,
-            'addon_full_name': self.config.full_name,
-        }
-
-    #############
-    # Callbacks #
-    #############
-
-    def on_add(self):
-        """Called when the addon is added (or re-added) to the owner (User or Node)."""
-        pass
-
-    def on_delete(self):
-        """Called when the addon is deleted from the owner (User or Node)."""
-        pass
-
-
-class AddonUserSettingsBase(AddonSettingsBase):
-
-    owner = fields.ForeignField('user', index=True)
-
-    _meta = {
-        'abstract': True,
-    }
-
-    def __repr__(self):
-        if self.owner:
-            return '<{cls} owned by user {uid}>'.format(cls=self.__class__.__name__, uid=self.owner._id)
-        else:
-            return '<{cls} with no owner>'.format(cls=self.__class__.__name__)
-
-    @property
-    def public_id(self):
-        return None
-
-    @property
-    def has_auth(self):
-        """Whether the user has added credentials for this addon."""
-        return False
-
-    # TODO: Test me @asmacdo
-    @property
-    def nodes_authorized(self):
-        """Get authorized, non-deleted nodes. Returns an empty list if the
-        attached add-on does not include a node model.
-
-        """
-        try:
-            schema = self.config.settings_models['node']
-        except KeyError:
-            return []
-        return [
-            node_addon.owner
-            for node_addon in schema.objects.filter(user_settings=self)
-            if node_addon.owner and not node_addon.owner.is_deleted
-        ]
-
-    @property
-    def can_be_merged(self):
-        return hasattr(self, 'merge')
-
-    def to_json(self, user):
-        ret = super(AddonUserSettingsBase, self).to_json(user)
-        ret['has_auth'] = self.has_auth
-        ret.update({
-            'nodes': [
-                {
-                    '_id': node._id,
-                    'url': node.url,
-                    'title': node.title,
-                    'registered': node.is_registration,
-                    'api_url': node.api_url
-                }
-                for node in self.nodes_authorized
-            ]
-        })
-        return ret
-
-
-@oauth_complete.connect
-def oauth_complete(provider, account, user):
-    if not user or not account:
-        return
-    user.add_addon(account.provider, Auth(user))
-    user.save()
-
-
-class AddonOAuthUserSettingsBase(AddonUserSettingsBase):
-    _meta = {
-        'abstract': True,
-    }
-
-    # Keeps track of what nodes have been given permission to use external
-    #   accounts belonging to the user.
-    oauth_grants = fields.DictionaryField()
-    # example:
-    # {
-    #     '<Node._id>': {
-    #         '<ExternalAccount._id>': {
-    #             <metadata>
-    #         },
-    #     }
-    # }
-    #
-    # metadata here is the specific to each addon.
-
-    # The existence of this property is used to determine whether or not
-    #   an addon instance is an "OAuth addon" in
-    #   AddonModelMixin.get_oauth_addons().
-    oauth_provider = None
-
-    serializer = serializer.OAuthAddonSerializer
-
-    @property
-    def has_auth(self):
-        return bool(self.external_accounts)
-
-    @property
-    def external_accounts(self):
-        """The user's list of ``ExternalAccount`` instances for this provider"""
-        return [
-            x for x in self.owner.external_accounts.all()
-            if x.provider == self.oauth_provider.short_name
-        ]
-
-    def delete(self, save=True):
-        for account in self.external_accounts.all():
-            self.revoke_oauth_access(account, save=False)
-        super(AddonOAuthUserSettingsBase, self).delete(save=save)
-
-    def grant_oauth_access(self, node, external_account, metadata=None):
-        """Give a node permission to use an ``ExternalAccount`` instance."""
-        # ensure the user owns the external_account
-        if not self.owner.external_accounts.filter(id=external_account.id).exists():
-            raise PermissionsError()
-
-        metadata = metadata or {}
-
-        # create an entry for the node, if necessary
-        if node._id not in self.oauth_grants:
-            self.oauth_grants[node._id] = {}
-
-        # create an entry for the external account on the node, if necessary
-        if external_account._id not in self.oauth_grants[node._id]:
-            self.oauth_grants[node._id][external_account._id] = {}
-
-        # update the metadata with the supplied values
-        for key, value in metadata.iteritems():
-            self.oauth_grants[node._id][external_account._id][key] = value
-
-        self.save()
-
-    @must_be_logged_in
-    def revoke_oauth_access(self, external_account, auth, save=True):
-        """Revoke all access to an ``ExternalAccount``.
-
-        TODO: This should accept node and metadata params in the future, to
-            allow fine-grained revocation of grants. That's not yet been needed,
-            so it's not yet been implemented.
-        """
-        for node in self.get_nodes_with_oauth_grants(external_account):
-            try:
-                node.get_addon(external_account.provider, deleted=True).deauthorize(auth=auth)
-            except AttributeError:
-                # No associated addon settings despite oauth grant
-                # Remove grant in `for` loop below
-                pass
-
-        if OSFUser.objects.filter(external_accounts=external_account._id).count() == 1:
-            # Only this user is using the account, so revoke remote access as well.
-            self.revoke_remote_oauth_access(external_account)
-
-        for key in self.oauth_grants:
-            self.oauth_grants[key].pop(external_account._id, None)
-        if save:
-            self.save()
-
-    def revoke_remote_oauth_access(self, external_account):
-        """ Makes outgoing request to remove the remote oauth grant
-        stored by third-party provider.
-
-        Individual addons must override this method, as it is addon-specific behavior.
-        Not all addon providers support this through their API, but those that do
-        should also handle the case where this is called with an external_account
-        with invalid credentials, to prevent a user from being unable to disconnect
-        an account.
-        """
-        pass
-
-    def verify_oauth_access(self, node, external_account, metadata=None):
-        """Verify that access has been previously granted.
-
-        If metadata is not provided, this checks only if the node can access the
-        account. This is suitable to check to see if the node's addon settings
-        is still connected to an external account (i.e., the user hasn't revoked
-        it in their user settings pane).
-
-        If metadata is provided, this checks to see that all key/value pairs
-        have been granted. This is suitable for checking access to a particular
-        folder or other resource on an external provider.
-        """
-
-        metadata = metadata or {}
-
-        # ensure the grant exists
-        try:
-            grants = self.oauth_grants[node._id][external_account._id]
-        except KeyError:
-            return False
-
-        # Verify every key/value pair is in the grants dict
-        for key, value in metadata.iteritems():
-            if key not in grants or grants[key] != value:
-                return False
-
-        return True
-
-    def get_nodes_with_oauth_grants(self, external_account):
-        # Generator of nodes which have grants for this external account
-        for node_id, grants in self.oauth_grants.iteritems():
-            node = AbstractNode.load(node_id)
-            if external_account._id in grants.keys() and not node.is_deleted:
-                yield node
-
-    def get_attached_nodes(self, external_account):
-        for node in self.get_nodes_with_oauth_grants(external_account):
-            if node is None:
-                continue
-            node_settings = node.get_addon(self.oauth_provider.short_name)
-
-            if node_settings is None:
-                continue
-
-            if node_settings.external_account == external_account:
-                yield node
-
-    def merge(self, user_settings):
-        """Merge `user_settings` into this instance"""
-        if user_settings.__class__ is not self.__class__:
-            raise TypeError('Cannot merge different addons')
-
-        for node_id, data in user_settings.oauth_grants.iteritems():
-            if node_id not in self.oauth_grants:
-                self.oauth_grants[node_id] = data
-            else:
-                node_grants = user_settings.oauth_grants[node_id].iteritems()
-                for ext_acct, meta in node_grants:
-                    if ext_acct not in self.oauth_grants[node_id]:
-                        self.oauth_grants[node_id][ext_acct] = meta
-                    else:
-                        for k, v in meta:
-                            if k not in self.oauth_grants[node_id][ext_acct]:
-                                self.oauth_grants[node_id][ext_acct][k] = v
-
-        user_settings.oauth_grants = {}
-        user_settings.save()
-
-        try:
-            config = settings.ADDONS_AVAILABLE_DICT[
-                self.oauth_provider.short_name
-            ]
-            Model = config.settings_models['node']
-        except KeyError:
-            pass
-        else:
-            connected = Model.objects.filter(user_settings=user_settings)
-            for node_settings in connected:
-                node_settings.user_settings = self
-                node_settings.save()
-
-        self.save()
-
-    def to_json(self, user):
-        ret = super(AddonOAuthUserSettingsBase, self).to_json(user)
-
-        ret['accounts'] = self.serializer(
-            user_settings=self
-        ).serialized_accounts
-
-        return ret
-
-    #############
-    # Callbacks #
-    #############
-
-    def on_delete(self):
-        """When the user deactivates the addon, clear auth for connected nodes.
-        """
-        super(AddonOAuthUserSettingsBase, self).on_delete()
-        nodes = [AbstractNode.load(node_id) for node_id in self.oauth_grants.keys()]
-        for node in nodes:
-            node_addon = node.get_addon(self.oauth_provider.short_name)
-            if node_addon and node_addon.user_settings == self:
-                node_addon.clear_auth()
-
-class AddonNodeSettingsBase(AddonSettingsBase):
-
-    owner = fields.ForeignField('node', index=True)
-
-    _meta = {
-        'abstract': True,
-    }
-
-    @property
-    def complete(self):
-        """Whether or not this addon is properly configured
-        :rtype bool:
-        """
-        raise NotImplementedError()
-
-    @property
-    def configured(self):
-        """Whether or not this addon has had a folder connected.
-        :rtype bool:
-        """
-        return self.complete
-
-    @property
-    def has_auth(self):
-        """Whether the node has added credentials for this addon."""
-        return False
-
-    def to_json(self, user):
-        ret = super(AddonNodeSettingsBase, self).to_json(user)
-        ret.update({
-            'user': {
-                'permissions': self.owner.get_permissions(user)
-            },
-            'node': {
-                'id': self.owner._id,
-                'api_url': self.owner.api_url,
-                'url': self.owner.url,
-                'is_registration': self.owner.is_registration,
-            },
-            'node_settings_template': os.path.basename(self.config.node_settings_template),
-        })
-        return ret
-
-    def render_config_error(self, data):
-        pass
-
-    #############
-    # Callbacks #
-    #############
-
-    def before_page_load(self, node, user):
-        """
-
-        :param User user:
-        :param Node node:
-
-        """
-        pass
-
-    def before_remove_contributor(self, node, removed):
-        """
-        :param Node node:
-        :param User removed:
-        """
-        pass
-
-    def after_remove_contributor(self, node, removed, auth=None):
-        """
-        :param Node node:
-        :param User removed:
-        """
-        pass
-
-    def before_make_public(self, node):
-        """
-
-        :param Node node:
-        :returns: Alert message or None
-
-        """
-        pass
-
-    def before_make_private(self, node):
-        """
-
-        :param Node node:
-        :returns: Alert message or None
-
-        """
-        pass
-
-    def after_set_privacy(self, node, permissions):
-        """
-
-        :param Node node:
-        :param str permissions:
-
-        """
-        pass
-
-    def before_fork(self, node, user):
-        """Return warning text to display if user auth will be copied to a
-        fork.
-        :param Node node:
-        :param Uder user
-        :returns Alert message
-        """
-
-        if hasattr(self, 'user_settings'):
-            if self.user_settings is None:
-                return (
-                    u'Because you have not configured the authorization for this {addon} add-on, this '
-                    u'{category} will not transfer your authentication to '
-                    u'the forked {category}.'
-                ).format(
-                    addon=self.config.full_name,
-                    category=node.project_or_component,
-                )
-
-            elif self.user_settings and self.user_settings.owner == user:
-                return (
-                    u'Because you have authorized the {addon} add-on for this '
-                    u'{category}, forking it will also transfer your authentication to '
-                    u'the forked {category}.'
-                ).format(
-                    addon=self.config.full_name,
-                    category=node.project_or_component,
-                )
-            else:
-                return (
-                    u'Because the {addon} add-on has been authorized by a different '
-                    u'user, forking it will not transfer authentication to the forked '
-                    u'{category}.'
-                ).format(
-                    addon=self.config.full_name,
-                    category=node.project_or_component,
-                )
-
-    def after_fork(self, node, fork, user, save=True):
-        """
-
-        :param Node node:
-        :param Node fork:
-        :param User user:
-        :param bool save:
-        :returns: Tuple of cloned settings and alert message
-
-        """
-        clone = self.clone()
-        clone.owner = fork
-
-        if save:
-            clone.save()
-
-        return clone, None
-
-    def before_register(self, node, user):
-        """
-
-        :param Node node:
-        :param User user:
-        :returns: Alert message
-
-        """
-        pass
-
-    def after_register(self, node, registration, user, save=True):
-        """
-
-        :param Node node:
-        :param Node registration:
-        :param User user:
-        :param bool save:
-        :returns: Tuple of cloned settings and alert message
-
-        """
-        return None, None
-
-    def after_delete(self, node, user):
-        """
-
-        :param Node node:
-        :param User user:
-
-        """
-        pass
-
-############
-# Archiver #
-############
-class GenericRootNode(object):
-    path = '/'
-    name = ''
-
-class StorageAddonBase(object):
-    """
-    Mixin class for traversing file trees of addons with files
-    """
-
-    root_node = GenericRootNode()
-
-    @property
-    def archive_folder_name(self):
-        name = 'Archive of {addon}'.format(addon=self.config.full_name)
-        folder_name = getattr(self, 'folder_name', '').lstrip('/').strip()
-        if folder_name:
-            name = name + ': {folder}'.format(folder=folder_name)
-        return name
-
-    def _get_fileobj_child_metadata(self, filenode, user, cookie=None, version=None):
-        kwargs = dict(
-            provider=self.config.short_name,
-            path=filenode.get('path', ''),
-            node=self.owner,
-            user=user,
-            view_only=True,
-        )
-        if cookie:
-            kwargs['cookie'] = cookie
-        if version:
-            kwargs['version'] = version
-        metadata_url = waterbutler_url_for(
-            'metadata',
-            _internal=True,
-            **kwargs
-        )
-        res = requests.get(metadata_url)
-        if res.status_code != 200:
-            raise HTTPError(res.status_code, data={
-                'error': res.json(),
-            })
-        # TODO: better throttling?
-        sleep(1.0 / 5.0)
-        return res.json().get('data', [])
-
-    def _get_file_tree(self, filenode=None, user=None, cookie=None, version=None):
-        """
-        Recursively get file metadata
-        """
-        filenode = filenode or {
-            'path': '/',
-            'kind': 'folder',
-            'name': self.root_node.name,
-        }
-        if filenode.get('kind') == 'file':
-            return filenode
-        elif 'size' in filenode:
-            return filenode
-        kwargs = {
-            'version': version,
-            'cookie': cookie,
-        }
-        filenode['children'] = [
-            self._get_file_tree(child, user, cookie=cookie)
-            for child in self._get_fileobj_child_metadata(filenode, user, **kwargs)
-        ]
-        return filenode
-
-class AddonOAuthNodeSettingsBase(AddonNodeSettingsBase):
-    _meta = {
-        'abstract': True,
-    }
-
-    # TODO: Validate this field to be sure it matches the provider's short_name
-    # NOTE: Do not set this field directly. Use ``set_auth()``
-    external_account = fields.ForeignField('externalaccount')
-
-    # NOTE: Do not set this field directly. Use ``set_auth()``
-    user_settings = fields.AbstractForeignField()
-
-    # The existence of this property is used to determine whether or not
-    #   an addon instance is an "OAuth addon" in
-    #   AddonModelMixin.get_oauth_addons().
-    oauth_provider = None
-
-    @property
-    def folder_id(self):
-        raise NotImplementedError(
-            "AddonOAuthNodeSettingsBase subclasses must expose a 'folder_id' property."
-        )
-
-    @property
-    def folder_name(self):
-        raise NotImplementedError(
-            "AddonOAuthNodeSettingsBase subclasses must expose a 'folder_name' property."
-        )
-
-    @property
-    def folder_path(self):
-        raise NotImplementedError(
-            "AddonOAuthNodeSettingsBase subclasses must expose a 'folder_path' property."
-        )
-
-    @property
-    def nodelogger(self):
-        auth = None
-        if self.user_settings:
-            auth = Auth(self.user_settings.owner)
-        self._logger_class = getattr(
-            self,
-            '_logger_class',
-            type(
-                '{0}NodeLogger'.format(self.config.short_name.capitalize()),
-                (logger.AddonNodeLogger, ),
-                {'addon_short_name': self.config.short_name}
-            )
-        )
-        return self._logger_class(
-            node=self.owner,
-            auth=auth
-        )
-
-    @property
-    def complete(self):
-        return bool(
-            self.has_auth and
-            self.external_account and
-            self.user_settings.verify_oauth_access(
-                node=self.owner,
-                external_account=self.external_account,
-            )
-        )
-
-    @property
-    def configured(self):
-        return bool(
-            self.complete and
-            (self.folder_id or self.folder_name or self.folder_path)
-        )
-
-    @property
-    def has_auth(self):
-        """Instance has an external account and *active* permission to use it"""
-        return bool(
-            self.user_settings and self.user_settings.has_auth
-        ) and bool(
-            self.external_account and self.user_settings.verify_oauth_access(
-                node=self.owner,
-                external_account=self.external_account
-            )
-        )
-
-    def clear_settings(self):
-        raise NotImplementedError(
-            "AddonOAuthNodeSettingsBase subclasses must expose a 'clear_settings' method."
-        )
-
-    def set_auth(self, external_account, user, metadata=None, log=True):
-        """Connect the node addon to a user's external account.
-
-        This method also adds the permission to use the account in the user's
-        addon settings.
-        """
-        # tell the user's addon settings that this node is connected to it
-        user_settings = user.get_or_add_addon(self.oauth_provider.short_name)
-        user_settings.grant_oauth_access(
-            node=self.owner,
-            external_account=external_account,
-            metadata=metadata  # metadata can be passed in when forking
-        )
-        user_settings.save()
-
-        # update this instance
-        self.user_settings = user_settings
-        self.external_account = external_account
-
-        if log:
-            self.nodelogger.log(action='node_authorized', save=True)
-        self.save()
-
-    def deauthorize(self, auth=None, add_log=False):
-        """Remove authorization from this node.
-
-        This method should be overridden for addon-specific behavior,
-        such as logging and clearing non-generalizable settings.
-        """
-        self.clear_auth()
-
-    def clear_auth(self):
-        """Disconnect the node settings from the user settings.
-
-        This method does not remove the node's permission in the user's addon
-        settings.
-        """
-        self.external_account = None
-        self.user_settings = None
-        self.save()
-
-    def before_remove_contributor_message(self, node, removed):
-        """If contributor to be removed authorized this addon, warn that removing
-        will remove addon authorization.
-        """
-        if self.has_auth and self.user_settings.owner == removed:
-            return (
-                u'The {addon} add-on for this {category} is authenticated by {name}. '
-                u'Removing this user will also remove write access to {addon} '
-                u'unless another contributor re-authenticates the add-on.'
-            ).format(
-                addon=self.config.full_name,
-                category=node.project_or_component,
-                name=removed.fullname,
-            )
-
-    # backwards compatibility
-    before_remove_contributor = before_remove_contributor_message
-
-    def after_remove_contributor(self, node, removed, auth=None):
-        """If removed contributor authorized this addon, remove addon authorization
-        from owner.
-        """
-        if self.user_settings and self.user_settings.owner == removed:
-
-            # Delete OAuth tokens
-            self.user_settings.oauth_grants[self.owner._id].pop(self.external_account._id)
-            self.clear_auth()
-            message = (
-                u'Because the {addon} add-on for {category} "{title}" was authenticated '
-                u'by {user}, authentication information has been deleted.'
-            ).format(
-                addon=self.config.full_name,
-                category=markupsafe.escape(node.category_display),
-                title=markupsafe.escape(node.title),
-                user=markupsafe.escape(removed.fullname)
-            )
-
-            if not auth or auth.user != removed:
-                url = node.web_url_for('node_setting')
-                message += (
-                    u' You can re-authenticate on the <u><a href="{url}">Settings</a></u> page.'
-                ).format(url=url)
-            #
-            return message
-
-    def after_fork(self, node, fork, user, save=True):
-        """After forking, copy user settings if the user is the one who authorized
-        the addon.
-
-        :return: A tuple of the form (cloned_settings, message)
-        """
-        clone, _ = super(AddonOAuthNodeSettingsBase, self).after_fork(
-            node=node,
-            fork=fork,
-            user=user,
-            save=False,
-        )
-        if self.has_auth and self.user_settings.owner == user:
-            metadata = None
-            if self.complete:
-                try:
-                    metadata = self.user_settings.oauth_grants[node._id][self.external_account._id]
-                except (KeyError, AttributeError):
-                    pass
-            clone.set_auth(self.external_account, user, metadata=metadata, log=False)
-            message = '{addon} authorization copied to forked {category}.'.format(
-                addon=self.config.full_name,
-                category=fork.project_or_component,
-            )
-        else:
-            message = (
-                u'{addon} authorization not copied to forked {category}. You may '
-                u'authorize this fork on the <u><a href="{url}">Settings</a></u> '
-                u'page.'
-            ).format(
-                addon=self.config.full_name,
-                url=fork.web_url_for('node_setting'),
-                category=fork.project_or_component,
-            )
-        if save:
-            clone.save()
-        return clone, message
-
-    def before_register_message(self, node, user):
-        """Return warning text to display if user auth will be copied to a
-        registration.
-        """
-        if self.has_auth:
-            return (
-                u'The contents of {addon} add-ons cannot be registered at this time; '
-                u'the {addon} add-on linked to this {category} will not be included '
-                u'as part of this registration.'
-            ).format(
-                addon=self.config.full_name,
-                category=node.project_or_component,
-            )
-
-    # backwards compatibility
-    before_register = before_register_message
-
-    def serialize_waterbutler_credentials(self):
-        raise NotImplementedError(
-            "AddonOAuthNodeSettingsBase subclasses must implement a 'serialize_waterbutler_credentials' method."
-        )
-
-    def serialize_waterbutler_settings(self):
-        raise NotImplementedError(
-            "AddonOAuthNodeSettingsBase subclasses must implement a 'serialize_waterbutler_settings' method."
-        )

From 33548a38ea8a8824ad0f4c4cbcc4813736b3d01f Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 6 Oct 2017 11:54:49 -0400
Subject: [PATCH 041/192] Remove modm from api base and caching

---
 api/base/filters.py            | 2 --
 api/base/generic_bulk_views.py | 4 ++--
 api/base/utils.py              | 2 +-
 api/caching/listeners.py       | 9 ++++++---
 4 files changed, 9 insertions(+), 8 deletions(-)

diff --git a/api/base/filters.py b/api/base/filters.py
index 80dcb2785ff..3c479842cce 100644
--- a/api/base/filters.py
+++ b/api/base/filters.py
@@ -12,10 +12,8 @@
 from api.base.serializers import RelationshipField, ShowIfVersion, TargetField
 from dateutil import parser as date_parser
 from django.core.exceptions import ValidationError
-from django.db.models import QuerySet as DjangoQuerySet
 from django.db.models import Q
 from rest_framework import serializers as ser
-from rest_framework.filters import OrderingFilter
 from osf.models import Subject
 from osf.models.base import GuidMixin
 
diff --git a/api/base/generic_bulk_views.py b/api/base/generic_bulk_views.py
index 4a892bcdf65..ba9247e5a2d 100644
--- a/api/base/generic_bulk_views.py
+++ b/api/base/generic_bulk_views.py
@@ -1,8 +1,8 @@
-from modularodm import Q
 from rest_framework import status
 from rest_framework.response import Response
 from rest_framework_bulk import generics as bulk_generics
 from rest_framework.exceptions import PermissionDenied, ValidationError
+from django.db.models import Q
 
 from api.base.settings import BULK_SETTINGS
 from api.base.exceptions import Conflict, JSONAPIException, Gone
@@ -78,7 +78,7 @@ def get_requested_resources(self, request, request_data):
         model_cls = request.parser_context['view'].model_class
         requested_ids = [data['id'] for data in request_data]
         column_name = 'guids___id' if issubclass(model_cls, GuidMixin) else '_id'
-        resource_object_list = model_cls.find(Q(column_name, 'in', requested_ids))
+        resource_object_list = model_cls.objects.filter(Q(**{'{}__in'.format(column_name): requested_ids}))
 
         for resource in resource_object_list:
             if getattr(resource, 'is_deleted', None):
diff --git a/api/base/utils.py b/api/base/utils.py
index b2e27aa21ca..f2daf0c56f8 100644
--- a/api/base/utils.py
+++ b/api/base/utils.py
@@ -93,7 +93,7 @@ def get_object_or_error(model_cls, query_or_pk, request, display_name=None):
     else:
         # they passed a query
         try:
-            obj = model_cls.objects.get(query_or_pk, select_for_update=select_for_update)
+            obj = model_cls.objects.filter(query_or_pk).select_for_update().get() if select_for_update else model_cls.objects.get(query_or_pk)
         except model_cls.DoesNotExist:
             obj = None
 
diff --git a/api/caching/listeners.py b/api/caching/listeners.py
index a8374554fd3..792d82a414a 100644
--- a/api/caching/listeners.py
+++ b/api/caching/listeners.py
@@ -1,8 +1,11 @@
+from django.dispatch import receiver
+from django.db.models.signals import post_save
+
 from api.caching.tasks import ban_url
 from framework.postcommit_tasks.handlers import enqueue_postcommit_task
-from modularodm import signals
 
-@signals.save.connect
-def ban_object_from_cache(sender, instance, fields_changed, cached_data):
+
+@receiver(post_save)
+def ban_object_from_cache(sender, instance, created, **kwargs):
     if hasattr(instance, 'absolute_api_v2_url'):
         enqueue_postcommit_task(ban_url, (instance, ), {}, celery=False, once_per_request=True)

From c1ad11f6edfbf44104f3a0edc1f72b67b3ab828e Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 6 Oct 2017 15:33:29 -0400
Subject: [PATCH 042/192] Fix references to find in addons

---
 addons/osfstorage/tests/test_models.py | 8 ++++----
 addons/wiki/tests/test_wiki.py         | 6 +++---
 2 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/addons/osfstorage/tests/test_models.py b/addons/osfstorage/tests/test_models.py
index f0ee8e83d79..dc219098830 100644
--- a/addons/osfstorage/tests/test_models.py
+++ b/addons/osfstorage/tests/test_models.py
@@ -205,14 +205,14 @@ def test_delete_folder(self):
             kid = parent.append_file(str(x))
             kid.save()
             kids.append(kid)
-        count = OsfStorageFileNode.find().count()
-        tcount = models.TrashedFileNode.find().count()
+        count = OsfStorageFileNode.objects.all().count()
+        tcount = models.TrashedFileNode.objects.all().count()
 
         parent.delete()
 
         assert_is(OsfStorageFileNode.load(parent._id), None)
-        assert_equals(count - 11, OsfStorageFileNode.find().count())
-        assert_equals(tcount + 11, models.TrashedFileNode.find().count())
+        assert_equals(count - 11, OsfStorageFileNode.objects.all().count())
+        assert_equals(tcount + 11, models.TrashedFileNode.objects.all().count())
 
         for kid in kids:
             assert_is(
diff --git a/addons/wiki/tests/test_wiki.py b/addons/wiki/tests/test_wiki.py
index c1943b556e3..1e17e5f10c2 100644
--- a/addons/wiki/tests/test_wiki.py
+++ b/addons/wiki/tests/test_wiki.py
@@ -170,13 +170,13 @@ def test_project_wiki_edit_post_with_new_wname_and_no_content(self):
         # note: forward slashes not allowed in page_name
         page_name = fake.catch_phrase().replace('/', ' ')
 
-        old_wiki_page_count = NodeWikiPage.find().count()
+        old_wiki_page_count = NodeWikiPage.objects.all().count()
         url = self.project.web_url_for('project_wiki_edit_post', wname=page_name)
         # User submits to edit form with no content
         res = self.app.post(url, {'content': ''}, auth=self.user.auth).follow()
         assert_equal(res.status_code, 200)
 
-        new_wiki_page_count = NodeWikiPage.find().count()
+        new_wiki_page_count = NodeWikiPage.objects.all().count()
         # A new wiki page was created in the db
         assert_equal(new_wiki_page_count, old_wiki_page_count + 1)
 
@@ -190,7 +190,7 @@ def test_project_wiki_edit_post_with_new_wname_and_content(self):
         page_name = fake.catch_phrase().replace('/', ' ')
         page_content = fake.bs()
 
-        old_wiki_page_count = NodeWikiPage.find().count()
+        old_wiki_page_count = NodeWikiPage.objects.all().count()
         url = self.project.web_url_for('project_wiki_edit_post', wname=page_name)
         # User submits to edit form with no content
         res = self.app.post(url, {'content': page_content}, auth=self.user.auth).follow()

From a0fe6fe358d7d9bd12abb6742fb6798d149b34f9 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 6 Oct 2017 15:33:51 -0400
Subject: [PATCH 043/192] Change conferences query in message.py

---
 website/conferences/message.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/website/conferences/message.py b/website/conferences/message.py
index 762a7ab3d4f..77ca1ba18a4 100644
--- a/website/conferences/message.py
+++ b/website/conferences/message.py
@@ -177,7 +177,7 @@ def attachments(self):
     def allowed_types(self):
         Conference = apps.get_model('osf.Conference')
         allowed_types = []
-        for conf in Conference.find():
+        for conf in Conference.objects.all():
             allowed_types.extend([conf.field_names['submission1'], conf.field_names['submission2']])
         regex_types_allowed = '|'.join(set(allowed_types))
         return regex_types_allowed

From ff429ac77f55aec109d44f02ab9be8f1d20579d7 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Mon, 9 Oct 2017 16:04:30 -0400
Subject: [PATCH 044/192] correct query in conference views

---
 website/conferences/views.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/website/conferences/views.py b/website/conferences/views.py
index f5f99d6fb33..831eb23c20b 100644
--- a/website/conferences/views.py
+++ b/website/conferences/views.py
@@ -258,7 +258,7 @@ def conference_submissions(**kwargs):
 
 def conference_view(**kwargs):
     meetings = []
-    for conf in Conference.find():
+    for conf in Conference.objects.all():
         if conf.num_submissions < settings.CONFERENCE_MIN_COUNT:
             continue
         if (hasattr(conf, 'is_meeting') and (conf.is_meeting is False)):

From 90cfea44d3c31d8f5b141f960ba79e983b78b0d3 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Tue, 10 Oct 2017 12:11:50 -0400
Subject: [PATCH 045/192] Remove find from Institutions origins whitelist
 method

---
 api/base/settings/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/api/base/settings/__init__.py b/api/base/settings/__init__.py
index 22c90bf6725..9249eb797c4 100644
--- a/api/base/settings/__init__.py
+++ b/api/base/settings/__init__.py
@@ -31,7 +31,7 @@ def load_origins_whitelist():
 
     institution_origins = tuple(domain.lower() for domain in itertools.chain(*[
         institution.domains
-        for institution in Institution.find()
+        for institution in Institution.objects.all()
     ]))
 
     preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain=''))

From 698bb9dff4822ed67ea5d68f1c2a47ef37522594 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Tue, 10 Oct 2017 16:34:00 -0400
Subject: [PATCH 046/192] Revert "Remove ODMOrderingFilter and tests" and
 instead rename

This reverts commit b4561ff8cd5b505fe1186e4dc37fa81292fe481c.
Instead, change the name to no longer include MODM. Benefits for keeping
it include ordering views that do not return querysets, rather lists.
---
 api/base/filters.py            | 22 ++++++++++++++
 api/base/settings/defaults.py  |  2 +-
 api_tests/base/test_filters.py | 53 ++++++++++++++++++++++++++++++++++
 3 files changed, 76 insertions(+), 1 deletion(-)

diff --git a/api/base/filters.py b/api/base/filters.py
index 3c479842cce..d1eb319afa0 100644
--- a/api/base/filters.py
+++ b/api/base/filters.py
@@ -12,8 +12,10 @@
 from api.base.serializers import RelationshipField, ShowIfVersion, TargetField
 from dateutil import parser as date_parser
 from django.core.exceptions import ValidationError
+from django.db.models import QuerySet as DjangoQuerySet
 from django.db.models import Q
 from rest_framework import serializers as ser
+from rest_framework.filters import OrderingFilter
 from osf.models import Subject
 from osf.models.base import GuidMixin
 
@@ -41,6 +43,26 @@ def sort_fn(a, b):
         return 0
     return sort_fn
 
+class OSFOrderingFilter(OrderingFilter):
+    """Adaptation of rest_framework.filters.OrderingFilter to work with modular-odm."""
+    # override
+    def filter_queryset(self, request, queryset, view):
+        ordering = self.get_ordering(request, queryset, view)
+        if isinstance(queryset, DjangoQuerySet):
+            if queryset.ordered:
+                return queryset
+            elif ordering and getattr(queryset.query, 'distinct_fields', None):
+                order_fields = tuple([field.lstrip('-') for field in ordering])
+                distinct_fields = queryset.query.distinct_fields
+                queryset.query.distinct_fields = tuple(set(distinct_fields + order_fields))
+            return super(OSFOrderingFilter, self).filter_queryset(request, queryset, view)
+        if ordering:
+            if isinstance(ordering, (list, tuple)):
+                sorted_list = sorted(queryset, cmp=sort_multiple(ordering))
+                return sorted_list
+            return queryset.sort(*ordering)
+        return queryset
+
 
 class FilterMixin(object):
     """ View mixin with helper functions for filtering. """
diff --git a/api/base/settings/defaults.py b/api/base/settings/defaults.py
index accfce2a7de..0a2cff710e0 100644
--- a/api/base/settings/defaults.py
+++ b/api/base/settings/defaults.py
@@ -152,7 +152,7 @@
         '2.5',
         '2.6',
     ),
-    'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.OrderingFilter',),
+    'DEFAULT_FILTER_BACKENDS': ('api.base.filters.OSFOrderingFilter',),
     'DEFAULT_PAGINATION_CLASS': 'api.base.pagination.JSONAPIPagination',
     'ORDERING_PARAM': 'sort',
     'DEFAULT_AUTHENTICATION_CLASSES': (
diff --git a/api_tests/base/test_filters.py b/api_tests/base/test_filters.py
index b77abe01ced..46236fba9f0 100644
--- a/api_tests/base/test_filters.py
+++ b/api_tests/base/test_filters.py
@@ -354,6 +354,59 @@ def test_parse_query_params_uses_field_source_attribute(self):
         assert_equal(parsed_field ['op'], 'eq')
 
 
+class TestOSFOrderingFilter(ApiTestCase):
+    class query:
+        title = ' '
+        def __init__(self, title):
+            self.title = title
+        def __str__(self):
+            return self.title
+
+    class query_with_num:
+        title = ' '
+        number = 0
+        def __init__(self, title, number):
+            self.title = title
+            self.number = number
+        def __str__(self):
+            return self.title
+
+
+    def test_filter_queryset_forward(self):
+        query_to_be_sorted = [self.query(x) for x in 'NewProj Zip Proj Activity'.split()]
+        sorted_query = sorted(query_to_be_sorted, cmp=filters.sort_multiple(['title']))
+        sorted_output = [str(i) for i in sorted_query]
+        assert_equal(sorted_output, ['Activity', 'NewProj', 'Proj', 'Zip'])
+
+
+    def test_filter_queryset_forward_duplicate(self):
+        query_to_be_sorted = [self.query(x) for x in 'NewProj Activity Zip Activity'.split()]
+        sorted_query = sorted(query_to_be_sorted, cmp=filters.sort_multiple(['title']))
+        sorted_output = [str(i) for i in sorted_query]
+        assert_equal(sorted_output, ['Activity', 'Activity', 'NewProj', 'Zip'])
+
+
+    def test_filter_queryset_reverse(self):
+        query_to_be_sorted = [self.query(x) for x in 'NewProj Zip Proj Activity'.split()]
+        sorted_query = sorted(query_to_be_sorted, cmp=filters.sort_multiple(['-title']))
+        sorted_output = [str(i) for i in sorted_query]
+        assert_equal(sorted_output, ['Zip', 'Proj', 'NewProj', 'Activity'])
+
+    def test_filter_queryset_reverse_duplicate(self):
+        query_to_be_sorted = [self.query(x) for x in 'NewProj Activity Zip Activity'.split()]
+        sorted_query = sorted(query_to_be_sorted, cmp=filters.sort_multiple(['-title']))
+        sorted_output = [str(i) for i in sorted_query]
+        assert_equal(sorted_output, ['Zip', 'NewProj', 'Activity', 'Activity'])
+
+    def test_filter_queryset_handles_multiple_fields(self):
+        objs = [self.query_with_num(title='NewProj', number=10),
+                self.query_with_num(title='Zip', number=20),
+                self.query_with_num(title='Activity', number=30),
+                self.query_with_num(title='Activity', number=40)]
+        actual = [x.number for x in sorted(objs, cmp=filters.sort_multiple(['title', '-number']))]
+        assert_equal(actual, [40, 30, 10, 20])
+
+
 class TestQueryPatternRegex(TestCase):
 
     def setUp(self):

From 3cc0ab5ed7d3f4252f12fe5d583ce36586dbf5d1 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Wed, 11 Oct 2017 09:41:25 -0400
Subject: [PATCH 047/192] Fix unicode call on PreprintService if node is
 missing

---
 osf/models/preprint_service.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/osf/models/preprint_service.py b/osf/models/preprint_service.py
index 7d969fd533e..73f19d06f13 100644
--- a/osf/models/preprint_service.py
+++ b/osf/models/preprint_service.py
@@ -47,7 +47,7 @@ class Meta:
         )
 
     def __unicode__(self):
-        return '{} preprint (guid={}) of {}'.format('published' if self.is_published else 'unpublished', self._id, self.node.__unicode__())
+        return '{} preprint (guid={}) of {}'.format('published' if self.is_published else 'unpublished', self._id, self.node.__unicode__() if self.node else None)
 
     @property
     def primary_file(self):

From 0d6a8dcecde42e62bbd560bb325b6b60885e8efd Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 19 Oct 2017 16:25:43 -0400
Subject: [PATCH 048/192] Query optimizations and fixes from code review

---
 addons/osfstorage/tests/test_models.py     | 8 ++++----
 api/base/settings/__init__.py              | 5 +----
 api/base/utils.py                          | 2 +-
 framework/auth/core.py                     | 2 +-
 framework/auth/utils.py                    | 3 +--
 osf_tests/test_user.py                     | 4 ++--
 scripts/create_fakes.py                    | 2 +-
 tests/test_registrations/test_embargoes.py | 2 +-
 website/conferences/message.py             | 4 ++--
 9 files changed, 14 insertions(+), 18 deletions(-)

diff --git a/addons/osfstorage/tests/test_models.py b/addons/osfstorage/tests/test_models.py
index dc219098830..ab9d5da88f8 100644
--- a/addons/osfstorage/tests/test_models.py
+++ b/addons/osfstorage/tests/test_models.py
@@ -205,14 +205,14 @@ def test_delete_folder(self):
             kid = parent.append_file(str(x))
             kid.save()
             kids.append(kid)
-        count = OsfStorageFileNode.objects.all().count()
-        tcount = models.TrashedFileNode.objects.all().count()
+        count = OsfStorageFileNode.objects.count()
+        tcount = models.TrashedFileNode.objects.count()
 
         parent.delete()
 
         assert_is(OsfStorageFileNode.load(parent._id), None)
-        assert_equals(count - 11, OsfStorageFileNode.objects.all().count())
-        assert_equals(tcount + 11, models.TrashedFileNode.objects.all().count())
+        assert_equals(count - 11, OsfStorageFileNode.objects.count())
+        assert_equals(tcount + 11, models.TrashedFileNode.objects.count())
 
         for kid in kids:
             assert_is(
diff --git a/api/base/settings/__init__.py b/api/base/settings/__init__.py
index 9249eb797c4..e12046b0c54 100644
--- a/api/base/settings/__init__.py
+++ b/api/base/settings/__init__.py
@@ -29,10 +29,7 @@ def load_origins_whitelist():
     global ORIGINS_WHITELIST
     from osf.models import Institution, PreprintProvider
 
-    institution_origins = tuple(domain.lower() for domain in itertools.chain(*[
-        institution.domains
-        for institution in Institution.objects.all()
-    ]))
+    institution_origins = tuple(domain.lower() for domain in itertools.chain(*Institution.objects.values_list('domains', flat=True)))
 
     preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain=''))
 
diff --git a/api/base/utils.py b/api/base/utils.py
index f2daf0c56f8..8ecf96b05fc 100644
--- a/api/base/utils.py
+++ b/api/base/utils.py
@@ -95,7 +95,7 @@ def get_object_or_error(model_cls, query_or_pk, request, display_name=None):
         try:
             obj = model_cls.objects.filter(query_or_pk).select_for_update().get() if select_for_update else model_cls.objects.get(query_or_pk)
         except model_cls.DoesNotExist:
-            obj = None
+            raise NotFound
 
     if not obj:
         if not query:
diff --git a/framework/auth/core.py b/framework/auth/core.py
index a9d67ea8100..7c7f212e32a 100644
--- a/framework/auth/core.py
+++ b/framework/auth/core.py
@@ -115,7 +115,7 @@ def get_user(email=None, password=None, token=None, external_id_provider=None, e
 
     if email:
         email = email.strip().lower()
-        qs = qs.filter(Q(Q(username=email) | Q(id=Subquery(Email.objects.filter(address=email).values_list('user_id', flat=True)))))
+        qs = qs.filter(Q(Q(username=email) | Q(id=Subquery(Email.objects.filter(address=email).values('user_id')))))
 
     if password:
         password = password.strip()
diff --git a/framework/auth/utils.py b/framework/auth/utils.py
index 77235165e4b..15ef8d31afb 100644
--- a/framework/auth/utils.py
+++ b/framework/auth/utils.py
@@ -4,7 +4,6 @@
 from nameparser.parser import HumanName
 import requests
 
-from django.db.models import Q
 from django.core.exceptions import ValidationError
 
 from website import settings
@@ -90,7 +89,7 @@ def privacy_info_handle(info, anonymous, name=False):
 def ensure_external_identity_uniqueness(provider, identity, user=None):
     from osf.models import OSFUser
     users_with_identity = OSFUser.objects.filter(
-        Q(**{'external_identity__{}__{}__isnull'.format(provider, identity): False})
+        **{'external_identity__{}__{}__isnull'.format(provider, identity): False}
     )
     for existing_user in users_with_identity:
         if user and user._id == existing_user._id:
diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py
index 071f335e73b..0ff8711b775 100644
--- a/osf_tests/test_user.py
+++ b/osf_tests/test_user.py
@@ -1388,7 +1388,7 @@ def test_add_same_unconfirmed_email_twice(self):
             self.user.get_unconfirmed_email_for_token(token1)
 
     def test_contributed_property(self):
-        projects_contributed_to = AbstractNode.objects.filter(_contributors=self.user)
+        projects_contributed_to = self.user.nodes.all()
         assert list(self.user.contributed.all()) == list(projects_contributed_to)
 
     def test_contributor_to_property(self):
@@ -1628,7 +1628,7 @@ def is_mrm_field(value):
         # check fields set on merged user
         assert other_user.merged_by == self.user
 
-        assert Session.objects.filter(data__auth_user_id=other_user._id).count() == 0
+        assert not Session.objects.filter(data__auth_user_id=other_user._id).exists()
 
     def test_merge_unconfirmed(self):
         self._add_unconfirmed_user()
diff --git a/scripts/create_fakes.py b/scripts/create_fakes.py
index 7be0ba3fc97..3b69942cec1 100644
--- a/scripts/create_fakes.py
+++ b/scripts/create_fakes.py
@@ -370,7 +370,7 @@ def render_generations_from_node_structure_list(parent, creator, node_structure_
 
 def main():
     args = parse_args()
-    creator = models.OSFUser.objects.filter(username=args.user).first()
+    creator = models.OSFUser.objects.get(username=args.user)
     for i in range(args.n_projects):
         name = args.name + str(i) if args.name else ''
         create_fake_project(creator, args.n_users, args.privacy, args.n_components, name, args.n_tags,
diff --git a/tests/test_registrations/test_embargoes.py b/tests/test_registrations/test_embargoes.py
index bb43ccb64fe..d3436c0dc1f 100644
--- a/tests/test_registrations/test_embargoes.py
+++ b/tests/test_registrations/test_embargoes.py
@@ -903,7 +903,7 @@ def test_POST_register_embargo_is_not_public(self, mock_enqueue):
 
         assert_equal(res.status_code, 202)
 
-        registration = Registration.objects.all().order_by('-registered_date').first()
+        registration = Registration.objects.order_by('-registered_date').first()
 
         assert_false(registration.is_public)
         assert_true(registration.is_pending_embargo_for_existing_registration)
diff --git a/website/conferences/message.py b/website/conferences/message.py
index 77ca1ba18a4..b1c4d68fda4 100644
--- a/website/conferences/message.py
+++ b/website/conferences/message.py
@@ -177,7 +177,7 @@ def attachments(self):
     def allowed_types(self):
         Conference = apps.get_model('osf.Conference')
         allowed_types = []
-        for conf in Conference.objects.all():
-            allowed_types.extend([conf.field_names['submission1'], conf.field_names['submission2']])
+        for field_names in Conference.objects.values_list('field_names', flat=True):
+            allowed_types.extend([field_names['submission1'], field_names['submission2']])
         regex_types_allowed = '|'.join(set(allowed_types))
         return regex_types_allowed

From 2739ecea73b760a8869944676e7e198107ddf487 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 23 Oct 2017 10:33:06 -0500
Subject: [PATCH 049/192] Construct doi ourselves to return in preprint
 serializer if doi hasn't been built yet for preprints.

---
 api/preprints/serializers.py | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py
index ab15fba6df7..be6ea7ff17f 100644
--- a/api/preprints/serializers.py
+++ b/api/preprints/serializers.py
@@ -17,6 +17,7 @@
 )
 from framework.exceptions import PermissionsError
 from website.util import permissions
+from website.identifiers import utils
 from website.exceptions import NodeStateError
 from website.project import signals as project_signals
 from osf.models import BaseFileNode, PreprintService, PreprintProvider, Node, NodeLicense
@@ -154,7 +155,11 @@ def get_article_doi_url(self, obj):
 
     def get_preprint_doi_url(self, obj):
         doi_identifier = obj.get_identifier('doi')
-        return 'https://dx.doi.org/{}'.format(doi_identifier.value) if doi_identifier else None
+        if doi_identifier:
+            return 'https://dx.doi.org/{}'.format(doi_identifier.value)
+        else:
+            built_identifier = utils.get_or_create_identifiers(obj).get('doi', None)
+            return 'https://dx.doi.org/{}'.format(built_identifier) if built_identifier else None
 
     def run_validation(self, *args, **kwargs):
         # Overrides construtor for validated_data to allow writes to a SerializerMethodField

From 81d3d57a9f7d8d2c74278dffaeee0241e664e740 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 23 Oct 2017 11:34:45 -0500
Subject: [PATCH 050/192] Modify building identifier in preprint serializer so
 we're just returning what DOI should look like, not actually building it on
 datacite.  Only return the doi if preprint is published.

---
 api/preprints/serializers.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py
index be6ea7ff17f..bae196da89a 100644
--- a/api/preprints/serializers.py
+++ b/api/preprints/serializers.py
@@ -17,7 +17,7 @@
 )
 from framework.exceptions import PermissionsError
 from website.util import permissions
-from website.identifiers import utils
+from website import settings
 from website.exceptions import NodeStateError
 from website.project import signals as project_signals
 from osf.models import BaseFileNode, PreprintService, PreprintProvider, Node, NodeLicense
@@ -158,8 +158,8 @@ def get_preprint_doi_url(self, obj):
         if doi_identifier:
             return 'https://dx.doi.org/{}'.format(doi_identifier.value)
         else:
-            built_identifier = utils.get_or_create_identifiers(obj).get('doi', None)
-            return 'https://dx.doi.org/{}'.format(built_identifier) if built_identifier else None
+            built_identifier = settings.EZID_FORMAT.format(namespace=settings.DOI_NAMESPACE, guid=obj._id).replace('doi:', '').upper()
+            return 'https://dx.doi.org/{}'.format(built_identifier) if built_identifier and obj.is_published else None
 
     def run_validation(self, *args, **kwargs):
         # Overrides construtor for validated_data to allow writes to a SerializerMethodField

From 840bc57e7120ae67e84c1c7bca94cfef34c8d2a8 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 23 Oct 2017 12:32:08 -0500
Subject: [PATCH 051/192] Copy old script from @erinspace which added
 identifiers to existing preprints.

---
 .../add_missing_identifiers_to_preprints.py   | 65 +++++++++++++++++++
 1 file changed, 65 insertions(+)
 create mode 100644 scripts/add_missing_identifiers_to_preprints.py

diff --git a/scripts/add_missing_identifiers_to_preprints.py b/scripts/add_missing_identifiers_to_preprints.py
new file mode 100644
index 00000000000..0a304a0e2a2
--- /dev/null
+++ b/scripts/add_missing_identifiers_to_preprints.py
@@ -0,0 +1,65 @@
+import sys
+import time
+import logging
+from scripts import utils as script_utils
+from django.db import transaction
+
+from website.app import setup_django
+from website.identifiers.utils import request_identifiers_from_ezid, parse_identifiers
+
+setup_django()
+logger = logging.getLogger(__name__)
+
+
+def add_identifiers_to_preprints(dry=True):
+    from osf.models import PreprintService
+
+    preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
+    logger.info('About to add identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
+
+    for preprint in preprints_without_identifiers:
+        logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
+
+        if not dry:
+            ezid_response = request_identifiers_from_ezid(preprint)
+            id_dict = parse_identifiers(ezid_response)
+            preprint.set_identifier_values(doi=id_dict['doi'], ark=id_dict['ark'])
+            preprint.save()
+
+            doi = preprint.get_identifier('doi')
+            assert preprint._id.upper() in doi.value
+
+            logger.info('Created DOI {} for Preprint with guid {} from service {}'.format(doi.value, preprint._id, preprint.provider.name))
+            time.sleep(1)
+        else:
+            logger.info('Dry run - would have created identifier for preprint {} from service {}'.format(preprint._id, preprint.provider.name))
+
+    logger.info('Finished Adding identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
+
+
+def main(dry=True):
+    # Start a transaction that will be rolled back if any exceptions are un
+    add_identifiers_to_preprints(dry)
+    if dry:
+        # When running in dry mode force the transaction to rollback
+        raise Exception('Dry Run complete -- not actually saved')
+
+
+if __name__ == '__main__':
+    dry = '--dry' in sys.argv
+    if not dry:
+        # If we're not running in dry mode log everything to a file
+        script_utils.add_file_logger(logger, __file__)
+
+    # Allow setting the log level just by appending the level to the command
+    if '--debug' in sys.argv:
+        logger.setLevel(logging.DEBUG)
+    elif '--warning' in sys.argv:
+        logger.setLevel(logging.WARNING)
+    elif '--info' in sys.argv:
+        logger.setLevel(logging.INFO)
+    elif '--error' in sys.argv:
+        logger.setLevel(logging.ERROR)
+
+    # Finally run the migration
+    main(dry=dry)

From d08348a7ad52273ed3a97f241df64bcfd8dce292 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 23 Oct 2017 12:36:58 -0500
Subject: [PATCH 052/192] Modify how identifiers added counts are being
 calculated.

---
 scripts/add_missing_identifiers_to_preprints.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/scripts/add_missing_identifiers_to_preprints.py b/scripts/add_missing_identifiers_to_preprints.py
index 0a304a0e2a2..931e6cb69e0 100644
--- a/scripts/add_missing_identifiers_to_preprints.py
+++ b/scripts/add_missing_identifiers_to_preprints.py
@@ -16,6 +16,7 @@ def add_identifiers_to_preprints(dry=True):
 
     preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
     logger.info('About to add identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
+    identifiers_added = 0
 
     for preprint in preprints_without_identifiers:
         logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
@@ -28,13 +29,14 @@ def add_identifiers_to_preprints(dry=True):
 
             doi = preprint.get_identifier('doi')
             assert preprint._id.upper() in doi.value
+            identifiers_added += 1
 
             logger.info('Created DOI {} for Preprint with guid {} from service {}'.format(doi.value, preprint._id, preprint.provider.name))
             time.sleep(1)
         else:
             logger.info('Dry run - would have created identifier for preprint {} from service {}'.format(preprint._id, preprint.provider.name))
 
-    logger.info('Finished Adding identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
+    logger.info('Finished Adding identifiers to {} preprints.'.format(identifiers_added))
 
 
 def main(dry=True):

From e9d0793c6f14b72071d2b368cf88c63279fe6f1f Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 23 Oct 2017 14:49:37 -0500
Subject: [PATCH 053/192] Modify add_missing_identifiers_to_preprints script so
 it is a celery task run nightly with celery beat.

---
 .../add_missing_identifiers_to_preprints.py   | 44 ++++++++-----------
 website/settings/defaults.py                  |  7 +++
 2 files changed, 25 insertions(+), 26 deletions(-)

diff --git a/scripts/add_missing_identifiers_to_preprints.py b/scripts/add_missing_identifiers_to_preprints.py
index 931e6cb69e0..702a01e7583 100644
--- a/scripts/add_missing_identifiers_to_preprints.py
+++ b/scripts/add_missing_identifiers_to_preprints.py
@@ -1,17 +1,19 @@
-import sys
 import time
 import logging
+import django
 from scripts import utils as script_utils
 from django.db import transaction
 
-from website.app import setup_django
+from framework.celery_tasks import app as celery_app
+
+from website.app import init_app
 from website.identifiers.utils import request_identifiers_from_ezid, parse_identifiers
 
-setup_django()
+django.setup()
 logger = logging.getLogger(__name__)
 
 
-def add_identifiers_to_preprints(dry=True):
+def add_identifiers_to_preprints(dry_run=True):
     from osf.models import PreprintService
 
     preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
@@ -21,7 +23,7 @@ def add_identifiers_to_preprints(dry=True):
     for preprint in preprints_without_identifiers:
         logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
 
-        if not dry:
+        if not dry_run:
             ezid_response = request_identifiers_from_ezid(preprint)
             id_dict = parse_identifiers(ezid_response)
             preprint.set_identifier_values(doi=id_dict['doi'], ark=id_dict['ark'])
@@ -39,29 +41,19 @@ def add_identifiers_to_preprints(dry=True):
     logger.info('Finished Adding identifiers to {} preprints.'.format(identifiers_added))
 
 
-def main(dry=True):
-    # Start a transaction that will be rolled back if any exceptions are un
-    add_identifiers_to_preprints(dry)
-    if dry:
-        # When running in dry mode force the transaction to rollback
+def main(dry_run=True):
+    add_identifiers_to_preprints(dry_run)
+    if dry_run:
+        # When running in dry_run mode force the transaction to rollback
         raise Exception('Dry Run complete -- not actually saved')
 
-
-if __name__ == '__main__':
-    dry = '--dry' in sys.argv
-    if not dry:
+@celery_app.task(name='scripts.add_missing_identifiers_to_preprints')
+def run_main(dry_run=True):
+    init_app(routes=False)
+    if not dry_run:
         # If we're not running in dry mode log everything to a file
         script_utils.add_file_logger(logger, __file__)
-
-    # Allow setting the log level just by appending the level to the command
-    if '--debug' in sys.argv:
-        logger.setLevel(logging.DEBUG)
-    elif '--warning' in sys.argv:
-        logger.setLevel(logging.WARNING)
-    elif '--info' in sys.argv:
-        logger.setLevel(logging.INFO)
-    elif '--error' in sys.argv:
-        logger.setLevel(logging.ERROR)
-
+    
     # Finally run the migration
-    main(dry=dry)
+    with transaction.atomic():
+        main(dry_run=dry_run)
diff --git a/website/settings/defaults.py b/website/settings/defaults.py
index 9d69eae4b3e..571097b30f4 100644
--- a/website/settings/defaults.py
+++ b/website/settings/defaults.py
@@ -383,6 +383,7 @@ def parent_dir(path):
     'scripts.refresh_addon_tokens',
     'scripts.retract_registrations',
     'website.archiver.tasks',
+    'scripts.add_missing_identifiers_to_preprints'
 }
 
 try:
@@ -440,6 +441,7 @@ def parent_dir(path):
     'scripts.analytics.run_keen_snapshots',
     'scripts.analytics.run_keen_events',
     'scripts.generate_sitemap',
+    'scripts.add_missing_identifiers_to_preprints',
 )
 
 # Modules that need metrics and release requirements
@@ -495,6 +497,11 @@ def parent_dir(path):
             'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
             'kwargs': {'dry_run': False},
         },
+        'add_missing_identifiers_to_preprints': {
+            'task': 'scripts.add_missing_identifiers_to_preprints',
+            'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+            'kwargs': {'dry_run': False},
+        },
         'approve_embargo_terminations': {
             'task': 'scripts.approve_embargo_terminations',
             'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m

From d8fc61ad0ab9f1dd6d76162366038b2ee3844a47 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 23 Oct 2017 15:55:52 -0500
Subject: [PATCH 054/192] Add tests for presence of preprint_doi_link.

- If unpublished, no doi link generated.
- If preprint is published, doi link is returned, regardless of whether EZID request has been sent.
---
 .../preprints/views/test_preprint_detail.py   | 37 +++++++++++++++++--
 1 file changed, 33 insertions(+), 4 deletions(-)

diff --git a/api_tests/preprints/views/test_preprint_detail.py b/api_tests/preprints/views/test_preprint_detail.py
index 37ebb90e512..80d3d9f6876 100644
--- a/api_tests/preprints/views/test_preprint_detail.py
+++ b/api_tests/preprints/views/test_preprint_detail.py
@@ -8,16 +8,17 @@
 from framework.auth.core import Auth
 from osf.models import PreprintService, NodeLicense
 from osf_tests.factories import (
-    PreprintFactory, 
-    AuthUserFactory, 
-    ProjectFactory, 
-    SubjectFactory, 
+    PreprintFactory,
+    AuthUserFactory,
+    ProjectFactory,
+    SubjectFactory,
     PreprintProviderFactory,
 )
 from rest_framework import exceptions
 from tests.base import fake, capture_signals
 from website.project.signals import contributor_added
 from website.identifiers.utils import build_ezid_metadata
+from website.settings import EZID_FORMAT, DOI_NAMESPACE
 
 
 def build_preprint_update_payload(node_id, attributes=None, relationships=None):
@@ -41,10 +42,18 @@ class TestPreprintDetail:
     def preprint(self, user):
         return PreprintFactory(creator=user)
 
+    @pytest.fixture()
+    def unpublished_preprint(self, user):
+        return PreprintFactory(creator=user, is_published=False)
+
     @pytest.fixture()
     def url(self, preprint):
         return '/{}preprints/{}/'.format(API_BASE, preprint._id)
 
+    @pytest.fixture()
+    def unpublished_url(self, unpublished_preprint):
+        return '/{}preprints/{}/'.format(API_BASE, unpublished_preprint._id)
+
     @pytest.fixture()
     def res(self, app, url):
         return app.get(url)
@@ -88,6 +97,26 @@ def test_embed_contributors(self, app, user, preprint):
         for contrib in embeds['contributors']['data']:
             assert contrib['id'] in ids
 
+    def test_preprint_doi_link_absent_in_unpublished_preprints(self, app, user, unpublished_preprint, unpublished_url):
+        res = app.get(unpublished_url, auth=user.auth)
+        assert res.json['data']['id'] == unpublished_preprint._id
+        assert res.json['data']['attributes']['is_published'] == False
+        assert 'self' in res.json['data']['links'].keys()
+        assert 'html' in res.json['data']['links'].keys()
+        assert 'preprint_doi' not in res.json['data']['links'].keys()
+
+    def test_preprint_doi_link_present_after_preprint_published(self, app, user, unpublished_preprint, unpublished_url):
+        unpublished_preprint.is_published = True
+        unpublished_preprint.save()
+        res = app.get(unpublished_url, auth=user.auth)
+        assert res.json['data']['id'] == unpublished_preprint._id
+        assert res.json['data']['attributes']['is_published'] == True
+        assert 'self' in res.json['data']['links'].keys()
+        assert 'html' in res.json['data']['links'].keys()
+        assert 'preprint_doi' in res.json['data']['links'].keys()
+        expected_doi = EZID_FORMAT.format(namespace=DOI_NAMESPACE, guid=unpublished_preprint._id).replace('doi:', '').upper()
+        assert res.json['data']['links']['preprint_doi'] == 'https://dx.doi.org/{}'.format(expected_doi)
+
 
 @pytest.mark.django_db
 class TestPreprintDelete:

From 8c4d1920ea7326645c0b11e6f139a45fb9d9ba2f Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 23 Oct 2017 16:15:56 -0500
Subject: [PATCH 055/192] Add preprint_doi_on_datacite for whether preprint doi
 has been created properly on datacite.   Add tests for this.

---
 api/preprints/serializers.py                      |  5 +++++
 api/preprints/views.py                            |  3 +++
 api_tests/preprints/views/test_preprint_detail.py | 15 ++++++++++++++-
 3 files changed, 22 insertions(+), 1 deletion(-)

diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py
index bae196da89a..9a82854ecfd 100644
--- a/api/preprints/serializers.py
+++ b/api/preprints/serializers.py
@@ -79,6 +79,7 @@ class PreprintSerializer(JSONAPISerializer):
     title = ser.CharField(source='node.title', required=False)
     description = ser.CharField(required=False, allow_blank=True, allow_null=True, source='node.description')
     tags = JSONAPIListField(child=NodeTagField(), required=False, source='node.tags')
+    preprint_doi_on_datacite = ser.SerializerMethodField(read_only=True)
 
     contributors = RelationshipField(
         related_view='nodes:node-contributors',
@@ -137,6 +138,10 @@ class PreprintSerializer(JSONAPISerializer):
     class Meta:
         type_ = 'preprints'
 
+    def get_preprint_doi_on_datacite(self, obj):
+        doi_identifier = obj.get_identifier('doi')
+        return doi_identifier is not None
+
     def get_subjects(self, obj):
         return [
             [
diff --git a/api/preprints/views.py b/api/preprints/views.py
index d81ed7156db..8609fe75f40 100644
--- a/api/preprints/views.py
+++ b/api/preprints/views.py
@@ -76,6 +76,7 @@ class PreprintList(JSONAPIBaseView, generics.ListCreateAPIView, PreprintFilterMi
         is_preprint_orphan              boolean                             whether or not this preprint is orphaned
         subjects                        list of lists of dictionaries       ids of Subject in the BePress taxonomy. Dictionary, containing the subject text and subject ID
         doi                             string                              bare DOI for the manuscript, as entered by the user
+        preprint_doi_on_datacite        boolean                             whether or not the preprint doi has been added on datacite
 
     ##Relationships
 
@@ -203,6 +204,8 @@ class PreprintDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, Pre
         is_preprint_orphan              boolean                             whether or not this preprint is orphaned
         subjects                        array of tuples of dictionaries     ids of Subject in the BePress taxonomy. Dictionary, containing the subject text and subject ID
         doi                             string                              bare DOI for the manuscript, as entered by the user
+        preprint_doi_on_datacite        boolean                             whether or not the preprint doi has been added on datacite
+
 
     ##Relationships
 
diff --git a/api_tests/preprints/views/test_preprint_detail.py b/api_tests/preprints/views/test_preprint_detail.py
index 80d3d9f6876..0f0601f132e 100644
--- a/api_tests/preprints/views/test_preprint_detail.py
+++ b/api_tests/preprints/views/test_preprint_detail.py
@@ -104,8 +104,9 @@ def test_preprint_doi_link_absent_in_unpublished_preprints(self, app, user, unpu
         assert 'self' in res.json['data']['links'].keys()
         assert 'html' in res.json['data']['links'].keys()
         assert 'preprint_doi' not in res.json['data']['links'].keys()
+        assert res.json['data']['attributes']['preprint_doi_on_datacite'] == False
 
-    def test_preprint_doi_link_present_after_preprint_published(self, app, user, unpublished_preprint, unpublished_url):
+    def test_published_preprint_doi_link_returned_before_datacite_request(self, app, user, unpublished_preprint, unpublished_url):
         unpublished_preprint.is_published = True
         unpublished_preprint.save()
         res = app.get(unpublished_url, auth=user.auth)
@@ -116,6 +117,18 @@ def test_preprint_doi_link_present_after_preprint_published(self, app, user, unp
         assert 'preprint_doi' in res.json['data']['links'].keys()
         expected_doi = EZID_FORMAT.format(namespace=DOI_NAMESPACE, guid=unpublished_preprint._id).replace('doi:', '').upper()
         assert res.json['data']['links']['preprint_doi'] == 'https://dx.doi.org/{}'.format(expected_doi)
+        assert res.json['data']['attributes']['preprint_doi_on_datacite'] == False
+
+    def test_published_preprint_doi_link_returned_after_datacite_request(self, app, user, preprint, url):
+        res = app.get(url, auth=user.auth)
+        assert res.json['data']['id'] == preprint._id
+        assert res.json['data']['attributes']['is_published'] == True
+        assert 'self' in res.json['data']['links'].keys()
+        assert 'html' in res.json['data']['links'].keys()
+        assert 'preprint_doi' in res.json['data']['links'].keys()
+        expected_doi = EZID_FORMAT.format(namespace=DOI_NAMESPACE, guid=preprint._id).replace('doi:', '')
+        assert res.json['data']['links']['preprint_doi'] == 'https://dx.doi.org/{}'.format(expected_doi)
+        assert res.json['data']['attributes']['preprint_doi_on_datacite'] == True
 
 
 @pytest.mark.django_db

From 757aa59a49f363b6b4cccbfdba032dbbb2785e2e Mon Sep 17 00:00:00 2001
From: Daniel Neis Araujo <danielneis@gmail.com>
Date: Mon, 3 Jul 2017 13:55:47 -0400
Subject: [PATCH 056/192] Implement initial GitLab Addon     [1/3]

---
 addons.json                                   |   6 +
 api/logs/serializers.py                       |   2 +
 website/addons/gitlab/__init__.py             |  36 ++
 website/addons/gitlab/model.py                | 459 ++++++++++++++++++
 website/addons/gitlab/serializer.py           |  50 ++
 website/addons/gitlab/static/comicon.png      | Bin 0 -> 750 bytes
 .../gitlab/static/gitlabFangornConfig.js      | 269 ++++++++++
 website/addons/gitlab/static/node-cfg.js      |   7 +
 .../templates/gitlab_node_settings.mako       |  99 ++++
 website/addons/gitlab/tests/factories.py      |  28 ++
 website/addons/gitlab/tests/test_utils.py     |  62 +++
 website/addons/gitlab/utils.py                | 116 +++++
 website/files/models/gitlab.py                |  20 +
 website/static/js/fileViewTreebeard.js        |   4 +-
 website/static/js/osfLanguage.js              |   4 +
 website/util/rubeus.py                        |   2 +-
 16 files changed, 1161 insertions(+), 3 deletions(-)
 create mode 100644 website/addons/gitlab/__init__.py
 create mode 100644 website/addons/gitlab/model.py
 create mode 100644 website/addons/gitlab/serializer.py
 create mode 100644 website/addons/gitlab/static/comicon.png
 create mode 100644 website/addons/gitlab/static/gitlabFangornConfig.js
 create mode 100644 website/addons/gitlab/static/node-cfg.js
 create mode 100644 website/addons/gitlab/templates/gitlab_node_settings.mako
 create mode 100644 website/addons/gitlab/tests/factories.py
 create mode 100644 website/addons/gitlab/tests/test_utils.py
 create mode 100644 website/addons/gitlab/utils.py
 create mode 100644 website/files/models/gitlab.py

diff --git a/addons.json b/addons.json
index 15120a1d5e2..043d0f08a96 100644
--- a/addons.json
+++ b/addons.json
@@ -6,6 +6,7 @@
         "figshare",
         "forward",
         "github",
+        "gitlab",
         "mendeley",
         "zotero",
         "osfstorage",
@@ -25,7 +26,9 @@
         "forward": "full",
         "googledrive": "partial",
         "github": "partial",
+        "gitlab": "partial",
         "owncloud": "partial",
+        "gitlab": "partial",
         "s3": "partial",
         "wiki": "full",
         "bitbucket": "partial"
@@ -37,6 +40,7 @@
         "dropbox",
         "figshare",
         "github",
+        "gitlab",
         "googledrive",
         "owncloud",
         "s3",
@@ -53,6 +57,7 @@
         "figshare": "Figshare is an online digital repository. Connect your figshare account to share your figshare files along with other materials in your OSF project.",
         "forward": "The External Link add-on allows you to provide a link to a website outside of the OSF.",
         "github": "GitHub is a web-based Git repository hosting service. Connect your GitHub repo to your OSF project to share your code alongside other materials in your OSF project.",
+        "gitlab": "GitLab is an open-source web-based Git repository hosting tool and service. Connect your GitLab repo to your OSF project to share your code alongside other materials in your OSF project.",
         "mendeley": "Mendeley is a reference management tool. Connecting Mendeley folders to OSF projects allows you and others to view, copy, and download citations that are relevant to your project from the Project Overview page.",
         "zotero": "Zotero is a reference management tool. Connecting Zotero folders to OSF projects allows you and others to view, copy, and download citations that are relevant to your project from the Project Overview page.",
         "osfstorage": "OSF Storage is the default storage provider for OSF projects.",
@@ -69,6 +74,7 @@
         "dropbox": "http://www.dropbox.com",
         "figshare": "http://www.figshare.com",
         "github": "http://www.github.com",
+        "gitlab": "https://www.gitlab.com",
         "mendeley": "http://www.mendeley.com",
         "owncloud": "https://owncloud.org/",
         "zotero": "http://www.zotero.org",
diff --git a/api/logs/serializers.py b/api/logs/serializers.py
index 199a266cd98..95c27f26679 100644
--- a/api/logs/serializers.py
+++ b/api/logs/serializers.py
@@ -56,6 +56,8 @@ class NodeLogParamsSerializer(RestrictedDictSerializer):
     github_repo = ser.CharField(read_only=True, source='github.repo')
     bitbucket_user = ser.CharField(read_only=True, source='bitbucket.user')
     bitbucket_repo = ser.CharField(read_only=True, source='bitbucket.repo')
+    gitlab_user = ser.CharField(read_only=True, source='gitlab.user')
+    gitlab_repo = ser.CharField(read_only=True, source='gitlab.repo')
     file = ser.DictField(read_only=True)
     filename = ser.CharField(read_only=True)
     kind = ser.CharField(read_only=True)
diff --git a/website/addons/gitlab/__init__.py b/website/addons/gitlab/__init__.py
new file mode 100644
index 00000000000..8e1a79447d6
--- /dev/null
+++ b/website/addons/gitlab/__init__.py
@@ -0,0 +1,36 @@
+import os
+
+from website.addons.gitlab import routes, views, model
+
+MODELS = [
+    model.GitLabUserSettings,
+    model.GitLabNodeSettings,
+]
+USER_SETTINGS_MODEL = model.GitLabUserSettings
+NODE_SETTINGS_MODEL = model.GitLabNodeSettings
+
+ROUTES = [routes.api_routes]
+
+SHORT_NAME = 'gitlab'
+FULL_NAME = 'GitLab'
+
+OWNERS = ['user', 'node']
+
+ADDED_DEFAULT = []
+ADDED_MANDATORY = []
+
+VIEWS = []
+CONFIGS = ['accounts', 'node']
+
+CATEGORIES = ['storage']
+
+INCLUDE_JS = {}
+
+INCLUDE_CSS = {}
+
+HAS_HGRID_FILES = True
+GET_HGRID_DATA = views.gitlab_hgrid_data
+
+HERE = os.path.dirname(os.path.abspath(__file__))
+NODE_SETTINGS_TEMPLATE = os.path.join(HERE, 'templates', 'gitlab_node_settings.mako')
+USER_SETTINGS_TEMPLATE = os.path.join(HERE, 'templates', 'gitlab_user_settings.mako')
diff --git a/website/addons/gitlab/model.py b/website/addons/gitlab/model.py
new file mode 100644
index 00000000000..e18024a83fb
--- /dev/null
+++ b/website/addons/gitlab/model.py
@@ -0,0 +1,459 @@
+# -*- coding: utf-8 -*-
+
+import os
+import urlparse
+
+import markupsafe
+from modularodm import fields
+
+from framework.auth import Auth
+
+from website import settings
+from website.util import web_url_for
+from website.addons.base import exceptions
+from website.addons.base import AddonOAuthUserSettingsBase, AddonOAuthNodeSettingsBase
+from website.addons.base import StorageAddonBase
+
+from website.addons.gitlab import utils
+from website.addons.gitlab.api import GitLabClient
+from website.addons.gitlab.serializer import GitLabSerializer
+from website.addons.gitlab import settings as gitlab_settings
+from website.addons.gitlab.exceptions import ApiError, NotFoundError, GitLabError
+from website.oauth.models import ExternalProvider
+
+
+hook_domain = gitlab_settings.HOOK_DOMAIN or settings.DOMAIN
+
+
+class GitLabProvider(ExternalProvider):
+    name = 'GitLab'
+    short_name = 'gitlab'
+
+    @property
+    def auth_url_base(self):
+        return 'https://{0}{1}'.format('gitlab.com', '/oauth/authorize')
+
+    @property
+    def callback_url(self):
+        return 'https://{0}{1}'.format('gitlab.com', '/oauth/token')
+
+    @property
+    def client_secret(self):
+        return ''
+
+    @property
+    def client_id(self):
+        return ''
+
+    def handle_callback(self, response):
+        """View called when the OAuth flow is completed. Adds a new GitLabUserSettings
+        record to the user and saves the account info.
+        """
+        client = GitLabClient(
+            access_token=response['access_token']
+        )
+
+        user_info = client.user()
+
+        return {
+            'provider_id': client.host,
+            'profile_url': user_info['web_url'],
+            'oauth_key': response['access_token'],
+            'display_name': client.host
+        }
+
+
+class GitLabUserSettings(AddonOAuthUserSettingsBase):
+    oauth_provider = GitLabProvider
+    serializer = GitLabSerializer
+
+
+class GitLabNodeSettings(StorageAddonBase, AddonOAuthNodeSettingsBase):
+    oauth_provider = GitLabProvider
+    serializer = GitLabSerializer
+
+    user = fields.StringField()
+    repo = fields.StringField()
+    repo_id = fields.StringField()
+    hook_id = fields.StringField()
+    hook_secret = fields.StringField()
+
+    @property
+    def folder_id(self):
+        return self.repo or None
+
+    @property
+    def folder_name(self):
+        if self.complete:
+            return '{}/{}'.format(self.user, self.repo)
+        return None
+
+    @property
+    def folder_path(self):
+        return self.repo or None
+
+    @property
+    def has_auth(self):
+        return bool(self.user_settings and self.user_settings.has_auth)
+
+    @property
+    def complete(self):
+        return self.has_auth and self.repo is not None and self.user is not None
+
+    def authorize(self, user_settings, save=False):
+        self.user_settings = user_settings
+        self.owner.add_log(
+            action='gitlab_node_authorized',
+            params={
+                'project': self.owner.parent_id,
+                'node': self.owner._id,
+            },
+            auth=Auth(user_settings.owner),
+        )
+        if save:
+            self.save()
+
+    def clear_settings(self):
+        self.user = None
+        self.repo = None
+        self.repo_id = None
+        self.hook_id = None
+        self.hook_secret = None
+
+    def deauthorize(self, auth=None, log=True):
+        self.delete_hook(save=False)
+        self.clear_settings()
+        if log:
+            self.owner.add_log(
+                action='gitlab_node_deauthorized',
+                params={
+                    'project': self.owner.parent_id,
+                    'node': self.owner._id,
+                },
+                auth=auth,
+            )
+
+        self.clear_auth()
+
+    def delete(self, save=False):
+        super(GitLabNodeSettings, self).delete(save=False)
+        self.deauthorize(log=False)
+        if save:
+            self.save()
+
+    @property
+    def repo_url(self):
+        if self.repo:
+            return 'https://{0}/{1}'.format(self.external_account.display_name, self.repo)
+
+    @property
+    def short_url(self):
+        if self.repo:
+            return self.repo
+
+    @property
+    def is_private(self):
+        connection = GitLabClient(external_account=self.external_account)
+        return not connection.repo(repo_id=self.repo_id)['public']
+
+    def to_json(self, user):
+
+        ret = super(GitLabNodeSettings, self).to_json(user)
+        user_settings = user.get_addon('gitlab')
+        ret.update({
+            'user_has_auth': user_settings and user_settings.has_auth,
+            'is_registration': self.owner.is_registration,
+        })
+
+        if self.user_settings and self.user_settings.has_auth:
+
+            valid_credentials = False
+            owner = self.user_settings.owner
+            connection = GitLabClient(external_account=self.external_account)
+
+            valid_credentials = True
+            try:
+                repos = connection.repos()
+
+            except GitLabError:
+                valid_credentials = False
+
+            if owner == user:
+                ret.update({'repos': repos})
+
+            ret.update({
+                'node_has_auth': True,
+                'gitlab_user': self.user or '',
+                'gitlab_repo': self.repo or '',
+                'gitlab_repo_id': self.repo_id if self.repo_id is not None else '0',
+                'gitlab_repo_full_name': '{0} / {1}'.format(self.user, self.repo) if (self.user and self.repo) else '',
+                'auth_osf_name': owner.fullname,
+                'auth_osf_url': owner.url,
+                'auth_osf_id': owner._id,
+                'gitlab_host': self.external_account.display_name,
+                'gitlab_user_name': self.external_account.display_name,
+                'gitlab_user_url': self.external_account.profile_url,
+                'is_owner': owner == user,
+                'valid_credentials': valid_credentials,
+                'addons_url': web_url_for('user_addons'),
+                'files_url': self.owner.web_url_for('collect_file_trees')
+            })
+        return ret
+
+    def serialize_waterbutler_credentials(self):
+        if not self.complete or not self.repo:
+            raise exceptions.AddonError('Addon is not authorized')
+        return {'token': self.external_account.provider_id}
+
+    def serialize_waterbutler_settings(self):
+        if not self.complete:
+            raise exceptions.AddonError('Repo is not configured')
+        return {
+            'host': 'https://{}'.format(self.external_account.display_name),
+            'owner': self.user,
+            'repo': self.repo,
+            'repo_id': self.repo_id
+        }
+
+    def create_waterbutler_log(self, auth, action, metadata):
+        path = metadata['path']
+
+        url = self.owner.web_url_for('addon_view_or_download_file', path=path, provider='gitlab')
+
+        if not metadata.get('extra'):
+            sha = None
+            urls = {}
+        else:
+            sha = metadata['extra']['fileSha']
+            urls = {
+                'view': '{0}?ref={1}'.format(url, sha),
+                'download': '{0}?action=download&ref={1}'.format(url, sha)
+            }
+
+        self.owner.add_log(
+            'gitlab_{0}'.format(action),
+            auth=auth,
+            params={
+                'project': self.owner.parent_id,
+                'node': self.owner._id,
+                'path': path,
+                'urls': urls,
+                'gitlab': {
+                    'host': 'https://{0}'.format(self.external_account.display_name),
+                    'user': self.user,
+                    'repo': self.repo,
+                    'sha': sha,
+                },
+            },
+        )
+
+    #############
+    # Callbacks #
+    #############
+
+    def before_page_load(self, node, user):
+        """
+
+        :param Node node:
+        :param User user:
+        :return str: Alert message
+        """
+        messages = []
+
+        # Quit if not contributor
+        if not node.is_contributor(user):
+            return messages
+
+        # Quit if not configured
+        if self.user is None or self.repo is None:
+            return messages
+
+        # Quit if no user authorization
+        if self.user_settings is None:
+            return messages
+
+        connect = GitLabClient(external_account=self.external_account)
+
+        try:
+            repo = connect.repo(self.repo_id)
+        except (ApiError, GitLabError):
+            return
+
+        node_permissions = 'public' if node.is_public else 'private'
+        repo_permissions = 'private' if not repo['public'] else 'public'
+        if repo_permissions != node_permissions:
+            message = (
+                'Warning: This OSF {category} is {node_perm}, but the GitLab '
+                'repo {user} / {repo} is {repo_perm}.'.format(
+                    category=markupsafe.escape(node.project_or_component),
+                    node_perm=markupsafe.escape(node_permissions),
+                    repo_perm=markupsafe.escape(repo_permissions),
+                    user=markupsafe.escape(self.user),
+                    repo=markupsafe.escape(self.repo),
+                )
+            )
+            if repo_permissions == 'private':
+                message += (
+                    ' Users can view the contents of this private GitLab '
+                    'repository through this public project.'
+                )
+            else:
+                message += (
+                    ' The files in this GitLab repo can be viewed on GitLab '
+                    '<u><a href="{url}">here</a></u>.'
+                ).format(url = repo['http_url_to_repo'])
+            messages.append(message)
+            return messages
+
+    def before_remove_contributor_message(self, node, removed):
+        """
+
+        :param Node node:
+        :param User removed:
+        :return str: Alert message
+
+        """
+        try:
+            message = (super(GitLabNodeSettings, self).before_remove_contributor_message(node, removed) +
+            'You can download the contents of this repository before removing '
+            'this contributor <u><a href="{url}">here</a></u>.'.format(
+                url=node.api_url + 'gitlab/tarball/'
+            ))
+        except TypeError:
+            # super call returned None due to lack of user auth
+            return None
+        else:
+            return message
+
+    # backwards compatibility -- TODO: is this necessary?
+    before_remove_contributor = before_remove_contributor_message
+
+    def after_remove_contributor(self, node, removed, auth=None):
+        """
+        :param Node node:
+        :param User removed:
+        :return str: Alert message
+        """
+        if self.user_settings and self.user_settings.owner == removed:
+
+            # Delete OAuth tokens
+            self.user_settings = None
+            self.save()
+            message = (
+                u'Because the GitLab add-on for {category} "{title}" was authenticated '
+                u'by {user}, authentication information has been deleted.'
+            ).format(
+                category=markupsafe.escape(node.category_display),
+                title=markupsafe.escape(node.title),
+                user=markupsafe.escape(removed.fullname)
+            )
+
+            if not auth or auth.user != removed:
+                url = node.web_url_for('node_setting')
+                message += (
+                    u' You can re-authenticate on the <u><a href="{url}">Settings</a></u> page.'
+                ).format(url=url)
+            #
+            return message
+
+    def after_fork(self, node, fork, user, save=True):
+        """
+        :param Node node: Original node
+        :param Node fork: Forked node
+        :param User user: User creating fork
+        :param bool save: Save settings after callback
+        :return tuple: Tuple of cloned settings and alert message
+        """
+        clone, _ = super(GitLabNodeSettings, self).after_fork(
+            node, fork, user, save=False
+        )
+
+        # Copy authentication if authenticated by forking user
+        if self.user_settings and self.user_settings.owner == user:
+            clone.user_settings = self.user_settings
+            message = (
+                'GitLab authorization copied to forked {cat}.'
+            ).format(
+                cat=markupsafe.escape(fork.project_or_component),
+            )
+        else:
+            message = (
+                'GitLab authorization not copied to forked {cat}. You may '
+                'authorize this fork on the <u><a href={url}>Settings</a></u> '
+                'page.'
+            ).format(
+                cat=markupsafe.escape(fork.project_or_component),
+                url=fork.url + 'settings/'
+            )
+
+        if save:
+            clone.save()
+
+        return clone, message
+
+    def before_make_public(self, node):
+        try:
+            is_private = self.is_private
+        except NotFoundError:
+            return None
+        if is_private:
+            return (
+                'This {cat} is connected to a private GitLab repository. Users '
+                '(other than contributors) will not be able to see the '
+                'contents of this repo unless it is made public on GitLab.'
+            ).format(
+                cat=node.project_or_component,
+            )
+
+    def after_delete(self, node, user):
+        self.deauthorize(Auth(user=user), log=True)
+
+    #########
+    # Hooks #
+    #########
+
+    # TODO: Should Events be added here?
+    # TODO: Move hook logic to service
+    def add_hook(self, save=True):
+
+        if self.user_settings:
+            connect = GitLabClient(external_account=self.external_account)
+            secret = utils.make_hook_secret()
+            hook = connect.add_hook(
+                self.user, self.repo,
+                'web',
+                {
+                    'url': urlparse.urljoin(
+                        hook_domain,
+                        os.path.join(
+                            self.owner.api_url, 'gitlab', 'hook/'
+                        )
+                    ),
+                    'content_type': gitlab_settings.HOOK_CONTENT_TYPE,
+                    'secret': secret,
+                },
+                events=gitlab_settings.HOOK_EVENTS,
+            )
+
+            if hook:
+                self.hook_id = hook.id
+                self.hook_secret = secret
+                if save:
+                    self.save()
+
+    def delete_hook(self, save=True):
+        """
+        :return bool: Hook was deleted
+        """
+        if self.user_settings and self.hook_id:
+            connection = GitLabClient(external_account=self.external_account)
+            try:
+                response = connection.delete_hook(self.user, self.repo, self.hook_id)
+            except (GitLabError, NotFoundError):
+                return False
+            if response:
+                self.hook_id = None
+                if save:
+                    self.save()
+                return True
+        return False
diff --git a/website/addons/gitlab/serializer.py b/website/addons/gitlab/serializer.py
new file mode 100644
index 00000000000..a94b4cd4e99
--- /dev/null
+++ b/website/addons/gitlab/serializer.py
@@ -0,0 +1,50 @@
+from website.addons.base.serializer import StorageAddonSerializer
+
+from website.util import api_url_for
+
+from website.addons.gitlab.api import GitLabClient
+from website.addons.gitlab.exceptions import GitLabError
+
+class GitLabSerializer(StorageAddonSerializer):
+
+    addon_short_name = 'gitlab'
+
+    # Include host information with more informative labels / formatting
+    def serialize_account(self, external_account):
+        ret = super(GitLabSerializer, self).serialize_account(external_account)
+        host = external_account.display_name
+        ret.update({
+            'host': host,
+            'host_url': 'https://{0}'.format(host),
+        })
+
+        return ret
+
+    def credentials_are_valid(self, user_settings, client):
+        if user_settings:
+            client = client or GitLabClient(external_account=user_settings.external_accounts[0])
+            try:
+                client.user()
+            except (GitLabError, IndexError):
+                return False
+        return True
+
+    def serialized_folder(self, node_settings):
+        return {
+            'path': node_settings.repo,
+            'name': '{0} / {1}'.format(node_settings.user, node_settings.repo),
+        }
+
+    @property
+    def addon_serialized_urls(self):
+        node = self.node_settings.owner
+
+        return {
+            'auth': api_url_for('oauth_connect', service_name='GitLab'),
+            'importAuth': node.api_url_for('gitlab_import_auth'),
+            'files': node.web_url_for('collect_file_trees'),
+            'folders': node.api_url_for('gitlab_root_folder'),
+            'config': node.api_url_for('gitlab_set_config'),
+            'deauthorize': node.api_url_for('gitlab_deauthorize_node'),
+            'accounts': node.api_url_for('gitlab_account_list'),
+        }
diff --git a/website/addons/gitlab/static/comicon.png b/website/addons/gitlab/static/comicon.png
new file mode 100644
index 0000000000000000000000000000000000000000..1ca416ac6626be3ac0ffc3a82df481ae97746692
GIT binary patch
literal 750
zcmV<K0ulX*P)<h;3K|Lk000e1NJLTq000yK000sQ1^@s6ThRUx00006VoOIv0RI60
z0RN!9r;`8x010qNS#tmY3ljhU3ljkVnw%H_000McNliru;06{FEGa~T$Q=Lx0((hB
zK~y-)rISrdRACgyfA{gujE<v?HkPLLqIt)gErJkx-N-m&qlAm7h|;OA;I36c;kU35
zf_5#UP}><;AD~b!Xyj-{E}{lW#yihxapx5^g$_LMoOA#8f6o7$^FW!c#O%41nBB_(
zY&G6ZI%fB!WA>}^xGDk$>{f(cgxl-M=)Qz)vx^8ML>O5eu$y)eYR!2d0Q4^{*0YxA
z-jggHw+q)>09uGIU+VD)(}{|Y25|1w=}f3~W^qr#_Spr}3A+X13;;s1WBxPWhAdS3
z%t8Tor*O$><Kcdi#pQ9CdmSK~@q3cfG5c;Mn(=>;ttV|?L2lkc>6o23IrqsU)H%jQ
zpzQcp`t82G2(w%Ch)`4UkIPd4!+mKrt2Gw*O1Ly4`1Rc&v!Qm;3ZaM)KBg;Tbl~P%
zXj5c~>Z62p3+P<wRM!b2yYLAF8>9lOkAQBZ%Glz90#XRj3e@Ja0sbIV`F0YT)!?s@
zQe`3FIOI1ZsW2ERx@Q4jNk-jaU0J0-T|}b`Y6Fm8oPu3b2!p`yV#X+-OVW(8HW~^7
zwc_D4U{#Q1fEz}^bEm9n3=$~d2e9PDH@+ygw>+2`fToTdh5>W|jrk&kH|D4_44~X+
zUK+i@Ai@wDhycxA8v-FI4x>6G9$6F600@F6&QFE`+(`>%-gmq=7y)zCc}tE?;3Frz
zAqDFN(OxCBjoKSRCjcMNbeUt$I}YUs_P{A^o;l{=PFk2LXBo{!geTbG4}<9!d^Lg@
z%J(hD1lwFT$DAbqGv$=}yxU7~pIewI2Y1T4YHadi^Sx?E3BfC}MlCnZd(J%nW7!F7
g`p={2>w{qb0>5_ZjhJ+FH~;_u07*qoM6N<$f>uUP7XSbN

literal 0
HcmV?d00001

diff --git a/website/addons/gitlab/static/gitlabFangornConfig.js b/website/addons/gitlab/static/gitlabFangornConfig.js
new file mode 100644
index 00000000000..facb41a7bcb
--- /dev/null
+++ b/website/addons/gitlab/static/gitlabFangornConfig.js
@@ -0,0 +1,269 @@
+'use strict';
+/**
+ * GitLab FileBrowser configuration module.
+ */
+
+var m = require('mithril');
+var $ = require('jquery');
+var URI = require('URIjs');
+var Fangorn = require('js/fangorn').Fangorn;
+var waterbutler = require('js/waterbutler');
+var $osf = require('js/osfHelpers');
+
+// Cross browser key codes for the Command key
+var commandKeys = [224, 17, 91, 93];
+
+function _formatRepoUrl(item, branch) {
+    return item.data.urls.repo.substring(0, item.data.urls.repo.indexOf('/tree/') + 6) + branch;
+}
+
+function _formatZipUrl(item, branch) {
+    return item.data.urls.zip.substring(0, item.data.urls.zip.indexOf('?ref=') + 5) + branch;
+}
+
+function _getCurrentBranch(item) {
+    var branch;
+    if (item.data.branch === undefined) {
+        if (item.data.isAddonRoot) {
+            branch = item.data.default_branch;
+        } else {
+            branch = item.data.extra.branch;
+        }
+    } else {
+        branch = item.data.branch;
+    }
+    return branch;
+}
+
+// Define Fangorn Button Actions
+var _gitlabItemButtons = {
+    view: function (ctrl, args, children) {
+        var tb = args.treebeard;
+        var item = args.item;
+        var buttons = [];
+        function _downloadEvent(event, item, col) {
+            event.stopPropagation();
+            var branch = _getCurrentBranch(item);
+            window.location = waterbutler.buildTreeBeardDownload(item, {branch: branch});
+        }
+        // Download Zip File
+        if (item.kind === 'folder') {
+            var branchArray = [];
+            if (item.data.branches) {
+                item.data.branch = _getCurrentBranch(item);
+                for (var i = 0; i < item.data.branches.length; i++) {
+                    var selected = item.data.branches[i] === item.data.branch ? 'selected' : '';
+                    branchArray.push(m('option', {
+                        selected: selected,
+                        value: item.data.branches[i]
+                    }, item.data.branches[i]));
+                }
+            }
+            if (item.data.addonFullname) {
+                buttons.push(
+                    m.component(Fangorn.Components.dropdown, {
+                        'label': 'Branch: ',
+                        onchange: function (event) {
+                            changeBranch.call(tb, item, event.target.value);
+                        },
+                        icon: 'fa fa-external-link',
+                        className: 'text-info'
+                    }, branchArray)
+                );
+            }
+            if (tb.options.placement !== 'fileview') {
+                if (item.data.addonFullname) {
+                    var branch = _getCurrentBranch(item);
+
+                    buttons.push(
+                        m.component(Fangorn.Components.button, {
+                            onclick: function (event) {
+                                window.location = _formatZipUrl(item, branch);
+                            },
+                            icon: 'fa fa-download',
+                            className: 'text-primary'
+                        }, 'Download'),
+                        m.component(Fangorn.Components.button, {
+                            onclick: function (event) {
+                                window.open(_formatRepoUrl(item, branch), '_blank');
+                            },
+                            icon: 'fa fa-external-link',
+                            className: 'text-info'
+                        }, 'Open')
+                    );
+                }
+            }
+        } else if (item.kind === 'file' && tb.options.placement !== 'fileview') {
+            buttons.push(
+                m.component(Fangorn.Components.button, {
+                    onclick: function (event) {
+                        _downloadEvent.call(tb, event, item);
+                    },
+                    icon: 'fa fa-download',
+                    className: 'text-primary'
+                }, 'Download')
+            );
+            if (item.data.permissions && item.data.permissions.view) {
+                buttons.push(
+                    m.component(Fangorn.Components.button, {
+                        onclick: function(event) {
+                            gotoFile.call(tb, item);
+                        },
+                        icon: 'fa fa-file-o',
+                        className : 'text-info'
+                    }, 'View'));
+            }
+            if (item.data.permissions && item.data.permissions.view && !item.data.permissions.private) {
+                buttons.push(
+                    m('a.text-info.fangorn-toolbar-icon', {href: item.data.extra.webView}, [
+                        m('i.fa.fa-external-link'),
+                        m('span', 'View on GitLab')
+                    ])
+                );
+            }
+        }
+
+        return m('span', buttons); // Tell fangorn this function is used.
+    }
+};
+
+function changeBranch(item, ref){
+    item.data.branch = ref;
+    this.updateFolder(null, item);
+}
+
+function _resolveLazyLoad(item) {
+    var branch = _getCurrentBranch(item);
+    return waterbutler.buildTreeBeardMetadata(item, {ref: branch});
+}
+
+function _fangornLazyLoadOnLoad (tree, event) {
+    var tb = this;
+    tree.children.forEach(function(item) {
+        Fangorn.Utils.inheritFromParent(item, tree, ['branch']);
+    });
+    Fangorn.Utils.setCurrentFileID.call(tb, tree, window.contextVars.node.id, window.contextVars.file);
+    if(!event && tb.isMultiselected(tb.currentFileID)){
+        Fangorn.Utils.scrollToFile.call(tb, tb.currentFileID);
+    }
+}
+
+function gotoFile (item) {
+    var tb = this;
+    var branch = _getCurrentBranch(item);
+    var fileurl = new URI(item.data.nodeUrl)
+        .segment('files')
+        .segment(item.data.provider)
+        .segment(item.data.path.substring(1))
+        .search({branch: branch})
+        .toString();
+    if(commandKeys.indexOf(tb.pressedKey) !== -1) {
+        window.open(fileurl, '_blank');
+    } else {
+        window.open(fileurl, '_self');
+    }
+}
+
+function _fangornGitLabTitle(item, col)  {
+    var tb = this;
+    if (item.data.isAddonRoot && item.connected === false) { // as opposed to undefined, avoids unnecessary setting of this value
+        return Fangorn.Utils.connectCheckTemplate.call(this, item);
+    }
+
+    if (item.data.addonFullname) {
+        var urlParams = $osf.urlParams();
+
+        if (!item.data.branch && urlParams.branch) {
+            item.data.branch = urlParams.branch;
+        }
+        var branch = _getCurrentBranch(item);
+
+        return m('span',[
+            m('gitlab-name', item.data.name + ' (' + branch + ')')
+        ]);
+    } else {
+        if (item.kind === 'file' && item.data.permissions.view) {
+            return m('span',[
+                m('gitlab-name.fg-file-links', {
+                    onclick: function() {
+                        gotoFile.call(tb, item);
+                    }
+                }, item.data.name)]);
+        } else {
+            return m('span', item.data.name);
+        }
+    }
+}
+
+function _fangornColumns (item) {
+    var tb = this;
+    var node = item.parent().parent();
+    var columns = [];
+    columns.push({
+        data : 'name',
+        folderIcons : true,
+        filter: true,
+        custom : _fangornGitLabTitle
+    });
+
+    if(tb.options.placement === 'project-files') {
+        columns.push(
+        {
+            data  : 'size',
+            sortInclude : false,
+            filter : false,
+            custom : function() {return item.data.size ? $osf.humanFileSize(item.data.size, true) : '';}
+        });
+        columns.push(
+        {
+            data  : 'downloads',
+            sortInclude : false,
+            filter : false,
+            custom : function() {return m('');}
+        });
+        columns.push({
+            data: 'version',
+            filter: false,
+            sortInclude : false,
+            custom: function() {return m('');}
+        });
+    }
+    if(tb.options.placement !== 'fileview') {
+        columns.push({
+            data : 'modified',
+            filter: false,
+            custom : function() {return m('');}
+        });
+    }
+    return columns;
+}
+
+
+function _fangornFolderIcons(item){
+    if(item.data.iconUrl){
+        return m('img',{src:item.data.iconUrl, style:{width:'16px', height:'auto'}}, ' ');
+    }
+    return undefined;
+}
+
+function _fangornUploadComplete(item){
+    var index = this.returnIndex(item.id);
+}
+
+function _fangornUploadSuccess(file, item, response) {
+    if (response) {
+        response.branch = item.parent().data.branch;
+    }
+}
+
+// Register configuration
+Fangorn.config.gitlab = {
+    // Handle changing the branch select
+    lazyload: _resolveLazyLoad,
+    resolveRows: _fangornColumns,
+    folderIcon: _fangornFolderIcons,
+    onUploadComplete: _fangornUploadComplete,
+    lazyLoadOnLoad: _fangornLazyLoadOnLoad,
+    uploadSuccess: _fangornUploadSuccess,
+    itemButtons: _gitlabItemButtons,
+};
diff --git a/website/addons/gitlab/static/node-cfg.js b/website/addons/gitlab/static/node-cfg.js
new file mode 100644
index 00000000000..3256d572eed
--- /dev/null
+++ b/website/addons/gitlab/static/node-cfg.js
@@ -0,0 +1,7 @@
+'use strict';
+
+var $ = require('jquery');
+require('./gitlab-node-cfg.js');
+var AddonHelper = require('js/addonHelper');
+
+$(window.contextVars.gitlabSettingsSelector).on('submit', AddonHelper.onSubmitSettings);
diff --git a/website/addons/gitlab/templates/gitlab_node_settings.mako b/website/addons/gitlab/templates/gitlab_node_settings.mako
new file mode 100644
index 00000000000..b4c548b9220
--- /dev/null
+++ b/website/addons/gitlab/templates/gitlab_node_settings.mako
@@ -0,0 +1,99 @@
+<form role="form" id="addonSettingsGitLab" data-addon="${addon_short_name}">
+
+    <div>
+        <h4 class="addon-title">
+            <img class="addon-icon" src="${addon_icon_url}">
+            GitLab
+            <small class="authorized-by">
+                % if node_has_auth:
+                        authorized by
+                        <a href="${auth_osf_url}" target="_blank">
+                            ${auth_osf_name}
+                        </a>
+                    % if not is_registration:
+                        <a id="gitlabRemoveToken" class="text-danger pull-right addon-auth" >
+                          Disconnect Account
+                        </a>
+                    % endif
+                % else:
+                    % if user_has_auth:
+                        <a id="gitlabImportToken" class="text-primary pull-right addon-auth">
+                           Import Account from Profile
+                        </a>
+                    % else:
+                        <a id="gitlabCreateToken" class="text-primary pull-right addon-auth">
+                           Connect Account
+                        </a>
+                    % endif
+                % endif
+            </small>
+        </h4>
+    </div>
+
+    % if node_has_auth and valid_credentials:
+
+        <input type="hidden" id="gitlabUser" name="gitlab_user" value="${gitlab_user}" />
+        <input type="hidden" id="gitlabRepo" name="gitlab_repo" value="${gitlab_repo}" />
+        <input type="hidden" id="gitlabRepoId" name="gitlab_repo_id" value="${gitlab_repo_id}" />
+
+        <p><strong>Current Repo: </strong>
+
+        % if is_owner and not is_registration:
+        </p>
+        <div class="row">
+            <div class="col-md-6 m-b-sm">
+                <select id="gitlabSelectRepo" class="form-control" ${'disabled' if not is_owner or is_registration else ''}>
+                    <option>-----</option>
+                        % if is_owner:
+                            % if repos:
+                              % for repo in repos:
+                                  <option value="${repo['id']}" ${'selected' if repo['id'] == int(gitlab_repo_id) else ''}>${repo['path_with_namespace']}</option>
+                              % endfor
+                            % endif
+                        % else:
+                            <option selected>${gitlab_repo_full_name}</option>
+                        % endif
+                </select>
+            </div>
+
+            <div class="col-md-6 m-b-sm">
+                <button class="btn btn-success addon-settings-submit">
+                    Save
+                </button>
+                <a id="gitlabCreateRepo" class="btn btn-success pull-right">Create Repo</a>
+            </div>
+        </div>
+        % elif gitlab_repo_full_name:
+            <a href="${files_url}">${gitlab_repo_full_name}</a></p>
+        % else:
+            <span>None</span></p>
+        % endif
+    % endif
+
+    ${self.on_submit()}
+
+    % if node_has_auth and not valid_credentials:
+        <div class="addon-settings-message text-danger p-t-sm">
+            % if is_owner:
+                Could not retrieve GitLab settings at this time. The GitLab addon credentials
+                may no longer be valid. Try deauthorizing and reauthorizing GitLab on your
+                <a href="${addons_url}">account settings page</a>.
+            % else:
+                Could not retrieve GitLab settings at this time. The GitLab addon credentials
+                may no longer be valid. Contact ${auth_osf_name} to verify.
+            % endif
+        </div>
+    % else:
+        <div class="addon-settings-message p-t-sm" style="display: none"></div>
+    % endif
+
+</form>
+
+<%def name="on_submit()">
+    <script type="text/javascript">
+        window.contextVars = $.extend({}, window.contextVars, {
+            ## Short name never changes
+            'gitlabSettingsSelector': '#addonSettingsGitLab'
+        });
+    </script>
+</%def>
diff --git a/website/addons/gitlab/tests/factories.py b/website/addons/gitlab/tests/factories.py
new file mode 100644
index 00000000000..eab2edcdbdb
--- /dev/null
+++ b/website/addons/gitlab/tests/factories.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+
+from factory import Sequence, SubFactory
+from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory
+
+from website.addons.gitlab.model import GitLabNodeSettings, GitLabUserSettings
+
+
+class GitLabAccountFactory(ExternalAccountFactory):
+    provider = 'gitlab'
+    provider_id = Sequence(lambda n: 'id-{0}'.format(n))
+    oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
+    display_name = 'abc'
+
+
+class GitLabUserSettingsFactory(ModularOdmFactory):
+    class Meta:
+        model = GitLabUserSettings
+
+    owner = SubFactory(UserFactory)
+
+
+class GitLabNodeSettingsFactory(ModularOdmFactory):
+    class Meta:
+        model = GitLabNodeSettings
+
+    owner = SubFactory(ProjectFactory)
+    user_settings = SubFactory(GitLabUserSettingsFactory)
diff --git a/website/addons/gitlab/tests/test_utils.py b/website/addons/gitlab/tests/test_utils.py
new file mode 100644
index 00000000000..dedff482614
--- /dev/null
+++ b/website/addons/gitlab/tests/test_utils.py
@@ -0,0 +1,62 @@
+import json
+import hmac
+import hashlib
+
+import nose
+from nose.tools import *  # noqa
+
+from tests.base import OsfTestCase
+
+from website.addons.gitlab import utils
+from website.addons.base.exceptions import HookError
+from website.addons.gitlab.model import GitLabNodeSettings
+
+
+def make_signature(secret, data):
+    return hmac.new(secret, data, hashlib.sha1).hexdigest()
+
+HOOK_PAYLOAD = json.dumps({
+    'files': [],
+    'message': 'fake commit',
+})
+
+
+class TestHookVerify(OsfTestCase):
+
+    def setUp(self):
+        super(TestHookVerify, self).setUp()
+        self.node_settings = GitLabNodeSettings(
+            hook_secret='speakfriend',
+        )
+
+    def test_verify_no_secret(self):
+        self.node_settings.hook_secret = None
+        with assert_raises(HookError):
+            utils.verify_hook_signature(self.node_settings, {}, {})
+
+    def test_verify_valid(self):
+        try:
+            utils.verify_hook_signature(
+                self.node_settings,
+                HOOK_PAYLOAD,
+                {
+                    'X-Hub-Signature': make_signature(
+                        self.node_settings.hook_secret,
+                        HOOK_PAYLOAD,
+                    )
+                }
+            )
+        except HookError:
+            assert 0
+
+    def test_verify_invalid(self):
+        with assert_raises(HookError):
+            utils.verify_hook_signature(
+                self.node_settings,
+                HOOK_PAYLOAD,
+                {'X-Hub-Signature': 'invalid'}
+            )
+
+
+if __name__ == '__main__':
+    nose.run()
diff --git a/website/addons/gitlab/utils.py b/website/addons/gitlab/utils.py
new file mode 100644
index 00000000000..a33ecea1d48
--- /dev/null
+++ b/website/addons/gitlab/utils.py
@@ -0,0 +1,116 @@
+import hmac
+import uuid
+import urllib
+import hashlib
+import httplib as http
+
+from framework.exceptions import HTTPError
+from website.addons.base.exceptions import HookError
+
+from website.addons.gitlab.api import GitLabClient
+
+MESSAGE_BASE = 'via the Open Science Framework'
+MESSAGES = {
+    'add': 'Added {0}'.format(MESSAGE_BASE),
+    'move': 'Moved {0}'.format(MESSAGE_BASE),
+    'copy': 'Copied {0}'.format(MESSAGE_BASE),
+    'update': 'Updated {0}'.format(MESSAGE_BASE),
+    'delete': 'Deleted {0}'.format(MESSAGE_BASE),
+}
+
+
+def make_hook_secret():
+    return str(uuid.uuid4()).replace('-', '')
+
+
+HOOK_SIGNATURE_KEY = 'X-Hub-Signature'
+def verify_hook_signature(node_settings, data, headers):
+    """Verify hook signature.
+    :param GitLabNodeSettings node_settings:
+    :param dict data: JSON response body
+    :param dict headers: Request headers
+    :raises: HookError if signature is missing or invalid
+    """
+    if node_settings.hook_secret is None:
+        raise HookError('No secret key')
+    digest = hmac.new(
+        str(node_settings.hook_secret),
+        data,
+        digestmod=hashlib.sha1
+    ).hexdigest()
+    signature = headers.get(HOOK_SIGNATURE_KEY, '').replace('sha1=', '')
+    if digest != signature:
+        raise HookError('Invalid signature')
+
+
+def get_path(kwargs, required=True):
+    path = kwargs.get('path')
+    if path:
+        return urllib.unquote_plus(path)
+    elif required:
+        raise HTTPError(http.BAD_REQUEST)
+
+
+def get_refs(addon, branch=None, sha=None, connection=None):
+    """Get the appropriate branch name and sha given the addon settings object,
+    and optionally the branch and sha from the request arguments.
+    :param str branch: Branch name. If None, return the default branch from the
+        repo settings.
+    :param str sha: The SHA.
+    :param GitLab connection: GitLab API object. If None, one will be created
+        from the addon's user settings.
+    """
+    connection = connection or GitLabClient(external_account=addon.external_account)
+
+    if sha and not branch:
+        raise HTTPError(http.BAD_REQUEST)
+
+    # Get default branch if not provided
+    if not branch:
+        repo = connection.repo(addon.repo_id)
+        if repo is None:
+            return None, None, None
+        branch = repo['default_branch']
+
+    # Get data from GitLab API if not registered
+    branches = connection.branches(addon.repo_id)
+
+    # Use registered SHA if provided
+    for each in branches:
+        if branch == each['name']:
+            sha = each['commit']['id']
+            break
+
+    return branch, sha, branches
+
+
+def check_permissions(node_settings, auth, connection, branch, sha=None, repo=None):
+
+    user_settings = node_settings.user_settings
+    has_access = False
+
+    has_auth = bool(user_settings and user_settings.has_auth)
+    if has_auth:
+        repo = repo or connection.repo(node_settings.repo_id)
+
+        has_access = (
+            repo is not None and (
+                repo['permissions']['project_access']['access_level'] >= 30
+            )
+        )
+
+    if sha:
+        current_branch = connection.branches(node_settings.repo_id, branch)
+        # TODO Will I ever return false?
+        is_head = sha == current_branch['commit']['id']
+    else:
+        is_head = True
+
+    can_edit = (
+        node_settings.owner.can_edit(auth) and
+        not node_settings.owner.is_registration and
+        has_access and
+        is_head
+    )
+
+    return can_edit
diff --git a/website/files/models/gitlab.py b/website/files/models/gitlab.py
new file mode 100644
index 00000000000..cfa7a373706
--- /dev/null
+++ b/website/files/models/gitlab.py
@@ -0,0 +1,20 @@
+from website.files.models.base import File, Folder, FileNode
+
+
+__all__ = ('GitLabFile', 'GitLabFolder', 'GitLabFileNode')
+
+
+class GitLabFileNode(FileNode):
+    provider = 'gitlab'
+
+
+class GitLabFolder(GitLabFileNode, Folder):
+    pass
+
+
+class GitLabFile(GitLabFileNode, File):
+    version_identifier = 'ref'
+
+    def touch(self, auth_header, revision=None, ref=None, branch=None, **kwargs):
+        revision = revision or ref or branch
+        return super(GitLabFile, self).touch(auth_header, revision=revision, **kwargs)
diff --git a/website/static/js/fileViewTreebeard.js b/website/static/js/fileViewTreebeard.js
index 4f7d3005d62..862cc8cabbe 100644
--- a/website/static/js/fileViewTreebeard.js
+++ b/website/static/js/fileViewTreebeard.js
@@ -7,11 +7,11 @@ function FileViewTreebeard(data) {
     // Set item.branch to show the branch of the rendered GitHub / Bitbucket file instead of the default branch
     var addonRootFolders = data.data[0].children;
 
-    if (window.contextVars.file.provider === 'github' || window.contextVars.file.provider === 'bitbucket') {
+    if (window.contextVars.file.provider === 'github' || window.contextVars.file.provider === 'bitbucket' || window.contextVars.file.provider === 'gitlab') {
         for (var i = 0; i < addonRootFolders.length; i++) {
             var item = addonRootFolders[i];
             if (
-                (item.provider === 'github' || item.provider === 'bitbucket') &&
+                (item.provider === 'github' || item.provider === 'bitbucket' || item.provider === 'gitlab') &&
                     item.isAddonRoot && window.contextVars.file.extra.branch
             ) {
                 item.branch = window.contextVars.file.extra.branch;
diff --git a/website/static/js/osfLanguage.js b/website/static/js/osfLanguage.js
index 5ad69368472..cba16d0ddab 100644
--- a/website/static/js/osfLanguage.js
+++ b/website/static/js/osfLanguage.js
@@ -75,6 +75,10 @@ module.exports = {
             confirmDeauth: 'Are you sure you want to disconnect the Bitbucket account? ' +
                 'This will revoke access to Bitbucket for all projects you have ' +
                 'associated with this account.',
+        gitlab: {
+            confirmDeauth: 'Are you sure you want to disconnect the GitLab account? ' +
+                'This will revoke access to GitLab for all projects you have ' +
+                'associated with this account.',
         },
         s3:{
             authError: 'Could not connect to Amazon S3 at this time. Please try again later.',
diff --git a/website/util/rubeus.py b/website/util/rubeus.py
index 49bef702ca3..73bc0592680 100644
--- a/website/util/rubeus.py
+++ b/website/util/rubeus.py
@@ -64,7 +64,7 @@ def build_addon_root(node_settings, name, permissions=None,
     :param dict or Auth permissions: Dictionary of permissions for the addon's content or Auth for use in node.can_X methods
     :param dict urls: Hgrid related urls
     :param String extra: Html to be appened to the addon folder name
-        eg. Branch switcher for github/bitbucket
+        eg. Branch switcher for github/bitbucket/gitlab
     :param list of dicts buttons: List of buttons to appear in HGrid row. Each
         dict must have 'text', a string that will appear on the button, and
         'action', the name of a function in

From 6e1cd71f25f78f0f2ae5713d1773ddb5485e657d Mon Sep 17 00:00:00 2001
From: Luis Henrique Mulinari <luis.mulinari@gmail.com>
Date: Mon, 3 Jul 2017 13:56:39 -0400
Subject: [PATCH 057/192] Implement initial GitLab Addon     [2/3]

---
 api/base/views.py                             |   1 +
 website/addons/gitlab/README.md               |  10 +
 website/addons/gitlab/exceptions.py           |   8 +
 website/addons/gitlab/routes.py               | 127 ++++++++++
 website/addons/gitlab/settings/defaults.py    |  11 +
 .../addons/gitlab/static/gitlab-node-cfg.js   | 190 ++++++++++++++
 .../addons/gitlab/static/gitlabUserConfig.js  | 236 ++++++++++++++++++
 .../templates/gitlab_credentials_modal.mako   |  63 +++++
 website/addons/gitlab/tests/__init__.py       |   0
 .../addons/gitlab/tests/test_serializer.py    |  34 +++
 website/addons/gitlab/tests/utils.py          | 146 +++++++++++
 website/files/models/__init__.py              |  12 +
 website/static/js/fangorn.js                  |  10 +-
 website/static/js/logTextParser.js            |  13 +-
 website/static/js/osfLanguage.js              |   1 +
 website/static/storageAddons.json             |   4 +
 16 files changed, 860 insertions(+), 6 deletions(-)
 create mode 100644 website/addons/gitlab/README.md
 create mode 100644 website/addons/gitlab/exceptions.py
 create mode 100644 website/addons/gitlab/routes.py
 create mode 100644 website/addons/gitlab/settings/defaults.py
 create mode 100644 website/addons/gitlab/static/gitlab-node-cfg.js
 create mode 100644 website/addons/gitlab/static/gitlabUserConfig.js
 create mode 100644 website/addons/gitlab/templates/gitlab_credentials_modal.mako
 create mode 100644 website/addons/gitlab/tests/__init__.py
 create mode 100644 website/addons/gitlab/tests/test_serializer.py
 create mode 100644 website/addons/gitlab/tests/utils.py
 create mode 100644 website/files/models/__init__.py

diff --git a/api/base/views.py b/api/base/views.py
index cef98c3bfca..7672e050cfd 100644
--- a/api/base/views.py
+++ b/api/base/views.py
@@ -736,6 +736,7 @@ def root(request, format=None, **kwargs):
         dropbox      Dropbox
         figshare     figshare
         github       GitHub
+        gitlab       GitLab
         googledrive  Google Drive
         osfstorage   OSF Storage
         s3           Amazon S3
diff --git a/website/addons/gitlab/README.md b/website/addons/gitlab/README.md
new file mode 100644
index 00000000000..22282849c70
--- /dev/null
+++ b/website/addons/gitlab/README.md
@@ -0,0 +1,10 @@
+# OSF GitLab Addon
+
+1. On your GitLab profile settings, go to “Applications”
+2. In the name field, enter your application name, e..g “OSF GitLab Addon (local)”
+4. In the Redirect URI field, enter the full URL for your OSF instance + "/oauth/callback/gitlab/",
+ e.g "http://localhost:5000/oauth/callback/gitlab/". Make sure you use the slash in the end
+5. Click on 'Save application' button to submit the form
+6. Go to your project settings on OSF, click on the Addons menu and then on Gitlab
+7. Copy your Application ID and Secret from GitLab into the form
+8. Save the form and you are ready to go.
diff --git a/website/addons/gitlab/exceptions.py b/website/addons/gitlab/exceptions.py
new file mode 100644
index 00000000000..3aa57bfcd66
--- /dev/null
+++ b/website/addons/gitlab/exceptions.py
@@ -0,0 +1,8 @@
+class ApiError(Exception):
+    pass
+
+class NotFoundError(ApiError):
+    pass
+
+class GitLabError(Exception):
+    pass
diff --git a/website/addons/gitlab/routes.py b/website/addons/gitlab/routes.py
new file mode 100644
index 00000000000..17a24919ca4
--- /dev/null
+++ b/website/addons/gitlab/routes.py
@@ -0,0 +1,127 @@
+# -*- coding: utf-8 -*-
+
+from framework.routing import Rule, json_renderer
+
+from website.addons.gitlab import views
+
+api_routes = {
+    'rules': [
+
+        Rule(
+            '/settings/gitlab/',
+            'get',
+            views.gitlab_user_config_get,
+            json_renderer,
+        ),
+        Rule(
+            '/settings/gitlab/accounts/',
+            'post',
+            views.gitlab_add_user_account,
+            json_renderer,
+        ),
+        Rule(
+            [
+                '/settings/gitlab/accounts/',
+            ],
+            'get',
+            views.gitlab_account_list,
+            json_renderer,
+        ),
+
+        Rule(
+            [
+                '/project/<pid>/gitlab/settings/',
+                '/project/<pid>/node/<nid>/gitlab/settings/'
+            ],
+            'get',
+            views.gitlab_get_config,
+            json_renderer,
+        ),
+
+        Rule(
+            [
+                '/project/<pid>/gitlab/settings/',
+                '/project/<pid>/node/<nid>/gitlab/settings/',
+            ],
+            'post',
+            views.gitlab_set_config,
+            json_renderer,
+        ),
+
+        Rule(
+            [
+                '/project/<pid>/gitlab/user_auth/',
+                '/project/<pid>/node/<nid>/gitlab/user_auth/'
+            ],
+            'put',
+            views.gitlab_import_auth,
+            json_renderer,
+        ),
+
+        Rule(
+            [
+                '/project/<pid>/gitlab/user_auth/',
+                '/project/<pid>/node/<nid>/gitlab/user_auth/'
+            ],
+            'delete',
+            views.gitlab_deauthorize_node,
+            json_renderer,
+        ),
+
+        Rule(
+            [
+                '/project/<pid>/gitlab/tarball/',
+                '/project/<pid>/node/<nid>/gitlab/tarball/',
+            ],
+            'get',
+            views.gitlab_download_starball,
+            json_renderer,
+            {'archive': 'tar'},
+            endpoint_suffix='__tar',
+        ),
+        Rule(
+            [
+                '/project/<pid>/gitlab/zipball/',
+                '/project/<pid>/node/<nid>/gitlab/zipball/',
+            ],
+            'get',
+            views.gitlab_download_starball,
+            json_renderer,
+            {'archive': 'zip'},
+            endpoint_suffix='__zip',
+        ),
+
+        Rule(
+            [
+                '/project/<pid>/gitlab/hook/',
+                '/project/<pid>/node/<nid>/gitlab/hook/',
+            ],
+            'post',
+            views.gitlab_hook_callback,
+            json_renderer,
+        ),
+
+        Rule(
+            [
+                '/project/<pid>/gitlab/repo/create/',
+                '/project/<pid>/node/<nid>/gitlab/repo/create/',
+
+            ],
+            'post',
+            views.gitlab_create_repo,
+            json_renderer,
+        ),
+
+        Rule(
+            [
+                '/project/<pid>/gitlab/hgrid/root/',
+                '/project/<pid>/node/<nid>/gitlab/hgrid/root/',
+            ],
+            'get',
+            views.gitlab_root_folder,
+            json_renderer,
+        ),
+
+    ],
+    'prefix': '/api/v1'
+}
diff --git a/website/addons/gitlab/settings/defaults.py b/website/addons/gitlab/settings/defaults.py
new file mode 100644
index 00000000000..78d66a6a401
--- /dev/null
+++ b/website/addons/gitlab/settings/defaults.py
@@ -0,0 +1,11 @@
+DEFAULT_HOSTS = ['gitlab.com']
+
+# GitLab hook domain
+HOOK_DOMAIN = None
+HOOK_CONTENT_TYPE = 'json'
+HOOK_EVENTS = ['push']  # Only log commits
+
+# Max render size in bytes; no max if None
+MAX_RENDER_SIZE = None
+
+CACHE = False
diff --git a/website/addons/gitlab/static/gitlab-node-cfg.js b/website/addons/gitlab/static/gitlab-node-cfg.js
new file mode 100644
index 00000000000..ad852dc42b6
--- /dev/null
+++ b/website/addons/gitlab/static/gitlab-node-cfg.js
@@ -0,0 +1,190 @@
+'use strict';
+
+var ko = require('knockout');
+var $ = require('jquery');
+var bootbox = require('bootbox');
+var $osf = require('js/osfHelpers');
+
+var nodeApiUrl = window.contextVars.node.urls.api;
+
+var GitLabConfigHelper = (function() {
+
+    var connectExistingAccount = function(accountId) {
+        $osf.putJSON(
+                nodeApiUrl + 'gitlab/user_auth/',
+                {'external_account_id': accountId}
+            ).done(function() {
+                    if($osf.isIE()){
+                        window.location.hash = '#configureAddonsAnchor';
+                    }
+                    window.location.reload();
+            }).fail(
+                $osf.handleJSONError
+            );
+    };
+
+    var updateHidden = function(element) {
+        var repoParts = $("option:selected", element).text().split('/');
+
+        $('#gitlabUser').val($.trim(repoParts[0]));
+        $('#gitlabRepo').val($.trim(repoParts[1]));
+        $('#gitlabRepoId').val(element.val());
+    };
+
+    var displayError = function(msg) {
+        $('#addonSettingsGitLab').find('.addon-settings-message')
+            .text('Error: ' + msg)
+            .removeClass('text-success').addClass('text-danger')
+            .fadeOut(100).fadeIn();
+    };
+
+    var createRepo = function() {
+
+        var $elm = $('#addonSettingsGitLab');
+        var $select = $elm.find('select');
+
+        bootbox.prompt({
+            title: 'Name your new repo',
+            placeholder: 'Repo name',
+            callback: function (repoName) {
+                // Return if cancelled
+                if (repoName === null) {
+                    return;
+                }
+
+                if (repoName === '') {
+                    displayError('Your repo must have a name');
+                    return;
+                }
+
+                $osf.postJSON(
+                    nodeApiUrl + 'gitlab/repo/create/',
+                    {name: repoName, user: $("#gitlabUser").val()}
+                ).done(function (response) {
+                        $select.append('<option value="' + response.repo['id'] + '">' + $osf.htmlEscape(response.repo['path_with_namespace']) + '</option>');
+                        $select.val(response.repo['id']);
+                        updateHidden($select);
+                    }).fail(function () {
+                        displayError('Could not create repository');
+                    });
+            },
+            buttons:{
+                confirm:{
+                    label: 'Save',
+                    className:'btn-success'
+                }
+            }
+        });
+    };
+
+    var askImport = function() {
+        $.get('/api/v1/settings/gitlab/accounts/'
+        ).done(function(data){
+            var accounts = data.accounts.map(function(account) {
+                return {
+                    name: account.display_name,
+                    id: account.id
+                };
+            });
+            if (accounts.length > 1) {
+                bootbox.prompt({
+                    title: 'Choose GitLab Account to Import',
+                    inputType: 'select',
+                    inputOptions: ko.utils.arrayMap(
+                        accounts,
+                        function(item) {
+                            return {
+                                text: $osf.htmlEscape(item.name),
+                                value: item.id
+                            };
+                        }
+                    ),
+                    value: accounts[0].id,
+                    callback: function(accountId) {
+                        connectExistingAccount(accountId);
+                    },
+                    buttons: {
+                        confirm:{
+                            label:'Import',
+                        }
+                    }
+                });
+            } else {
+                bootbox.confirm({
+                    title: 'Import GitLab Account?',
+                    message: 'Are you sure you want to link your GitLab account with this project?',
+                    callback: function(confirmed) {
+                        if (confirmed) {
+                            connectExistingAccount(accounts[0].id);
+                        }
+                    },
+                    buttons: {
+                        confirm: {
+                            label:'Import',
+                        }
+                    }
+                });
+            }
+        }).fail(function(xhr, textStatus, error) {
+            displayError('Could not GET GitLab accounts for user.');
+        });
+    };
+
+    $(document).ready(function() {
+        $('#gitlabSelectRepo').on('change', function() {
+            var el = $(this);
+            if (el.val()) {
+                updateHidden(el);
+            }
+        });
+
+        $('#gitlabCreateRepo').on('click', function() {
+            createRepo();
+        });
+
+        $('#gitlabImportToken').on('click', function() {
+            askImport();
+        });
+
+        $('#gitlabCreateToken').on('click', function() {
+            window.oauthComplete = function(res) {
+                askImport();
+            };
+        });
+
+        $('#gitlabRemoveToken').on('click', function() {
+            bootbox.confirm({
+                title: 'Disconnect GitLab Account?',
+                message: 'Are you sure you want to remove this GitLab account?',
+                callback: function(confirm) {
+                    if(confirm) {
+                        $.ajax({
+                        type: 'DELETE',
+                        url: nodeApiUrl + 'gitlab/user_auth/'
+                    }).done(function() {
+                        window.location.reload();
+                    }).fail(
+                        $osf.handleJSONError
+                    );
+                    }
+                },
+                buttons:{
+                    confirm:{
+                        label: 'Disconnect',
+                        className: 'btn-danger'
+                    }
+                }
+            });
+        });
+
+        $('#addonSettingsGitLab .addon-settings-submit').on('click', function() {
+            if (!$('#gitlabRepo').val()) {
+                return false;
+            }
+        });
+
+    });
+
+})();
+
+module.exports = GitLabConfigHelper;
diff --git a/website/addons/gitlab/static/gitlabUserConfig.js b/website/addons/gitlab/static/gitlabUserConfig.js
new file mode 100644
index 00000000000..ff4e8bd0bb9
--- /dev/null
+++ b/website/addons/gitlab/static/gitlabUserConfig.js
@@ -0,0 +1,236 @@
+/**
+* Module that controls the GitLab user settings. Includes Knockout view-model
+* for syncing data.
+*/
+
+var ko = require('knockout');
+var $ = require('jquery');
+var Raven = require('raven-js');
+var bootbox = require('bootbox');
+require('js/osfToggleHeight');
+
+var language = require('js/osfLanguage').Addons.gitlab;
+var osfHelpers = require('js/osfHelpers');
+var addonSettings = require('js/addonSettings');
+
+var ExternalAccount = addonSettings.ExternalAccount;
+
+var $modal = $('#gitlabInputCredentials');
+
+
+function ViewModel(url) {
+    var self = this;
+    const otherString = 'Other (Please Specify)';
+
+    self.properName = 'GitLab';
+    self.clientId = ko.observable();
+    self.urls = ko.observable({});
+    self.hosts = ko.observableArray([]);
+    self.selectedHost = ko.observable();    // Host specified in select element
+    self.customHost = ko.observable();      // Host specified in input element
+    // Whether the initial data has been loaded
+    self.loaded = ko.observable(false);
+    self.accounts = ko.observableArray();
+
+    // Designated host, specified from select or input element
+    self.host = ko.pureComputed(function() {
+        return self.useCustomHost() ? self.customHost() : self.selectedHost();
+    });
+    // Hosts visible in select element. Includes presets and "Other" option
+    self.visibleHosts = ko.pureComputed(function() {
+        return self.hosts().concat([otherString]);
+    });
+    // Whether to use select element or input element for host designation
+    self.useCustomHost = ko.pureComputed(function() {
+        return self.selectedHost() === otherString;
+    });
+    self.showApiTokenInput = ko.pureComputed(function() {
+        return Boolean(self.selectedHost());
+    });
+    self.tokenUrl = ko.pureComputed(function() {
+        return self.host() ? 'https://' + self.host() + '/profile/personal_access_tokens' : null;
+    });
+
+    // Flashed messages
+    self.message = ko.observable('');
+    self.messageClass = ko.observable('text-info');
+
+    /** Reset all fields from GitLab host selection modal */
+    self.clearModal = function() {
+        self.message('');
+        self.messageClass('text-info');
+        self.clientId(null);
+        self.selectedHost(null);
+        self.customHost(null);
+    };
+
+    self.updateAccounts = function() {
+        var url = self.urls().accounts;
+        var request = $.get(url);
+        request.done(function(data) {
+            self.accounts($.map(data.accounts, function(account) {
+                var externalAccount =  new ExternalAccount(account);
+                externalAccount.gitlabHost = account.host;
+                externalAccount.gitlabUrl = account.host_url;
+                return externalAccount;
+            }));
+            $('#gitlab-header').osfToggleHeight({height: 160});
+        });
+        request.fail(function(xhr, status, error) {
+            Raven.captureMessage('Error while updating addon account', {
+                extra: {
+                    url: url,
+                    status: status,
+                    error: error
+                }
+            });
+        });
+        return request;
+    };
+
+    /** Send POST request to authorize GitLab */
+    self.sendAuth = function() {
+        // Selection should not be empty
+        if( !self.selectedHost() ){
+            self.changeMessage("Please select a GitLab repository.", 'text-danger');
+            return;
+        }
+
+        if ( !self.useCustomHost() && !self.clientId() ) {
+            self.changeMessage("Please enter your Personal Access Token.", 'text-danger');
+            return;
+        }
+
+        if ( self.useCustomHost() && (!self.customHost() || !self.clientId()) ) {
+            self.changeMessage("Please enter a GitLab host and your Personal Access Token.", 'text-danger');
+            return;
+        }
+
+        var url = self.urls().create;
+
+        return osfHelpers.postJSON(
+            url,
+            ko.toJS({
+                host: self.host,
+                clientId: self.clientId
+            })
+        ).done(function() {
+            self.updateAccounts();
+            self.clearModal();
+            $modal.modal('hide');
+
+        }).fail(function(xhr, textStatus, error) {
+            var errorMessage = (xhr.status === 401) ? 'Auth Error' : 'Other error';
+            self.changeMessage(errorMessage, 'text-danger');
+            Raven.captureMessage('Could not authenticate with GitLab', {
+                extra: {
+                    url: url,
+                    textStatus: textStatus,
+                    error: error
+                }
+            });
+        });
+    };
+
+    self.askDisconnect = function(account) {
+        var self = this;
+        bootbox.confirm({
+            title: 'Disconnect GitLab Account?',
+            message: '<p class="overflow">' +
+                'Are you sure you want to disconnect the GitLab account on <strong>' +
+                osfHelpers.htmlEscape(account.name) + '</strong>? This will revoke access to GitLab for all projects associated with this account.' +
+                '</p>',
+            callback: function (confirm) {
+                if (confirm) {
+                    self.disconnectAccount(account);
+                }
+            },
+            buttons:{
+                confirm:{
+                    label:'Disconnect',
+                    className:'btn-danger'
+                }
+            }
+        });
+    };
+
+    self.disconnectAccount = function(account) {
+        var self = this;
+        var url = '/api/v1/oauth/accounts/' + account.id + '/';
+        var request = $.ajax({
+            url: url,
+            type: 'DELETE'
+        });
+        request.done(function(data) {
+            self.updateAccounts();
+        });
+        request.fail(function(xhr, status, error) {
+            Raven.captureMessage('Error while removing addon authorization for ' + account.id, {
+                extra: {
+                    url: url,
+                    status: status,
+                    error: error
+                }
+            });
+        });
+        return request;
+    };
+
+    /** Change the flashed status message */
+    self.changeMessage = function(text, css, timeout) {
+        self.message(text);
+        var cssClass = css || 'text-info';
+        self.messageClass(cssClass);
+        if (timeout) {
+            // Reset message after timeout period
+            setTimeout(function() {
+                self.message('');
+                self.messageClass('text-info');
+            }, timeout);
+        }
+    };
+
+    // Update observables with data from the server
+    self.fetch = function() {
+        $.ajax({
+            url: url,
+            type: 'GET',
+            dataType: 'json'
+        }).done(function (response) {
+            var data = response.result;
+            self.urls(data.urls);
+            self.hosts(data.hosts);
+            self.loaded(true);
+            self.updateAccounts();
+        }).fail(function (xhr, textStatus, error) {
+            self.changeMessage(language.userSettingsError, 'text-danger');
+            Raven.captureMessage('Could not GET GitLab settings', {
+                extra: {
+                    url: url,
+                    textStatus: textStatus,
+                    error: error
+                }
+            });
+        });
+    };
+
+    self.selectionChanged = function() {
+        self.changeMessage('','');
+    };
+
+}
+
+function GitLabUserConfig(selector, url) {
+    // Initialization code
+    var self = this;
+    self.selector = selector;
+    self.url = url;
+    // On success, instantiate and bind the ViewModel
+    self.viewModel = new ViewModel(url);
+    osfHelpers.applyBindings(self.viewModel, self.selector);
+}
+
+module.exports = {
+    GitLabViewModel: ViewModel,
+    GitLabUserConfig: GitLabUserConfig    // for backwards-compat
+};
diff --git a/website/addons/gitlab/templates/gitlab_credentials_modal.mako b/website/addons/gitlab/templates/gitlab_credentials_modal.mako
new file mode 100644
index 00000000000..edd7ca13644
--- /dev/null
+++ b/website/addons/gitlab/templates/gitlab_credentials_modal.mako
@@ -0,0 +1,63 @@
+<div id="gitlabInputCredentials" class="modal fade">
+    <div class="modal-dialog modal-lg">
+        <div class="modal-content">
+            <div class="modal-header"><h3>Connect a GitLab Account</h3></div>
+            <form>
+                <div class="modal-body">
+                    <div class="row">
+                        <div class="col-sm-6">
+                            <!-- Select GitLab installation -->
+                            <div class="form-group">
+                                <label for="hostSelect">GitLab Repository</label>
+                                <select class="form-control" id="hostSelect"
+                                        data-bind="options: visibleHosts,
+                                                   optionsCaption: 'Select a GitLab repository',
+                                                   value: selectedHost,
+                                                   event: { change: selectionChanged }">
+                                </select>
+                            </div>
+
+                            <!-- Custom input -->
+                            <div data-bind="if: useCustomHost">
+                                <div class="input-group">
+                                    <div class="input-group-addon">https://</div>
+                                    <input type="text" class="form-control" name="customHost" data-bind="value: customHost">
+                                </div>
+                                <div class="text-info" style="text-align: center">
+                                    <em>Only GitLab repositories v4.0 or higher are supported.</em>
+                                </div>
+                            </div>
+
+                        </div>
+
+                        <div class="col-sm-6">
+                            <!-- Personal Access Token Input-->
+                            <div class="form-group" data-bind="if: showApiTokenInput">
+                                <label for="clientId">
+                                    Personal Access Token
+                                    <!-- Link to API token generation page -->
+                                    <a data-bind="attr: {href: tokenUrl}"
+                                       target="_blank" class="text-muted addon-external-link">
+                                        (Get from GitLab <i class="fa fa-external-link-square"></i>)
+                                    </a>
+                                </label>
+                                <input class="form-control" name="clientId" data-bind="value: clientId"/>
+                            </div>
+                        </div>
+                    </div><!-- end row -->
+                    <!-- Flashed Messages -->
+                    <div class="help-block">
+                        <p data-bind="html: message, attr: {class: messageClass}"></p>
+                    </div>
+                </div><!-- end modal-body -->
+
+                <div class="modal-footer">
+                    <a href="#" class="btn btn-default" data-bind="click: clearModal" data-dismiss="modal">Cancel</a>
+                    <!-- Save Button -->
+                    <button data-bind="click: sendAuth" class="btn btn-success">Save</button>
+
+                </div><!-- end modal-footer -->
+            </form>
+        </div><!-- end modal-content -->
+    </div>
+</div>
diff --git a/website/addons/gitlab/tests/__init__.py b/website/addons/gitlab/tests/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/website/addons/gitlab/tests/test_serializer.py b/website/addons/gitlab/tests/test_serializer.py
new file mode 100644
index 00000000000..1b24aeb4273
--- /dev/null
+++ b/website/addons/gitlab/tests/test_serializer.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+"""Serializer tests for the GitLab addon."""
+import mock
+from nose.tools import *  # noqa (PEP8 asserts)
+
+from website.addons.base.testing.serializers import StorageAddonSerializerTestSuiteMixin
+from website.addons.gitlab.api import GitLabClient
+from website.addons.gitlab.tests.factories import GitLabAccountFactory
+from website.addons.gitlab.serializer import GitLabSerializer
+
+from tests.base import OsfTestCase
+
+class TestGitLabSerializer(StorageAddonSerializerTestSuiteMixin, OsfTestCase):
+
+    addon_short_name = 'gitlab'
+
+    Serializer = GitLabSerializer
+    ExternalAccountFactory = GitLabAccountFactory
+    client = GitLabClient()
+
+    def set_provider_id(self, pid):
+        self.node_settings.repo = pid
+    
+    ## Overrides ##
+
+    def setUp(self):
+        super(TestGitLabSerializer, self).setUp()
+        self.mock_api_user = mock.patch("website.addons.gitlab.api.GitLabClient.user")
+        self.mock_api_user.return_value = mock.Mock()
+        self.mock_api_user.start()
+
+    def tearDown(self):
+        self.mock_api_user.stop()
+        super(TestGitLabSerializer, self).tearDown()
diff --git a/website/addons/gitlab/tests/utils.py b/website/addons/gitlab/tests/utils.py
new file mode 100644
index 00000000000..8e395dda827
--- /dev/null
+++ b/website/addons/gitlab/tests/utils.py
@@ -0,0 +1,146 @@
+import mock
+import github3
+from website.addons.gitlab.api import GitLabClient
+from github3.repos.branch import Branch
+
+from website.addons.base.testing import OAuthAddonTestCaseMixin, AddonTestCase
+from website.addons.gitlab.model import GitLabProvider
+from website.addons.gitlab.tests.factories import GitLabAccountFactory
+
+
+class GitLabAddonTestCase(OAuthAddonTestCaseMixin, AddonTestCase):
+    ADDON_SHORT_NAME = 'gitlab'
+    ExternalAccountFactory = GitLabAccountFactory
+    Provider = GitLabProvider
+
+    def set_node_settings(self, settings):
+        super(GitLabAddonTestCase, self).set_node_settings(settings)
+        settings.repo = 'osfgitlabtest'
+        settings.user = 'osfio'
+
+def create_mock_gitlab(user='osfio', private=False):
+    """Factory for mock GitLab objects.
+    Example: ::
+
+        >>> gitlab = create_mock_gitlab(user='osfio')
+        >>> gitlab.branches(user='osfio', repo='hello-world')
+        >>> [{u'commit': {u'sha': u'e22d92d5d90bb8f9695e9a5e2e2311a5c1997230',
+        ...   u'url': u'https://api.gitlab.com/repos/osfio/mock-repo/commits/e22d92d5d90bb8f9695e9a5e2e2311a5c1997230'},
+        ...  u'name': u'dev'},
+        ... {u'commit': {u'sha': u'444a74d0d90a4aea744dacb31a14f87b5c30759c',
+        ...   u'url': u'https://api.gitlab.com/repos/osfio/mock-repo/commits/444a74d0d90a4aea744dacb31a14f87b5c30759c'},
+        ...  u'name': u'master'},
+        ... {u'commit': {u'sha': u'c6eaaf6708561c3d4439c0c8dd99c2e33525b1e6',
+        ...   u'url': u'https://api.gitlab.com/repos/osfio/mock-repo/commits/c6eaaf6708561c3d4439c0c8dd99c2e33525b1e6'},
+        ...  u'name': u'no-bundle'}]
+
+    :param str user: GitLab username.
+    :param bool private: Whether repo is private.
+    :return: An autospecced GitLab Mock object
+    """
+    gitlab_mock = mock.create_autospec(GitLabClient)
+    gitlab_mock.repo.return_value = github3.repos.Repository.from_json({
+    u'archive_url': u'https://api.gitlab.com/repos/{user}/mock-repo/{{archive_format}}{{/ref}}'.format(user=user),
+     u'assignees_url': u'https://api.gitlab.com/repos/{user}/mock-repo/assignees{{/user}}'.format(user=user),
+     u'blobs_url': u'https://api.gitlab.com/repos/{user}/mock-repo/git/blobs{{/sha}}'.format(user=user),
+     u'branches_url': u'https://api.gitlab.com/repos/{user}/mock-repo/branches{{/bra.format(user=user)nch}}'.format(user=user),
+     u'clone_url': u'https://gitlab.com/{user}/mock-repo.git'.format(user=user),
+     u'collaborators_url': u'https://api.gitlab.com/repos/{user}/mock-repo/collaborators{{/collaborator}}'.format(user=user),
+     u'comments_url': u'https://api.gitlab.com/repos/{user}/mock-repo/comments{{/number}}'.format(user=user),
+     u'commits_url': u'https://api.gitlab.com/repos/{user}/mock-repo/commits{{/sha}}'.format(user=user),
+     u'compare_url': u'https://api.gitlab.com/repos/{user}/mock-repo/compare/{{base}}...{{head}}',
+     u'contents_url': u'https://api.gitlab.com/repos/{user}/mock-repo/contents/{{+path}}'.format(user=user),
+     u'contributors_url': u'https://api.gitlab.com/repos/{user}/mock-repo/contributors'.format(user=user),
+     u'created_at': u'2013-06-30T18:29:18Z',
+     u'default_branch': u'dev',
+     u'description': u'Simple, Pythonic, text processing--Sentiment analysis, part-of-speech tagging, noun phrase extraction, translation, and more.',
+     u'downloads_url': u'https://api.gitlab.com/repos/{user}/mock-repo/downloads'.format(user=user),
+     u'events_url': u'https://api.gitlab.com/repos/{user}/mock-repo/events'.format(user=user),
+     u'fork': False,
+     u'forks': 89,
+     u'forks_count': 89,
+     u'forks_url': u'https://api.gitlab.com/repos/{user}/mock-repo/forks',
+     u'full_name': u'{user}/mock-repo',
+     u'git_commits_url': u'https://api.gitlab.com/repos/{user}/mock-repo/git/commits{{/sha}}'.format(user=user),
+     u'git_refs_url': u'https://api.gitlab.com/repos/{user}/mock-repo/git/refs{{/sha}}'.format(user=user),
+     u'git_tags_url': u'https://api.gitlab.com/repos/{user}/mock-repo/git/tags{{/sha}}'.format(user=user),
+     u'git_url': u'git://gitlab.com/{user}/mock-repo.git'.format(user=user),
+     u'has_downloads': True,
+     u'has_issues': True,
+     u'has_wiki': True,
+     u'homepage': u'https://mock-repo.readthedocs.org/',
+     u'hooks_url': u'https://api.gitlab.com/repos/{user}/mock-repo/hooks'.format(user=user),
+     u'html_url': u'https://gitlab.com/{user}/mock-repo'.format(user=user),
+     u'id': 11075275,
+     u'issue_comment_url': u'https://api.gitlab.com/repos/{user}/mock-repo/issues/comments/{{number}}'.format(user=user),
+     u'issue_events_url': u'https://api.gitlab.com/repos/{user}/mock-repo/issues/events{{/number}}'.format(user=user),
+     u'issues_url': u'https://api.gitlab.com/repos/{user}/mock-repo/issues{{/number}}'.format(user=user),
+     u'keys_url': u'https://api.gitlab.com/repos/{user}/mock-repo/keys{{/key_id}}'.format(user=user),
+     u'labels_url': u'https://api.gitlab.com/repos/{user}/mock-repo/labels{{/name}}'.format(user=user),
+     u'language': u'Python',
+     u'languages_url': u'https://api.gitlab.com/repos/{user}/mock-repo/languages'.format(user=user),
+     u'master_branch': u'dev',
+     u'merges_url': u'https://api.gitlab.com/repos/{user}/mock-repo/merges'.format(user=user),
+     u'milestones_url': u'https://api.gitlab.com/repos/{user}/mock-repo/milestones{{/number}}'.format(user=user),
+     u'mirror_url': None,
+     u'name': u'mock-repo',
+     u'network_count': 89,
+     u'notifications_url': u'https://api.gitlab.com/repos/{user}/mock-repo/notifications{{?since,all,participating}}'.format(user=user),
+     u'open_issues': 2,
+     u'open_issues_count': 2,
+     u'owner': {u'avatar_url': u'https://gravatar.com/avatar/c74f9cfd7776305a82ede0b765d65402?d=https%3A%2F%2Fidenticons.gitlab.com%2F3959fe3bcd263a12c28ae86a66ec75ef.png&r=x',
+      u'events_url': u'https://api.gitlab.com/users/{user}/events{{/privacy}}'.format(user=user),
+      u'followers_url': u'https://api.gitlab.com/users/{user}/followers'.format(user=user),
+      u'following_url': u'https://api.gitlab.com/users/{user}/following{{/other_user}}'.format(user=user),
+      u'gists_url': u'https://api.gitlab.com/users/{user}/gists{{/gist_id}}'.format(user=user),
+      u'gravatar_id': u'c74f9cfd7776305a82ede0b765d65402',
+      u'html_url': u'https://gitlab.com/{user}'.format(user=user),
+      u'id': 2379650,
+      u'login': user,
+      u'organizations_url': u'https://api.gitlab.com/users/{user}/orgs'.format(user=user),
+      u'received_events_url': u'https://api.gitlab.com/users/{user}/received_events',
+      u'repos_url': u'https://api.gitlab.com/users/{user}/repos'.format(user=user),
+      u'site_admin': False,
+      u'starred_url': u'https://api.gitlab.com/users/{user}/starred{{/owner}}{{/repo}}',
+      u'subscriptions_url': u'https://api.gitlab.com/users/{user}/subscriptions'.format(user=user),
+      u'type': u'User',
+      u'url': u'https://api.gitlab.com/users/{user}'.format(user=user)},
+     u'private': private,
+     u'pulls_url': u'https://api.gitlab.com/repos/{user}/mock-repo/pulls{{/number}}'.format(user=user),
+     u'pushed_at': u'2013-12-30T16:05:54Z',
+     u'releases_url': u'https://api.gitlab.com/repos/{user}/mock-repo/releases{{/id}}'.format(user=user),
+     u'size': 8717,
+     u'ssh_url': u'git@gitlab.com:{user}/mock-repo.git'.format(user=user),
+     u'stargazers_count': 1469,
+     u'stargazers_url': u'https://api.gitlab.com/repos/{user}/mock-repo/stargazers'.format(user=user),
+     u'statuses_url': u'https://api.gitlab.com/repos/{user}/mock-repo/statuses/{{sha}}'.format(user=user),
+     u'subscribers_count': 86,
+     u'subscribers_url': u'https://api.gitlab.com/repos/{user}/mock-repo/subscribers'.format(user=user),
+     u'subscription_url': u'https://api.gitlab.com/repos/{user}/mock-repo/subscription'.format(user=user),
+     u'svn_url': u'https://gitlab.com/{user}/mock-repo'.format(user=user),
+     u'tags_url': u'https://api.gitlab.com/repos/{user}/mock-repo/tags'.format(user=user),
+     u'teams_url': u'https://api.gitlab.com/repos/{user}/mock-repo/teams'.format(user=user),
+     u'trees_url': u'https://api.gitlab.com/repos/{user}/mock-repo/git/trees{{/sha}}'.format(user=user),
+     u'updated_at': u'2014-01-12T21:23:50Z',
+     u'url': u'https://api.gitlab.com/repos/{user}/mock-repo'.format(user=user),
+     u'watchers': 1469,
+     u'watchers_count': 1469,
+     # NOTE: permissions are only available if authorized on the repo
+     'permissions': {
+        'push': True
+     }
+     })
+
+    gitlab_mock.branches.return_value = [
+        Branch.from_json({u'commit': {u'sha': u'e22d92d5d90bb8f9695e9a5e2e2311a5c1997230',
+           u'url': u'https://api.gitlab.com/repos/{user}/mock-repo/commits/e22d92d5d90bb8f9695e9a5e2e2311a5c1997230'.format(user=user)},
+          u'name': u'dev'}),
+         Branch.from_json({u'commit': {u'sha': u'444a74d0d90a4aea744dacb31a14f87b5c30759c',
+           u'url': u'https://api.gitlab.com/repos/{user}/mock-repo/commits/444a74d0d90a4aea744dacb31a14f87b5c30759c'.format(user=user)},
+          u'name': u'master'}),
+         Branch.from_json({u'commit': {u'sha': u'c6eaaf6708561c3d4439c0c8dd99c2e33525b1e6',
+           u'url': u'https://api.gitlab.com/repos/{user}/mock-repo/commits/c6eaaf6708561c3d4439c0c8dd99c2e33525b1e6'.format(user=user)},
+          u'name': u'no-bundle'})
+      ]
+
+    return gitlab_mock
diff --git a/website/files/models/__init__.py b/website/files/models/__init__.py
new file mode 100644
index 00000000000..434e4b5a7ba
--- /dev/null
+++ b/website/files/models/__init__.py
@@ -0,0 +1,12 @@
+from website.files.models.base import *  # noqa
+
+from website.files.models.s3 import *  # noqa
+from website.files.models.box import *  # noqa
+from website.files.models.github import *  # noqa
+from website.files.models.gitlab import *  # noqa
+from website.files.models.dropbox import *  # noqa
+from website.files.models.figshare import *  # noqa
+from website.files.models.dataverse import *  # noqa
+from website.files.models.osfstorage import *  # noqa
+from website.files.models.owncloud import *  # noqa
+from website.files.models.googledrive import *  # noqa
diff --git a/website/static/js/fangorn.js b/website/static/js/fangorn.js
index bbe2eeeb046..ca39bb92530 100644
--- a/website/static/js/fangorn.js
+++ b/website/static/js/fangorn.js
@@ -375,7 +375,7 @@ function inheritFromParent(item, parent, fields) {
         item.data[field] = item.data[field] || parent.data[field];
     });
 
-    if(item.data.provider === 'github' || item.data.provider === 'bitbucket'){
+    if(item.data.provider === 'github' || item.data.provider === 'bitbucket' || item.data.provider === 'gitlab'){
         item.data.branch = parent.data.branch;
     }
 }
@@ -544,7 +544,7 @@ function doItemOp(operation, to, from, rename, conflict) {
     }
 
     var options = {};
-    if(from.data.provider === 'github' || from.data.provider === 'bitbucket'){
+    if(from.data.provider === 'github' || from.data.provider === 'bitbucket' || from.data.provider === 'gitlab'){
         options.branch = from.data.branch;
         moveSpec.branch = from.data.branch;
     }
@@ -1029,7 +1029,7 @@ function _createFolder(event, dismissCallback, helpText) {
     var path = parent.data.path || '/';
     var options = {name: val, kind: 'folder'};
 
-    if (parent.data.provider === 'github') {
+    if ((parent.data.provider === 'github') || (parent.data.provider === 'gitlab')) {
         extra.branch = parent.data.branch;
         options.branch = parent.data.branch;
     }
@@ -1038,7 +1038,7 @@ function _createFolder(event, dismissCallback, helpText) {
         method: 'PUT',
         background: true,
         config: $osf.setXHRAuthorization,
-        url: waterbutler.buildCreateFolderUrl(path, parent.data.provider, parent.data.nodeId, options)
+        url: waterbutler.buildCreateFolderUrl(path, parent.data.provider, parent.data.nodeId, options, extra)
     }).then(function(item) {
         item = tb.options.lazyLoadPreprocess.call(this, item).data;
         inheritFromParent({data: item}, parent, ['branch']);
@@ -2522,7 +2522,7 @@ function allowedToMove(folder, item, mustBeIntra) {
         item.data.permissions.edit &&
         (!mustBeIntra || (item.data.provider === folder.data.provider && item.data.nodeId === folder.data.nodeId)) &&
         !(item.data.provider === 'figshare' && item.data.extra && item.data.extra.status === 'public') &&
-        (item.data.provider !== 'bitbucket')
+        (item.data.provider !== 'bitbucket') && (item.data.provider !== 'gitlab')
     );
 }
 
diff --git a/website/static/js/logTextParser.js b/website/static/js/logTextParser.js
index 248a7313ac1..d9e5b539290 100644
--- a/website/static/js/logTextParser.js
+++ b/website/static/js/logTextParser.js
@@ -718,7 +718,18 @@ var LogPieces = {
             }
             return m('span', 'a');
         }
-    }
+    },
+
+    gitlab_repo: {
+        view: function(ctrl, logObject){
+            var gitlab_user = logObject.attributes.params.gitlab_user;
+            var gitlab_repo = logObject.attributes.params.gitlab_repo;
+            if (paramIsReturned(gitlab_repo, logObject) && paramIsReturned(gitlab_user, logObject)) {
+                return m('span', gitlab_user + '/' + gitlab_repo);
+            }
+            return m('span', '');
+        }
+    },
 };
 
 module.exports = {
diff --git a/website/static/js/osfLanguage.js b/website/static/js/osfLanguage.js
index cba16d0ddab..24a4bce9868 100644
--- a/website/static/js/osfLanguage.js
+++ b/website/static/js/osfLanguage.js
@@ -75,6 +75,7 @@ module.exports = {
             confirmDeauth: 'Are you sure you want to disconnect the Bitbucket account? ' +
                 'This will revoke access to Bitbucket for all projects you have ' +
                 'associated with this account.',
+        },
         gitlab: {
             confirmDeauth: 'Are you sure you want to disconnect the GitLab account? ' +
                 'This will revoke access to GitLab for all projects you have ' +
diff --git a/website/static/storageAddons.json b/website/static/storageAddons.json
index 15358899253..795349c000b 100644
--- a/website/static/storageAddons.json
+++ b/website/static/storageAddons.json
@@ -23,6 +23,10 @@
         "fullName": "GitHub",
         "externalView": true
     },
+    "gitlab": {
+        "fullName": "GitLab",
+        "externalView": true
+    },
     "googledrive": {
         "fullName": "Google Drive",
         "externalView": true

From 898a8de8c2b1aed6c607ecc0df25d492f054c4a0 Mon Sep 17 00:00:00 2001
From: Rafael de Lucena Valle <rafaeldelucena@gmail.com>
Date: Mon, 3 Jul 2017 13:57:18 -0400
Subject: [PATCH 058/192] Implement initial GitLab addon     [3/3]

---
 api/base/settings/defaults.py                 |   2 +-
 framework/addons/data/addons.json             |  30 +
 website/addons/gitlab/api.py                  | 144 +++++
 website/addons/gitlab/requirements.txt        |   3 +
 website/addons/gitlab/settings/__init__.py    |  10 +
 website/addons/gitlab/static/files.js         |   1 +
 .../gitlab/static/gitlabLogActionList.json    |  10 +
 website/addons/gitlab/static/user-cfg.js      |  11 +
 .../templates/gitlab_user_settings.mako       |  45 ++
 website/addons/gitlab/tests/test_models.py    | 246 ++++++++
 website/addons/gitlab/tests/test_views.py     | 560 ++++++++++++++++++
 website/addons/gitlab/views.py                | 440 ++++++++++++++
 website/static/js/filepage/index.js           |   6 +-
 13 files changed, 1504 insertions(+), 4 deletions(-)
 create mode 100644 website/addons/gitlab/api.py
 create mode 100644 website/addons/gitlab/requirements.txt
 create mode 100644 website/addons/gitlab/settings/__init__.py
 create mode 100644 website/addons/gitlab/static/files.js
 create mode 100644 website/addons/gitlab/static/gitlabLogActionList.json
 create mode 100644 website/addons/gitlab/static/user-cfg.js
 create mode 100644 website/addons/gitlab/templates/gitlab_user_settings.mako
 create mode 100644 website/addons/gitlab/tests/test_models.py
 create mode 100644 website/addons/gitlab/tests/test_views.py
 create mode 100644 website/addons/gitlab/views.py

diff --git a/api/base/settings/defaults.py b/api/base/settings/defaults.py
index 187e566ef98..b0a03a5ff82 100644
--- a/api/base/settings/defaults.py
+++ b/api/base/settings/defaults.py
@@ -257,7 +257,7 @@
 ESI_MEDIA_TYPES = osf_settings.ESI_MEDIA_TYPES
 
 ADDONS_FOLDER_CONFIGURABLE = ['box', 'dropbox', 's3', 'googledrive', 'figshare', 'owncloud']
-ADDONS_OAUTH = ADDONS_FOLDER_CONFIGURABLE + ['dataverse', 'github', 'bitbucket', 'mendeley', 'zotero', 'forward']
+ADDONS_OAUTH = ADDONS_FOLDER_CONFIGURABLE + ['dataverse', 'github', 'bitbucket', 'gitlab', 'mendeley', 'zotero', 'forward']
 
 BYPASS_THROTTLE_TOKEN = 'test-token'
 
diff --git a/framework/addons/data/addons.json b/framework/addons/data/addons.json
index 3304541f57b..99f6f6976d1 100644
--- a/framework/addons/data/addons.json
+++ b/framework/addons/data/addons.json
@@ -30,6 +30,36 @@
                 "text": "GitHub content will be registered, but version history will not be copied to the registration."
             }
         },
+        "GitLab": {
+            "Permissions": {
+                "status": "partial",
+                "text": "Making an OSF project public or private is independent of making a GitLab repo public or private. The OSF does not alter the permissions of linked GitLab repos."
+            },
+            "View / download file versions": {
+                "status": "full",
+                "text": "GitLab files and their versions can be viewed/downloaded via OSF."
+            },
+            "Add / update files": {
+                "status": "none",
+                "text": "Adding/updating files in the project via OSF is not implemented yet."
+            },
+            "Delete files": {
+                "status": "none",
+                "text": "Deleting files via OSF is not implemented yet."
+            },
+            "Logs": {
+                "status": "full",
+                "text": "GitLab dynamically updates OSF logs when files are modified outside the OSF. Changes to GitLab repos made before the repo is linked to the OSF will not be reflected in OSF logs."
+            },
+            "Forking": {
+                "status": "none",
+                "text": "Forking a project is not implemented yet."
+            },
+            "Registering": {
+                "status": "none",
+                "text": "Registration on Gitlab is not implemented yet."
+            }
+        },
         "Amazon S3": {
             "Permissions": {
                 "status": "partial",
diff --git a/website/addons/gitlab/api.py b/website/addons/gitlab/api.py
new file mode 100644
index 00000000000..819c2189995
--- /dev/null
+++ b/website/addons/gitlab/api.py
@@ -0,0 +1,144 @@
+import urllib
+import requests
+
+import gitlab
+import cachecontrol
+from requests.adapters import HTTPAdapter
+
+from website.addons.gitlab.exceptions import NotFoundError
+
+# Initialize caches
+https_cache = cachecontrol.CacheControlAdapter()
+default_adapter = HTTPAdapter()
+
+class GitLabClient(object):
+
+    def __init__(self, external_account=None, access_token=None):
+
+        self.access_token = getattr(external_account, 'provider_id', None) or access_token
+
+        self.host = getattr(external_account, 'display_name', None) or 'gitlab.com'
+
+        if self.access_token:
+            self.gitlab = gitlab.Gitlab(self.host, token=self.access_token)
+        else:
+            self.gitlab = gitlab.Gitlab(self.host)
+
+    def user(self, user=None):
+        """Fetch a user or the authenticated user.
+
+        :param user: Optional GitLab user name; will fetch authenticated
+            user if omitted
+        :return dict: GitLab API response
+        """
+        return self.gitlab.currentuser()
+
+    def repo(self, repo_id):
+        """Get a single GitLab repo's info.
+
+        https://docs.gitlab.com/ce/api/projects.html#get-single-project
+
+        :param str repo_id: GitLab repository id
+        :return: Dict of repo information
+        """
+        rv = self.gitlab.getproject(repo_id)
+
+        if rv:
+            return rv
+        raise NotFoundError
+
+    def repos(self):
+        return self.gitlab.getprojects()
+
+    def user_repos(self, user):
+        return self.gitlab.getprojectsowned()
+
+    def create_repo(self, repo, **kwargs):
+        return self.gitlab.createproject(repo)
+
+    def branches(self, repo_id, branch=None):
+        """List a repo's branches or get a single branch (in a list).
+
+        https://docs.gitlab.com/ce/api/branches.html#list-repository-branches
+
+        :param str user: GitLab user name
+        :param str repo: GitLab repo name
+        :param str branch: Branch name if getting a single branch
+        :return: List of branch dicts
+        """
+        if branch:
+            return self.gitlab.getbranch(repo_id, branch)
+
+        return self.gitlab.getbranches(repo_id)
+
+    def starball(self, user, repo, repo_id, ref='master'):
+        """Get link for archive download.
+
+        :param str user: GitLab user name
+        :param str repo: GitLab repo name
+        :param str ref: Git reference
+        :returns: tuple: Tuple of headers and file location
+        """
+        uri = 'projects/{0}/repository/archive?sha={1}'.format(repo_id, ref)
+
+        request = self._get_api_request(uri)
+
+        return request.headers, request.content
+
+    def hooks(self, user, repo):
+        """List webhooks
+
+        https://docs.gitlab.com/ce/api/projects.html#list-project-hooks
+
+        :param str user: GitLab user name
+        :param str repo: GitLab repo name
+        :return list: List of commit dicts from GitLab; see
+        """
+        return False
+
+    def add_hook(self, user, repo, name, config, events=None, active=True):
+        """Create a webhook.
+
+        https://docs.gitlab.com/ce/api/projects.html#add-project-hook
+
+        :param str user: GitLab user name
+        :param str repo: GitLab repo name
+        :return dict: Hook info from GitLab: see see
+        """
+        return False
+
+    def delete_hook(self, user, repo, _id):
+        """Delete a webhook.
+
+        https://docs.gitlab.com/ce/api/projects.html#delete-project-hook
+
+        :param str user: GitLab user name
+        :param str repo: GitLab repo name
+        :return bool: True if successful, False otherwise
+        :raises: NotFoundError if repo or hook cannot be located
+        """
+        return False
+
+    def _get_api_request(self, uri):
+        headers = {'PRIVATE-TOKEN': '{}'.format(self.access_token)}
+
+        return requests.get('https://{0}/{1}/{2}'.format(self.host, 'api/v4', uri),
+                            verify=True, headers=headers)
+
+    def revoke_token(self):
+        return False
+
+
+def ref_to_params(branch=None, sha=None):
+
+    params = urllib.urlencode({
+        key: value
+        for key, value in {
+            'branch': branch,
+            'sha': sha,
+        }.iteritems()
+        if value
+    })
+    if params:
+        return '?' + params
+    return ''
diff --git a/website/addons/gitlab/requirements.txt b/website/addons/gitlab/requirements.txt
new file mode 100644
index 00000000000..09ef966d1b9
--- /dev/null
+++ b/website/addons/gitlab/requirements.txt
@@ -0,0 +1,3 @@
+cachecontrol==0.10.2
+pyapi-gitlab==7.8.5
+python-magic==0.4.6
diff --git a/website/addons/gitlab/settings/__init__.py b/website/addons/gitlab/settings/__init__.py
new file mode 100644
index 00000000000..228e7ddf51c
--- /dev/null
+++ b/website/addons/gitlab/settings/__init__.py
@@ -0,0 +1,10 @@
+import logging
+from .defaults import *  # noqa
+
+
+logger = logging.getLogger(__name__)
+
+try:
+    from .local import *  # noqa
+except ImportError as error:
+    logger.warn('No local.py settings file found')
diff --git a/website/addons/gitlab/static/files.js b/website/addons/gitlab/static/files.js
new file mode 100644
index 00000000000..a46a889c7fb
--- /dev/null
+++ b/website/addons/gitlab/static/files.js
@@ -0,0 +1 @@
+require('./gitlabFangornConfig.js');
diff --git a/website/addons/gitlab/static/gitlabLogActionList.json b/website/addons/gitlab/static/gitlabLogActionList.json
new file mode 100644
index 00000000000..731efcafb2b
--- /dev/null
+++ b/website/addons/gitlab/static/gitlabLogActionList.json
@@ -0,0 +1,10 @@
+{
+  "gitlab_file_added" : "${user} added file ${path} to GitLab repo ${gitlab_repo} in ${node}",
+  "gitlab_file_removed" : "${user} removed file ${path} in GitLab repo ${gitlab_repo} in ${node}",
+  "gitlab_file_updated" : "${user} updated file ${path} in GitLab repo ${gitlab_repo} in ${node}",
+  "gitlab_folder_created" : "${user} created folder ${path} in GitLab repo ${gitlab_repo} in ${node}",
+  "gitlab_node_authorized" : "${user} authorized the GitLab addon for ${node}",
+  "gitlab_node_deauthorized" : "${user} deauthorized the GitLab addon for ${node}",
+  "gitlab_node_deauthorized_no_user" : "GitLab addon for ${node} deauthorized",
+  "gitlab_repo_linked" : "${user} linked GitLab repo ${gitlab_repo} to ${node}"
+}
diff --git a/website/addons/gitlab/static/user-cfg.js b/website/addons/gitlab/static/user-cfg.js
new file mode 100644
index 00000000000..312f2ece6d0
--- /dev/null
+++ b/website/addons/gitlab/static/user-cfg.js
@@ -0,0 +1,11 @@
+var $osf = require('js/osfHelpers');
+var GitLabViewModel = require('./gitlabUserConfig.js').GitLabViewModel;
+
+// Endpoint for GitLab user settings
+var url = '/api/v1/settings/gitlab/';
+
+var gitlabViewModel = new GitLabViewModel(url);
+$osf.applyBindings(gitlabViewModel, '#gitlabAddonScope');
+
+// Load initial GitLab data
+gitlabViewModel.fetch();
diff --git a/website/addons/gitlab/templates/gitlab_user_settings.mako b/website/addons/gitlab/templates/gitlab_user_settings.mako
new file mode 100644
index 00000000000..aea509e46c6
--- /dev/null
+++ b/website/addons/gitlab/templates/gitlab_user_settings.mako
@@ -0,0 +1,45 @@
+<!-- Authorization -->
+<div id='gitlabAddonScope' class='addon-settings addon-generic scripted'
+     data-addon-short-name="${ addon_short_name }"
+     data-addon-name="${ addon_full_name }">
+
+    <%include file="gitlab_credentials_modal.mako"/>
+
+    <h4 class="addon-title">
+        <img class="addon-icon" src=${addon_icon_url}>
+        <span data-bind="text: properName"></span>
+        <small>
+            <a href="#gitlabInputCredentials" data-toggle="modal" class="pull-right text-primary">Connect or Reauthorize Account</a>
+        </small>
+    </h4>
+
+    <div class="addon-auth-table" id="${addon_short_name}-header">
+        <!-- ko foreach: accounts -->
+        <a data-bind="click: $root.askDisconnect.bind($root)" class="text-danger pull-right default-authorized-by">Disconnect Account</a>
+        <div class="m-h-lg addon-auth-table" id="${addon_short_name}-header">
+            <table class="table table-hover">
+                <thead>
+                    <tr class="user-settings-addon-auth">
+                        <th class="text-muted default-authorized-by">Authorized on <a data-bind="attr: {href: gitlabHost}"><em data-bind="text: gitlabHost"></em></a></th>
+                    </tr>
+                </thead>
+                <!-- ko if: connectedNodes().length > 0 -->
+                <tbody data-bind="foreach: connectedNodes()">
+                    <tr>
+                        <td class="authorized-nodes">
+                            <!-- ko if: title --><a data-bind="attr: {href: urls.view}, text: title"></a><!-- /ko -->
+                            <!-- ko if: !title --><em>Private project</em><!-- /ko -->
+                        </td>
+                        <td>
+                            <a data-bind="click: $parent.deauthorizeNode.bind($parent)">
+                                <i class="fa fa-times text-danger pull-right" title="Deauthorize Project"></i>
+                            </a>
+                        </td>
+                    </tr>
+                </tbody>
+                <!-- /ko -->
+            </table>
+        </div>
+        <!-- /ko -->
+    </div>
+</div>
diff --git a/website/addons/gitlab/tests/test_models.py b/website/addons/gitlab/tests/test_models.py
new file mode 100644
index 00000000000..ca712b7f0be
--- /dev/null
+++ b/website/addons/gitlab/tests/test_models.py
@@ -0,0 +1,246 @@
+# -*- coding: utf-8 -*-
+
+import mock
+import unittest
+from nose.tools import *  # noqa
+
+from tests.base import OsfTestCase, get_default_metaschema
+from tests.factories import ExternalAccountFactory, ProjectFactory, UserFactory
+
+from framework.auth import Auth
+
+from website.addons.gitlab.exceptions import NotFoundError, GitLabError
+from website.addons.gitlab import settings as gitlab_settings
+from website.addons.gitlab.model import GitLabUserSettings
+from website.addons.gitlab.model import GitLabNodeSettings
+from website.addons.gitlab.tests.factories import (
+    GitLabAccountFactory,
+    GitLabNodeSettingsFactory,
+    GitLabUserSettingsFactory
+)
+from website.addons.base.testing import models
+
+from .utils import create_mock_gitlab
+mock_gitlab = create_mock_gitlab()
+
+
+class TestNodeSettings(models.OAuthAddonNodeSettingsTestSuiteMixin, OsfTestCase):
+
+    short_name = 'gitlab'
+    full_name = 'GitLab'
+    ExternalAccountFactory = GitLabAccountFactory
+
+    NodeSettingsFactory = GitLabNodeSettingsFactory
+    NodeSettingsClass = GitLabNodeSettings
+    UserSettingsFactory = GitLabUserSettingsFactory
+
+    ## Mixin Overrides ##
+
+    def _node_settings_class_kwargs(self, node, user_settings):
+        return {
+            'user_settings': self.user_settings,
+            'repo': 'mock',
+            'user': 'abc',
+            'owner': self.node,
+            'repo_id': 123
+        }
+
+    def test_set_folder(self):
+        # GitLab doesn't use folderpicker, and the nodesettings model
+        # does not need a `set_repo` method
+        pass
+
+    def test_serialize_settings(self):
+        # GitLab's serialized_settings are a little different from
+        # common storage addons.
+        settings = self.node_settings.serialize_waterbutler_settings()
+        expected = {'host': 'https://abc', 'owner': 'abc', 'repo': 'mock', 'repo_id': 123}
+        assert_equal(settings, expected)
+
+    @mock.patch(
+        'website.addons.gitlab.model.GitLabUserSettings.revoke_remote_oauth_access',
+        mock.PropertyMock()
+    )
+    def test_complete_has_auth_not_verified(self):
+        super(TestNodeSettings, self).test_complete_has_auth_not_verified()
+
+    @mock.patch('website.addons.gitlab.api.GitLabClient.repos')
+    def test_to_json(self, mock_repos):
+        mock_repos.return_value = {}
+        super(TestNodeSettings, self).test_to_json()
+
+    @mock.patch('website.addons.gitlab.api.GitLabClient.repos')
+    def test_to_json_user_is_owner(self, mock_repos):
+        mock_repos.return_value = {}
+        result = self.node_settings.to_json(self.user)
+        assert_true(result['user_has_auth'])
+        assert_equal(result['gitlab_user'], 'abc')
+        assert_true(result['is_owner'])
+        assert_true(result['valid_credentials'])
+        assert_equal(result.get('gitlab_repo', None), 'mock')
+
+    @mock.patch('website.addons.gitlab.api.GitLabClient.repos')
+    def test_to_json_user_is_not_owner(self, mock_repos):
+        mock_repos.return_value = {}
+        not_owner = UserFactory()
+        result = self.node_settings.to_json(not_owner)
+        assert_false(result['user_has_auth'])
+        assert_equal(result['gitlab_user'], 'abc')
+        assert_false(result['is_owner'])
+        assert_true(result['valid_credentials'])
+        assert_equal(result.get('repo_names', None), None)
+
+
+class TestUserSettings(models.OAuthAddonUserSettingTestSuiteMixin, OsfTestCase):
+
+    short_name = 'gitlab'
+    full_name = 'GitLab'
+    ExternalAccountFactory = GitLabAccountFactory
+
+    def test_public_id(self):
+        assert_equal(self.user.external_accounts[0].display_name, self.user_settings.public_id)
+
+
+class TestCallbacks(OsfTestCase):
+
+    def setUp(self):
+
+        super(TestCallbacks, self).setUp()
+
+        self.project = ProjectFactory.build()
+        self.consolidated_auth = Auth(self.project.creator)
+        self.non_authenticator = UserFactory()
+        self.project.save()
+        self.project.add_contributor(
+            contributor=self.non_authenticator,
+            auth=self.consolidated_auth,
+        )
+
+        self.project.add_addon('gitlab', auth=self.consolidated_auth)
+        self.project.creator.add_addon('gitlab')
+        self.external_account = GitLabAccountFactory()
+        self.project.creator.external_accounts.append(self.external_account)
+        self.project.creator.save()
+        self.node_settings = self.project.get_addon('gitlab')
+        self.user_settings = self.project.creator.get_addon('gitlab')
+        self.node_settings.user_settings = self.user_settings
+        self.node_settings.user = 'Queen'
+        self.node_settings.repo = 'Sheer-Heart-Attack'
+        self.node_settings.external_account = self.external_account
+        self.node_settings.save()
+        self.node_settings.set_auth
+
+
+    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    def test_before_make_public(self, mock_repo):
+        mock_repo.side_effect = NotFoundError
+
+        result = self.node_settings.before_make_public(self.project)
+        assert_is(result, None)
+
+    def test_before_page_load_not_contributor(self):
+        message = self.node_settings.before_page_load(self.project, UserFactory())
+        assert_false(message)
+
+    def test_before_page_load_not_logged_in(self):
+        message = self.node_settings.before_page_load(self.project, None)
+        assert_false(message)
+
+    def test_before_remove_contributor_authenticator(self):
+        message = self.node_settings.before_remove_contributor(
+            self.project, self.project.creator
+        )
+        assert_true(message)
+
+    def test_before_remove_contributor_not_authenticator(self):
+        message = self.node_settings.before_remove_contributor(
+            self.project, self.non_authenticator
+        )
+        assert_false(message)
+
+    def test_after_remove_contributor_authenticator_self(self):
+        message = self.node_settings.after_remove_contributor(
+            self.project, self.project.creator, self.consolidated_auth
+        )
+        assert_equal(
+            self.node_settings.user_settings,
+            None
+        )
+        assert_true(message)
+        assert_not_in("You can re-authenticate", message)
+
+    def test_after_remove_contributor_authenticator_not_self(self):
+        auth = Auth(user=self.non_authenticator)
+        message = self.node_settings.after_remove_contributor(
+            self.project, self.project.creator, auth
+        )
+        assert_equal(
+            self.node_settings.user_settings,
+            None
+        )
+        assert_true(message)
+        assert_in("You can re-authenticate", message)
+
+    def test_after_remove_contributor_not_authenticator(self):
+        self.node_settings.after_remove_contributor(
+            self.project, self.non_authenticator, self.consolidated_auth
+        )
+        assert_not_equal(
+            self.node_settings.user_settings,
+            None,
+        )
+
+    def test_after_fork_authenticator(self):
+        fork = ProjectFactory()
+        clone, message = self.node_settings.after_fork(
+            self.project, fork, self.project.creator,
+        )
+        assert_equal(
+            self.node_settings.user_settings,
+            clone.user_settings,
+        )
+
+    def test_after_fork_not_authenticator(self):
+        fork = ProjectFactory()
+        clone, message = self.node_settings.after_fork(
+            self.project, fork, self.non_authenticator,
+        )
+        assert_equal(
+            clone.user_settings,
+            None,
+        )
+
+    def test_after_delete(self):
+        self.project.remove_node(Auth(user=self.project.creator))
+        # Ensure that changes to node settings have been saved
+        self.node_settings.reload()
+        assert_true(self.node_settings.user_settings is None)
+
+    @mock.patch('website.archiver.tasks.archive')
+    def test_does_not_get_copied_to_registrations(self, mock_archive):
+        registration = self.project.register_node(
+            schema=get_default_metaschema(),
+            auth=Auth(user=self.project.creator),
+            data='hodor',
+        )
+        assert_false(registration.has_addon('gitlab'))
+
+
+
+class TestGitLabNodeSettings(OsfTestCase):
+
+    def setUp(self):
+        OsfTestCase.setUp(self)
+        self.user = UserFactory()
+        self.user.add_addon('gitlab')
+        self.user_settings = self.user.get_addon('gitlab')
+        self.external_account = GitLabAccountFactory()
+        self.user_settings.owner.external_accounts.append(self.external_account)
+        self.user_settings.owner.save()
+        self.node_settings = GitLabNodeSettingsFactory(user_settings=self.user_settings)
+
+    @mock.patch('website.addons.gitlab.api.GitLabClient.delete_hook')
+    def test_delete_hook_no_hook(self, mock_delete_hook):
+        res = self.node_settings.delete_hook()
+        assert_false(res)
+        assert_false(mock_delete_hook.called)
diff --git a/website/addons/gitlab/tests/test_views.py b/website/addons/gitlab/tests/test_views.py
new file mode 100644
index 00000000000..6b7ebfad5bb
--- /dev/null
+++ b/website/addons/gitlab/tests/test_views.py
@@ -0,0 +1,560 @@
+# -*- coding: utf-8 -*-
+import httplib as http
+
+import mock
+import datetime
+import unittest
+
+from nose.tools import *  # noqa (PEP8 asserts)
+from tests.base import OsfTestCase, get_default_metaschema
+from tests.factories import ProjectFactory, UserFactory, AuthUserFactory
+
+from github3.repos.branch import Branch
+
+from framework.exceptions import HTTPError
+from framework.auth import Auth
+
+from website.util import api_url_for
+from website.addons.base.testing.views import (
+    OAuthAddonAuthViewsTestCaseMixin, OAuthAddonConfigViewsTestCaseMixin
+)
+from website.addons.gitlab import views, utils
+from website.addons.gitlab.api import GitLabClient
+from website.addons.gitlab.model import GitLabProvider
+from website.addons.gitlab.serializer import GitLabSerializer
+from website.addons.gitlab.utils import check_permissions
+from website.addons.gitlab.tests.utils import create_mock_gitlab, GitLabAddonTestCase
+from website.addons.gitlab.tests.factories import GitLabAccountFactory
+
+
+class TestGitLabAuthViews(GitLabAddonTestCase, OAuthAddonAuthViewsTestCaseMixin):
+    
+    @mock.patch(
+        'website.addons.gitlab.model.GitLabUserSettings.revoke_remote_oauth_access',
+        mock.PropertyMock()
+    )
+    def test_delete_external_account(self):
+        super(TestGitLabAuthViews, self).test_delete_external_account()
+
+
+class TestGitLabConfigViews(GitLabAddonTestCase, OAuthAddonConfigViewsTestCaseMixin):
+    folder = None
+    Serializer = GitLabSerializer
+    client = GitLabClient
+
+    ## Overrides ##
+
+    def setUp(self):
+        super(TestGitLabConfigViews, self).setUp()
+        self.mock_api_user = mock.patch("website.addons.gitlab.api.GitLabClient.user")
+        self.mock_api_user.return_value = mock.Mock()
+        self.mock_api_user.start()
+
+    def tearDown(self):
+        self.mock_api_user.stop()
+        super(TestGitLabConfigViews, self).tearDown()
+
+    def test_folder_list(self):
+        # GH only lists root folder (repos), this test is superfluous
+        pass
+
+    @mock.patch('website.addons.gitlab.model.GitLabNodeSettings.add_hook')
+    @mock.patch('website.addons.gitlab.views.GitLabClient.repo')
+    def test_set_config(self, mock_repo, mock_add_hook):
+        # GH selects repos, not folders, so this needs to be overriden
+        mock_repo.return_value = 'repo_name'
+        url = self.project.api_url_for('{0}_set_config'.format(self.ADDON_SHORT_NAME))
+        res = self.app.post_json(url, {
+            'gitlab_user': 'octocat',
+            'gitlab_repo': 'repo_name',
+        }, auth=self.user.auth)
+        assert_equal(res.status_code, http.OK)
+        self.project.reload()
+        assert_equal(
+            self.project.logs[-1].action,
+            '{0}_repo_linked'.format(self.ADDON_SHORT_NAME)
+        )
+        mock_add_hook.assert_called_once()
+
+
+# TODO: Test remaining CRUD methods
+# TODO: Test exception handling
+class TestCRUD(OsfTestCase):
+
+    def setUp(self):
+        super(TestCRUD, self).setUp()
+        self.gitlab = create_mock_gitlab(user='fred', private=False)
+        self.user = AuthUserFactory()
+        self.consolidated_auth = Auth(user=self.user)
+        self.project = ProjectFactory(creator=self.user)
+        self.project.add_addon('gitlab', auth=self.consolidated_auth)
+        self.project.creator.add_addon('gitlab')
+        self.node_settings = self.project.get_addon('gitlab')
+        self.node_settings.user_settings = self.project.creator.get_addon('gitlab')
+        # Set the node addon settings to correspond to the values of the mock repo
+        self.node_settings.user = self.gitlab.repo.return_value.owner.login
+        self.node_settings.repo = self.gitlab.repo.return_value.name
+        self.node_settings.save()
+
+
+class TestGitLabViews(OsfTestCase):
+
+    def setUp(self):
+        super(TestGitLabViews, self).setUp()
+        self.user = AuthUserFactory()
+        self.consolidated_auth = Auth(user=self.user)
+
+        self.project = ProjectFactory(creator=self.user)
+        self.non_authenticator = UserFactory()
+        self.project.add_contributor(
+            contributor=self.non_authenticator,
+            auth=self.consolidated_auth,
+        )
+        self.project.save()
+        self.project.add_addon('gitlab', auth=self.consolidated_auth)
+        self.project.creator.add_addon('gitlab')
+        self.project.creator.external_accounts.append(GitLabAccountFactory())
+        self.project.creator.save()
+
+        self.gitlab = create_mock_gitlab(user='fred', private=False)
+
+        self.node_settings = self.project.get_addon('gitlab')
+        self.node_settings.user_settings = self.project.creator.get_addon('gitlab')
+        # Set the node addon settings to correspond to the values of the mock repo
+        self.node_settings.user = self.gitlab.repo.return_value.owner.login
+        self.node_settings.repo = self.gitlab.repo.return_value.name
+        self.node_settings.save()
+
+    def _get_sha_for_branch(self, branch=None, mock_branches=None):
+        gitlab_mock = self.gitlab
+        if mock_branches is None:
+            mock_branches = gitlab_mock.branches
+        if branch is None:  # Get default branch name
+            branch = self.gitlab.repo.return_value.default_branch
+        for each in mock_branches.return_value:
+            if each.name == branch:
+                branch_sha = each.commit.sha
+        return branch_sha
+
+    # Tests for _get_refs
+    @mock.patch('website.addons.gitlab.api.GitLabClient.branches')
+    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    def test_get_refs_defaults(self, mock_repo, mock_branches):
+        gitlab_mock = self.gitlab
+        mock_repo.return_value = gitlab_mock.repo.return_value
+        mock_branches.return_value = gitlab_mock.branches.return_value
+        branch, sha, branches = utils.get_refs(self.node_settings)
+        assert_equal(
+            branch,
+            gitlab_mock.repo.return_value.default_branch
+        )
+        assert_equal(sha, self._get_sha_for_branch(branch=None))  # Get refs for default branch
+        assert_equal(
+            branches,
+            gitlab_mock.branches.return_value
+        )
+
+    @mock.patch('website.addons.gitlab.api.GitLabClient.branches')
+    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    def test_get_refs_branch(self, mock_repo, mock_branches):
+        gitlab_mock = self.gitlab
+        mock_repo.return_value = gitlab_mock.repo.return_value
+        mock_branches.return_value = gitlab_mock.branches.return_value
+        branch, sha, branches = utils.get_refs(self.node_settings, 'master')
+        assert_equal(branch, 'master')
+        branch_sha = self._get_sha_for_branch('master')
+        assert_equal(sha, branch_sha)
+        assert_equal(
+            branches,
+            gitlab_mock.branches.return_value
+        )
+
+    def test_before_fork(self):
+        url = self.project.api_url + 'fork/before/'
+        res = self.app.get(url, auth=self.user.auth).maybe_follow()
+        assert_equal(len(res.json['prompts']), 1)
+
+    @mock.patch('website.addons.gitlab.model.GitLabUserSettings.has_auth')
+    def test_before_register(self, mock_has_auth):
+        mock_has_auth.return_value = True
+        url = self.project.api_url + 'beforeregister/'
+        res = self.app.get(url, auth=self.user.auth).maybe_follow()
+        assert_true('GitLab' in res.json['prompts'][1])
+        
+    def test_get_refs_sha_no_branch(self):
+        with assert_raises(HTTPError):
+            utils.get_refs(self.node_settings, sha='12345')
+
+    def test_get_refs_registered_missing_branch(self):
+        gitlab_mock = self.gitlab
+        self.node_settings.registration_data = {
+            'branches': [
+                branch.to_json()
+                for branch in gitlab_mock.branches.return_value
+            ]
+        }
+        self.node_settings.owner.is_registration = True
+        with assert_raises(HTTPError):
+            utils.get_refs(self.node_settings, branch='nothere')
+
+    # Tests for _check_permissions
+    # make a user with no authorization; make sure check_permissions returns false
+    def test_permissions_no_auth(self):
+        gitlab_mock = self.gitlab
+        # project is set to private right now
+        connection = gitlab_mock
+        non_authenticated_user = UserFactory()
+        non_authenticated_auth = Auth(user=non_authenticated_user)
+        branch = 'master'
+        assert_false(check_permissions(self.node_settings, non_authenticated_auth, connection, branch))
+
+    # make a repository that doesn't allow push access for this user;
+    # make sure check_permissions returns false
+    @mock.patch('website.addons.gitlab.model.GitLabUserSettings.has_auth')
+    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    def test_permissions_no_access(self, mock_repo, mock_has_auth):
+        gitlab_mock = self.gitlab
+        mock_has_auth.return_value = True
+        connection = gitlab_mock
+        branch = 'master'
+        mock_repository = mock.NonCallableMock()
+        mock_repository.user = 'fred'
+        mock_repository.repo = 'mock-repo'
+        mock_repository.to_json.return_value = {
+            'user': 'fred',
+            'repo': 'mock-repo',
+            'permissions': {
+                'push': False,  # this is key
+            },
+        }
+        mock_repo.return_value = mock_repository
+        assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, branch, repo=mock_repository))
+
+    # make a branch with a different commit than the commit being passed into check_permissions
+    @mock.patch('website.addons.gitlab.model.GitLabUserSettings.has_auth')
+    def test_permissions_not_head(self, mock_has_auth):
+        gitlab_mock = self.gitlab
+        mock_has_auth.return_value = True
+        connection = gitlab_mock
+        mock_branch = mock.NonCallableMock()
+        mock_branch.commit.sha = '67890'
+        sha = '12345'
+        assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, mock_branch, sha=sha))
+
+    # make sure permissions are not granted for editing a registration
+    @mock.patch('website.addons.gitlab.model.GitLabUserSettings.has_auth')
+    def test_permissions(self, mock_has_auth):
+        gitlab_mock = self.gitlab
+        mock_has_auth.return_value = True
+        connection = gitlab_mock
+        self.node_settings.owner.is_registration = True
+        assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, 'master'))
+
+    def check_hook_urls(self, urls, node, path, sha):
+        url = node.web_url_for('addon_view_or_download_file', path=path, provider='gitlab')
+        expected_urls = {
+            'view': '{0}?ref={1}'.format(url, sha),
+            'download': '{0}?action=download&ref={1}'.format(url, sha)
+        }
+
+        assert_equal(urls['view'], expected_urls['view'])
+        assert_equal(urls['download'], expected_urls['download'])
+
+    @mock.patch('website.addons.gitlab.views.verify_hook_signature')
+    def test_hook_callback_add_file_not_thro_osf(self, mock_verify):
+        url = "/api/v1/project/{0}/gitlab/hook/".format(self.project._id)
+        timestamp = str(datetime.datetime.utcnow())
+        self.app.post_json(
+            url,
+            {
+                "test": True,
+                "commits": [{
+                    "id": "b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
+                    "distinct": True,
+                    "message": "foo",
+                    "timestamp": timestamp,
+                    "url": "https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
+                    "author": {"name": "Illidan", "email": "njqpw@osf.io"},
+                    "committer": {"name": "Testor", "email": "test@osf.io", "username": "tester"},
+                    "added": ["PRJWN3TV"],
+                    "removed": [],
+                    "modified": [],
+                }]
+            },
+            content_type="application/json",
+        ).maybe_follow()
+        self.project.reload()
+        assert_equal(self.project.logs[-1].action, "gitlab_file_added")
+        urls = self.project.logs[-1].params['urls']
+        self.check_hook_urls(
+            urls,
+            self.project,
+            path='PRJWN3TV',
+            sha='b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+        )
+
+    @mock.patch('website.addons.gitlab.views.verify_hook_signature')
+    def test_hook_callback_modify_file_not_thro_osf(self, mock_verify):
+        url = "/api/v1/project/{0}/gitlab/hook/".format(self.project._id)
+        timestamp = str(datetime.datetime.utcnow())
+        self.app.post_json(
+            url,
+            {"test": True,
+                 "commits": [{"id": "b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
+                              "distinct": True,
+                              "message": " foo",
+                              "timestamp": timestamp,
+                              "url": "https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
+                              "author": {"name": "Illidan", "email": "njqpw@osf.io"},
+                              "committer": {"name": "Testor", "email": "test@osf.io",
+                                            "username": "tester"},
+                              "added": [], "removed":[], "modified":["PRJWN3TV"]}]},
+            content_type="application/json").maybe_follow()
+        self.project.reload()
+        assert_equal(self.project.logs[-1].action, "gitlab_file_updated")
+        urls = self.project.logs[-1].params['urls']
+        self.check_hook_urls(
+            urls,
+            self.project,
+            path='PRJWN3TV',
+            sha='b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+        )
+
+    @mock.patch('website.addons.gitlab.views.verify_hook_signature')
+    def test_hook_callback_remove_file_not_thro_osf(self, mock_verify):
+        url = "/api/v1/project/{0}/gitlab/hook/".format(self.project._id)
+        timestamp = str(datetime.datetime.utcnow())
+        self.app.post_json(
+            url,
+            {"test": True,
+             "commits": [{"id": "b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
+                          "distinct": True,
+                          "message": "foo",
+                          "timestamp": timestamp,
+                          "url": "https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
+                          "author": {"name": "Illidan", "email": "njqpw@osf.io"},
+                          "committer": {"name": "Testor", "email": "test@osf.io", "username": "tester"},
+                          "added": [], "removed": ["PRJWN3TV"], "modified":[]}]},
+            content_type="application/json").maybe_follow()
+        self.project.reload()
+        assert_equal(self.project.logs[-1].action, "gitlab_file_removed")
+        urls = self.project.logs[-1].params['urls']
+        assert_equal(urls, {})
+
+    @mock.patch('website.addons.gitlab.views.verify_hook_signature')
+    def test_hook_callback_add_file_thro_osf(self, mock_verify):
+        url = "/api/v1/project/{0}/gitlab/hook/".format(self.project._id)
+        self.app.post_json(
+            url,
+            {"test": True,
+             "commits": [{"id": "b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
+                          "distinct": True,
+                          "message": "Added via the Open Science Framework",
+                          "timestamp": "2014-01-08T14:15:51-08:00",
+                          "url": "https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
+                          "author": {"name": "Illidan", "email": "njqpw@osf.io"},
+                          "committer": {"name": "Testor", "email": "test@osf.io", "username": "tester"},
+                          "added": ["PRJWN3TV"], "removed":[], "modified":[]}]},
+            content_type="application/json").maybe_follow()
+        self.project.reload()
+        assert_not_equal(self.project.logs[-1].action, "gitlab_file_added")
+
+    @mock.patch('website.addons.gitlab.views.verify_hook_signature')
+    def test_hook_callback_modify_file_thro_osf(self, mock_verify):
+        url = "/api/v1/project/{0}/gitlab/hook/".format(self.project._id)
+        self.app.post_json(
+            url,
+            {"test": True,
+             "commits": [{"id": "b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
+                          "distinct": True,
+                          "message": "Updated via the Open Science Framework",
+                          "timestamp": "2014-01-08T14:15:51-08:00",
+                          "url": "https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
+                          "author": {"name": "Illidan", "email": "njqpw@osf.io"},
+                          "committer": {"name": "Testor", "email": "test@osf.io", "username": "tester"},
+                          "added": [], "removed":[], "modified":["PRJWN3TV"]}]},
+            content_type="application/json").maybe_follow()
+        self.project.reload()
+        assert_not_equal(self.project.logs[-1].action, "gitlab_file_updated")
+
+    @mock.patch('website.addons.gitlab.views.verify_hook_signature')
+    def test_hook_callback_remove_file_thro_osf(self, mock_verify):
+        url = "/api/v1/project/{0}/gitlab/hook/".format(self.project._id)
+        self.app.post_json(
+            url,
+            {"test": True,
+             "commits": [{"id": "b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
+                          "distinct": True,
+                          "message": "Deleted via the Open Science Framework",
+                          "timestamp": "2014-01-08T14:15:51-08:00",
+                          "url": "https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
+                          "author": {"name": "Illidan", "email": "njqpw@osf.io"},
+                          "committer": {"name": "Testor", "email": "test@osf.io", "username": "tester"},
+                          "added": [], "removed":["PRJWN3TV"], "modified":[]}]},
+            content_type="application/json").maybe_follow()
+        self.project.reload()
+        assert_not_equal(self.project.logs[-1].action, "gitlab_file_removed")
+
+
+class TestRegistrationsWithGitLab(OsfTestCase):
+
+    def setUp(self):
+
+        super(TestRegistrationsWithGitLab, self).setUp()
+        self.project = ProjectFactory.build()
+        self.project.save()
+        self.consolidated_auth = Auth(user=self.project.creator)
+
+        self.project.add_addon('gitlab', auth=self.consolidated_auth)
+        self.project.creator.add_addon('gitlab')
+        self.node_settings = self.project.get_addon('gitlab')
+        self.user_settings = self.project.creator.get_addon('gitlab')
+        self.node_settings.user_settings = self.user_settings
+        self.node_settings.user = 'Queen'
+        self.node_settings.repo = 'Sheer-Heart-Attack'
+        self.node_settings.save()
+
+
+class TestGitLabSettings(OsfTestCase):
+
+    def setUp(self):
+
+        super(TestGitLabSettings, self).setUp()
+        self.gitlab = create_mock_gitlab(user='fred', private=False)
+        self.project = ProjectFactory.build()
+        self.project.save()
+        self.auth = self.project.creator.auth
+        self.consolidated_auth = Auth(user=self.project.creator)
+
+        self.project.add_addon('gitlab', auth=self.consolidated_auth)
+        self.project.creator.add_addon('gitlab')
+        self.node_settings = self.project.get_addon('gitlab')
+        self.user_settings = self.project.creator.get_addon('gitlab')
+        self.node_settings.user_settings = self.user_settings
+        self.node_settings.user = 'Queen'
+        self.node_settings.repo = 'Sheer-Heart-Attack'
+        self.node_settings.save()
+
+    @mock.patch('website.addons.gitlab.model.GitLabNodeSettings.add_hook')
+    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    def test_link_repo(self, mock_repo, mock_add_hook):
+        gitlab_mock = self.gitlab
+        mock_repo.return_value = gitlab_mock.repo.return_value
+
+        url = self.project.api_url + 'gitlab/settings/'
+        self.app.post_json(
+            url,
+            {
+                'gitlab_user': 'queen',
+                'gitlab_repo': 'night at the opera',
+            },
+            auth=self.auth
+        ).maybe_follow()
+
+        self.project.reload()
+        self.node_settings.reload()
+
+        assert_equal(self.node_settings.user, 'queen')
+        assert_equal(self.node_settings.repo, 'night at the opera')
+        assert_equal(self.project.logs[-1].action, 'gitlab_repo_linked')
+        mock_add_hook.assert_called_once()
+
+    @mock.patch('website.addons.gitlab.model.GitLabNodeSettings.add_hook')
+    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    def test_link_repo_no_change(self, mock_repo, mock_add_hook):
+        gitlab_mock = self.gitlab
+        mock_repo.return_value = gitlab_mock.repo.return_value
+
+        log_count = len(self.project.logs)
+
+        url = self.project.api_url + 'gitlab/settings/'
+        self.app.post_json(
+            url,
+            {
+                'gitlab_user': 'Queen',
+                'gitlab_repo': 'Sheer-Heart-Attack',
+            },
+            auth=self.auth
+        ).maybe_follow()
+
+        self.project.reload()
+        self.node_settings.reload()
+
+        assert_equal(len(self.project.logs), log_count)
+        assert_false(mock_add_hook.called)
+
+    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    def test_link_repo_non_existent(self, mock_repo):
+
+        mock_repo.return_value = None
+
+        url = self.project.api_url + 'gitlab/settings/'
+        res = self.app.post_json(
+            url,
+            {
+                'gitlab_user': 'queen',
+                'gitlab_repo': 'night at the opera',
+            },
+            auth=self.auth,
+            expect_errors=True
+        ).maybe_follow()
+
+        assert_equal(res.status_code, 400)
+
+    @mock.patch('website.addons.gitlab.api.GitLabClient.branches')
+    def test_link_repo_registration(self, mock_branches):
+
+        mock_branches.return_value = [
+            Branch.from_json({
+                'name': 'master',
+                'commit': {
+                    'sha': '6dcb09b5b57875f334f61aebed695e2e4193db5e',
+                    'url': 'https://api.gitlab.com/repos/octocat/Hello-World/commits/c5b97d5ae6c19d5c5df71a34c7fbeeda2479ccbc',
+                }
+            }),
+            Branch.from_json({
+                'name': 'develop',
+                'commit': {
+                    'sha': '6dcb09b5b57875asdasedawedawedwedaewdwdass',
+                    'url': 'https://api.gitlab.com/repos/octocat/Hello-World/commits/cdcb09b5b57875asdasedawedawedwedaewdwdass',
+                }
+            })
+        ]
+
+        registration = self.project.register_node(
+            schema=get_default_metaschema(),
+            auth=self.consolidated_auth,
+            data=''
+        )
+
+        url = registration.api_url + 'gitlab/settings/'
+        res = self.app.post_json(
+            url,
+            {
+                'gitlab_user': 'queen',
+                'gitlab_repo': 'night at the opera',
+            },
+            auth=self.auth,
+            expect_errors=True
+        ).maybe_follow()
+
+        assert_equal(res.status_code, 400)
+
+    @mock.patch('website.addons.gitlab.model.GitLabNodeSettings.delete_hook')
+    def test_deauthorize(self, mock_delete_hook):
+
+        url = self.project.api_url + 'gitlab/user_auth/'
+
+        self.app.delete(url, auth=self.auth).maybe_follow()
+
+        self.project.reload()
+        self.node_settings.reload()
+        assert_equal(self.node_settings.user, None)
+        assert_equal(self.node_settings.repo, None)
+        assert_equal(self.node_settings.user_settings, None)
+
+        assert_equal(self.project.logs[-1].action, 'gitlab_node_deauthorized')
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/website/addons/gitlab/views.py b/website/addons/gitlab/views.py
new file mode 100644
index 00000000000..5c2654a86fc
--- /dev/null
+++ b/website/addons/gitlab/views.py
@@ -0,0 +1,440 @@
+"""Views for the node settings page."""
+# -*- coding: utf-8 -*-
+from dateutil.parser import parse as dateparse
+import httplib as http
+import logging
+
+from furl import furl
+from flask import request, make_response
+
+from framework.exceptions import HTTPError
+
+from modularodm import Q
+from modularodm.storage.base import KeyExistsException
+from website.oauth.models import ExternalAccount
+
+from website.addons.base import generic_views
+from website.addons.gitlab.api import GitLabClient, ref_to_params
+from website.addons.gitlab.exceptions import NotFoundError, GitLabError
+from website.addons.gitlab.settings import DEFAULT_HOSTS
+from website.addons.gitlab.serializer import GitLabSerializer
+from website.addons.gitlab.utils import (
+    get_refs, check_permissions,
+    verify_hook_signature, MESSAGES
+)
+
+from website.models import NodeLog
+from website.project.decorators import (
+    must_have_addon, must_be_addon_authorizer,
+    must_have_permission, must_not_be_registration,
+    must_be_contributor_or_public, must_be_valid_project,
+)
+from website.util import rubeus
+
+from framework.auth.decorators import must_be_logged_in
+from website.util import api_url_for
+
+logger = logging.getLogger(__name__)
+
+SHORT_NAME = 'gitlab'
+FULL_NAME = 'GitLab'
+
+############
+# Generics #
+############
+
+gitlab_account_list = generic_views.account_list(
+    SHORT_NAME,
+    GitLabSerializer
+)
+
+gitlab_import_auth = generic_views.import_auth(
+    SHORT_NAME,
+    GitLabSerializer
+)
+
+def _get_folders(node_addon, folder_id):
+    pass
+
+gitlab_folder_list = generic_views.folder_list(
+    SHORT_NAME,
+    FULL_NAME,
+    _get_folders
+)
+
+gitlab_get_config = generic_views.get_config(
+    SHORT_NAME,
+    GitLabSerializer
+)
+
+gitlab_deauthorize_node = generic_views.deauthorize_node(
+    SHORT_NAME
+)
+
+gitlab_root_folder = generic_views.root_folder(
+    SHORT_NAME
+)
+
+@must_be_logged_in
+def gitlab_user_config_get(auth, **kwargs):
+    """View for getting a JSON representation of the logged-in user's
+    GitLab user settings.
+    """
+
+    user_addon = auth.user.get_addon('gitlab')
+    user_has_auth = False
+    if user_addon:
+        user_has_auth = user_addon.has_auth
+
+    return {
+        'result': {
+            'userHasAuth': user_has_auth,
+            'urls': {
+                'create': api_url_for('gitlab_add_user_account'),
+                'accounts': api_url_for('gitlab_account_list'),
+            },
+            'hosts': DEFAULT_HOSTS,
+        },
+    }, http.OK
+
+@must_be_logged_in
+def gitlab_add_user_account(auth, **kwargs):
+    """Verifies new external account credentials and adds to user's list"""
+
+    f = furl()
+    f.host = request.json.get('host').rstrip('/')
+    f.scheme = 'https'
+    clientId = request.json.get('clientId')
+    clientSecret = request.json.get('clientSecret')
+
+    try:
+        account = ExternalAccount(
+            provider='gitlab',
+            provider_name='GitLab',
+            display_name=f.host,       # no username; show host
+            oauth_key=f.host,          # hijacked; now host
+            oauth_secret=clientSecret,   # hijacked; now clientSecret
+            provider_id=clientId,   # hijacked; now clientId
+        )
+        account.save()
+    except KeyExistsException:
+        # ... or get the old one
+        account = ExternalAccount.find_one(
+            Q('provider', 'eq', 'gitlab') &
+            Q('provider_id', 'eq', clientId)
+        )
+
+    user = auth.user
+    if account not in user.external_accounts:
+        user.external_accounts.append(account)
+
+    user.get_or_add_addon('gitlab', auth=auth)
+    user.save()
+
+    return {}
+
+#################
+# Special Cased #
+#################
+
+@must_not_be_registration
+@must_have_addon(SHORT_NAME, 'user')
+@must_have_addon(SHORT_NAME, 'node')
+@must_be_addon_authorizer(SHORT_NAME)
+@must_have_permission('write')
+def gitlab_set_config(auth, **kwargs):
+    node_settings = kwargs.get('node_addon', None)
+    node = kwargs.get('node', None)
+    user_settings = kwargs.get('user_addon', None)
+
+    try:
+        if not node:
+            node = node_settings.owner
+        if not user_settings:
+            user_settings = node_settings.user_settings
+    except AttributeError:
+        raise HTTPError(http.BAD_REQUEST)
+
+    # Parse request
+    gitlab_user_name = request.json.get('gitlab_user', '')
+    gitlab_repo_name = request.json.get('gitlab_repo', '')
+    gitlab_repo_id = request.json.get('gitlab_repo_id', '')
+
+    if not gitlab_user_name or not gitlab_repo_name or not gitlab_repo_id:
+        raise HTTPError(http.BAD_REQUEST)
+
+    # Verify that repo exists and that user can access
+    connection = GitLabClient(external_account=node_settings.external_account)
+    repo = connection.repo(gitlab_repo_id)
+    if repo is None:
+        if user_settings:
+            message = (
+                'Cannot access repo. Either the repo does not exist '
+                'or your account does not have permission to view it.'
+            )
+        else:
+            message = (
+                'Cannot access repo.'
+            )
+        return {'message': message}, http.BAD_REQUEST
+
+    changed = (
+        gitlab_user_name != node_settings.user or
+        gitlab_repo_name != node_settings.repo or
+        gitlab_repo_id != node_settings.repo_id
+    )
+
+    # Update hooks
+    if changed:
+
+        # Delete existing hook, if any
+        node_settings.delete_hook()
+
+        # Update node settings
+        node_settings.user = gitlab_user_name
+        node_settings.repo = gitlab_repo_name
+        node_settings.repo_id = gitlab_repo_id
+
+        # Log repo select
+        node.add_log(
+            action='gitlab_repo_linked',
+            params={
+                'project': node.parent_id,
+                'node': node._id,
+                'gitlab': {
+                    'user': gitlab_user_name,
+                    'repo': gitlab_repo_name,
+                    'repo_id': gitlab_repo_id,
+                }
+            },
+            auth=auth,
+        )
+
+        # Add new hook
+        if node_settings.user and node_settings.repo:
+            node_settings.add_hook(save=False)
+
+        node_settings.save()
+
+    return {}
+
+@must_be_contributor_or_public
+@must_have_addon('gitlab', 'node')
+def gitlab_download_starball(node_addon, **kwargs):
+
+    ref = request.args.get('ref', 'master')
+
+    connection = GitLabClient(external_account=node_addon.external_account)
+    headers, data = connection.starball(
+        node_addon.user, node_addon.repo, node_addon.repo_id, ref
+    )
+
+    resp = make_response(data)
+    for key, value in headers.iteritems():
+        resp.headers[key] = value
+
+    return resp
+
+#########
+# HGrid #
+#########
+
+@must_be_contributor_or_public
+@must_have_addon('gitlab', 'node')
+def gitlab_root_folder(*args, **kwargs):
+    """View function returning the root container for a GitLab repo. In
+    contrast to other add-ons, this is exposed via the API for GitLab to
+    accommodate switching between branches and commits.
+
+    """
+    node_settings = kwargs['node_addon']
+    auth = kwargs['auth']
+    data = request.args.to_dict()
+
+    return gitlab_hgrid_data(node_settings, auth=auth, **data)
+
+def gitlab_hgrid_data(node_settings, auth, **kwargs):
+
+    # Quit if no repo linked
+    if not node_settings.complete:
+        return
+
+    connection = GitLabClient(external_account=node_settings.external_account)
+
+    # Initialize repo here in the event that it is set in the privacy check
+    # below. This potentially saves an API call in _check_permissions, below.
+    repo = None
+
+    # Quit if privacy mismatch and not contributor
+    node = node_settings.owner
+    if node.is_public or node.is_contributor(auth.user):
+        try:
+            repo = connection.repo(node_settings.repo_id)
+        except NotFoundError:
+            logger.error('Could not access GitLab repo')
+            return None
+
+    try:
+        branch, sha, branches = get_refs(node_settings, branch=kwargs.get('branch'), sha=kwargs.get('sha'), connection=connection)
+    except (NotFoundError, GitLabError):
+        logger.error('GitLab repo not found')
+        return
+
+    if branch is not None:
+        ref = ref_to_params(branch, sha)
+        can_edit = check_permissions(node_settings, auth, connection, branch, sha, repo=repo)
+    else:
+        ref = None
+        can_edit = False
+
+    permissions = {
+        'edit': can_edit,
+        'view': True,
+        'private': node_settings.is_private
+    }
+    urls = {
+        'upload': node_settings.owner.api_url + 'gitlab/file/' + branch,
+        'fetch': node_settings.owner.api_url + 'gitlab/hgrid/' + branch,
+        'branch': node_settings.owner.api_url + 'gitlab/hgrid/root/' + branch,
+        'zip': 'https://{0}/{1}/repository/archive.zip?ref={2}'.format(node_settings.external_account.display_name, repo['path_with_namespace'], branch),
+        'repo': 'https://{0}/{1}/tree/{2}'.format(node_settings.external_account.display_name, repo['path_with_namespace'], branch)
+    }
+
+    branch_names = [each['name'] for each in branches]
+    if not branch_names:
+        branch_names = [branch]  # if repo un-init-ed then still add default branch to list of branches
+
+    return [rubeus.build_addon_root(
+        node_settings,
+        repo['path_with_namespace'],
+        urls=urls,
+        permissions=permissions,
+        branches=branch_names,
+        private_key=kwargs.get('view_only', None),
+        default_branch=repo['default_branch'],
+    )]
+
+#########
+# Repos #
+#########
+
+@must_have_addon(SHORT_NAME, 'user')
+@must_have_addon(SHORT_NAME, 'node')
+@must_be_addon_authorizer(SHORT_NAME)
+@must_have_permission('write')
+def gitlab_create_repo(**kwargs):
+    repo_name = request.json.get('name')
+    user = request.json.get('user')
+
+    if not repo_name:
+        raise HTTPError(http.BAD_REQUEST)
+
+    node_settings = kwargs['node_addon']
+    connection = GitLabClient(external_account=node_settings.external_account)
+
+    try:
+        repo = connection.create_repo(repo_name, auto_init=True)
+    except GitLabError:
+        raise HTTPError(http.BAD_REQUEST)
+
+    return {
+        'user': user,
+        'repo': repo,
+    }
+
+def add_hook_log(node, gitlab, action, path, date, committer, include_urls=False,
+                 sha=None, save=False):
+    """Add log event for commit from webhook payload.
+
+    :param node: Node to add logs to
+    :param gitlab: GitLab node settings record
+    :param path: Path to file
+    :param date: Date of commit
+    :param committer: Committer name
+    :param include_urls: Include URLs in `params`
+    :param sha: SHA of updated file
+    :param save: Save changes
+
+    """
+    gitlab_data = {
+        'user': gitlab.user,
+        'repo': gitlab.repo,
+    }
+
+    urls = {}
+
+    if include_urls:
+        url = node.web_url_for('addon_view_or_download_file', path=path, provider=SHORT_NAME)
+
+        urls = {
+            'view': '{0}?ref={1}'.format(url, sha),
+            'download': '{0}?action=download&ref={1}'.format(url, sha)
+        }
+
+    node.add_log(
+        action=action,
+        params={
+            'project': node.parent_id,
+            'node': node._id,
+            'path': path,
+            'gitlab': gitlab_data,
+            'urls': urls,
+        },
+        auth=None,
+        foreign_user=committer,
+        log_date=date,
+        save=save,
+    )
+
+
+@must_be_valid_project
+@must_not_be_registration
+@must_have_addon('gitlab', 'node')
+def gitlab_hook_callback(node_addon, **kwargs):
+    """Add logs for commits from outside OSF.
+
+    """
+    if request.json is None:
+        return {}
+
+    # Fail if hook signature is invalid
+    verify_hook_signature(
+        node_addon,
+        request.data,
+        request.headers,
+    )
+
+    node = kwargs['node'] or kwargs['project']
+
+    payload = request.json
+
+    for commit in payload.get('commits', []):
+
+        # TODO: Look up OSF user by commit
+
+        # Skip if pushed by OSF
+        if commit['message'] and commit['message'] in MESSAGES.values():
+            continue
+
+        _id = commit['id']
+        date = dateparse(commit['timestamp'])
+        committer = commit['committer']['name']
+
+        # Add logs
+        for path in commit.get('added', []):
+            add_hook_log(
+                node, node_addon, 'gitlab_' + NodeLog.FILE_ADDED,
+                path, date, committer, include_urls=True, sha=_id,
+            )
+        for path in commit.get('modified', []):
+            add_hook_log(
+                node, node_addon, 'gitlab_' + NodeLog.FILE_UPDATED,
+                path, date, committer, include_urls=True, sha=_id,
+            )
+        for path in commit.get('removed', []):
+            add_hook_log(
+                node, node_addon, 'gitlab_' + NodeLog.FILE_REMOVED,
+                path, date, committer,
+            )
+
+    node.save()
diff --git a/website/static/js/filepage/index.js b/website/static/js/filepage/index.js
index c81cb61956a..b3b9b41b4fb 100644
--- a/website/static/js/filepage/index.js
+++ b/website/static/js/filepage/index.js
@@ -136,8 +136,8 @@ var FileViewPage = {
                     '. It needs to be checked in before any changes can be made.'
                 ])));
             }
-        } else if (self.file.provider === 'bitbucket') {
-            self.canEdit = function() { return false; };  // Bitbucket is read-only
+        } else if (self.file.provider === 'bitbucket' || self.file.provider === 'gitlab') {
+            self.canEdit = function() { return false; };  // Bitbucket and GitLab are read-only
         } else {
             self.canEdit = function() {
                 return self.context.currentUser.canEdit;
@@ -168,7 +168,7 @@ var FileViewPage = {
                     {sha: $osf.urlParams().branch}
                 );
             }
-            else if (self.file.provider === 'bitbucket') {
+            else if (self.file.provider === 'bitbucket' || self.file.provider === 'gitlab') {
                 self.file.urls.revisions = waterbutler.buildRevisionsUrl(
                     self.file.path, self.file.provider, self.node.id,
                     {branch: $osf.urlParams().branch}

From 595a3f8e0396faf06055ba6a1d7d30da0f489b2f Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Mon, 3 Jul 2017 17:31:18 -0400
Subject: [PATCH 059/192] Port GitLab to Django app

[#OSF-8169]
---
 Dockerfile                                    |   2 +
 {website/addons => addons}/gitlab/README.md   |   0
 addons/gitlab/__init__.py                     |   1 +
 {website/addons => addons}/gitlab/api.py      |  11 +-
 addons/gitlab/apps.py                         | 125 +++++++
 .../addons => addons}/gitlab/exceptions.py    |   0
 .../gitlab/migrations}/__init__.py            |   0
 .../model.py => addons/gitlab/models.py       | 125 +++----
 .../addons => addons}/gitlab/requirements.txt |   0
 {website/addons => addons}/gitlab/routes.py   |   2 +-
 .../addons => addons}/gitlab/serializer.py    |  12 +-
 .../gitlab/settings/__init__.py               |   0
 .../gitlab/settings/defaults.py               |   0
 .../gitlab/static/comicon.png                 | Bin
 .../addons => addons}/gitlab/static/files.js  |   0
 .../gitlab/static/gitlabFangornConfig.js      |  10 +-
 .../gitlab/static/gitlabLogActionList.json    |   0
 addons/gitlab/static/gitlabNodeConfig.js      | 227 +++++++++++++
 addons/gitlab/static/gitlabUserConfig.js      | 187 +++++++++++
 addons/gitlab/static/node-cfg.js              |  16 +
 .../gitlab/static/user-cfg.js                 |   2 +-
 .../templates/gitlab_credentials_modal.mako   |   4 +-
 .../templates/gitlab_node_settings.mako       | 103 ++++++
 .../templates/gitlab_user_settings.mako       |   0
 addons/gitlab/tests/__init__.py               |   0
 addons/gitlab/tests/conftest.py               |   1 +
 .../gitlab/tests/factories.py                 |  13 +-
 .../gitlab/tests/test_models.py               |  60 ++--
 addons/gitlab/tests/test_serializer.py        |  50 +++
 .../gitlab/tests/test_utils.py                |   8 +-
 .../gitlab/tests/test_views.py                | 314 +++++++++---------
 addons/gitlab/tests/utils.py                  | 109 ++++++
 {website/addons => addons}/gitlab/utils.py    |   4 +-
 {website/addons => addons}/gitlab/views.py    | 137 ++------
 api/base/settings/defaults.py                 |   1 +
 framework/addons/data/addons.json             |  12 +-
 website/addons/gitlab/__init__.py             |  36 --
 .../addons/gitlab/static/gitlab-node-cfg.js   | 190 -----------
 .../addons/gitlab/static/gitlabUserConfig.js  | 236 -------------
 website/addons/gitlab/static/node-cfg.js      |   7 -
 .../templates/gitlab_node_settings.mako       |  99 ------
 .../addons/gitlab/tests/test_serializer.py    |  34 --
 website/addons/gitlab/tests/utils.py          | 146 --------
 website/files/models/__init__.py              |  12 -
 website/files/models/gitlab.py                |  20 --
 website/static/js/osfLanguage.js              |   3 +-
 46 files changed, 1131 insertions(+), 1188 deletions(-)
 rename {website/addons => addons}/gitlab/README.md (100%)
 create mode 100644 addons/gitlab/__init__.py
 rename {website/addons => addons}/gitlab/api.py (91%)
 create mode 100644 addons/gitlab/apps.py
 rename {website/addons => addons}/gitlab/exceptions.py (100%)
 rename {website/addons/gitlab/tests => addons/gitlab/migrations}/__init__.py (100%)
 rename website/addons/gitlab/model.py => addons/gitlab/models.py (80%)
 rename {website/addons => addons}/gitlab/requirements.txt (100%)
 rename {website/addons => addons}/gitlab/routes.py (98%)
 rename {website/addons => addons}/gitlab/serializer.py (85%)
 rename {website/addons => addons}/gitlab/settings/__init__.py (100%)
 rename {website/addons => addons}/gitlab/settings/defaults.py (100%)
 rename {website/addons => addons}/gitlab/static/comicon.png (100%)
 rename {website/addons => addons}/gitlab/static/files.js (100%)
 rename {website/addons => addons}/gitlab/static/gitlabFangornConfig.js (96%)
 rename {website/addons => addons}/gitlab/static/gitlabLogActionList.json (100%)
 create mode 100644 addons/gitlab/static/gitlabNodeConfig.js
 create mode 100644 addons/gitlab/static/gitlabUserConfig.js
 create mode 100644 addons/gitlab/static/node-cfg.js
 rename {website/addons => addons}/gitlab/static/user-cfg.js (91%)
 rename {website/addons => addons}/gitlab/templates/gitlab_credentials_modal.mako (97%)
 create mode 100644 addons/gitlab/templates/gitlab_node_settings.mako
 rename {website/addons => addons}/gitlab/templates/gitlab_user_settings.mako (100%)
 create mode 100644 addons/gitlab/tests/__init__.py
 create mode 100644 addons/gitlab/tests/conftest.py
 rename {website/addons => addons}/gitlab/tests/factories.py (56%)
 rename {website/addons => addons}/gitlab/tests/test_models.py (80%)
 create mode 100644 addons/gitlab/tests/test_serializer.py
 rename {website/addons => addons}/gitlab/tests/test_utils.py (87%)
 rename {website/addons => addons}/gitlab/tests/test_views.py (59%)
 create mode 100644 addons/gitlab/tests/utils.py
 rename {website/addons => addons}/gitlab/utils.py (96%)
 rename {website/addons => addons}/gitlab/views.py (69%)
 delete mode 100644 website/addons/gitlab/__init__.py
 delete mode 100644 website/addons/gitlab/static/gitlab-node-cfg.js
 delete mode 100644 website/addons/gitlab/static/gitlabUserConfig.js
 delete mode 100644 website/addons/gitlab/static/node-cfg.js
 delete mode 100644 website/addons/gitlab/templates/gitlab_node_settings.mako
 delete mode 100644 website/addons/gitlab/tests/test_serializer.py
 delete mode 100644 website/addons/gitlab/tests/utils.py
 delete mode 100644 website/files/models/__init__.py
 delete mode 100644 website/files/models/gitlab.py

diff --git a/Dockerfile b/Dockerfile
index e225a6b1ee2..fce73424b70 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -103,6 +103,7 @@ COPY ./addons/dropbox/requirements.txt ./addons/dropbox/
 #COPY ./addons/figshare/requirements.txt ./addons/figshare/
 #COPY ./addons/forward/requirements.txt ./addons/forward/
 COPY ./addons/github/requirements.txt ./addons/github/
+COPY ./addons/gitlab/requirements.txt ./addons/gitlab/
 #COPY ./addons/googledrive/requirements.txt ./addons/googledrive/
 COPY ./addons/mendeley/requirements.txt ./addons/mendeley/
 #COPY ./addons/osfstorage/requirements.txt ./addons/osfstorage/
@@ -152,6 +153,7 @@ COPY ./addons/dropbox/static/ ./addons/dropbox/static/
 COPY ./addons/figshare/static/ ./addons/figshare/static/
 COPY ./addons/forward/static/ ./addons/forward/static/
 COPY ./addons/github/static/ ./addons/github/static/
+COPY ./addons/gitlab/static/ ./addons/gitlab/static/
 COPY ./addons/googledrive/static/ ./addons/googledrive/static/
 COPY ./addons/mendeley/static/ ./addons/mendeley/static/
 COPY ./addons/osfstorage/static/ ./addons/osfstorage/static/
diff --git a/website/addons/gitlab/README.md b/addons/gitlab/README.md
similarity index 100%
rename from website/addons/gitlab/README.md
rename to addons/gitlab/README.md
diff --git a/addons/gitlab/__init__.py b/addons/gitlab/__init__.py
new file mode 100644
index 00000000000..0f2d76092eb
--- /dev/null
+++ b/addons/gitlab/__init__.py
@@ -0,0 +1 @@
+default_app_config = 'addons.gitlab.apps.GitLabAddonConfig'
diff --git a/website/addons/gitlab/api.py b/addons/gitlab/api.py
similarity index 91%
rename from website/addons/gitlab/api.py
rename to addons/gitlab/api.py
index 819c2189995..fd9b2e8f785 100644
--- a/website/addons/gitlab/api.py
+++ b/addons/gitlab/api.py
@@ -5,7 +5,8 @@
 import cachecontrol
 from requests.adapters import HTTPAdapter
 
-from website.addons.gitlab.exceptions import NotFoundError
+from addons.gitlab.exceptions import NotFoundError
+from addons.gitlab.settings import DEFAULT_HOSTS
 
 # Initialize caches
 https_cache = cachecontrol.CacheControlAdapter()
@@ -13,11 +14,9 @@
 
 class GitLabClient(object):
 
-    def __init__(self, external_account=None, access_token=None):
-
-        self.access_token = getattr(external_account, 'provider_id', None) or access_token
-
-        self.host = getattr(external_account, 'display_name', None) or 'gitlab.com'
+    def __init__(self, external_account=None, access_token=None, host=None):
+        self.access_token = getattr(external_account, 'oauth_key', None) or access_token
+        self.host = getattr(external_account, 'oauth_secret', None) or host or DEFAULT_HOSTS[0]
 
         if self.access_token:
             self.gitlab = gitlab.Gitlab(self.host, token=self.access_token)
diff --git a/addons/gitlab/apps.py b/addons/gitlab/apps.py
new file mode 100644
index 00000000000..4a103949ac1
--- /dev/null
+++ b/addons/gitlab/apps.py
@@ -0,0 +1,125 @@
+import logging
+import os
+
+from addons.base.apps import BaseAddonAppConfig
+from addons.gitlab.api import GitLabClient, ref_to_params
+from addons.gitlab.exceptions import NotFoundError, GitLabError
+from addons.gitlab.utils import get_refs, check_permissions
+from website.util import rubeus
+
+logger = logging.getLogger(__name__)
+
+def gitlab_hgrid_data(node_settings, auth, **kwargs):
+
+    # Quit if no repo linked
+    if not node_settings.complete:
+        return
+
+    connection = GitLabClient(external_account=node_settings.external_account)
+
+    # Initialize repo here in the event that it is set in the privacy check
+    # below. This potentially saves an API call in _check_permissions, below.
+    repo = None
+
+    # Quit if privacy mismatch and not contributor
+    node = node_settings.owner
+    if node.is_public or node.is_contributor(auth.user):
+        try:
+            repo = connection.repo(node_settings.repo_id)
+        except NotFoundError:
+            logger.error('Could not access GitLab repo')
+            return None
+
+    try:
+        branch, sha, branches = get_refs(node_settings, branch=kwargs.get('branch'), sha=kwargs.get('sha'), connection=connection)
+    except (NotFoundError, GitLabError):
+        logger.error('GitLab repo not found')
+        return
+
+    if branch is not None:
+        ref = ref_to_params(branch, sha)
+        can_edit = check_permissions(node_settings, auth, connection, branch, sha, repo=repo)
+    else:
+        ref = None
+        can_edit = False
+
+    permissions = {
+        'edit': can_edit,
+        'view': True,
+        'private': node_settings.is_private
+    }
+    urls = {
+        'upload': node_settings.owner.api_url + 'gitlab/file/' + ref,
+        'fetch': node_settings.owner.api_url + 'gitlab/hgrid/' + ref,
+        'branch': node_settings.owner.api_url + 'gitlab/hgrid/root/' + ref,
+        'zip': 'https://{0}/{1}/repository/archive.zip?branch={2}'.format(node_settings.external_account.oauth_secret, repo['path_with_namespace'], ref),
+        'repo': 'https://{0}/{1}/tree/{2}'.format(node_settings.external_account.oauth_secret, repo['path_with_namespace'], ref)
+    }
+
+    branch_names = [each['name'] for each in branches]
+    if not branch_names:
+        branch_names = [branch]  # if repo un-init-ed then still add default branch to list of branches
+
+    return [rubeus.build_addon_root(
+        node_settings,
+        repo['path_with_namespace'],
+        urls=urls,
+        permissions=permissions,
+        branches=branch_names,
+        private_key=kwargs.get('view_only', None),
+        default_branch=repo['default_branch'],
+    )]
+
+HERE = os.path.dirname(os.path.abspath(__file__))
+NODE_SETTINGS_TEMPLATE = os.path.join(HERE, 'templates', 'gitlab_node_settings.mako')
+USER_SETTINGS_TEMPLATE = os.path.join(HERE, 'templates', 'gitlab_user_settings.mako')
+
+class GitLabAddonConfig(BaseAddonAppConfig):
+
+    name = 'addons.gitlab'
+    label = 'addons_gitlab'
+    full_name = 'GitLab'
+    short_name = 'gitlab'
+    configs = ['accounts', 'node']
+    categories = ['storage']
+    owners = ['user', 'node']
+    has_hgrid_files = True
+    max_file_size = 100  # MB
+    node_settings_template = NODE_SETTINGS_TEMPLATE
+    user_settings_template = USER_SETTINGS_TEMPLATE
+
+    @property
+    def get_hgrid_data(self):
+        return gitlab_hgrid_data
+
+    FILE_ADDED = 'gitlab_file_added'
+    FILE_REMOVED = 'gitlab_file_removed'
+    FILE_UPDATED = 'gitlab_file_updated'
+    FOLDER_CREATED = 'gitlab_folder_created'
+    NODE_AUTHORIZED = 'gitlab_node_authorized'
+    NODE_DEAUTHORIZED = 'gitlab_node_deauthorized'
+    NODE_DEAUTHORIZED_NO_USER = 'gitlab_node_deauthorized_no_user'
+    REPO_LINKED = 'gitlab_repo_linked'
+
+    actions = (
+        FILE_ADDED,
+        FILE_REMOVED,
+        FILE_UPDATED,
+        FOLDER_CREATED,
+        NODE_AUTHORIZED,
+        NODE_DEAUTHORIZED,
+        NODE_DEAUTHORIZED_NO_USER,
+        REPO_LINKED)
+
+    @property
+    def routes(self):
+        from . import routes
+        return [routes.api_routes]
+
+    @property
+    def user_settings(self):
+        return self.get_model('UserSettings')
+
+    @property
+    def node_settings(self):
+        return self.get_model('NodeSettings')
diff --git a/website/addons/gitlab/exceptions.py b/addons/gitlab/exceptions.py
similarity index 100%
rename from website/addons/gitlab/exceptions.py
rename to addons/gitlab/exceptions.py
diff --git a/website/addons/gitlab/tests/__init__.py b/addons/gitlab/migrations/__init__.py
similarity index 100%
rename from website/addons/gitlab/tests/__init__.py
rename to addons/gitlab/migrations/__init__.py
diff --git a/website/addons/gitlab/model.py b/addons/gitlab/models.py
similarity index 80%
rename from website/addons/gitlab/model.py
rename to addons/gitlab/models.py
index e18024a83fb..14d27d8fc0b 100644
--- a/website/addons/gitlab/model.py
+++ b/addons/gitlab/models.py
@@ -1,82 +1,73 @@
 # -*- coding: utf-8 -*-
-
 import os
 import urlparse
 
+from django.db import models
 import markupsafe
-from modularodm import fields
 
+from addons.base import exceptions
+from addons.base.models import (BaseOAuthNodeSettings, BaseOAuthUserSettings,
+                                BaseStorageAddon)
+from addons.gitlab import utils
+from addons.gitlab.api import GitLabClient
+from addons.gitlab.serializer import GitLabSerializer
+from addons.gitlab import settings as gitlab_settings
+from addons.gitlab.exceptions import ApiError, NotFoundError, GitLabError
 from framework.auth import Auth
-
+from osf.models.files import File, Folder, BaseFileNode
 from website import settings
 from website.util import web_url_for
-from website.addons.base import exceptions
-from website.addons.base import AddonOAuthUserSettingsBase, AddonOAuthNodeSettingsBase
-from website.addons.base import StorageAddonBase
-
-from website.addons.gitlab import utils
-from website.addons.gitlab.api import GitLabClient
-from website.addons.gitlab.serializer import GitLabSerializer
-from website.addons.gitlab import settings as gitlab_settings
-from website.addons.gitlab.exceptions import ApiError, NotFoundError, GitLabError
-from website.oauth.models import ExternalProvider
-
 
 hook_domain = gitlab_settings.HOOK_DOMAIN or settings.DOMAIN
 
 
-class GitLabProvider(ExternalProvider):
-    name = 'GitLab'
-    short_name = 'gitlab'
+class GitLabFileNode(BaseFileNode):
+    _provider = 'gitlab'
 
-    @property
-    def auth_url_base(self):
-        return 'https://{0}{1}'.format('gitlab.com', '/oauth/authorize')
 
-    @property
-    def callback_url(self):
-        return 'https://{0}{1}'.format('gitlab.com', '/oauth/token')
+class GitLabFolder(GitLabFileNode, Folder):
+    pass
 
-    @property
-    def client_secret(self):
-        return ''
 
-    @property
-    def client_id(self):
-        return ''
+class GitLabFile(GitLabFileNode, File):
+    version_identifier = 'branch'
 
-    def handle_callback(self, response):
-        """View called when the OAuth flow is completed. Adds a new GitLabUserSettings
-        record to the user and saves the account info.
-        """
-        client = GitLabClient(
-            access_token=response['access_token']
-        )
+    def touch(self, auth_header, revision=None, ref=None, branch=None, **kwargs):
+        revision = revision or ref or branch
+        return super(GitLabFile, self).touch(auth_header, revision=revision, **kwargs)
+
+class GitLabProvider(object):
+    name = 'GitLab'
+    short_name = 'gitlab'
+    serializer = GitLabSerializer
 
-        user_info = client.user()
+    def __init__(self, account=None):
+        super(GitLabProvider, self).__init__()  # this does exactly nothing...
+        # provide an unauthenticated session by default
+        self.account = account
 
-        return {
-            'provider_id': client.host,
-            'profile_url': user_info['web_url'],
-            'oauth_key': response['access_token'],
-            'display_name': client.host
-        }
+    def __repr__(self):
+        return '<{name}: {status}>'.format(
+            name=self.__class__.__name__,
+            status=self.account.display_name if self.account else 'anonymous'
+        )
 
 
-class GitLabUserSettings(AddonOAuthUserSettingsBase):
+class UserSettings(BaseStorageAddon, BaseOAuthUserSettings):
     oauth_provider = GitLabProvider
     serializer = GitLabSerializer
 
 
-class GitLabNodeSettings(StorageAddonBase, AddonOAuthNodeSettingsBase):
+class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
     oauth_provider = GitLabProvider
     serializer = GitLabSerializer
 
-    user = fields.StringField()
-    repo = fields.StringField()
-    repo_id = fields.StringField()
-    hook_id = fields.StringField()
-    hook_secret = fields.StringField()
+    user = models.TextField(blank=True, null=True)
+    repo = models.TextField(blank=True, null=True)
+    repo_id = models.TextField(blank=True, null=True)
+    hook_id = models.TextField(blank=True, null=True)
+    hook_secret = models.TextField(blank=True, null=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
 
     @property
     def folder_id(self):
@@ -136,7 +127,7 @@ def deauthorize(self, auth=None, log=True):
         self.clear_auth()
 
     def delete(self, save=False):
-        super(GitLabNodeSettings, self).delete(save=False)
+        super(NodeSettings, self).delete(save=False)
         self.deauthorize(log=False)
         if save:
             self.save()
@@ -158,7 +149,7 @@ def is_private(self):
 
     def to_json(self, user):
 
-        ret = super(GitLabNodeSettings, self).to_json(user)
+        ret = super(NodeSettings, self).to_json(user)
         user_settings = user.get_addon('gitlab')
         ret.update({
             'user_has_auth': user_settings and user_settings.has_auth,
@@ -203,13 +194,13 @@ def to_json(self, user):
     def serialize_waterbutler_credentials(self):
         if not self.complete or not self.repo:
             raise exceptions.AddonError('Addon is not authorized')
-        return {'token': self.external_account.provider_id}
+        return {'token': self.external_account.oauth_key}
 
     def serialize_waterbutler_settings(self):
         if not self.complete:
             raise exceptions.AddonError('Repo is not configured')
         return {
-            'host': 'https://{}'.format(self.external_account.display_name),
+            'host': 'https://{}'.format(self.external_account.oauth_secret),
             'owner': self.user,
             'repo': self.repo,
             'repo_id': self.repo_id
@@ -226,8 +217,8 @@ def create_waterbutler_log(self, auth, action, metadata):
         else:
             sha = metadata['extra']['fileSha']
             urls = {
-                'view': '{0}?ref={1}'.format(url, sha),
-                'download': '{0}?action=download&ref={1}'.format(url, sha)
+                'view': '{0}?branch={1}'.format(url, sha),
+                'download': '{0}?action=download&branch={1}'.format(url, sha)
             }
 
         self.owner.add_log(
@@ -301,7 +292,7 @@ def before_page_load(self, node, user):
                 message += (
                     ' The files in this GitLab repo can be viewed on GitLab '
                     '<u><a href="{url}">here</a></u>.'
-                ).format(url = repo['http_url_to_repo'])
+                ).format(url=repo['http_url_to_repo'])
             messages.append(message)
             return messages
 
@@ -314,7 +305,7 @@ def before_remove_contributor_message(self, node, removed):
 
         """
         try:
-            message = (super(GitLabNodeSettings, self).before_remove_contributor_message(node, removed) +
+            message = (super(NodeSettings, self).before_remove_contributor_message(node, removed) +
             'You can download the contents of this repository before removing '
             'this contributor <u><a href="{url}">here</a></u>.'.format(
                 url=node.api_url + 'gitlab/tarball/'
@@ -364,32 +355,18 @@ def after_fork(self, node, fork, user, save=True):
         :param bool save: Save settings after callback
         :return tuple: Tuple of cloned settings and alert message
         """
-        clone, _ = super(GitLabNodeSettings, self).after_fork(
+        clone = super(NodeSettings, self).after_fork(
             node, fork, user, save=False
         )
 
         # Copy authentication if authenticated by forking user
         if self.user_settings and self.user_settings.owner == user:
             clone.user_settings = self.user_settings
-            message = (
-                'GitLab authorization copied to forked {cat}.'
-            ).format(
-                cat=markupsafe.escape(fork.project_or_component),
-            )
-        else:
-            message = (
-                'GitLab authorization not copied to forked {cat}. You may '
-                'authorize this fork on the <u><a href={url}>Settings</a></u> '
-                'page.'
-            ).format(
-                cat=markupsafe.escape(fork.project_or_component),
-                url=fork.url + 'settings/'
-            )
 
         if save:
             clone.save()
 
-        return clone, message
+        return clone
 
     def before_make_public(self, node):
         try:
diff --git a/website/addons/gitlab/requirements.txt b/addons/gitlab/requirements.txt
similarity index 100%
rename from website/addons/gitlab/requirements.txt
rename to addons/gitlab/requirements.txt
diff --git a/website/addons/gitlab/routes.py b/addons/gitlab/routes.py
similarity index 98%
rename from website/addons/gitlab/routes.py
rename to addons/gitlab/routes.py
index 17a24919ca4..8f6c1a15a79 100644
--- a/website/addons/gitlab/routes.py
+++ b/addons/gitlab/routes.py
@@ -2,7 +2,7 @@
 
 from framework.routing import Rule, json_renderer
 
-from website.addons.gitlab import views
+from addons.gitlab import views
 
 api_routes = {
     'rules': [
diff --git a/website/addons/gitlab/serializer.py b/addons/gitlab/serializer.py
similarity index 85%
rename from website/addons/gitlab/serializer.py
rename to addons/gitlab/serializer.py
index a94b4cd4e99..deded311e6f 100644
--- a/website/addons/gitlab/serializer.py
+++ b/addons/gitlab/serializer.py
@@ -1,10 +1,8 @@
-from website.addons.base.serializer import StorageAddonSerializer
-
+from addons.base.serializer import StorageAddonSerializer
+from addons.gitlab.api import GitLabClient
+from addons.gitlab.exceptions import GitLabError
 from website.util import api_url_for
 
-from website.addons.gitlab.api import GitLabClient
-from website.addons.gitlab.exceptions import GitLabError
-
 class GitLabSerializer(StorageAddonSerializer):
 
     addon_short_name = 'gitlab'
@@ -12,7 +10,7 @@ class GitLabSerializer(StorageAddonSerializer):
     # Include host information with more informative labels / formatting
     def serialize_account(self, external_account):
         ret = super(GitLabSerializer, self).serialize_account(external_account)
-        host = external_account.display_name
+        host = external_account.oauth_secret
         ret.update({
             'host': host,
             'host_url': 'https://{0}'.format(host),
@@ -22,7 +20,7 @@ def serialize_account(self, external_account):
 
     def credentials_are_valid(self, user_settings, client):
         if user_settings:
-            client = client or GitLabClient(external_account=user_settings.external_accounts[0])
+            client = client or GitLabClient(external_account=user_settings.external_accounts.first())
             try:
                 client.user()
             except (GitLabError, IndexError):
diff --git a/website/addons/gitlab/settings/__init__.py b/addons/gitlab/settings/__init__.py
similarity index 100%
rename from website/addons/gitlab/settings/__init__.py
rename to addons/gitlab/settings/__init__.py
diff --git a/website/addons/gitlab/settings/defaults.py b/addons/gitlab/settings/defaults.py
similarity index 100%
rename from website/addons/gitlab/settings/defaults.py
rename to addons/gitlab/settings/defaults.py
diff --git a/website/addons/gitlab/static/comicon.png b/addons/gitlab/static/comicon.png
similarity index 100%
rename from website/addons/gitlab/static/comicon.png
rename to addons/gitlab/static/comicon.png
diff --git a/website/addons/gitlab/static/files.js b/addons/gitlab/static/files.js
similarity index 100%
rename from website/addons/gitlab/static/files.js
rename to addons/gitlab/static/files.js
diff --git a/website/addons/gitlab/static/gitlabFangornConfig.js b/addons/gitlab/static/gitlabFangornConfig.js
similarity index 96%
rename from website/addons/gitlab/static/gitlabFangornConfig.js
rename to addons/gitlab/static/gitlabFangornConfig.js
index facb41a7bcb..8daade84226 100644
--- a/website/addons/gitlab/static/gitlabFangornConfig.js
+++ b/addons/gitlab/static/gitlabFangornConfig.js
@@ -17,10 +17,6 @@ function _formatRepoUrl(item, branch) {
     return item.data.urls.repo.substring(0, item.data.urls.repo.indexOf('/tree/') + 6) + branch;
 }
 
-function _formatZipUrl(item, branch) {
-    return item.data.urls.zip.substring(0, item.data.urls.zip.indexOf('?ref=') + 5) + branch;
-}
-
 function _getCurrentBranch(item) {
     var branch;
     if (item.data.branch === undefined) {
@@ -78,7 +74,7 @@ var _gitlabItemButtons = {
                     buttons.push(
                         m.component(Fangorn.Components.button, {
                             onclick: function (event) {
-                                window.location = _formatZipUrl(item, branch);
+                                window.location = waterbutler.buildTreeBeardDownloadZip(item, {'branch': item.data.branch});
                             },
                             icon: 'fa fa-download',
                             className: 'text-primary'
@@ -133,8 +129,8 @@ function changeBranch(item, ref){
 }
 
 function _resolveLazyLoad(item) {
-    var branch = _getCurrentBranch(item);
-    return waterbutler.buildTreeBeardMetadata(item, {ref: branch});
+    var _branch = _getCurrentBranch(item);
+    return waterbutler.buildTreeBeardMetadata(item, {branch: _branch});
 }
 
 function _fangornLazyLoadOnLoad (tree, event) {
diff --git a/website/addons/gitlab/static/gitlabLogActionList.json b/addons/gitlab/static/gitlabLogActionList.json
similarity index 100%
rename from website/addons/gitlab/static/gitlabLogActionList.json
rename to addons/gitlab/static/gitlabLogActionList.json
diff --git a/addons/gitlab/static/gitlabNodeConfig.js b/addons/gitlab/static/gitlabNodeConfig.js
new file mode 100644
index 00000000000..06ff30786a4
--- /dev/null
+++ b/addons/gitlab/static/gitlabNodeConfig.js
@@ -0,0 +1,227 @@
+var ko = require('knockout');
+var $ = require('jquery');
+var bootbox = require('bootbox');
+var $osf = require('js/osfHelpers');
+var oop = require('js/oop');
+var UserViewModel = require('./gitlabUserConfig.js').GitLabViewModel;
+
+var nodeApiUrl = window.contextVars.node.urls.api;
+
+var connectExistingAccount = function(accountId) {
+    $osf.putJSON(
+            nodeApiUrl + 'gitlab/user_auth/',
+            {'external_account_id': accountId}
+        ).done(function() {
+                if($osf.isIE()){
+                    window.location.hash = '#configureAddonsAnchor';
+                }
+                window.location.reload();
+        }).fail(
+            $osf.handleJSONError
+        );
+};
+
+var updateHidden = function(element) {
+    var repoParts = $("option:selected", element).text().split('/');
+
+    $('#gitlabUser').val($.trim(repoParts[0]));
+    $('#gitlabRepo').val($.trim(repoParts[1]));
+    $('#gitlabRepoId').val(element.val());
+};
+
+var displayError = function(msg) {
+    $('#addonSettingsGitLab').find('.addon-settings-message')
+        .text('Error: ' + msg)
+        .removeClass('text-success').addClass('text-danger')
+        .fadeOut(100).fadeIn();
+};
+
+var createRepo = function() {
+
+    var $elm = $('#addonSettingsGitLab');
+    var $select = $elm.find('select');
+
+    bootbox.prompt({
+        title: 'Name your new repo',
+        placeholder: 'Repo name',
+        callback: function (repoName) {
+            // Return if cancelled
+            if (repoName === null) {
+                return;
+            }
+
+            if (repoName === '') {
+                displayError('Your repo must have a name');
+                return;
+            }
+
+            $osf.postJSON(
+                nodeApiUrl + 'gitlab/repo/create/',
+                {name: repoName, user: $("#gitlabUser").val()}
+            ).done(function (response) {
+                    $select.append('<option value="' + response.repo['id'] + '">' + $osf.htmlEscape(response.repo['path_with_namespace']) + '</option>');
+                    $select.val(response.repo['id']);
+                    updateHidden($select);
+                }).fail(function () {
+                    displayError('Could not create repository');
+                });
+        },
+        buttons:{
+            confirm:{
+                label: 'Save',
+                className:'btn-success'
+            }
+        }
+    });
+};
+
+var askImport = function() {
+    $.get('/api/v1/settings/gitlab/accounts/'
+    ).done(function(data){
+        var accounts = data.accounts.map(function(account) {
+            return {
+                name: account.display_name,
+                id: account.id
+            };
+        });
+        if (accounts.length > 1) {
+            bootbox.prompt({
+                title: 'Choose GitLab Account to Import',
+                inputType: 'select',
+                inputOptions: ko.utils.arrayMap(
+                    accounts,
+                    function(item) {
+                        return {
+                            text: $osf.htmlEscape(item.name),
+                            value: item.id
+                        };
+                    }
+                ),
+                value: accounts[0].id,
+                callback: function(accountId) {
+                    connectExistingAccount(accountId);
+                },
+                buttons: {
+                    confirm:{
+                        label:'Import',
+                    }
+                }
+            });
+        } else {
+            bootbox.confirm({
+                title: 'Import GitLab Account?',
+                message: 'Are you sure you want to link your GitLab account with this project?',
+                callback: function(confirmed) {
+                    if (confirmed) {
+                        connectExistingAccount(accounts[0].id);
+                    }
+                },
+                buttons: {
+                    confirm: {
+                        label:'Import',
+                    }
+                }
+            });
+        }
+    }).fail(function(xhr, textStatus, error) {
+        displayError('Could not GET GitLab accounts for user.');
+    });
+};
+
+$(document).ready(function() {
+    $('#gitlabSelectRepo').on('change', function() {
+        var el = $(this);
+        if (el.val()) {
+            updateHidden(el);
+        }
+    });
+
+    $('#gitlabCreateRepo').on('click', function() {
+        createRepo();
+    });
+
+    $('#gitlabImportToken').on('click', function() {
+        askImport();
+    });
+
+    $('#gitlabRemoveToken').on('click', function() {
+        bootbox.confirm({
+            title: 'Disconnect GitLab Account?',
+            message: 'Are you sure you want to remove this GitLab account?',
+            callback: function(confirm) {
+                if(confirm) {
+                    $.ajax({
+                    type: 'DELETE',
+                    url: nodeApiUrl + 'gitlab/user_auth/'
+                }).done(function() {
+                    window.location.reload();
+                }).fail(
+                    $osf.handleJSONError
+                );
+                }
+            },
+            buttons:{
+                confirm:{
+                    label: 'Disconnect',
+                    className: 'btn-danger'
+                }
+            }
+        });
+    });
+
+    $('#addonSettingsGitLab .addon-settings-submit').on('click', function() {
+        if (!$('#gitlabRepo').val()) {
+            return false;
+        }
+    });
+
+});
+
+var ViewModel = oop.extend(UserViewModel,{
+    constructor: function(url){
+        var self = this;
+        self.name = 'gitlab';
+        self.properName = 'GitLab';
+        self.accounts = ko.observableArray();
+        self.message = ko.observable('');
+        self.messageClass = ko.observable('');
+        const otherString = 'Other (Please Specify)';
+
+        self.url = url;
+        self.properName = 'GitLab';
+        self.apiToken = ko.observable();
+        self.urls = ko.observable({});
+        self.hosts = ko.observableArray([]);
+        self.selectedHost = ko.observable();    // Host specified in select element
+        self.customHost = ko.observable();      // Host specified in input element
+        // Whether the initial data has been loaded
+        self.loaded = ko.observable(false);
+
+        // Designated host, specified from select or input element
+        self.host = ko.pureComputed(function() {
+            return self.useCustomHost() ? self.customHost() : self.selectedHost();
+        });
+        // Hosts visible in select element. Includes presets and "Other" option
+        self.visibleHosts = ko.pureComputed(function() {
+            return self.hosts().concat([otherString]);
+        });
+        // Whether to use select element or input element for host designation
+        self.useCustomHost = ko.pureComputed(function() {
+            return self.selectedHost() === otherString;
+        });
+        self.showApiTokenInput = ko.pureComputed(function() {
+            return Boolean(self.selectedHost());
+        });
+        self.tokenUrl = ko.pureComputed(function() {
+            return self.host() ? 'https://' + self.host() + '/profile/personal_access_tokens' : null;
+        });
+    },
+    authSuccessCallback: function() {
+        askImport();
+    }
+
+});
+
+module.exports = {
+    GitLabViewModel: ViewModel
+};
diff --git a/addons/gitlab/static/gitlabUserConfig.js b/addons/gitlab/static/gitlabUserConfig.js
new file mode 100644
index 00000000000..d234d9cff08
--- /dev/null
+++ b/addons/gitlab/static/gitlabUserConfig.js
@@ -0,0 +1,187 @@
+/**
+* Module that controls the GitLab user settings. Includes Knockout view-model
+* for syncing data.
+*/
+
+var ko = require('knockout');
+var $ = require('jquery');
+var Raven = require('raven-js');
+var bootbox = require('bootbox');
+require('js/osfToggleHeight');
+
+var language = require('js/osfLanguage').Addons.gitlab;
+var osfHelpers = require('js/osfHelpers');
+var addonSettings = require('js/addonSettings');
+var oop = require('js/oop');
+var OAuthAddonSettingsViewModel = require('js/addonSettings.js').OAuthAddonSettingsViewModel;
+
+var ExternalAccount = addonSettings.ExternalAccount;
+
+var $modal = $('#gitlabInputCredentials');
+
+
+var ViewModel = oop.extend(OAuthAddonSettingsViewModel,{
+    constructor: function(url){
+        var self = this;
+        self.name = 'gitlab';
+        self.properName = 'GitLab';
+        self.accounts = ko.observableArray();
+        self.message = ko.observable('');
+        self.messageClass = ko.observable('');
+        const otherString = 'Other (Please Specify)';
+
+        self.url = url;
+        self.properName = 'GitLab';
+        self.apiToken = ko.observable();
+        self.urls = ko.observable({});
+        self.hosts = ko.observableArray([]);
+        self.selectedHost = ko.observable();    // Host specified in select element
+        self.customHost = ko.observable();      // Host specified in input element
+        // Whether the initial data has been loaded
+        self.loaded = ko.observable(false);
+
+        // Designated host, specified from select or input element
+        self.host = ko.pureComputed(function() {
+            return self.useCustomHost() ? self.customHost() : self.selectedHost();
+        });
+        // Hosts visible in select element. Includes presets and "Other" option
+        self.visibleHosts = ko.pureComputed(function() {
+            return self.hosts().concat([otherString]);
+        });
+        // Whether to use select element or input element for host designation
+        self.useCustomHost = ko.pureComputed(function() {
+            return self.selectedHost() === otherString;
+        });
+        self.showApiTokenInput = ko.pureComputed(function() {
+            return Boolean(self.selectedHost());
+        });
+        self.tokenUrl = ko.pureComputed(function() {
+            return self.host() ? 'https://' + self.host() + '/profile/personal_access_tokens' : null;
+        });
+    },
+    clearModal: function() {
+        /** Reset all fields from GitLab host selection modal */
+        var self = this;
+        self.message('');
+        self.messageClass('text-info');
+        self.apiToken(null);
+        self.selectedHost(null);
+        self.customHost(null);
+    },
+    updateAccounts: function() {
+        var self = this;
+        var url = self.urls().accounts;
+        var request = $.get(url);
+        request.done(function(data) {
+            self.accounts($.map(data.accounts, function(account) {
+                var externalAccount =  new ExternalAccount(account);
+                externalAccount.gitlabHost = account.host;
+                externalAccount.gitlabUrl = account.host_url;
+                return externalAccount;
+            }));
+            $('#gitlab-header').osfToggleHeight({height: 160});
+        });
+        request.fail(function(xhr, status, error) {
+            Raven.captureMessage('Error while updating addon account', {
+                extra: {
+                    url: url,
+                    status: status,
+                    error: error
+                }
+            });
+        });
+        return request;
+    },
+    authSuccessCallback: function() {
+        // Override for NS-specific auth success behavior
+        // TODO: generalize this when rewriting addon configs for ember
+        return;
+    },
+    sendAuth:  function() {
+        /** Send POST request to authorize GitLab */
+        // Selection should not be empty
+        var self = this;
+        if( !self.selectedHost() ){
+            self.setMessage("Please select a GitLab repository.", 'text-danger');
+            return;
+        }
+
+        if ( !self.useCustomHost() && !self.apiToken() ) {
+            self.setMessage("Please enter your Personal Access Token.", 'text-danger');
+            return;
+        }
+
+        if ( self.useCustomHost() && (!self.customHost() || !self.apiToken()) ) {
+            self.setMessage("Please enter a GitLab host and your Personal Access Token.", 'text-danger');
+            return;
+        }
+
+        var url = self.urls().create;
+
+        return osfHelpers.postJSON(
+            url,
+            ko.toJS({
+                host: self.host,
+                access_token: self.apiToken
+            })
+        ).done(function() {
+            self.updateAccounts();
+            self.clearModal();
+            $modal.modal('hide');
+            self.authSuccessCallback();
+        }).fail(function(xhr, textStatus, error) {
+            var errorMessage = (xhr.status === 401) ? 'Auth Error' : 'Other error';
+            self.setMessage(errorMessage, 'text-danger');
+            Raven.captureMessage('Could not authenticate with GitLab', {
+                extra: {
+                    url: url,
+                    textStatus: textStatus,
+                    error: error
+                }
+            });
+        });
+    },
+    fetch: function() {
+        // Update observables with data from the server
+        var self = this;
+        $.ajax({
+            url: self.url,
+            type: 'GET',
+            dataType: 'json'
+        }).done(function (response) {
+            var data = response.result;
+            self.urls(data.urls);
+            self.hosts(data.hosts);
+            self.loaded(true);
+            self.updateAccounts();
+        }).fail(function (xhr, textStatus, error) {
+            self.setMessage(language.userSettingsError, 'text-danger');
+            Raven.captureMessage('Could not GET GitLab settings', {
+                extra: {
+                    url: self.url,
+                    textStatus: textStatus,
+                    error: error
+                }
+            });
+        });
+    },
+    selectionChanged: function() {
+        var self = this;
+        self.setMessage('','');
+    }
+});
+
+function GitLabUserConfig(selector, url) {
+    // Initialization code
+    var self = this;
+    self.selector = selector;
+    self.url = url;
+    // On success, instantiate and bind the ViewModel
+    self.viewModel = new ViewModel(url);
+    osfHelpers.applyBindings(self.viewModel, self.selector);
+}
+
+module.exports = {
+    GitLabViewModel: ViewModel,
+    GitLabUserConfig: GitLabUserConfig    // for backwards-compat
+};
diff --git a/addons/gitlab/static/node-cfg.js b/addons/gitlab/static/node-cfg.js
new file mode 100644
index 00000000000..4b1a265acb4
--- /dev/null
+++ b/addons/gitlab/static/node-cfg.js
@@ -0,0 +1,16 @@
+'use strict';
+
+var $ = require('jquery');
+var AddonHelper = require('js/addonHelper');
+
+$(window.contextVars.gitlabSettingsSelector).on('submit', AddonHelper.onSubmitSettings);
+
+// Endpoint for GitLab user settings
+var url = '/api/v1/settings/gitlab/';
+var $osf = require('js/osfHelpers');
+var GitLabViewModel = require('./gitlabNodeConfig.js').GitLabViewModel;
+var gitlabViewModel = new GitLabViewModel(url);
+
+// Load initial GitLab data
+gitlabViewModel.fetch();
+$osf.applyBindings(gitlabViewModel, '#gitlabScope');
diff --git a/website/addons/gitlab/static/user-cfg.js b/addons/gitlab/static/user-cfg.js
similarity index 91%
rename from website/addons/gitlab/static/user-cfg.js
rename to addons/gitlab/static/user-cfg.js
index 312f2ece6d0..ef792e97159 100644
--- a/website/addons/gitlab/static/user-cfg.js
+++ b/addons/gitlab/static/user-cfg.js
@@ -8,4 +8,4 @@ var gitlabViewModel = new GitLabViewModel(url);
 $osf.applyBindings(gitlabViewModel, '#gitlabAddonScope');
 
 // Load initial GitLab data
-gitlabViewModel.fetch();
+gitlabViewModel.fetch(url);
diff --git a/website/addons/gitlab/templates/gitlab_credentials_modal.mako b/addons/gitlab/templates/gitlab_credentials_modal.mako
similarity index 97%
rename from website/addons/gitlab/templates/gitlab_credentials_modal.mako
rename to addons/gitlab/templates/gitlab_credentials_modal.mako
index edd7ca13644..401d5d5a881 100644
--- a/website/addons/gitlab/templates/gitlab_credentials_modal.mako
+++ b/addons/gitlab/templates/gitlab_credentials_modal.mako
@@ -33,7 +33,7 @@
                         <div class="col-sm-6">
                             <!-- Personal Access Token Input-->
                             <div class="form-group" data-bind="if: showApiTokenInput">
-                                <label for="clientId">
+                              <label for="apiToken">
                                     Personal Access Token
                                     <!-- Link to API token generation page -->
                                     <a data-bind="attr: {href: tokenUrl}"
@@ -41,7 +41,7 @@
                                         (Get from GitLab <i class="fa fa-external-link-square"></i>)
                                     </a>
                                 </label>
-                                <input class="form-control" name="clientId" data-bind="value: clientId"/>
+                                <input class="form-control" name="apiToken" data-bind="value: apiToken"/>
                             </div>
                         </div>
                     </div><!-- end row -->
diff --git a/addons/gitlab/templates/gitlab_node_settings.mako b/addons/gitlab/templates/gitlab_node_settings.mako
new file mode 100644
index 00000000000..bc3f7eb9edd
--- /dev/null
+++ b/addons/gitlab/templates/gitlab_node_settings.mako
@@ -0,0 +1,103 @@
+<div id="${addon_short_name}Scope" class="scripted">
+    <!-- Add credentials modal -->
+    <%include file="gitlab_credentials_modal.mako"/>
+
+    <form role="form" id="addonSettingsGitLab" data-addon="${addon_short_name}">
+        <div>
+            <h4 class="addon-title">
+                <img class="addon-icon" src="${addon_icon_url}">
+                GitLab
+                <small class="authorized-by">
+                    % if node_has_auth:
+                            authorized by
+                            <a href="${auth_osf_url}" target="_blank">
+                                ${auth_osf_name}
+                            </a>
+                        % if not is_registration:
+                            <a id="gitlabRemoveToken" class="text-danger pull-right addon-auth" >
+                              Disconnect Account
+                            </a>
+                        % endif
+                    % else:
+                        % if user_has_auth:
+                            <a id="gitlabImportToken" class="text-primary pull-right addon-auth">
+                               Import Account from Profile
+                            </a>
+                        % else:
+                            <a href="#gitlabInputCredentials" data-toggle="modal" class="text-primary pull-right addon-auth">
+                               Connect Account
+                            </a>
+                        % endif
+                    % endif
+                </small>
+            </h4>
+        </div>
+
+        % if node_has_auth and valid_credentials:
+
+            <input type="hidden" id="gitlabUser" name="gitlab_user" value="${gitlab_user}" />
+            <input type="hidden" id="gitlabRepo" name="gitlab_repo" value="${gitlab_repo}" />
+            <input type="hidden" id="gitlabRepoId" name="gitlab_repo_id" value="${gitlab_repo_id}" />
+
+            <p><strong>Current Repo: </strong>
+
+            % if is_owner and not is_registration:
+            </p>
+            <div class="row">
+                <div class="col-md-6 m-b-sm">
+                    <select id="gitlabSelectRepo" class="form-control" ${'disabled' if not is_owner or is_registration else ''}>
+                        <option>-----</option>
+                            % if is_owner:
+                                % if repos:
+                                  % for repo in repos:
+                                      <option value="${repo['id']}" ${'selected' if repo['id'] == int(gitlab_repo_id) else ''}>${repo['path_with_namespace']}</option>
+                                  % endfor
+                                % endif
+                            % else:
+                                <option selected>${gitlab_repo_full_name}</option>
+                            % endif
+                    </select>
+                </div>
+
+                <div class="col-md-6 m-b-sm">
+                    <button class="btn btn-success addon-settings-submit">
+                        Save
+                    </button>
+                    <a id="gitlabCreateRepo" class="btn btn-success pull-right">Create Repo</a>
+                </div>
+            </div>
+            % elif gitlab_repo_full_name:
+                <a href="${files_url}">${gitlab_repo_full_name}</a></p>
+            % else:
+                <span>None</span></p>
+            % endif
+        % endif
+
+        ${self.on_submit()}
+
+        % if node_has_auth and not valid_credentials:
+            <div class="addon-settings-message text-danger p-t-sm">
+                % if is_owner:
+                    Could not retrieve GitLab settings at this time. The GitLab addon credentials
+                    may no longer be valid. Try deauthorizing and reauthorizing GitLab on your
+                    <a href="${addons_url}">account settings page</a>.
+                % else:
+                    Could not retrieve GitLab settings at this time. The GitLab addon credentials
+                    may no longer be valid. Contact ${auth_osf_name} to verify.
+                % endif
+            </div>
+        % else:
+            <div class="addon-settings-message p-t-sm" style="display: none"></div>
+        % endif
+
+    </form>
+
+</div>
+<%def name="on_submit()">
+    <script type="text/javascript">
+        window.contextVars = $.extend({}, window.contextVars, {
+            ## Short name never changes
+            'gitlabSettingsSelector': '#addonSettingsGitLab'
+        });
+    </script>
+</%def>
diff --git a/website/addons/gitlab/templates/gitlab_user_settings.mako b/addons/gitlab/templates/gitlab_user_settings.mako
similarity index 100%
rename from website/addons/gitlab/templates/gitlab_user_settings.mako
rename to addons/gitlab/templates/gitlab_user_settings.mako
diff --git a/addons/gitlab/tests/__init__.py b/addons/gitlab/tests/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/addons/gitlab/tests/conftest.py b/addons/gitlab/tests/conftest.py
new file mode 100644
index 00000000000..da9f243685b
--- /dev/null
+++ b/addons/gitlab/tests/conftest.py
@@ -0,0 +1 @@
+from osf_tests.conftest import *  # noqa
diff --git a/website/addons/gitlab/tests/factories.py b/addons/gitlab/tests/factories.py
similarity index 56%
rename from website/addons/gitlab/tests/factories.py
rename to addons/gitlab/tests/factories.py
index eab2edcdbdb..8c71aabce30 100644
--- a/website/addons/gitlab/tests/factories.py
+++ b/addons/gitlab/tests/factories.py
@@ -1,9 +1,10 @@
 # -*- coding: utf-8 -*-
 
 from factory import Sequence, SubFactory
-from tests.factories import ExternalAccountFactory, ModularOdmFactory, ProjectFactory, UserFactory
+from factory.django import DjangoModelFactory
+from osf_tests.factories import ExternalAccountFactory, UserFactory, ProjectFactory
 
-from website.addons.gitlab.model import GitLabNodeSettings, GitLabUserSettings
+from addons.gitlab.models import NodeSettings, UserSettings
 
 
 class GitLabAccountFactory(ExternalAccountFactory):
@@ -13,16 +14,16 @@ class GitLabAccountFactory(ExternalAccountFactory):
     display_name = 'abc'
 
 
-class GitLabUserSettingsFactory(ModularOdmFactory):
+class GitLabUserSettingsFactory(DjangoModelFactory):
     class Meta:
-        model = GitLabUserSettings
+        model = UserSettings
 
     owner = SubFactory(UserFactory)
 
 
-class GitLabNodeSettingsFactory(ModularOdmFactory):
+class GitLabNodeSettingsFactory(DjangoModelFactory):
     class Meta:
-        model = GitLabNodeSettings
+        model = NodeSettings
 
     owner = SubFactory(ProjectFactory)
     user_settings = SubFactory(GitLabUserSettingsFactory)
diff --git a/website/addons/gitlab/tests/test_models.py b/addons/gitlab/tests/test_models.py
similarity index 80%
rename from website/addons/gitlab/tests/test_models.py
rename to addons/gitlab/tests/test_models.py
index ca712b7f0be..e6729267902 100644
--- a/website/addons/gitlab/tests/test_models.py
+++ b/addons/gitlab/tests/test_models.py
@@ -1,37 +1,38 @@
 # -*- coding: utf-8 -*-
 
 import mock
+import pytest
 import unittest
 from nose.tools import *  # noqa
 
 from tests.base import OsfTestCase, get_default_metaschema
-from tests.factories import ExternalAccountFactory, ProjectFactory, UserFactory
+from osf_tests.factories import ProjectFactory, UserFactory
 
 from framework.auth import Auth
 
-from website.addons.gitlab.exceptions import NotFoundError, GitLabError
-from website.addons.gitlab import settings as gitlab_settings
-from website.addons.gitlab.model import GitLabUserSettings
-from website.addons.gitlab.model import GitLabNodeSettings
-from website.addons.gitlab.tests.factories import (
+from addons.base.tests.models import (OAuthAddonNodeSettingsTestSuiteMixin,
+                                      OAuthAddonUserSettingTestSuiteMixin)
+from addons.gitlab.exceptions import NotFoundError
+from addons.gitlab.models import NodeSettings
+from addons.gitlab.tests.factories import (
     GitLabAccountFactory,
     GitLabNodeSettingsFactory,
     GitLabUserSettingsFactory
 )
-from website.addons.base.testing import models
 
 from .utils import create_mock_gitlab
 mock_gitlab = create_mock_gitlab()
 
+pytestmark = pytest.mark.django_db
 
-class TestNodeSettings(models.OAuthAddonNodeSettingsTestSuiteMixin, OsfTestCase):
+class TestNodeSettings(OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCase):
 
     short_name = 'gitlab'
     full_name = 'GitLab'
     ExternalAccountFactory = GitLabAccountFactory
 
     NodeSettingsFactory = GitLabNodeSettingsFactory
-    NodeSettingsClass = GitLabNodeSettings
+    NodeSettingsClass = NodeSettings
     UserSettingsFactory = GitLabUserSettingsFactory
 
     ## Mixin Overrides ##
@@ -42,7 +43,7 @@ def _node_settings_class_kwargs(self, node, user_settings):
             'repo': 'mock',
             'user': 'abc',
             'owner': self.node,
-            'repo_id': 123
+            'repo_id': '123'
         }
 
     def test_set_folder(self):
@@ -54,22 +55,22 @@ def test_serialize_settings(self):
         # GitLab's serialized_settings are a little different from
         # common storage addons.
         settings = self.node_settings.serialize_waterbutler_settings()
-        expected = {'host': 'https://abc', 'owner': 'abc', 'repo': 'mock', 'repo_id': 123}
+        expected = {'host': 'https://some-super-secret', 'owner': 'abc', 'repo': 'mock', 'repo_id': '123'}
         assert_equal(settings, expected)
 
     @mock.patch(
-        'website.addons.gitlab.model.GitLabUserSettings.revoke_remote_oauth_access',
+        'addons.gitlab.models.UserSettings.revoke_remote_oauth_access',
         mock.PropertyMock()
     )
     def test_complete_has_auth_not_verified(self):
         super(TestNodeSettings, self).test_complete_has_auth_not_verified()
 
-    @mock.patch('website.addons.gitlab.api.GitLabClient.repos')
+    @mock.patch('addons.gitlab.api.GitLabClient.repos')
     def test_to_json(self, mock_repos):
         mock_repos.return_value = {}
         super(TestNodeSettings, self).test_to_json()
 
-    @mock.patch('website.addons.gitlab.api.GitLabClient.repos')
+    @mock.patch('addons.gitlab.api.GitLabClient.repos')
     def test_to_json_user_is_owner(self, mock_repos):
         mock_repos.return_value = {}
         result = self.node_settings.to_json(self.user)
@@ -79,7 +80,7 @@ def test_to_json_user_is_owner(self, mock_repos):
         assert_true(result['valid_credentials'])
         assert_equal(result.get('gitlab_repo', None), 'mock')
 
-    @mock.patch('website.addons.gitlab.api.GitLabClient.repos')
+    @mock.patch('addons.gitlab.api.GitLabClient.repos')
     def test_to_json_user_is_not_owner(self, mock_repos):
         mock_repos.return_value = {}
         not_owner = UserFactory()
@@ -91,15 +92,12 @@ def test_to_json_user_is_not_owner(self, mock_repos):
         assert_equal(result.get('repo_names', None), None)
 
 
-class TestUserSettings(models.OAuthAddonUserSettingTestSuiteMixin, OsfTestCase):
+class TestUserSettings(OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase):
 
     short_name = 'gitlab'
     full_name = 'GitLab'
     ExternalAccountFactory = GitLabAccountFactory
 
-    def test_public_id(self):
-        assert_equal(self.user.external_accounts[0].display_name, self.user_settings.public_id)
-
 
 class TestCallbacks(OsfTestCase):
 
@@ -109,7 +107,9 @@ def setUp(self):
 
         self.project = ProjectFactory.build()
         self.consolidated_auth = Auth(self.project.creator)
+        self.project.creator.save()
         self.non_authenticator = UserFactory()
+        self.non_authenticator.save()
         self.project.save()
         self.project.add_contributor(
             contributor=self.non_authenticator,
@@ -119,7 +119,7 @@ def setUp(self):
         self.project.add_addon('gitlab', auth=self.consolidated_auth)
         self.project.creator.add_addon('gitlab')
         self.external_account = GitLabAccountFactory()
-        self.project.creator.external_accounts.append(self.external_account)
+        self.project.creator.external_accounts.add(self.external_account)
         self.project.creator.save()
         self.node_settings = self.project.get_addon('gitlab')
         self.user_settings = self.project.creator.get_addon('gitlab')
@@ -130,8 +130,7 @@ def setUp(self):
         self.node_settings.save()
         self.node_settings.set_auth
 
-
-    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    @mock.patch('addons.gitlab.api.GitLabClient.repo')
     def test_before_make_public(self, mock_repo):
         mock_repo.side_effect = NotFoundError
 
@@ -167,7 +166,7 @@ def test_after_remove_contributor_authenticator_self(self):
             None
         )
         assert_true(message)
-        assert_not_in("You can re-authenticate", message)
+        assert_not_in('You can re-authenticate', message)
 
     def test_after_remove_contributor_authenticator_not_self(self):
         auth = Auth(user=self.non_authenticator)
@@ -179,7 +178,7 @@ def test_after_remove_contributor_authenticator_not_self(self):
             None
         )
         assert_true(message)
-        assert_in("You can re-authenticate", message)
+        assert_in('You can re-authenticate', message)
 
     def test_after_remove_contributor_not_authenticator(self):
         self.node_settings.after_remove_contributor(
@@ -192,7 +191,7 @@ def test_after_remove_contributor_not_authenticator(self):
 
     def test_after_fork_authenticator(self):
         fork = ProjectFactory()
-        clone, message = self.node_settings.after_fork(
+        clone = self.node_settings.after_fork(
             self.project, fork, self.project.creator,
         )
         assert_equal(
@@ -202,7 +201,7 @@ def test_after_fork_authenticator(self):
 
     def test_after_fork_not_authenticator(self):
         fork = ProjectFactory()
-        clone, message = self.node_settings.after_fork(
+        clone = self.node_settings.after_fork(
             self.project, fork, self.non_authenticator,
         )
         assert_equal(
@@ -226,20 +225,19 @@ def test_does_not_get_copied_to_registrations(self, mock_archive):
         assert_false(registration.has_addon('gitlab'))
 
 
-
-class TestGitLabNodeSettings(OsfTestCase):
+class TestGitLabNodeSettings(unittest.TestCase):
 
     def setUp(self):
-        OsfTestCase.setUp(self)
+        super(TestGitLabNodeSettings, self).setUp()
         self.user = UserFactory()
         self.user.add_addon('gitlab')
         self.user_settings = self.user.get_addon('gitlab')
         self.external_account = GitLabAccountFactory()
-        self.user_settings.owner.external_accounts.append(self.external_account)
+        self.user_settings.owner.external_accounts.add(self.external_account)
         self.user_settings.owner.save()
         self.node_settings = GitLabNodeSettingsFactory(user_settings=self.user_settings)
 
-    @mock.patch('website.addons.gitlab.api.GitLabClient.delete_hook')
+    @mock.patch('addons.gitlab.api.GitLabClient.delete_hook')
     def test_delete_hook_no_hook(self, mock_delete_hook):
         res = self.node_settings.delete_hook()
         assert_false(res)
diff --git a/addons/gitlab/tests/test_serializer.py b/addons/gitlab/tests/test_serializer.py
new file mode 100644
index 00000000000..66a90084770
--- /dev/null
+++ b/addons/gitlab/tests/test_serializer.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+"""Serializer tests for the GitLab addon."""
+import mock
+import pytest
+
+from tests.base import OsfTestCase
+from addons.base.tests.serializers import StorageAddonSerializerTestSuiteMixin
+from addons.gitlab.api import GitLabClient
+from addons.gitlab.tests.factories import GitLabAccountFactory
+from addons.gitlab.serializer import GitLabSerializer
+
+pytestmark = pytest.mark.django_db
+
+class TestGitLabSerializer(StorageAddonSerializerTestSuiteMixin, OsfTestCase):
+
+    addon_short_name = 'gitlab'
+
+    Serializer = GitLabSerializer
+    ExternalAccountFactory = GitLabAccountFactory
+    client = GitLabClient()
+
+    def set_provider_id(self, pid):
+        self.node_settings.repo = pid
+
+    ## Overrides ##
+
+    def setUp(self):
+        super(TestGitLabSerializer, self).setUp()
+        self.mock_api_user = mock.patch('addons.gitlab.api.GitLabClient.user')
+        self.mock_api_user.return_value = mock.Mock()
+        self.mock_api_user.start()
+
+    def tearDown(self):
+        self.mock_api_user.stop()
+        super(TestGitLabSerializer, self).tearDown()
+
+    def test_serialize_acccount(self):
+        ea = self.ExternalAccountFactory()
+        expected = {
+            'id': ea._id,
+            'provider_id': ea.provider_id,
+            'provider_name': ea.provider_name,
+            'provider_short_name': ea.provider,
+            'display_name': ea.display_name,
+            'profile_url': ea.profile_url,
+            'nodes': [],
+            'host': ea.oauth_secret,
+            'host_url': 'https://{0}'.format(ea.oauth_secret),
+        }
+        assert self.ser.serialize_account(ea) == expected
diff --git a/website/addons/gitlab/tests/test_utils.py b/addons/gitlab/tests/test_utils.py
similarity index 87%
rename from website/addons/gitlab/tests/test_utils.py
rename to addons/gitlab/tests/test_utils.py
index dedff482614..5d0903d23d4 100644
--- a/website/addons/gitlab/tests/test_utils.py
+++ b/addons/gitlab/tests/test_utils.py
@@ -7,9 +7,9 @@
 
 from tests.base import OsfTestCase
 
-from website.addons.gitlab import utils
-from website.addons.base.exceptions import HookError
-from website.addons.gitlab.model import GitLabNodeSettings
+from addons.gitlab import utils
+from addons.base.exceptions import HookError
+from addons.gitlab.models import NodeSettings
 
 
 def make_signature(secret, data):
@@ -25,7 +25,7 @@ class TestHookVerify(OsfTestCase):
 
     def setUp(self):
         super(TestHookVerify, self).setUp()
-        self.node_settings = GitLabNodeSettings(
+        self.node_settings = NodeSettings(
             hook_secret='speakfriend',
         )
 
diff --git a/website/addons/gitlab/tests/test_views.py b/addons/gitlab/tests/test_views.py
similarity index 59%
rename from website/addons/gitlab/tests/test_views.py
rename to addons/gitlab/tests/test_views.py
index 6b7ebfad5bb..24486187b29 100644
--- a/website/addons/gitlab/tests/test_views.py
+++ b/addons/gitlab/tests/test_views.py
@@ -3,41 +3,47 @@
 
 import mock
 import datetime
+import pytest
 import unittest
 
 from nose.tools import *  # noqa (PEP8 asserts)
 from tests.base import OsfTestCase, get_default_metaschema
-from tests.factories import ProjectFactory, UserFactory, AuthUserFactory
+from osf_tests.factories import ProjectFactory, UserFactory, AuthUserFactory
 
 from github3.repos.branch import Branch
 
 from framework.exceptions import HTTPError
 from framework.auth import Auth
 
-from website.util import api_url_for
-from website.addons.base.testing.views import (
+from addons.base.tests.views import (
     OAuthAddonAuthViewsTestCaseMixin, OAuthAddonConfigViewsTestCaseMixin
 )
-from website.addons.gitlab import views, utils
-from website.addons.gitlab.api import GitLabClient
-from website.addons.gitlab.model import GitLabProvider
-from website.addons.gitlab.serializer import GitLabSerializer
-from website.addons.gitlab.utils import check_permissions
-from website.addons.gitlab.tests.utils import create_mock_gitlab, GitLabAddonTestCase
-from website.addons.gitlab.tests.factories import GitLabAccountFactory
+from addons.gitlab import utils
+from addons.gitlab.api import GitLabClient
+from addons.gitlab.serializer import GitLabSerializer
+from addons.gitlab.utils import check_permissions
+from addons.gitlab.tests.utils import create_mock_gitlab, GitLabAddonTestCase
+from addons.gitlab.tests.factories import GitLabAccountFactory
 
+pytestmark = pytest.mark.django_db
+
+class TestGitLabAuthViews(GitLabAddonTestCase, OAuthAddonAuthViewsTestCaseMixin, OsfTestCase):
 
-class TestGitLabAuthViews(GitLabAddonTestCase, OAuthAddonAuthViewsTestCaseMixin):
-    
     @mock.patch(
-        'website.addons.gitlab.model.GitLabUserSettings.revoke_remote_oauth_access',
+        'addons.gitlab.models.UserSettings.revoke_remote_oauth_access',
         mock.PropertyMock()
     )
     def test_delete_external_account(self):
         super(TestGitLabAuthViews, self).test_delete_external_account()
 
+    def test_oauth_start(self):
+        pass
+
+    def test_oauth_finish(self):
+        pass
+
 
-class TestGitLabConfigViews(GitLabAddonTestCase, OAuthAddonConfigViewsTestCaseMixin):
+class TestGitLabConfigViews(GitLabAddonTestCase, OAuthAddonConfigViewsTestCaseMixin, OsfTestCase):
     folder = None
     Serializer = GitLabSerializer
     client = GitLabClient
@@ -46,7 +52,7 @@ class TestGitLabConfigViews(GitLabAddonTestCase, OAuthAddonConfigViewsTestCaseMi
 
     def setUp(self):
         super(TestGitLabConfigViews, self).setUp()
-        self.mock_api_user = mock.patch("website.addons.gitlab.api.GitLabClient.user")
+        self.mock_api_user = mock.patch('addons.gitlab.api.GitLabClient.user')
         self.mock_api_user.return_value = mock.Mock()
         self.mock_api_user.start()
 
@@ -58,8 +64,8 @@ def test_folder_list(self):
         # GH only lists root folder (repos), this test is superfluous
         pass
 
-    @mock.patch('website.addons.gitlab.model.GitLabNodeSettings.add_hook')
-    @mock.patch('website.addons.gitlab.views.GitLabClient.repo')
+    @mock.patch('addons.gitlab.models.NodeSettings.add_hook')
+    @mock.patch('addons.gitlab.views.GitLabClient.repo')
     def test_set_config(self, mock_repo, mock_add_hook):
         # GH selects repos, not folders, so this needs to be overriden
         mock_repo.return_value = 'repo_name'
@@ -67,11 +73,12 @@ def test_set_config(self, mock_repo, mock_add_hook):
         res = self.app.post_json(url, {
             'gitlab_user': 'octocat',
             'gitlab_repo': 'repo_name',
+            'gitlab_repo_id': '123',
         }, auth=self.user.auth)
         assert_equal(res.status_code, http.OK)
         self.project.reload()
         assert_equal(
-            self.project.logs[-1].action,
+            self.project.logs.latest().action,
             '{0}_repo_linked'.format(self.ADDON_SHORT_NAME)
         )
         mock_add_hook.assert_called_once()
@@ -113,7 +120,7 @@ def setUp(self):
         self.project.save()
         self.project.add_addon('gitlab', auth=self.consolidated_auth)
         self.project.creator.add_addon('gitlab')
-        self.project.creator.external_accounts.append(GitLabAccountFactory())
+        self.project.creator.external_accounts.add(GitLabAccountFactory())
         self.project.creator.save()
 
         self.gitlab = create_mock_gitlab(user='fred', private=False)
@@ -121,8 +128,9 @@ def setUp(self):
         self.node_settings = self.project.get_addon('gitlab')
         self.node_settings.user_settings = self.project.creator.get_addon('gitlab')
         # Set the node addon settings to correspond to the values of the mock repo
-        self.node_settings.user = self.gitlab.repo.return_value.owner.login
-        self.node_settings.repo = self.gitlab.repo.return_value.name
+        self.node_settings.user = self.gitlab.repo.return_value['owner']['name']
+        self.node_settings.repo = self.gitlab.repo.return_value['name']
+        self.node_settings.repo_id = self.gitlab.repo.return_value['id']
         self.node_settings.save()
 
     def _get_sha_for_branch(self, branch=None, mock_branches=None):
@@ -130,15 +138,15 @@ def _get_sha_for_branch(self, branch=None, mock_branches=None):
         if mock_branches is None:
             mock_branches = gitlab_mock.branches
         if branch is None:  # Get default branch name
-            branch = self.gitlab.repo.return_value.default_branch
+            branch = self.gitlab.repo.return_value['default_branch']
         for each in mock_branches.return_value:
-            if each.name == branch:
-                branch_sha = each.commit.sha
+            if each['name'] == branch:
+                branch_sha = each['commit']['id']
         return branch_sha
 
     # Tests for _get_refs
-    @mock.patch('website.addons.gitlab.api.GitLabClient.branches')
-    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    @mock.patch('addons.gitlab.api.GitLabClient.branches')
+    @mock.patch('addons.gitlab.api.GitLabClient.repo')
     def test_get_refs_defaults(self, mock_repo, mock_branches):
         gitlab_mock = self.gitlab
         mock_repo.return_value = gitlab_mock.repo.return_value
@@ -146,7 +154,7 @@ def test_get_refs_defaults(self, mock_repo, mock_branches):
         branch, sha, branches = utils.get_refs(self.node_settings)
         assert_equal(
             branch,
-            gitlab_mock.repo.return_value.default_branch
+            gitlab_mock.repo.return_value['default_branch']
         )
         assert_equal(sha, self._get_sha_for_branch(branch=None))  # Get refs for default branch
         assert_equal(
@@ -154,8 +162,8 @@ def test_get_refs_defaults(self, mock_repo, mock_branches):
             gitlab_mock.branches.return_value
         )
 
-    @mock.patch('website.addons.gitlab.api.GitLabClient.branches')
-    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    @mock.patch('addons.gitlab.api.GitLabClient.branches')
+    @mock.patch('addons.gitlab.api.GitLabClient.repo')
     def test_get_refs_branch(self, mock_repo, mock_branches):
         gitlab_mock = self.gitlab
         mock_repo.return_value = gitlab_mock.repo.return_value
@@ -174,29 +182,17 @@ def test_before_fork(self):
         res = self.app.get(url, auth=self.user.auth).maybe_follow()
         assert_equal(len(res.json['prompts']), 1)
 
-    @mock.patch('website.addons.gitlab.model.GitLabUserSettings.has_auth')
+    @mock.patch('addons.gitlab.models.UserSettings.has_auth')
     def test_before_register(self, mock_has_auth):
         mock_has_auth.return_value = True
         url = self.project.api_url + 'beforeregister/'
         res = self.app.get(url, auth=self.user.auth).maybe_follow()
         assert_true('GitLab' in res.json['prompts'][1])
-        
+
     def test_get_refs_sha_no_branch(self):
         with assert_raises(HTTPError):
             utils.get_refs(self.node_settings, sha='12345')
 
-    def test_get_refs_registered_missing_branch(self):
-        gitlab_mock = self.gitlab
-        self.node_settings.registration_data = {
-            'branches': [
-                branch.to_json()
-                for branch in gitlab_mock.branches.return_value
-            ]
-        }
-        self.node_settings.owner.is_registration = True
-        with assert_raises(HTTPError):
-            utils.get_refs(self.node_settings, branch='nothere')
-
     # Tests for _check_permissions
     # make a user with no authorization; make sure check_permissions returns false
     def test_permissions_no_auth(self):
@@ -210,82 +206,82 @@ def test_permissions_no_auth(self):
 
     # make a repository that doesn't allow push access for this user;
     # make sure check_permissions returns false
-    @mock.patch('website.addons.gitlab.model.GitLabUserSettings.has_auth')
-    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    @mock.patch('addons.gitlab.models.UserSettings.has_auth')
+    @mock.patch('addons.gitlab.api.GitLabClient.repo')
     def test_permissions_no_access(self, mock_repo, mock_has_auth):
         gitlab_mock = self.gitlab
         mock_has_auth.return_value = True
         connection = gitlab_mock
         branch = 'master'
-        mock_repository = mock.NonCallableMock()
-        mock_repository.user = 'fred'
-        mock_repository.repo = 'mock-repo'
-        mock_repository.to_json.return_value = {
+        mock_repository = {
             'user': 'fred',
             'repo': 'mock-repo',
             'permissions': {
-                'push': False,  # this is key
+                'project_access': {'access_level': 20, 'notification_level': 3}
             },
         }
         mock_repo.return_value = mock_repository
         assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, branch, repo=mock_repository))
 
     # make a branch with a different commit than the commit being passed into check_permissions
-    @mock.patch('website.addons.gitlab.model.GitLabUserSettings.has_auth')
+    @mock.patch('addons.gitlab.models.UserSettings.has_auth')
     def test_permissions_not_head(self, mock_has_auth):
         gitlab_mock = self.gitlab
         mock_has_auth.return_value = True
         connection = gitlab_mock
-        mock_branch = mock.NonCallableMock()
-        mock_branch.commit.sha = '67890'
+        mock_branch = {
+            'commit': {'id': '67890'}
+        }
+        connection.branches.return_value = mock_branch
         sha = '12345'
         assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, mock_branch, sha=sha))
 
     # make sure permissions are not granted for editing a registration
-    @mock.patch('website.addons.gitlab.model.GitLabUserSettings.has_auth')
+    @mock.patch('addons.gitlab.models.UserSettings.has_auth')
     def test_permissions(self, mock_has_auth):
         gitlab_mock = self.gitlab
         mock_has_auth.return_value = True
         connection = gitlab_mock
-        self.node_settings.owner.is_registration = True
-        assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, 'master'))
+        with mock.patch('osf.models.node.AbstractNode.is_registration', new_callable=mock.PropertyMock) as mock_is_reg:
+            mock_is_reg.return_value = True
+            assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, 'master'))
 
     def check_hook_urls(self, urls, node, path, sha):
         url = node.web_url_for('addon_view_or_download_file', path=path, provider='gitlab')
         expected_urls = {
-            'view': '{0}?ref={1}'.format(url, sha),
-            'download': '{0}?action=download&ref={1}'.format(url, sha)
+            'view': '{0}?branch={1}'.format(url, sha),
+            'download': '{0}?action=download&branch={1}'.format(url, sha)
         }
 
         assert_equal(urls['view'], expected_urls['view'])
         assert_equal(urls['download'], expected_urls['download'])
 
-    @mock.patch('website.addons.gitlab.views.verify_hook_signature')
+    @mock.patch('addons.gitlab.views.verify_hook_signature')
     def test_hook_callback_add_file_not_thro_osf(self, mock_verify):
-        url = "/api/v1/project/{0}/gitlab/hook/".format(self.project._id)
+        url = '/api/v1/project/{0}/gitlab/hook/'.format(self.project._id)
         timestamp = str(datetime.datetime.utcnow())
         self.app.post_json(
             url,
             {
-                "test": True,
-                "commits": [{
-                    "id": "b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
-                    "distinct": True,
-                    "message": "foo",
-                    "timestamp": timestamp,
-                    "url": "https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
-                    "author": {"name": "Illidan", "email": "njqpw@osf.io"},
-                    "committer": {"name": "Testor", "email": "test@osf.io", "username": "tester"},
-                    "added": ["PRJWN3TV"],
-                    "removed": [],
-                    "modified": [],
+                'test': True,
+                'commits': [{
+                    'id': 'b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+                    'distinct': True,
+                    'message': 'foo',
+                    'timestamp': timestamp,
+                    'url': 'https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+                    'author': {'name': 'Illidan', 'email': 'njqpw@osf.io'},
+                    'committer': {'name': 'Testor', 'email': 'test@osf.io', 'username': 'tester'},
+                    'added': ['PRJWN3TV'],
+                    'removed': [],
+                    'modified': [],
                 }]
             },
-            content_type="application/json",
+            content_type='application/json',
         ).maybe_follow()
         self.project.reload()
-        assert_equal(self.project.logs[-1].action, "gitlab_file_added")
-        urls = self.project.logs[-1].params['urls']
+        assert_equal(self.project.logs.latest().action, 'gitlab_file_added')
+        urls = self.project.logs.latest().params['urls']
         self.check_hook_urls(
             urls,
             self.project,
@@ -293,26 +289,26 @@ def test_hook_callback_add_file_not_thro_osf(self, mock_verify):
             sha='b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
         )
 
-    @mock.patch('website.addons.gitlab.views.verify_hook_signature')
+    @mock.patch('addons.gitlab.views.verify_hook_signature')
     def test_hook_callback_modify_file_not_thro_osf(self, mock_verify):
-        url = "/api/v1/project/{0}/gitlab/hook/".format(self.project._id)
+        url = '/api/v1/project/{0}/gitlab/hook/'.format(self.project._id)
         timestamp = str(datetime.datetime.utcnow())
         self.app.post_json(
             url,
-            {"test": True,
-                 "commits": [{"id": "b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
-                              "distinct": True,
-                              "message": " foo",
-                              "timestamp": timestamp,
-                              "url": "https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
-                              "author": {"name": "Illidan", "email": "njqpw@osf.io"},
-                              "committer": {"name": "Testor", "email": "test@osf.io",
-                                            "username": "tester"},
-                              "added": [], "removed":[], "modified":["PRJWN3TV"]}]},
-            content_type="application/json").maybe_follow()
+            {'test': True,
+                 'commits': [{'id': 'b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+                              'distinct': True,
+                              'message': ' foo',
+                              'timestamp': timestamp,
+                              'url': 'https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+                              'author': {'name': 'Illidan', 'email': 'njqpw@osf.io'},
+                              'committer': {'name': 'Testor', 'email': 'test@osf.io',
+                                            'username': 'tester'},
+                              'added': [], 'removed':[], 'modified':['PRJWN3TV']}]},
+            content_type='application/json').maybe_follow()
         self.project.reload()
-        assert_equal(self.project.logs[-1].action, "gitlab_file_updated")
-        urls = self.project.logs[-1].params['urls']
+        assert_equal(self.project.logs.latest().action, 'gitlab_file_updated')
+        urls = self.project.logs.latest().params['urls']
         self.check_hook_urls(
             urls,
             self.project,
@@ -320,80 +316,80 @@ def test_hook_callback_modify_file_not_thro_osf(self, mock_verify):
             sha='b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
         )
 
-    @mock.patch('website.addons.gitlab.views.verify_hook_signature')
+    @mock.patch('addons.gitlab.views.verify_hook_signature')
     def test_hook_callback_remove_file_not_thro_osf(self, mock_verify):
-        url = "/api/v1/project/{0}/gitlab/hook/".format(self.project._id)
+        url = '/api/v1/project/{0}/gitlab/hook/'.format(self.project._id)
         timestamp = str(datetime.datetime.utcnow())
         self.app.post_json(
             url,
-            {"test": True,
-             "commits": [{"id": "b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
-                          "distinct": True,
-                          "message": "foo",
-                          "timestamp": timestamp,
-                          "url": "https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
-                          "author": {"name": "Illidan", "email": "njqpw@osf.io"},
-                          "committer": {"name": "Testor", "email": "test@osf.io", "username": "tester"},
-                          "added": [], "removed": ["PRJWN3TV"], "modified":[]}]},
-            content_type="application/json").maybe_follow()
+            {'test': True,
+             'commits': [{'id': 'b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+                          'distinct': True,
+                          'message': 'foo',
+                          'timestamp': timestamp,
+                          'url': 'https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+                          'author': {'name': 'Illidan', 'email': 'njqpw@osf.io'},
+                          'committer': {'name': 'Testor', 'email': 'test@osf.io', 'username': 'tester'},
+                          'added': [], 'removed': ['PRJWN3TV'], 'modified':[]}]},
+            content_type='application/json').maybe_follow()
         self.project.reload()
-        assert_equal(self.project.logs[-1].action, "gitlab_file_removed")
-        urls = self.project.logs[-1].params['urls']
+        assert_equal(self.project.logs.latest().action, 'gitlab_file_removed')
+        urls = self.project.logs.latest().params['urls']
         assert_equal(urls, {})
 
-    @mock.patch('website.addons.gitlab.views.verify_hook_signature')
+    @mock.patch('addons.gitlab.views.verify_hook_signature')
     def test_hook_callback_add_file_thro_osf(self, mock_verify):
-        url = "/api/v1/project/{0}/gitlab/hook/".format(self.project._id)
+        url = '/api/v1/project/{0}/gitlab/hook/'.format(self.project._id)
         self.app.post_json(
             url,
-            {"test": True,
-             "commits": [{"id": "b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
-                          "distinct": True,
-                          "message": "Added via the Open Science Framework",
-                          "timestamp": "2014-01-08T14:15:51-08:00",
-                          "url": "https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
-                          "author": {"name": "Illidan", "email": "njqpw@osf.io"},
-                          "committer": {"name": "Testor", "email": "test@osf.io", "username": "tester"},
-                          "added": ["PRJWN3TV"], "removed":[], "modified":[]}]},
-            content_type="application/json").maybe_follow()
+            {'test': True,
+             'commits': [{'id': 'b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+                          'distinct': True,
+                          'message': 'Added via the Open Science Framework',
+                          'timestamp': '2014-01-08T14:15:51-08:00',
+                          'url': 'https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+                          'author': {'name': 'Illidan', 'email': 'njqpw@osf.io'},
+                          'committer': {'name': 'Testor', 'email': 'test@osf.io', 'username': 'tester'},
+                          'added': ['PRJWN3TV'], 'removed':[], 'modified':[]}]},
+            content_type='application/json').maybe_follow()
         self.project.reload()
-        assert_not_equal(self.project.logs[-1].action, "gitlab_file_added")
+        assert_not_equal(self.project.logs.latest().action, 'gitlab_file_added')
 
-    @mock.patch('website.addons.gitlab.views.verify_hook_signature')
+    @mock.patch('addons.gitlab.views.verify_hook_signature')
     def test_hook_callback_modify_file_thro_osf(self, mock_verify):
-        url = "/api/v1/project/{0}/gitlab/hook/".format(self.project._id)
+        url = '/api/v1/project/{0}/gitlab/hook/'.format(self.project._id)
         self.app.post_json(
             url,
-            {"test": True,
-             "commits": [{"id": "b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
-                          "distinct": True,
-                          "message": "Updated via the Open Science Framework",
-                          "timestamp": "2014-01-08T14:15:51-08:00",
-                          "url": "https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
-                          "author": {"name": "Illidan", "email": "njqpw@osf.io"},
-                          "committer": {"name": "Testor", "email": "test@osf.io", "username": "tester"},
-                          "added": [], "removed":[], "modified":["PRJWN3TV"]}]},
-            content_type="application/json").maybe_follow()
+            {'test': True,
+             'commits': [{'id': 'b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+                          'distinct': True,
+                          'message': 'Updated via the Open Science Framework',
+                          'timestamp': '2014-01-08T14:15:51-08:00',
+                          'url': 'https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+                          'author': {'name': 'Illidan', 'email': 'njqpw@osf.io'},
+                          'committer': {'name': 'Testor', 'email': 'test@osf.io', 'username': 'tester'},
+                          'added': [], 'removed':[], 'modified':['PRJWN3TV']}]},
+            content_type='application/json').maybe_follow()
         self.project.reload()
-        assert_not_equal(self.project.logs[-1].action, "gitlab_file_updated")
+        assert_not_equal(self.project.logs.latest().action, 'gitlab_file_updated')
 
-    @mock.patch('website.addons.gitlab.views.verify_hook_signature')
+    @mock.patch('addons.gitlab.views.verify_hook_signature')
     def test_hook_callback_remove_file_thro_osf(self, mock_verify):
-        url = "/api/v1/project/{0}/gitlab/hook/".format(self.project._id)
+        url = '/api/v1/project/{0}/gitlab/hook/'.format(self.project._id)
         self.app.post_json(
             url,
-            {"test": True,
-             "commits": [{"id": "b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
-                          "distinct": True,
-                          "message": "Deleted via the Open Science Framework",
-                          "timestamp": "2014-01-08T14:15:51-08:00",
-                          "url": "https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce",
-                          "author": {"name": "Illidan", "email": "njqpw@osf.io"},
-                          "committer": {"name": "Testor", "email": "test@osf.io", "username": "tester"},
-                          "added": [], "removed":["PRJWN3TV"], "modified":[]}]},
-            content_type="application/json").maybe_follow()
+            {'test': True,
+             'commits': [{'id': 'b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+                          'distinct': True,
+                          'message': 'Deleted via the Open Science Framework',
+                          'timestamp': '2014-01-08T14:15:51-08:00',
+                          'url': 'https://gitlab.com/tester/addontesting/commit/b08dbb5b6fcd74a592e5281c9d28e2020a1db4ce',
+                          'author': {'name': 'Illidan', 'email': 'njqpw@osf.io'},
+                          'committer': {'name': 'Testor', 'email': 'test@osf.io', 'username': 'tester'},
+                          'added': [], 'removed':['PRJWN3TV'], 'modified':[]}]},
+            content_type='application/json').maybe_follow()
         self.project.reload()
-        assert_not_equal(self.project.logs[-1].action, "gitlab_file_removed")
+        assert_not_equal(self.project.logs.latest().action, 'gitlab_file_removed')
 
 
 class TestRegistrationsWithGitLab(OsfTestCase):
@@ -421,8 +417,7 @@ def setUp(self):
 
         super(TestGitLabSettings, self).setUp()
         self.gitlab = create_mock_gitlab(user='fred', private=False)
-        self.project = ProjectFactory.build()
-        self.project.save()
+        self.project = ProjectFactory()
         self.auth = self.project.creator.auth
         self.consolidated_auth = Auth(user=self.project.creator)
 
@@ -433,10 +428,11 @@ def setUp(self):
         self.node_settings.user_settings = self.user_settings
         self.node_settings.user = 'Queen'
         self.node_settings.repo = 'Sheer-Heart-Attack'
+        self.node_settings.repo_id = 'sheer-heart-attack'
         self.node_settings.save()
 
-    @mock.patch('website.addons.gitlab.model.GitLabNodeSettings.add_hook')
-    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    @mock.patch('addons.gitlab.models.NodeSettings.add_hook')
+    @mock.patch('addons.gitlab.api.GitLabClient.repo')
     def test_link_repo(self, mock_repo, mock_add_hook):
         gitlab_mock = self.gitlab
         mock_repo.return_value = gitlab_mock.repo.return_value
@@ -447,6 +443,7 @@ def test_link_repo(self, mock_repo, mock_add_hook):
             {
                 'gitlab_user': 'queen',
                 'gitlab_repo': 'night at the opera',
+                'gitlab_repo_id': 'abc',
             },
             auth=self.auth
         ).maybe_follow()
@@ -456,23 +453,24 @@ def test_link_repo(self, mock_repo, mock_add_hook):
 
         assert_equal(self.node_settings.user, 'queen')
         assert_equal(self.node_settings.repo, 'night at the opera')
-        assert_equal(self.project.logs[-1].action, 'gitlab_repo_linked')
+        assert_equal(self.project.logs.latest().action, 'gitlab_repo_linked')
         mock_add_hook.assert_called_once()
 
-    @mock.patch('website.addons.gitlab.model.GitLabNodeSettings.add_hook')
-    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    @mock.patch('addons.gitlab.models.NodeSettings.add_hook')
+    @mock.patch('addons.gitlab.api.GitLabClient.repo')
     def test_link_repo_no_change(self, mock_repo, mock_add_hook):
         gitlab_mock = self.gitlab
         mock_repo.return_value = gitlab_mock.repo.return_value
 
-        log_count = len(self.project.logs)
+        log_count = self.project.logs.count()
 
         url = self.project.api_url + 'gitlab/settings/'
         self.app.post_json(
             url,
             {
-                'gitlab_user': 'Queen',
-                'gitlab_repo': 'Sheer-Heart-Attack',
+                'gitlab_user': self.node_settings.user,
+                'gitlab_repo': self.node_settings.repo,
+                'gitlab_repo_id': self.node_settings.repo_id,
             },
             auth=self.auth
         ).maybe_follow()
@@ -480,10 +478,10 @@ def test_link_repo_no_change(self, mock_repo, mock_add_hook):
         self.project.reload()
         self.node_settings.reload()
 
-        assert_equal(len(self.project.logs), log_count)
+        assert_equal(self.project.logs.count(), log_count)
         assert_false(mock_add_hook.called)
 
-    @mock.patch('website.addons.gitlab.api.GitLabClient.repo')
+    @mock.patch('addons.gitlab.api.GitLabClient.repo')
     def test_link_repo_non_existent(self, mock_repo):
 
         mock_repo.return_value = None
@@ -501,7 +499,7 @@ def test_link_repo_non_existent(self, mock_repo):
 
         assert_equal(res.status_code, 400)
 
-    @mock.patch('website.addons.gitlab.api.GitLabClient.branches')
+    @mock.patch('addons.gitlab.api.GitLabClient.branches')
     def test_link_repo_registration(self, mock_branches):
 
         mock_branches.return_value = [
@@ -540,7 +538,7 @@ def test_link_repo_registration(self, mock_branches):
 
         assert_equal(res.status_code, 400)
 
-    @mock.patch('website.addons.gitlab.model.GitLabNodeSettings.delete_hook')
+    @mock.patch('addons.gitlab.models.NodeSettings.delete_hook')
     def test_deauthorize(self, mock_delete_hook):
 
         url = self.project.api_url + 'gitlab/user_auth/'
@@ -553,7 +551,7 @@ def test_deauthorize(self, mock_delete_hook):
         assert_equal(self.node_settings.repo, None)
         assert_equal(self.node_settings.user_settings, None)
 
-        assert_equal(self.project.logs[-1].action, 'gitlab_node_deauthorized')
+        assert_equal(self.project.logs.latest().action, 'gitlab_node_deauthorized')
 
 
 if __name__ == '__main__':
diff --git a/addons/gitlab/tests/utils.py b/addons/gitlab/tests/utils.py
new file mode 100644
index 00000000000..c4f8ee9d0d7
--- /dev/null
+++ b/addons/gitlab/tests/utils.py
@@ -0,0 +1,109 @@
+import mock
+from addons.gitlab.api import GitLabClient
+
+from addons.base.tests.base import OAuthAddonTestCaseMixin, AddonTestCase
+from addons.gitlab.models import GitLabProvider
+from addons.gitlab.tests.factories import GitLabAccountFactory
+
+
+class GitLabAddonTestCase(OAuthAddonTestCaseMixin, AddonTestCase):
+    ADDON_SHORT_NAME = 'gitlab'
+    ExternalAccountFactory = GitLabAccountFactory
+    Provider = GitLabProvider
+
+    def set_node_settings(self, settings):
+        super(GitLabAddonTestCase, self).set_node_settings(settings)
+        settings.repo = 'osfgitlabtest'
+        settings.user = 'osfio'
+
+def create_mock_gitlab(user='osfio', private=False):
+    """Factory for mock GitLab objects.
+    Example: ::
+
+        >>> gitlab = create_mock_gitlab(user='osfio')
+        >>> gitlab.branches(user='osfio', repo='hello-world')
+        >>> [{u'commit': {u'sha': u'e22d92d5d90bb8f9695e9a5e2e2311a5c1997230',
+        ...   u'url': u'https://api.gitlab.com/repos/osfio/mock-repo/commits/e22d92d5d90bb8f9695e9a5e2e2311a5c1997230'},
+        ...  u'name': u'dev'},
+        ... {u'commit': {u'sha': u'444a74d0d90a4aea744dacb31a14f87b5c30759c',
+        ...   u'url': u'https://api.gitlab.com/repos/osfio/mock-repo/commits/444a74d0d90a4aea744dacb31a14f87b5c30759c'},
+        ...  u'name': u'master'},
+        ... {u'commit': {u'sha': u'c6eaaf6708561c3d4439c0c8dd99c2e33525b1e6',
+        ...   u'url': u'https://api.gitlab.com/repos/osfio/mock-repo/commits/c6eaaf6708561c3d4439c0c8dd99c2e33525b1e6'},
+        ...  u'name': u'no-bundle'}]
+
+    :param str user: GitLab username.
+    :param bool private: Whether repo is private.
+    :return: An autospecced GitLab Mock object
+    """
+    gitlab_mock = mock.create_autospec(GitLabClient)
+    gitlab_mock.repo.return_value = {
+        u'approvals_before_merge': 0,
+        u'archived': False,
+        u'avatar_url': None,
+        u'builds_enabled': True,
+        u'container_registry_enabled': True,
+        u'created_at': u'2017-07-05T16:40:26.428Z',
+        u'creator_id': 1444024,
+        u'default_branch': u'master',
+        u'description': u'For testing',
+        u'forks_count': 0,
+        u'http_url_to_repo': u'https://gitlab.com/{}/mock-repo.git'.format(user),
+        u'id': 3643758,
+        u'issues_enabled': True,
+        u'last_activity_at': u'2017-07-05T16:40:26.428Z',
+        u'lfs_enabled': True,
+        u'merge_requests_enabled': True,
+        u'name': u'mock-repo',
+        u'name_with_namespace': u'{} / mock-repo'.format(user),
+        u'namespace': {u'full_path': u'{}'.format(user),
+            u'id': 1748448,
+            u'kind': u'user',
+            u'name': u'{}'.format(user),
+            u'path': u'{}'.format(user)},
+        u'only_allow_merge_if_all_discussions_are_resolved': False,
+        u'only_allow_merge_if_build_succeeds': False,
+        u'open_issues_count': 0,
+        u'owner': {u'avatar_url': u'https://secure.gravatar.com/avatar/a7fa245b01a35ad586d8e2fa5bd7be5f?s=80&d=identicon',
+            u'id': 1444024,
+            u'name': u'{}'.format(user),
+            u'state': u'active',
+            u'username': u'{}'.format(user),
+            u'web_url': u'https://gitlab.com/{}'.format(user)},
+        u'path': u'mock-repo',
+        u'path_with_namespace': u'{}/mock-repo'.format(user),
+        u'permissions': {u'group_access': None,
+            u'project_access': {u'access_level': 40, u'notification_level': 3}},
+        u'public': False,
+        u'public_builds': True,
+        u'request_access_enabled': False,
+        u'shared_runners_enabled': True,
+        u'shared_with_groups': [],
+        u'snippets_enabled': True,
+        u'ssh_url_to_repo': u'git@gitlab.com:{}/mock-repo.git'.format(user),
+        u'star_count': 0,
+        u'tag_list': [],
+        u'visibility_level': 0,
+        u'web_url': u'https://gitlab.com/{}/mock-repo'.format(user),
+        u'wiki_enabled': True
+    }
+    gitlab_mock.branches.return_value = [{
+        u'commit': {u'author_email': u'{}@gmail.com'.format(user),
+            u'author_name': u''.format(user),
+            u'authored_date': u'2017-07-05T16:43:04.000+00:00',
+            u'committed_date': u'2017-07-05T16:43:04.000+00:00',
+            u'committer_email': u'{}@gmail.com'.format(user),
+            u'committer_name': u'{}'.format(user),
+            u'created_at': u'2017-07-05T16:43:04.000+00:00',
+            u'id': u'f064566f133ddfad636ceec72c5937cc0044c371',
+            u'message': u'Add readme.md',
+            u'parent_ids': [],
+            u'short_id': u'f064566f',
+            u'title': u'Add readme.md'},
+        u'developers_can_merge': False,
+        u'developers_can_push': False,
+        u'merged': False,
+        u'name': u'master',
+        u'protected': True
+    }]
+    return gitlab_mock
diff --git a/website/addons/gitlab/utils.py b/addons/gitlab/utils.py
similarity index 96%
rename from website/addons/gitlab/utils.py
rename to addons/gitlab/utils.py
index a33ecea1d48..7cadd83573e 100644
--- a/website/addons/gitlab/utils.py
+++ b/addons/gitlab/utils.py
@@ -5,9 +5,9 @@
 import httplib as http
 
 from framework.exceptions import HTTPError
-from website.addons.base.exceptions import HookError
+from addons.base.exceptions import HookError
 
-from website.addons.gitlab.api import GitLabClient
+from addons.gitlab.api import GitLabClient
 
 MESSAGE_BASE = 'via the Open Science Framework'
 MESSAGES = {
diff --git a/website/addons/gitlab/views.py b/addons/gitlab/views.py
similarity index 69%
rename from website/addons/gitlab/views.py
rename to addons/gitlab/views.py
index 5c2654a86fc..732b2491c4a 100644
--- a/website/addons/gitlab/views.py
+++ b/addons/gitlab/views.py
@@ -4,36 +4,28 @@
 import httplib as http
 import logging
 
-from furl import furl
+from django.core.exceptions import ValidationError
 from flask import request, make_response
 
 from framework.exceptions import HTTPError
 
-from modularodm import Q
-from modularodm.storage.base import KeyExistsException
-from website.oauth.models import ExternalAccount
-
-from website.addons.base import generic_views
-from website.addons.gitlab.api import GitLabClient, ref_to_params
-from website.addons.gitlab.exceptions import NotFoundError, GitLabError
-from website.addons.gitlab.settings import DEFAULT_HOSTS
-from website.addons.gitlab.serializer import GitLabSerializer
-from website.addons.gitlab.utils import (
-    get_refs, check_permissions,
-    verify_hook_signature, MESSAGES
-)
-
-from website.models import NodeLog
+from addons.base import generic_views
+from addons.gitlab.api import GitLabClient
+from addons.gitlab.apps import gitlab_hgrid_data
+from addons.gitlab.exceptions import GitLabError
+from addons.gitlab.settings import DEFAULT_HOSTS
+from addons.gitlab.serializer import GitLabSerializer
+from addons.gitlab.utils import verify_hook_signature, MESSAGES
+from framework.auth.decorators import must_be_logged_in
+from osf.models import ExternalAccount, NodeLog
 from website.project.decorators import (
     must_have_addon, must_be_addon_authorizer,
     must_have_permission, must_not_be_registration,
     must_be_contributor_or_public, must_be_valid_project,
 )
-from website.util import rubeus
-
-from framework.auth.decorators import must_be_logged_in
 from website.util import api_url_for
 
+
 logger = logging.getLogger(__name__)
 
 SHORT_NAME = 'gitlab'
@@ -71,10 +63,6 @@ def _get_folders(node_addon, folder_id):
     SHORT_NAME
 )
 
-gitlab_root_folder = generic_views.root_folder(
-    SHORT_NAME
-)
-
 @must_be_logged_in
 def gitlab_user_config_get(auth, **kwargs):
     """View for getting a JSON representation of the logged-in user's
@@ -101,32 +89,38 @@ def gitlab_user_config_get(auth, **kwargs):
 def gitlab_add_user_account(auth, **kwargs):
     """Verifies new external account credentials and adds to user's list"""
 
-    f = furl()
-    f.host = request.json.get('host').rstrip('/')
-    f.scheme = 'https'
-    clientId = request.json.get('clientId')
-    clientSecret = request.json.get('clientSecret')
+    host = request.json.get('host').rstrip('/')
+    access_token = request.json.get('access_token')
+
+    client = GitLabClient(access_token=access_token, host=host)
+    try:
+        user_info = client.user()
+    except:
+        # TODO: does gitlab even throw errors?
+        raise
+
+    if user_info.get('message') == '401 Unauthorized':
+        raise HTTPError(http.UNAUTHORIZED)
 
     try:
         account = ExternalAccount(
             provider='gitlab',
             provider_name='GitLab',
-            display_name=f.host,       # no username; show host
-            oauth_key=f.host,          # hijacked; now host
-            oauth_secret=clientSecret,   # hijacked; now clientSecret
-            provider_id=clientId,   # hijacked; now clientId
+            display_name=user_info['username'],
+            oauth_key=access_token,
+            oauth_secret=host,  # Hijacked to allow multiple hosts
+            provider_id=user_info['web_url'],   # unique for host/username
         )
         account.save()
-    except KeyExistsException:
+    except ValidationError:
         # ... or get the old one
-        account = ExternalAccount.find_one(
-            Q('provider', 'eq', 'gitlab') &
-            Q('provider_id', 'eq', clientId)
+        account = ExternalAccount.objects.get(
+            provider='gitlab', provider_id=user_info['web_url']
         )
 
     user = auth.user
-    if account not in user.external_accounts:
-        user.external_accounts.append(account)
+    if not user.external_accounts.filter(id=account.id).exists():
+        user.external_accounts.add(account)
 
     user.get_or_add_addon('gitlab', auth=auth)
     user.save()
@@ -222,7 +216,7 @@ def gitlab_set_config(auth, **kwargs):
 @must_have_addon('gitlab', 'node')
 def gitlab_download_starball(node_addon, **kwargs):
 
-    ref = request.args.get('ref', 'master')
+    ref = request.args.get('branch', 'master')
 
     connection = GitLabClient(external_account=node_addon.external_account)
     headers, data = connection.starball(
@@ -253,67 +247,6 @@ def gitlab_root_folder(*args, **kwargs):
 
     return gitlab_hgrid_data(node_settings, auth=auth, **data)
 
-def gitlab_hgrid_data(node_settings, auth, **kwargs):
-
-    # Quit if no repo linked
-    if not node_settings.complete:
-        return
-
-    connection = GitLabClient(external_account=node_settings.external_account)
-
-    # Initialize repo here in the event that it is set in the privacy check
-    # below. This potentially saves an API call in _check_permissions, below.
-    repo = None
-
-    # Quit if privacy mismatch and not contributor
-    node = node_settings.owner
-    if node.is_public or node.is_contributor(auth.user):
-        try:
-            repo = connection.repo(node_settings.repo_id)
-        except NotFoundError:
-            logger.error('Could not access GitLab repo')
-            return None
-
-    try:
-        branch, sha, branches = get_refs(node_settings, branch=kwargs.get('branch'), sha=kwargs.get('sha'), connection=connection)
-    except (NotFoundError, GitLabError):
-        logger.error('GitLab repo not found')
-        return
-
-    if branch is not None:
-        ref = ref_to_params(branch, sha)
-        can_edit = check_permissions(node_settings, auth, connection, branch, sha, repo=repo)
-    else:
-        ref = None
-        can_edit = False
-
-    permissions = {
-        'edit': can_edit,
-        'view': True,
-        'private': node_settings.is_private
-    }
-    urls = {
-        'upload': node_settings.owner.api_url + 'gitlab/file/' + branch,
-        'fetch': node_settings.owner.api_url + 'gitlab/hgrid/' + branch,
-        'branch': node_settings.owner.api_url + 'gitlab/hgrid/root/' + branch,
-        'zip': 'https://{0}/{1}/repository/archive.zip?ref={2}'.format(node_settings.external_account.display_name, repo['path_with_namespace'], branch),
-        'repo': 'https://{0}/{1}/tree/{2}'.format(node_settings.external_account.display_name, repo['path_with_namespace'], branch)
-    }
-
-    branch_names = [each['name'] for each in branches]
-    if not branch_names:
-        branch_names = [branch]  # if repo un-init-ed then still add default branch to list of branches
-
-    return [rubeus.build_addon_root(
-        node_settings,
-        repo['path_with_namespace'],
-        urls=urls,
-        permissions=permissions,
-        branches=branch_names,
-        private_key=kwargs.get('view_only', None),
-        default_branch=repo['default_branch'],
-    )]
-
 #########
 # Repos #
 #########
@@ -367,8 +300,8 @@ def add_hook_log(node, gitlab, action, path, date, committer, include_urls=False
         url = node.web_url_for('addon_view_or_download_file', path=path, provider=SHORT_NAME)
 
         urls = {
-            'view': '{0}?ref={1}'.format(url, sha),
-            'download': '{0}?action=download&ref={1}'.format(url, sha)
+            'view': '{0}?branch={1}'.format(url, sha),
+            'download': '{0}?action=download&branch={1}'.format(url, sha)
         }
 
     node.add_log(
diff --git a/api/base/settings/defaults.py b/api/base/settings/defaults.py
index b0a03a5ff82..aacd7566673 100644
--- a/api/base/settings/defaults.py
+++ b/api/base/settings/defaults.py
@@ -103,6 +103,7 @@
     'addons.figshare',
     'addons.forward',
     'addons.github',
+    'addons.gitlab',
     'addons.googledrive',
     'addons.mendeley',
     'addons.owncloud',
diff --git a/framework/addons/data/addons.json b/framework/addons/data/addons.json
index 99f6f6976d1..0c5dddcb3cc 100644
--- a/framework/addons/data/addons.json
+++ b/framework/addons/data/addons.json
@@ -48,16 +48,16 @@
                 "text": "Deleting files via OSF is not implemented yet."
             },
             "Logs": {
-                "status": "full",
-                "text": "GitLab dynamically updates OSF logs when files are modified outside the OSF. Changes to GitLab repos made before the repo is linked to the OSF will not be reflected in OSF logs."
+                "status": "none",
+                "text": "OSF does not keep track of changes made using Gitlab directly."
             },
             "Forking": {
-                "status": "none",
-                "text": "Forking a project is not implemented yet."
+                "status": "partial",
+                "text": "Forking a project or component does not copy GitLab authorization unless the user forking the project is the same user who authorized the GitLab add-on in the source project being forked."
             },
             "Registering": {
-                "status": "none",
-                "text": "Registration on Gitlab is not implemented yet."
+                "status": "partial",
+                "text": "GitLab content will be registered, but version history will not be copied to the registration."
             }
         },
         "Amazon S3": {
diff --git a/website/addons/gitlab/__init__.py b/website/addons/gitlab/__init__.py
deleted file mode 100644
index 8e1a79447d6..00000000000
--- a/website/addons/gitlab/__init__.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import os
-
-from website.addons.gitlab import routes, views, model
-
-MODELS = [
-    model.GitLabUserSettings,
-    model.GitLabNodeSettings,
-]
-USER_SETTINGS_MODEL = model.GitLabUserSettings
-NODE_SETTINGS_MODEL = model.GitLabNodeSettings
-
-ROUTES = [routes.api_routes]
-
-SHORT_NAME = 'gitlab'
-FULL_NAME = 'GitLab'
-
-OWNERS = ['user', 'node']
-
-ADDED_DEFAULT = []
-ADDED_MANDATORY = []
-
-VIEWS = []
-CONFIGS = ['accounts', 'node']
-
-CATEGORIES = ['storage']
-
-INCLUDE_JS = {}
-
-INCLUDE_CSS = {}
-
-HAS_HGRID_FILES = True
-GET_HGRID_DATA = views.gitlab_hgrid_data
-
-HERE = os.path.dirname(os.path.abspath(__file__))
-NODE_SETTINGS_TEMPLATE = os.path.join(HERE, 'templates', 'gitlab_node_settings.mako')
-USER_SETTINGS_TEMPLATE = os.path.join(HERE, 'templates', 'gitlab_user_settings.mako')
diff --git a/website/addons/gitlab/static/gitlab-node-cfg.js b/website/addons/gitlab/static/gitlab-node-cfg.js
deleted file mode 100644
index ad852dc42b6..00000000000
--- a/website/addons/gitlab/static/gitlab-node-cfg.js
+++ /dev/null
@@ -1,190 +0,0 @@
-'use strict';
-
-var ko = require('knockout');
-var $ = require('jquery');
-var bootbox = require('bootbox');
-var $osf = require('js/osfHelpers');
-
-var nodeApiUrl = window.contextVars.node.urls.api;
-
-var GitLabConfigHelper = (function() {
-
-    var connectExistingAccount = function(accountId) {
-        $osf.putJSON(
-                nodeApiUrl + 'gitlab/user_auth/',
-                {'external_account_id': accountId}
-            ).done(function() {
-                    if($osf.isIE()){
-                        window.location.hash = '#configureAddonsAnchor';
-                    }
-                    window.location.reload();
-            }).fail(
-                $osf.handleJSONError
-            );
-    };
-
-    var updateHidden = function(element) {
-        var repoParts = $("option:selected", element).text().split('/');
-
-        $('#gitlabUser').val($.trim(repoParts[0]));
-        $('#gitlabRepo').val($.trim(repoParts[1]));
-        $('#gitlabRepoId').val(element.val());
-    };
-
-    var displayError = function(msg) {
-        $('#addonSettingsGitLab').find('.addon-settings-message')
-            .text('Error: ' + msg)
-            .removeClass('text-success').addClass('text-danger')
-            .fadeOut(100).fadeIn();
-    };
-
-    var createRepo = function() {
-
-        var $elm = $('#addonSettingsGitLab');
-        var $select = $elm.find('select');
-
-        bootbox.prompt({
-            title: 'Name your new repo',
-            placeholder: 'Repo name',
-            callback: function (repoName) {
-                // Return if cancelled
-                if (repoName === null) {
-                    return;
-                }
-
-                if (repoName === '') {
-                    displayError('Your repo must have a name');
-                    return;
-                }
-
-                $osf.postJSON(
-                    nodeApiUrl + 'gitlab/repo/create/',
-                    {name: repoName, user: $("#gitlabUser").val()}
-                ).done(function (response) {
-                        $select.append('<option value="' + response.repo['id'] + '">' + $osf.htmlEscape(response.repo['path_with_namespace']) + '</option>');
-                        $select.val(response.repo['id']);
-                        updateHidden($select);
-                    }).fail(function () {
-                        displayError('Could not create repository');
-                    });
-            },
-            buttons:{
-                confirm:{
-                    label: 'Save',
-                    className:'btn-success'
-                }
-            }
-        });
-    };
-
-    var askImport = function() {
-        $.get('/api/v1/settings/gitlab/accounts/'
-        ).done(function(data){
-            var accounts = data.accounts.map(function(account) {
-                return {
-                    name: account.display_name,
-                    id: account.id
-                };
-            });
-            if (accounts.length > 1) {
-                bootbox.prompt({
-                    title: 'Choose GitLab Account to Import',
-                    inputType: 'select',
-                    inputOptions: ko.utils.arrayMap(
-                        accounts,
-                        function(item) {
-                            return {
-                                text: $osf.htmlEscape(item.name),
-                                value: item.id
-                            };
-                        }
-                    ),
-                    value: accounts[0].id,
-                    callback: function(accountId) {
-                        connectExistingAccount(accountId);
-                    },
-                    buttons: {
-                        confirm:{
-                            label:'Import',
-                        }
-                    }
-                });
-            } else {
-                bootbox.confirm({
-                    title: 'Import GitLab Account?',
-                    message: 'Are you sure you want to link your GitLab account with this project?',
-                    callback: function(confirmed) {
-                        if (confirmed) {
-                            connectExistingAccount(accounts[0].id);
-                        }
-                    },
-                    buttons: {
-                        confirm: {
-                            label:'Import',
-                        }
-                    }
-                });
-            }
-        }).fail(function(xhr, textStatus, error) {
-            displayError('Could not GET GitLab accounts for user.');
-        });
-    };
-
-    $(document).ready(function() {
-        $('#gitlabSelectRepo').on('change', function() {
-            var el = $(this);
-            if (el.val()) {
-                updateHidden(el);
-            }
-        });
-
-        $('#gitlabCreateRepo').on('click', function() {
-            createRepo();
-        });
-
-        $('#gitlabImportToken').on('click', function() {
-            askImport();
-        });
-
-        $('#gitlabCreateToken').on('click', function() {
-            window.oauthComplete = function(res) {
-                askImport();
-            };
-        });
-
-        $('#gitlabRemoveToken').on('click', function() {
-            bootbox.confirm({
-                title: 'Disconnect GitLab Account?',
-                message: 'Are you sure you want to remove this GitLab account?',
-                callback: function(confirm) {
-                    if(confirm) {
-                        $.ajax({
-                        type: 'DELETE',
-                        url: nodeApiUrl + 'gitlab/user_auth/'
-                    }).done(function() {
-                        window.location.reload();
-                    }).fail(
-                        $osf.handleJSONError
-                    );
-                    }
-                },
-                buttons:{
-                    confirm:{
-                        label: 'Disconnect',
-                        className: 'btn-danger'
-                    }
-                }
-            });
-        });
-
-        $('#addonSettingsGitLab .addon-settings-submit').on('click', function() {
-            if (!$('#gitlabRepo').val()) {
-                return false;
-            }
-        });
-
-    });
-
-})();
-
-module.exports = GitLabConfigHelper;
diff --git a/website/addons/gitlab/static/gitlabUserConfig.js b/website/addons/gitlab/static/gitlabUserConfig.js
deleted file mode 100644
index ff4e8bd0bb9..00000000000
--- a/website/addons/gitlab/static/gitlabUserConfig.js
+++ /dev/null
@@ -1,236 +0,0 @@
-/**
-* Module that controls the GitLab user settings. Includes Knockout view-model
-* for syncing data.
-*/
-
-var ko = require('knockout');
-var $ = require('jquery');
-var Raven = require('raven-js');
-var bootbox = require('bootbox');
-require('js/osfToggleHeight');
-
-var language = require('js/osfLanguage').Addons.gitlab;
-var osfHelpers = require('js/osfHelpers');
-var addonSettings = require('js/addonSettings');
-
-var ExternalAccount = addonSettings.ExternalAccount;
-
-var $modal = $('#gitlabInputCredentials');
-
-
-function ViewModel(url) {
-    var self = this;
-    const otherString = 'Other (Please Specify)';
-
-    self.properName = 'GitLab';
-    self.clientId = ko.observable();
-    self.urls = ko.observable({});
-    self.hosts = ko.observableArray([]);
-    self.selectedHost = ko.observable();    // Host specified in select element
-    self.customHost = ko.observable();      // Host specified in input element
-    // Whether the initial data has been loaded
-    self.loaded = ko.observable(false);
-    self.accounts = ko.observableArray();
-
-    // Designated host, specified from select or input element
-    self.host = ko.pureComputed(function() {
-        return self.useCustomHost() ? self.customHost() : self.selectedHost();
-    });
-    // Hosts visible in select element. Includes presets and "Other" option
-    self.visibleHosts = ko.pureComputed(function() {
-        return self.hosts().concat([otherString]);
-    });
-    // Whether to use select element or input element for host designation
-    self.useCustomHost = ko.pureComputed(function() {
-        return self.selectedHost() === otherString;
-    });
-    self.showApiTokenInput = ko.pureComputed(function() {
-        return Boolean(self.selectedHost());
-    });
-    self.tokenUrl = ko.pureComputed(function() {
-        return self.host() ? 'https://' + self.host() + '/profile/personal_access_tokens' : null;
-    });
-
-    // Flashed messages
-    self.message = ko.observable('');
-    self.messageClass = ko.observable('text-info');
-
-    /** Reset all fields from GitLab host selection modal */
-    self.clearModal = function() {
-        self.message('');
-        self.messageClass('text-info');
-        self.clientId(null);
-        self.selectedHost(null);
-        self.customHost(null);
-    };
-
-    self.updateAccounts = function() {
-        var url = self.urls().accounts;
-        var request = $.get(url);
-        request.done(function(data) {
-            self.accounts($.map(data.accounts, function(account) {
-                var externalAccount =  new ExternalAccount(account);
-                externalAccount.gitlabHost = account.host;
-                externalAccount.gitlabUrl = account.host_url;
-                return externalAccount;
-            }));
-            $('#gitlab-header').osfToggleHeight({height: 160});
-        });
-        request.fail(function(xhr, status, error) {
-            Raven.captureMessage('Error while updating addon account', {
-                extra: {
-                    url: url,
-                    status: status,
-                    error: error
-                }
-            });
-        });
-        return request;
-    };
-
-    /** Send POST request to authorize GitLab */
-    self.sendAuth = function() {
-        // Selection should not be empty
-        if( !self.selectedHost() ){
-            self.changeMessage("Please select a GitLab repository.", 'text-danger');
-            return;
-        }
-
-        if ( !self.useCustomHost() && !self.clientId() ) {
-            self.changeMessage("Please enter your Personal Access Token.", 'text-danger');
-            return;
-        }
-
-        if ( self.useCustomHost() && (!self.customHost() || !self.clientId()) ) {
-            self.changeMessage("Please enter a GitLab host and your Personal Access Token.", 'text-danger');
-            return;
-        }
-
-        var url = self.urls().create;
-
-        return osfHelpers.postJSON(
-            url,
-            ko.toJS({
-                host: self.host,
-                clientId: self.clientId
-            })
-        ).done(function() {
-            self.updateAccounts();
-            self.clearModal();
-            $modal.modal('hide');
-
-        }).fail(function(xhr, textStatus, error) {
-            var errorMessage = (xhr.status === 401) ? 'Auth Error' : 'Other error';
-            self.changeMessage(errorMessage, 'text-danger');
-            Raven.captureMessage('Could not authenticate with GitLab', {
-                extra: {
-                    url: url,
-                    textStatus: textStatus,
-                    error: error
-                }
-            });
-        });
-    };
-
-    self.askDisconnect = function(account) {
-        var self = this;
-        bootbox.confirm({
-            title: 'Disconnect GitLab Account?',
-            message: '<p class="overflow">' +
-                'Are you sure you want to disconnect the GitLab account on <strong>' +
-                osfHelpers.htmlEscape(account.name) + '</strong>? This will revoke access to GitLab for all projects associated with this account.' +
-                '</p>',
-            callback: function (confirm) {
-                if (confirm) {
-                    self.disconnectAccount(account);
-                }
-            },
-            buttons:{
-                confirm:{
-                    label:'Disconnect',
-                    className:'btn-danger'
-                }
-            }
-        });
-    };
-
-    self.disconnectAccount = function(account) {
-        var self = this;
-        var url = '/api/v1/oauth/accounts/' + account.id + '/';
-        var request = $.ajax({
-            url: url,
-            type: 'DELETE'
-        });
-        request.done(function(data) {
-            self.updateAccounts();
-        });
-        request.fail(function(xhr, status, error) {
-            Raven.captureMessage('Error while removing addon authorization for ' + account.id, {
-                extra: {
-                    url: url,
-                    status: status,
-                    error: error
-                }
-            });
-        });
-        return request;
-    };
-
-    /** Change the flashed status message */
-    self.changeMessage = function(text, css, timeout) {
-        self.message(text);
-        var cssClass = css || 'text-info';
-        self.messageClass(cssClass);
-        if (timeout) {
-            // Reset message after timeout period
-            setTimeout(function() {
-                self.message('');
-                self.messageClass('text-info');
-            }, timeout);
-        }
-    };
-
-    // Update observables with data from the server
-    self.fetch = function() {
-        $.ajax({
-            url: url,
-            type: 'GET',
-            dataType: 'json'
-        }).done(function (response) {
-            var data = response.result;
-            self.urls(data.urls);
-            self.hosts(data.hosts);
-            self.loaded(true);
-            self.updateAccounts();
-        }).fail(function (xhr, textStatus, error) {
-            self.changeMessage(language.userSettingsError, 'text-danger');
-            Raven.captureMessage('Could not GET GitLab settings', {
-                extra: {
-                    url: url,
-                    textStatus: textStatus,
-                    error: error
-                }
-            });
-        });
-    };
-
-    self.selectionChanged = function() {
-        self.changeMessage('','');
-    };
-
-}
-
-function GitLabUserConfig(selector, url) {
-    // Initialization code
-    var self = this;
-    self.selector = selector;
-    self.url = url;
-    // On success, instantiate and bind the ViewModel
-    self.viewModel = new ViewModel(url);
-    osfHelpers.applyBindings(self.viewModel, self.selector);
-}
-
-module.exports = {
-    GitLabViewModel: ViewModel,
-    GitLabUserConfig: GitLabUserConfig    // for backwards-compat
-};
diff --git a/website/addons/gitlab/static/node-cfg.js b/website/addons/gitlab/static/node-cfg.js
deleted file mode 100644
index 3256d572eed..00000000000
--- a/website/addons/gitlab/static/node-cfg.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict';
-
-var $ = require('jquery');
-require('./gitlab-node-cfg.js');
-var AddonHelper = require('js/addonHelper');
-
-$(window.contextVars.gitlabSettingsSelector).on('submit', AddonHelper.onSubmitSettings);
diff --git a/website/addons/gitlab/templates/gitlab_node_settings.mako b/website/addons/gitlab/templates/gitlab_node_settings.mako
deleted file mode 100644
index b4c548b9220..00000000000
--- a/website/addons/gitlab/templates/gitlab_node_settings.mako
+++ /dev/null
@@ -1,99 +0,0 @@
-<form role="form" id="addonSettingsGitLab" data-addon="${addon_short_name}">
-
-    <div>
-        <h4 class="addon-title">
-            <img class="addon-icon" src="${addon_icon_url}">
-            GitLab
-            <small class="authorized-by">
-                % if node_has_auth:
-                        authorized by
-                        <a href="${auth_osf_url}" target="_blank">
-                            ${auth_osf_name}
-                        </a>
-                    % if not is_registration:
-                        <a id="gitlabRemoveToken" class="text-danger pull-right addon-auth" >
-                          Disconnect Account
-                        </a>
-                    % endif
-                % else:
-                    % if user_has_auth:
-                        <a id="gitlabImportToken" class="text-primary pull-right addon-auth">
-                           Import Account from Profile
-                        </a>
-                    % else:
-                        <a id="gitlabCreateToken" class="text-primary pull-right addon-auth">
-                           Connect Account
-                        </a>
-                    % endif
-                % endif
-            </small>
-        </h4>
-    </div>
-
-    % if node_has_auth and valid_credentials:
-
-        <input type="hidden" id="gitlabUser" name="gitlab_user" value="${gitlab_user}" />
-        <input type="hidden" id="gitlabRepo" name="gitlab_repo" value="${gitlab_repo}" />
-        <input type="hidden" id="gitlabRepoId" name="gitlab_repo_id" value="${gitlab_repo_id}" />
-
-        <p><strong>Current Repo: </strong>
-
-        % if is_owner and not is_registration:
-        </p>
-        <div class="row">
-            <div class="col-md-6 m-b-sm">
-                <select id="gitlabSelectRepo" class="form-control" ${'disabled' if not is_owner or is_registration else ''}>
-                    <option>-----</option>
-                        % if is_owner:
-                            % if repos:
-                              % for repo in repos:
-                                  <option value="${repo['id']}" ${'selected' if repo['id'] == int(gitlab_repo_id) else ''}>${repo['path_with_namespace']}</option>
-                              % endfor
-                            % endif
-                        % else:
-                            <option selected>${gitlab_repo_full_name}</option>
-                        % endif
-                </select>
-            </div>
-
-            <div class="col-md-6 m-b-sm">
-                <button class="btn btn-success addon-settings-submit">
-                    Save
-                </button>
-                <a id="gitlabCreateRepo" class="btn btn-success pull-right">Create Repo</a>
-            </div>
-        </div>
-        % elif gitlab_repo_full_name:
-            <a href="${files_url}">${gitlab_repo_full_name}</a></p>
-        % else:
-            <span>None</span></p>
-        % endif
-    % endif
-
-    ${self.on_submit()}
-
-    % if node_has_auth and not valid_credentials:
-        <div class="addon-settings-message text-danger p-t-sm">
-            % if is_owner:
-                Could not retrieve GitLab settings at this time. The GitLab addon credentials
-                may no longer be valid. Try deauthorizing and reauthorizing GitLab on your
-                <a href="${addons_url}">account settings page</a>.
-            % else:
-                Could not retrieve GitLab settings at this time. The GitLab addon credentials
-                may no longer be valid. Contact ${auth_osf_name} to verify.
-            % endif
-        </div>
-    % else:
-        <div class="addon-settings-message p-t-sm" style="display: none"></div>
-    % endif
-
-</form>
-
-<%def name="on_submit()">
-    <script type="text/javascript">
-        window.contextVars = $.extend({}, window.contextVars, {
-            ## Short name never changes
-            'gitlabSettingsSelector': '#addonSettingsGitLab'
-        });
-    </script>
-</%def>
diff --git a/website/addons/gitlab/tests/test_serializer.py b/website/addons/gitlab/tests/test_serializer.py
deleted file mode 100644
index 1b24aeb4273..00000000000
--- a/website/addons/gitlab/tests/test_serializer.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Serializer tests for the GitLab addon."""
-import mock
-from nose.tools import *  # noqa (PEP8 asserts)
-
-from website.addons.base.testing.serializers import StorageAddonSerializerTestSuiteMixin
-from website.addons.gitlab.api import GitLabClient
-from website.addons.gitlab.tests.factories import GitLabAccountFactory
-from website.addons.gitlab.serializer import GitLabSerializer
-
-from tests.base import OsfTestCase
-
-class TestGitLabSerializer(StorageAddonSerializerTestSuiteMixin, OsfTestCase):
-
-    addon_short_name = 'gitlab'
-
-    Serializer = GitLabSerializer
-    ExternalAccountFactory = GitLabAccountFactory
-    client = GitLabClient()
-
-    def set_provider_id(self, pid):
-        self.node_settings.repo = pid
-    
-    ## Overrides ##
-
-    def setUp(self):
-        super(TestGitLabSerializer, self).setUp()
-        self.mock_api_user = mock.patch("website.addons.gitlab.api.GitLabClient.user")
-        self.mock_api_user.return_value = mock.Mock()
-        self.mock_api_user.start()
-
-    def tearDown(self):
-        self.mock_api_user.stop()
-        super(TestGitLabSerializer, self).tearDown()
diff --git a/website/addons/gitlab/tests/utils.py b/website/addons/gitlab/tests/utils.py
deleted file mode 100644
index 8e395dda827..00000000000
--- a/website/addons/gitlab/tests/utils.py
+++ /dev/null
@@ -1,146 +0,0 @@
-import mock
-import github3
-from website.addons.gitlab.api import GitLabClient
-from github3.repos.branch import Branch
-
-from website.addons.base.testing import OAuthAddonTestCaseMixin, AddonTestCase
-from website.addons.gitlab.model import GitLabProvider
-from website.addons.gitlab.tests.factories import GitLabAccountFactory
-
-
-class GitLabAddonTestCase(OAuthAddonTestCaseMixin, AddonTestCase):
-    ADDON_SHORT_NAME = 'gitlab'
-    ExternalAccountFactory = GitLabAccountFactory
-    Provider = GitLabProvider
-
-    def set_node_settings(self, settings):
-        super(GitLabAddonTestCase, self).set_node_settings(settings)
-        settings.repo = 'osfgitlabtest'
-        settings.user = 'osfio'
-
-def create_mock_gitlab(user='osfio', private=False):
-    """Factory for mock GitLab objects.
-    Example: ::
-
-        >>> gitlab = create_mock_gitlab(user='osfio')
-        >>> gitlab.branches(user='osfio', repo='hello-world')
-        >>> [{u'commit': {u'sha': u'e22d92d5d90bb8f9695e9a5e2e2311a5c1997230',
-        ...   u'url': u'https://api.gitlab.com/repos/osfio/mock-repo/commits/e22d92d5d90bb8f9695e9a5e2e2311a5c1997230'},
-        ...  u'name': u'dev'},
-        ... {u'commit': {u'sha': u'444a74d0d90a4aea744dacb31a14f87b5c30759c',
-        ...   u'url': u'https://api.gitlab.com/repos/osfio/mock-repo/commits/444a74d0d90a4aea744dacb31a14f87b5c30759c'},
-        ...  u'name': u'master'},
-        ... {u'commit': {u'sha': u'c6eaaf6708561c3d4439c0c8dd99c2e33525b1e6',
-        ...   u'url': u'https://api.gitlab.com/repos/osfio/mock-repo/commits/c6eaaf6708561c3d4439c0c8dd99c2e33525b1e6'},
-        ...  u'name': u'no-bundle'}]
-
-    :param str user: GitLab username.
-    :param bool private: Whether repo is private.
-    :return: An autospecced GitLab Mock object
-    """
-    gitlab_mock = mock.create_autospec(GitLabClient)
-    gitlab_mock.repo.return_value = github3.repos.Repository.from_json({
-    u'archive_url': u'https://api.gitlab.com/repos/{user}/mock-repo/{{archive_format}}{{/ref}}'.format(user=user),
-     u'assignees_url': u'https://api.gitlab.com/repos/{user}/mock-repo/assignees{{/user}}'.format(user=user),
-     u'blobs_url': u'https://api.gitlab.com/repos/{user}/mock-repo/git/blobs{{/sha}}'.format(user=user),
-     u'branches_url': u'https://api.gitlab.com/repos/{user}/mock-repo/branches{{/bra.format(user=user)nch}}'.format(user=user),
-     u'clone_url': u'https://gitlab.com/{user}/mock-repo.git'.format(user=user),
-     u'collaborators_url': u'https://api.gitlab.com/repos/{user}/mock-repo/collaborators{{/collaborator}}'.format(user=user),
-     u'comments_url': u'https://api.gitlab.com/repos/{user}/mock-repo/comments{{/number}}'.format(user=user),
-     u'commits_url': u'https://api.gitlab.com/repos/{user}/mock-repo/commits{{/sha}}'.format(user=user),
-     u'compare_url': u'https://api.gitlab.com/repos/{user}/mock-repo/compare/{{base}}...{{head}}',
-     u'contents_url': u'https://api.gitlab.com/repos/{user}/mock-repo/contents/{{+path}}'.format(user=user),
-     u'contributors_url': u'https://api.gitlab.com/repos/{user}/mock-repo/contributors'.format(user=user),
-     u'created_at': u'2013-06-30T18:29:18Z',
-     u'default_branch': u'dev',
-     u'description': u'Simple, Pythonic, text processing--Sentiment analysis, part-of-speech tagging, noun phrase extraction, translation, and more.',
-     u'downloads_url': u'https://api.gitlab.com/repos/{user}/mock-repo/downloads'.format(user=user),
-     u'events_url': u'https://api.gitlab.com/repos/{user}/mock-repo/events'.format(user=user),
-     u'fork': False,
-     u'forks': 89,
-     u'forks_count': 89,
-     u'forks_url': u'https://api.gitlab.com/repos/{user}/mock-repo/forks',
-     u'full_name': u'{user}/mock-repo',
-     u'git_commits_url': u'https://api.gitlab.com/repos/{user}/mock-repo/git/commits{{/sha}}'.format(user=user),
-     u'git_refs_url': u'https://api.gitlab.com/repos/{user}/mock-repo/git/refs{{/sha}}'.format(user=user),
-     u'git_tags_url': u'https://api.gitlab.com/repos/{user}/mock-repo/git/tags{{/sha}}'.format(user=user),
-     u'git_url': u'git://gitlab.com/{user}/mock-repo.git'.format(user=user),
-     u'has_downloads': True,
-     u'has_issues': True,
-     u'has_wiki': True,
-     u'homepage': u'https://mock-repo.readthedocs.org/',
-     u'hooks_url': u'https://api.gitlab.com/repos/{user}/mock-repo/hooks'.format(user=user),
-     u'html_url': u'https://gitlab.com/{user}/mock-repo'.format(user=user),
-     u'id': 11075275,
-     u'issue_comment_url': u'https://api.gitlab.com/repos/{user}/mock-repo/issues/comments/{{number}}'.format(user=user),
-     u'issue_events_url': u'https://api.gitlab.com/repos/{user}/mock-repo/issues/events{{/number}}'.format(user=user),
-     u'issues_url': u'https://api.gitlab.com/repos/{user}/mock-repo/issues{{/number}}'.format(user=user),
-     u'keys_url': u'https://api.gitlab.com/repos/{user}/mock-repo/keys{{/key_id}}'.format(user=user),
-     u'labels_url': u'https://api.gitlab.com/repos/{user}/mock-repo/labels{{/name}}'.format(user=user),
-     u'language': u'Python',
-     u'languages_url': u'https://api.gitlab.com/repos/{user}/mock-repo/languages'.format(user=user),
-     u'master_branch': u'dev',
-     u'merges_url': u'https://api.gitlab.com/repos/{user}/mock-repo/merges'.format(user=user),
-     u'milestones_url': u'https://api.gitlab.com/repos/{user}/mock-repo/milestones{{/number}}'.format(user=user),
-     u'mirror_url': None,
-     u'name': u'mock-repo',
-     u'network_count': 89,
-     u'notifications_url': u'https://api.gitlab.com/repos/{user}/mock-repo/notifications{{?since,all,participating}}'.format(user=user),
-     u'open_issues': 2,
-     u'open_issues_count': 2,
-     u'owner': {u'avatar_url': u'https://gravatar.com/avatar/c74f9cfd7776305a82ede0b765d65402?d=https%3A%2F%2Fidenticons.gitlab.com%2F3959fe3bcd263a12c28ae86a66ec75ef.png&r=x',
-      u'events_url': u'https://api.gitlab.com/users/{user}/events{{/privacy}}'.format(user=user),
-      u'followers_url': u'https://api.gitlab.com/users/{user}/followers'.format(user=user),
-      u'following_url': u'https://api.gitlab.com/users/{user}/following{{/other_user}}'.format(user=user),
-      u'gists_url': u'https://api.gitlab.com/users/{user}/gists{{/gist_id}}'.format(user=user),
-      u'gravatar_id': u'c74f9cfd7776305a82ede0b765d65402',
-      u'html_url': u'https://gitlab.com/{user}'.format(user=user),
-      u'id': 2379650,
-      u'login': user,
-      u'organizations_url': u'https://api.gitlab.com/users/{user}/orgs'.format(user=user),
-      u'received_events_url': u'https://api.gitlab.com/users/{user}/received_events',
-      u'repos_url': u'https://api.gitlab.com/users/{user}/repos'.format(user=user),
-      u'site_admin': False,
-      u'starred_url': u'https://api.gitlab.com/users/{user}/starred{{/owner}}{{/repo}}',
-      u'subscriptions_url': u'https://api.gitlab.com/users/{user}/subscriptions'.format(user=user),
-      u'type': u'User',
-      u'url': u'https://api.gitlab.com/users/{user}'.format(user=user)},
-     u'private': private,
-     u'pulls_url': u'https://api.gitlab.com/repos/{user}/mock-repo/pulls{{/number}}'.format(user=user),
-     u'pushed_at': u'2013-12-30T16:05:54Z',
-     u'releases_url': u'https://api.gitlab.com/repos/{user}/mock-repo/releases{{/id}}'.format(user=user),
-     u'size': 8717,
-     u'ssh_url': u'git@gitlab.com:{user}/mock-repo.git'.format(user=user),
-     u'stargazers_count': 1469,
-     u'stargazers_url': u'https://api.gitlab.com/repos/{user}/mock-repo/stargazers'.format(user=user),
-     u'statuses_url': u'https://api.gitlab.com/repos/{user}/mock-repo/statuses/{{sha}}'.format(user=user),
-     u'subscribers_count': 86,
-     u'subscribers_url': u'https://api.gitlab.com/repos/{user}/mock-repo/subscribers'.format(user=user),
-     u'subscription_url': u'https://api.gitlab.com/repos/{user}/mock-repo/subscription'.format(user=user),
-     u'svn_url': u'https://gitlab.com/{user}/mock-repo'.format(user=user),
-     u'tags_url': u'https://api.gitlab.com/repos/{user}/mock-repo/tags'.format(user=user),
-     u'teams_url': u'https://api.gitlab.com/repos/{user}/mock-repo/teams'.format(user=user),
-     u'trees_url': u'https://api.gitlab.com/repos/{user}/mock-repo/git/trees{{/sha}}'.format(user=user),
-     u'updated_at': u'2014-01-12T21:23:50Z',
-     u'url': u'https://api.gitlab.com/repos/{user}/mock-repo'.format(user=user),
-     u'watchers': 1469,
-     u'watchers_count': 1469,
-     # NOTE: permissions are only available if authorized on the repo
-     'permissions': {
-        'push': True
-     }
-     })
-
-    gitlab_mock.branches.return_value = [
-        Branch.from_json({u'commit': {u'sha': u'e22d92d5d90bb8f9695e9a5e2e2311a5c1997230',
-           u'url': u'https://api.gitlab.com/repos/{user}/mock-repo/commits/e22d92d5d90bb8f9695e9a5e2e2311a5c1997230'.format(user=user)},
-          u'name': u'dev'}),
-         Branch.from_json({u'commit': {u'sha': u'444a74d0d90a4aea744dacb31a14f87b5c30759c',
-           u'url': u'https://api.gitlab.com/repos/{user}/mock-repo/commits/444a74d0d90a4aea744dacb31a14f87b5c30759c'.format(user=user)},
-          u'name': u'master'}),
-         Branch.from_json({u'commit': {u'sha': u'c6eaaf6708561c3d4439c0c8dd99c2e33525b1e6',
-           u'url': u'https://api.gitlab.com/repos/{user}/mock-repo/commits/c6eaaf6708561c3d4439c0c8dd99c2e33525b1e6'.format(user=user)},
-          u'name': u'no-bundle'})
-      ]
-
-    return gitlab_mock
diff --git a/website/files/models/__init__.py b/website/files/models/__init__.py
deleted file mode 100644
index 434e4b5a7ba..00000000000
--- a/website/files/models/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from website.files.models.base import *  # noqa
-
-from website.files.models.s3 import *  # noqa
-from website.files.models.box import *  # noqa
-from website.files.models.github import *  # noqa
-from website.files.models.gitlab import *  # noqa
-from website.files.models.dropbox import *  # noqa
-from website.files.models.figshare import *  # noqa
-from website.files.models.dataverse import *  # noqa
-from website.files.models.osfstorage import *  # noqa
-from website.files.models.owncloud import *  # noqa
-from website.files.models.googledrive import *  # noqa
diff --git a/website/files/models/gitlab.py b/website/files/models/gitlab.py
deleted file mode 100644
index cfa7a373706..00000000000
--- a/website/files/models/gitlab.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from website.files.models.base import File, Folder, FileNode
-
-
-__all__ = ('GitLabFile', 'GitLabFolder', 'GitLabFileNode')
-
-
-class GitLabFileNode(FileNode):
-    provider = 'gitlab'
-
-
-class GitLabFolder(GitLabFileNode, Folder):
-    pass
-
-
-class GitLabFile(GitLabFileNode, File):
-    version_identifier = 'ref'
-
-    def touch(self, auth_header, revision=None, ref=None, branch=None, **kwargs):
-        revision = revision or ref or branch
-        return super(GitLabFile, self).touch(auth_header, revision=revision, **kwargs)
diff --git a/website/static/js/osfLanguage.js b/website/static/js/osfLanguage.js
index 24a4bce9868..6a3a9ea7321 100644
--- a/website/static/js/osfLanguage.js
+++ b/website/static/js/osfLanguage.js
@@ -65,11 +65,12 @@ module.exports = {
                 'This will revoke access to figshare for all projects you have ' +
                 'associated with this account.',
         },
-        // TODO
         github: {
             confirmDeauth: 'Are you sure you want to disconnect the GitHub account? ' +
                 'This will revoke access to GitHub for all projects you have ' +
                 'associated with this account.',
+            userSettingsError: 'Could not retrieve settings. Please refresh the page or ' +
+                'contact ' + SUPPORT_LINK + ' if the problem persists.',
         },
         bitbucket: {
             confirmDeauth: 'Are you sure you want to disconnect the Bitbucket account? ' +

From e9f0d63aac911d023368827ea3f40968d6dee7fc Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Fri, 21 Jul 2017 16:19:49 -0400
Subject: [PATCH 060/192] Add premigrate script for long-running queries

[#OSF-7968]
---
 scripts/premigrate_created_modified.py | 319 +++++++++++++++++++++++++
 website/settings/defaults.py           |   2 +
 2 files changed, 321 insertions(+)
 create mode 100644 scripts/premigrate_created_modified.py

diff --git a/scripts/premigrate_created_modified.py b/scripts/premigrate_created_modified.py
new file mode 100644
index 00000000000..2bc6d734715
--- /dev/null
+++ b/scripts/premigrate_created_modified.py
@@ -0,0 +1,319 @@
+import argparse
+import logging
+
+import django
+from django.db import connection, transaction
+
+from framework.celery_tasks import app as celery_app
+
+
+logger = logging.getLogger(__name__)
+
+ADD_COLUMNS = [
+    'ALTER TABLE osf_basefilenode ADD COLUMN created timestamp with time zone;',
+    'ALTER TABLE osf_basefilenode ADD COLUMN modified timestamp with time zone;',
+    "ALTER TABLE osf_blacklistguid ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_blacklistguid ADD COLUMN modified timestamp with time zone;",
+    'ALTER TABLE osf_fileversion ADD COLUMN created timestamp with time zone;',
+    'ALTER TABLE osf_fileversion ADD COLUMN modified timestamp with time zone;',
+    "ALTER TABLE osf_guid ADD COLUMN modified timestamp with time zone;",
+    'ALTER TABLE osf_nodelog ADD COLUMN created timestamp with time zone;',
+    'ALTER TABLE osf_nodelog ADD COLUMN modified timestamp with time zone;',
+    "ALTER TABLE osf_pagecounter ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_pagecounter ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_abstractnode ADD COLUMN last_logged timestamp with time zone;",
+    "ALTER TABLE osf_institution ADD COLUMN last_logged timestamp with time zone;",
+]
+
+POPULATE_COLUMNS = [
+    "SET statement_timeout = 10000; UPDATE osf_basefilenode SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch' WHERE id IN (SELECT id FROM osf_basefilenode WHERE created IS NULL LIMIT 1000) RETURNING id;",
+    "SET statement_timeout = 10000; UPDATE osf_blacklistguid SET created='epoch', modified='epoch' WHERE id IN (SELECT id FROM osf_blacklistguid WHERE created IS NULL LIMIT 1000) RETURNING id;",
+    "SET statement_timeout = 10000; UPDATE osf_fileversion SET created=date_created, modified='epoch' WHERE id IN (SELECT id FROM osf_fileversion WHERE created IS NULL LIMIT 1000) RETURNING id;",
+    "SET statement_timeout = 10000; UPDATE osf_guid SET modified='epoch' WHERE id IN (SELECT id FROM osf_guid WHERE modified IS NULL LIMIT 1000) RETURNING id;",
+    "SET statement_timeout = 10000; UPDATE osf_nodelog SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch' WHERE id IN (SELECT id FROM osf_nodelog WHERE created IS NULL LIMIT 1000) RETURNING id;",
+    "SET statement_timeout = 10000; UPDATE osf_pagecounter SET created='epoch', modified='epoch' WHERE id IN (SELECT id FROM osf_pagecounter WHERE created IS NULL LIMIT 1000) RETURNING id;",
+]
+
+FINALIZE_MIGRATION = [
+    "UPDATE osf_basefilenode SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch' WHERE created IS NULL;",
+    'ALTER TABLE osf_basefilenode ALTER COLUMN created SET NOT NULL;',
+    'ALTER TABLE osf_basefilenode ALTER COLUMN modified SET NOT NULL;',
+    "UPDATE osf_blacklistguid SET created='epoch', modified='epoch' WHERE created IS NULL;",
+    "ALTER TABLE osf_blacklistguid ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_blacklistguid ALTER COLUMN modified SET NOT NULL;",
+    "UPDATE osf_fileversion SET created=date_created, modified='epoch' WHERE created IS NULL;",
+    'ALTER TABLE osf_fileversion ALTER COLUMN created SET NOT NULL;',
+    'ALTER TABLE osf_fileversion ALTER COLUMN modified SET NOT NULL;',
+    'ALTER TABLE osf_fileversion DROP COLUMN date_created;',
+    "UPDATE osf_guid SET modified='epoch' WHERE modified IS NULL;",
+    "ALTER TABLE osf_guid ALTER COLUMN modified SET NOT NULL;",
+    "UPDATE osf_nodelog SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch' WHERE created IS NULL;",
+    'ALTER TABLE osf_nodelog ALTER COLUMN created SET NOT NULL;',
+    'ALTER TABLE osf_nodelog ALTER COLUMN modified SET NOT NULL;',
+    "UPDATE osf_pagecounter SET created='epoch', modified='epoch' WHERE created IS NULL;",
+    "ALTER TABLE osf_pagecounter ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_pagecounter ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_alternativecitation ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_alternativecitation ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_apioauth2application ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_apioauth2personaltoken ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_apioauth2personaltoken ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_apioauth2scope ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_apioauth2scope ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_archivejob ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_archivejob ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_archivetarget ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_archivetarget ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_citationstyle ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_citationstyle ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_conference ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_conference ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_draftregistration ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_draftregistration ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_draftregistrationapproval ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_draftregistrationapproval ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_draftregistrationlog ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_draftregistrationlog ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_embargo ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_embargo ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_embargoterminationapproval ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_embargoterminationapproval ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_externalaccount ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_externalaccount ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_identifier ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_identifier ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_institution ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_institution ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_mailrecord ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_mailrecord ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_metaschema ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_metaschema ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_nodelicense ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_nodelicense ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_nodelicenserecord ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_nodelicenserecord ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_noderelation ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_noderelation ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_notificationdigest ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_notificationdigest ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_notificationsubscription ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_notificationsubscription ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_osfuser ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_osfuser ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_preprintprovider ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_preprintprovider ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_privatelink ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_queuedmail ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_queuedmail ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_registrationapproval ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_registrationapproval ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_retraction ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_retraction ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_subject ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_subject ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_tag ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_tag ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_useractivitycounter ADD COLUMN created timestamp with time zone;",
+    "ALTER TABLE osf_useractivitycounter ADD COLUMN modified timestamp with time zone;",
+    "ALTER TABLE osf_abstractnode RENAME COLUMN date_created TO created;",
+    "ALTER TABLE osf_abstractnode RENAME COLUMN date_modified TO modified;",
+    "ALTER TABLE osf_apioauth2application RENAME COLUMN date_created TO created;",
+    "ALTER TABLE osf_comment RENAME COLUMN date_created TO created;",
+    "ALTER TABLE osf_comment RENAME COLUMN date_modified TO modified;",
+    "ALTER TABLE osf_fileversion RENAME COLUMN date_modified TO external_modified;",
+    "ALTER TABLE osf_preprintservice RENAME COLUMN date_created TO created;",
+    "ALTER TABLE osf_preprintservice RENAME COLUMN date_modified TO modified;",
+    "ALTER TABLE osf_privatelink RENAME COLUMN date_created TO created;",
+    "ALTER TABLE osf_session RENAME COLUMN date_created TO created;",
+    "ALTER TABLE osf_session RENAME COLUMN date_modified TO modified;",
+    """
+    UPDATE osf_abstractnode
+    SET last_logged=(
+        SELECT date
+        FROM osf_nodelog
+        WHERE node_id = "osf_abstractnode"."id"
+        ORDER BY date DESC
+        LIMIT 1)
+    WHERE (SELECT COUNT(id) FROM osf_nodelog WHERE node_id = "osf_abstractnode"."id" LIMIT 1) > 0;
+    """,
+    """
+    UPDATE osf_abstractnode
+    SET last_logged=modified
+    WHERE (SELECT COUNT(id) FROM osf_nodelog WHERE node_id = "osf_abstractnode"."id" LIMIT 1) = 0;
+    """,
+    "UPDATE osf_alternativecitation SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_apioauth2application SET modified='epoch';",
+    "UPDATE osf_apioauth2personaltoken SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_apioauth2scope SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_archivejob SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_archivetarget SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_citationstyle SET created=date_parsed, modified='epoch';",
+    "UPDATE osf_conference SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_draftregistration SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_draftregistrationapproval SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_draftregistrationlog SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_embargo SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_embargoterminationapproval SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_externalaccount SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_identifier SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_institution SET created='epoch', modified='epoch';",
+    "UPDATE osf_mailrecord SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_metaschema SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_nodelicense SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_nodelicenserecord SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    """
+    UPDATE osf_noderelation SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch'
+    WHERE LENGTH(_id) > 5;
+    UPDATE osf_noderelation SET created='epoch', modified='epoch'
+    WHERE LENGTH(_id) <= 5;
+    """,
+    "UPDATE osf_notificationdigest SET created=timestamp, modified='epoch';",
+    "UPDATE osf_notificationsubscription SET created='epoch', modified='epoch';",
+    "UPDATE osf_osfuser SET created='epoch', modified='epoch';",
+    "UPDATE osf_preprintprovider SET created='epoch', modified='epoch';",
+    "UPDATE osf_privatelink SET modified='epoch';",
+    "UPDATE osf_queuedmail SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_registrationapproval SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_retraction SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_subject SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
+    "UPDATE osf_tag SET created='epoch', modified='epoch';",
+    "UPDATE osf_useractivitycounter SET created='epoch', modified='epoch';",
+    "ALTER TABLE osf_alternativecitation ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_alternativecitation ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_apioauth2application ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_apioauth2personaltoken ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_apioauth2personaltoken ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_apioauth2scope ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_apioauth2scope ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_archivejob ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_archivejob ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_archivetarget ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_archivetarget ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_citationstyle ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_citationstyle ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_conference ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_conference ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_draftregistration ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_draftregistration ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_draftregistrationapproval ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_draftregistrationapproval ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_draftregistrationlog ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_draftregistrationlog ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_embargo ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_embargo ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_embargoterminationapproval ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_embargoterminationapproval ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_externalaccount ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_externalaccount ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_identifier ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_identifier ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_institution ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_institution ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_mailrecord ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_mailrecord ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_metaschema ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_metaschema ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_nodelicense ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_nodelicense ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_nodelicenserecord ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_nodelicenserecord ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_noderelation ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_noderelation ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_notificationdigest ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_notificationdigest ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_notificationsubscription ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_notificationsubscription ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_osfuser ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_osfuser ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_preprintprovider ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_preprintprovider ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_privatelink ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_queuedmail ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_queuedmail ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_registrationapproval ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_registrationapproval ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_retraction ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_retraction ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_subject ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_subject ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_tag ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_tag ALTER COLUMN modified SET NOT NULL;",
+    "ALTER TABLE osf_useractivitycounter ALTER COLUMN created SET NOT NULL;",
+    "ALTER TABLE osf_useractivitycounter ALTER COLUMN modified SET NOT NULL;"
+]
+
+@celery_app.task
+def run_sql(sql):
+    table = sql.split(' ')[5]
+    logger.info('Updating table {}'.format(table))
+    with transaction.atomic():
+        with connection.cursor() as cursor:
+            cursor.execute(sql)
+            rows = cursor.fetchall()
+            if not rows:
+                raise Exception('Sentry notification that {} is migrated'.format(table))
+
+@celery_app.task(name='scripts.premigrate_created_modified')
+def migrate():
+    # Note:
+    # To update data slowly without requiring lots of downtime,
+    # add the following to CELERYBEAT_SCHEDULE in website/settings:
+    #
+    #   '1-minute-incremental-migrations':{
+    #       'task': 'scripts.premigrate_created_modified',
+    #       'schedule': crontab(minute='*/1'),
+    #   },
+    #
+    # And let it run for about a week
+    for statement in POPULATE_COLUMNS:
+        run_sql.delay(statement)
+
+def add_columns():
+    for statement in ADD_COLUMNS:
+        with connection.cursor() as cursor:
+            cursor.execute(statement)
+
+def finalize_migration():
+    for statement in FINALIZE_MIGRATION:
+        with connection.cursor() as cursor:
+            cursor.execute(statement)
+
+def main():
+    django.setup()
+    parser = argparse.ArgumentParser(
+        description='Handles long-running, non-breaking db changes slowly without requiring much downtime'
+    )
+    parser.add_argument(
+        '--dry',
+        action='store_true',
+        dest='dry_run',
+        help='Run migration and roll back changes to db',
+    )
+    parser.add_argument(
+        '--start',
+        action='store_true',
+        dest='start',
+        help='Adds columns',
+    )
+    parser.add_argument(
+        '--finish',
+        action='store_true',
+        dest='finish',
+        help='Sets NOT NULL',
+    )
+    pargs = parser.parse_args()
+    if pargs.start and pargs.finish:
+        raise Exception('Cannot start and finish in the same run')
+    with transaction.atomic():
+        if pargs.start:
+            add_columns()
+        elif pargs.finish:
+            raise Exception('Not until data is migrated')
+            finalize_migration()
+        else:
+            raise Exception('Must specify start or finish')
+        if pargs.dry_run:
+            raise Exception('Dry Run -- Transaction aborted.')
+
+if __name__ == '__main__':
+    main()
diff --git a/website/settings/defaults.py b/website/settings/defaults.py
index eb69cb0a70e..f57739f5a7c 100644
--- a/website/settings/defaults.py
+++ b/website/settings/defaults.py
@@ -380,6 +380,7 @@ def parent_dir(path):
     'scripts.approve_embargo_terminations',
     'scripts.approve_registrations',
     'scripts.embargo_registrations',
+    'scripts.premigrate_created_modified',
     'scripts.refresh_addon_tokens',
     'scripts.retract_registrations',
     'website.archiver.tasks',
@@ -440,6 +441,7 @@ def parent_dir(path):
     'scripts.analytics.run_keen_snapshots',
     'scripts.analytics.run_keen_events',
     'scripts.generate_sitemap',
+    'scripts.premigrate_created_modified',
 )
 
 # Modules that need metrics and release requirements

From 277b6ef06fe859c507fb8adf3a64c7e85292cbb8 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Mon, 3 Jul 2017 17:32:21 -0400
Subject: [PATCH 061/192] Add GitLab migrations

---
 addons/gitlab/migrations/0001_initial.py  | 58 +++++++++++++++++++++++
 osf/migrations/0065_auto_20171024_1330.py | 50 +++++++++++++++++++
 2 files changed, 108 insertions(+)
 create mode 100644 addons/gitlab/migrations/0001_initial.py
 create mode 100644 osf/migrations/0065_auto_20171024_1330.py

diff --git a/addons/gitlab/migrations/0001_initial.py b/addons/gitlab/migrations/0001_initial.py
new file mode 100644
index 00000000000..ea4946e463e
--- /dev/null
+++ b/addons/gitlab/migrations/0001_initial.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.4 on 2017-10-24 18:30
+from __future__ import unicode_literals
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+import osf.models.base
+import osf.utils.datetime_aware_jsonfield
+
+
+class Migration(migrations.Migration):
+
+    initial = True
+
+    dependencies = [
+        migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+        ('osf', '0065_auto_20171024_1330'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='NodeSettings',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)),
+                ('deleted', models.BooleanField(default=False)),
+                ('user', models.TextField(blank=True, null=True)),
+                ('repo', models.TextField(blank=True, null=True)),
+                ('repo_id', models.TextField(blank=True, null=True)),
+                ('hook_id', models.TextField(blank=True, null=True)),
+                ('hook_secret', models.TextField(blank=True, null=True)),
+                ('external_account', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='addons_gitlab_node_settings', to='osf.ExternalAccount')),
+                ('owner', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='addons_gitlab_node_settings', to='osf.AbstractNode')),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
+        migrations.CreateModel(
+            name='UserSettings',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)),
+                ('deleted', models.BooleanField(default=False)),
+                ('oauth_grants', osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONField(blank=True, default=dict, encoder=osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONEncoder)),
+                ('owner', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='addons_gitlab_user_settings', to=settings.AUTH_USER_MODEL)),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='user_settings',
+            field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='addons_gitlab.UserSettings'),
+        ),
+    ]
diff --git a/osf/migrations/0065_auto_20171024_1330.py b/osf/migrations/0065_auto_20171024_1330.py
new file mode 100644
index 00000000000..8999ebfedda
--- /dev/null
+++ b/osf/migrations/0065_auto_20171024_1330.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.4 on 2017-10-24 18:30
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0064_auto_20171019_0918'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='GitLabFileNode',
+            fields=[
+            ],
+            options={
+                'proxy': True,
+                'indexes': [],
+            },
+            bases=('osf.basefilenode',),
+        ),
+        migrations.AlterField(
+            model_name='basefilenode',
+            name='type',
+            field=models.CharField(choices=[('osf.trashedfilenode', 'trashed file node'), ('osf.trashedfile', 'trashed file'), ('osf.trashedfolder', 'trashed folder'), ('osf.osfstoragefilenode', 'osf storage file node'), ('osf.osfstoragefile', 'osf storage file'), ('osf.osfstoragefolder', 'osf storage folder'), ('osf.bitbucketfilenode', 'bitbucket file node'), ('osf.bitbucketfolder', 'bitbucket folder'), ('osf.bitbucketfile', 'bitbucket file'), ('osf.boxfilenode', 'box file node'), ('osf.boxfolder', 'box folder'), ('osf.boxfile', 'box file'), ('osf.dataversefilenode', 'dataverse file node'), ('osf.dataversefolder', 'dataverse folder'), ('osf.dataversefile', 'dataverse file'), ('osf.dropboxfilenode', 'dropbox file node'), ('osf.dropboxfolder', 'dropbox folder'), ('osf.dropboxfile', 'dropbox file'), ('osf.figsharefilenode', 'figshare file node'), ('osf.figsharefolder', 'figshare folder'), ('osf.figsharefile', 'figshare file'), ('osf.githubfilenode', 'github file node'), ('osf.githubfolder', 'github folder'), ('osf.githubfile', 'github file'), ('osf.gitlabfilenode', 'git lab file node'), ('osf.gitlabfolder', 'git lab folder'), ('osf.gitlabfile', 'git lab file'), ('osf.googledrivefilenode', 'google drive file node'), ('osf.googledrivefolder', 'google drive folder'), ('osf.googledrivefile', 'google drive file'), ('osf.owncloudfilenode', 'owncloud file node'), ('osf.owncloudfolder', 'owncloud folder'), ('osf.owncloudfile', 'owncloud file'), ('osf.s3filenode', 's3 file node'), ('osf.s3folder', 's3 folder'), ('osf.s3file', 's3 file')], db_index=True, max_length=255),
+        ),
+        migrations.CreateModel(
+            name='GitLabFile',
+            fields=[
+            ],
+            options={
+                'proxy': True,
+                'indexes': [],
+            },
+            bases=('osf.gitlabfilenode', models.Model),
+        ),
+        migrations.CreateModel(
+            name='GitLabFolder',
+            fields=[
+            ],
+            options={
+                'proxy': True,
+                'indexes': [],
+            },
+            bases=('osf.gitlabfilenode', models.Model),
+        ),
+    ]

From a6dc02f5916f69aa7fa4d66b43cbf0cb850327cd Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Mon, 25 Sep 2017 14:17:06 -0400
Subject: [PATCH 062/192] Templates, views, urls, and javascript for validating
 custom taxonomies

[#OSF-8559]
---
 admin/preprint_providers/forms.py             |  31 +++-
 admin/preprint_providers/urls.py              |   1 +
 admin/preprint_providers/views.py             |  58 ++++++-
 admin/static/css/institutions.css             |   9 +
 .../preprint_providers/preprintProviders.js   |  54 ++++++
 .../templates/preprint_providers/detail.html  |   4 +-
 .../update_preprint_provider_form.html        | 162 ++++++++++++++++++
 7 files changed, 307 insertions(+), 12 deletions(-)

diff --git a/admin/preprint_providers/forms.py b/admin/preprint_providers/forms.py
index d8b4582c1cd..7e9987fff86 100644
--- a/admin/preprint_providers/forms.py
+++ b/admin/preprint_providers/forms.py
@@ -1,14 +1,14 @@
 import bleach
 
-from django.forms import ModelForm, CheckboxSelectMultiple, MultipleChoiceField, HiddenInput, CharField
+from django import forms
 
 from osf.models import PreprintProvider, Subject
 from admin.base.utils import get_subject_rules, get_toplevel_subjects, get_nodelicense_choices
 
 
-class PreprintProviderForm(ModelForm):
-    toplevel_subjects = MultipleChoiceField(widget=CheckboxSelectMultiple(), required=False)
-    subjects_chosen = CharField(widget=HiddenInput(), required=False)
+class PreprintProviderForm(forms.ModelForm):
+    toplevel_subjects = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple(), required=False)
+    subjects_chosen = forms.CharField(widget=forms.HiddenInput(), required=False)
 
     class Meta:
         model = PreprintProvider
@@ -16,8 +16,8 @@ class Meta:
         exclude = ['primary_identifier_name']
 
         widgets = {
-            'licenses_acceptable': CheckboxSelectMultiple(),
-            'subjects_acceptable': HiddenInput(),
+            'licenses_acceptable': forms.CheckboxSelectMultiple(),
+            'subjects_acceptable': forms.HiddenInput(),
         }
 
     def __init__(self, *args, **kwargs):
@@ -59,3 +59,22 @@ def clean_footer_links(self, *args, **kwargs):
             styles=['text-align', 'vertical-align'],
             strip=True
         )
+
+
+class PreprintProviderCustomTaxonomyForm(forms.Form):
+    custom_taxonomy_json = forms.CharField(widget=forms.Textarea, initial='{"include": [], "exclude": [], "custom": {}}', required=False)
+    add_missing = forms.BooleanField(required=False)
+    provider_id = forms.IntegerField(widget=forms.HiddenInput())
+    include = forms.ChoiceField(choices=[], required=False)
+    exclude = forms.ChoiceField(choices=[], required=False)
+    custom_name = forms.CharField(required=False)
+    custom_parent = forms.CharField(required=False)
+    bepress = forms.ChoiceField(choices=[], required=False)
+
+    def __init__(self, *args, **kwargs):
+        super(PreprintProviderCustomTaxonomyForm, self).__init__(*args, **kwargs)
+        subject_choices = [(x, x) for x in Subject.objects.all().values_list('text', flat=True)]
+        for name, field in self.fields.iteritems():
+            if hasattr(field, 'choices'):
+                if field.choices == []:
+                    field.choices = subject_choices
diff --git a/admin/preprint_providers/urls.py b/admin/preprint_providers/urls.py
index dd2da1218ae..1d72e3abdea 100644
--- a/admin/preprint_providers/urls.py
+++ b/admin/preprint_providers/urls.py
@@ -9,6 +9,7 @@
     url(r'^(?P<preprint_provider_id>[a-z0-9]+)/import/$', views.ImportPreprintProvider.as_view(), name='import'),
     url(r'^get_subjects/$', views.SubjectDynamicUpdateView.as_view(), name='get_subjects'),
     url(r'^get_descendants/$', views.GetSubjectDescendants.as_view(), name='get_descendants'),
+    url(r'^process_custom_taxonomy/$', views.ProcessCustomTaxonomy.as_view(), name='process_custom_taxonomy'),
     url(r'^rules_to_subjects/$', views.RulesToSubjects.as_view(), name='rules_to_subjects'),
     url(r'^(?P<preprint_provider_id>[a-z0-9]+)/cannot_delete/$', views.CannotDeleteProvider.as_view(), name='cannot_delete'),
     url(r'^(?P<preprint_provider_id>[a-z0-9]+)/$', views.PreprintProviderDetail.as_view(), name='detail'),
diff --git a/admin/preprint_providers/views.py b/admin/preprint_providers/views.py
index ad97906d49b..26c265f4c00 100644
--- a/admin/preprint_providers/views.py
+++ b/admin/preprint_providers/views.py
@@ -12,9 +12,10 @@
 
 from admin.base import settings
 from admin.base.forms import ImportFileForm
-from admin.preprint_providers.forms import PreprintProviderForm
+from admin.preprint_providers.forms import PreprintProviderForm, PreprintProviderCustomTaxonomyForm
 from osf.models import PreprintProvider, Subject, NodeLicense
 from osf.models.preprint_provider import rules_to_subjects
+from osf.management.commands.populate_custom_taxonomies import validate_input, migrate
 
 # When preprint_providers exclusively use Subject relations for creation, set this to False
 SHOW_TAXONOMIES_IN_PREPRINT_PROVIDER_CREATE = True
@@ -96,16 +97,25 @@ def get_context_data(self, *args, **kwargs):
 
         subject_html = '<ul class="three-cols">'
         for parent in preprint_provider.top_level_subjects:
-            subject_html += '<li>{}</li>'.format(parent.text)
+            mapped_text = ''
+            if parent.bepress_subject and parent.text != parent.bepress_subject.text:
+                mapped_text = ' (mapped from {})'.format(parent.bepress_subject.text)
+            subject_html = subject_html + '<li>{}'.format(parent.text) + mapped_text + '</li>'
             child_html = '<ul>'
             for child in parent.children.all():
                 grandchild_html = ''
                 if child.id in subject_ids:
-                    child_html += '<li>{}</li>'.format(child.text)
+                    child_mapped_text = ''
+                    if child.bepress_subject and child.text != child.bepress_subject.text:
+                        child_mapped_text = ' (mapped from {})'.format(child.bepress_subject.text)
+                    child_html = child_html + '<li>{}'.format(child.text) + child_mapped_text + '</li>'
                     grandchild_html = '<ul>'
                     for grandchild in child.children.all():
                         if grandchild.id in subject_ids:
-                            grandchild_html += '<li>{}</li>'.format(grandchild.text)
+                            grandchild_mapped_text = ''
+                            if grandchild.bepress_subject and grandchild.text != grandchild.bepress_subject.text:
+                                grandchild_mapped_text = ' (mapped from {})'.format(grandchild.bepress_subject.text)
+                            grandchild_html = grandchild_html + '<li>{}'.format(grandchild.text) + grandchild_mapped_text + '</li>'
                     grandchild_html += '</ul>'
                 child_html += grandchild_html
 
@@ -114,6 +124,11 @@ def get_context_data(self, *args, **kwargs):
 
         subject_html += '</ul>'
         preprint_provider_attributes['subjects_acceptable'] = subject_html
+        # subjects_with_mapped_taxonomies = [sub for sub in preprint_provider.subjects.all() if sub.text != sub.bepress_subject.text]
+        # mapped_taxonomies_html = '<ul>'
+        # for subject in subjects_with_mapped_taxonomies:
+        #     mapped_taxonomies_html += '<li>BePress: <b>{}</b> ==> New: <b>{}</b>'.format(subject.bepress_subject.text, subject.text)
+        # kwargs['mapped_taxonomies'] = mapped_taxonomies_html + '</ul>'
 
         kwargs['preprint_provider'] = preprint_provider_attributes
         kwargs['subject_ids'] = list(subject_ids)
@@ -123,6 +138,8 @@ def get_context_data(self, *args, **kwargs):
         fields['subjects_chosen'] = ', '.join(str(i) for i in subject_ids)
         kwargs['show_taxonomies'] = False if preprint_provider.subjects.exists() else True
         kwargs['form'] = PreprintProviderForm(initial=fields)
+        kwargs['taxonomy_form'] = PreprintProviderCustomTaxonomyForm()
+        kwargs['no_add_list'] = ['custom_taxonomy_json', 'add_missing']
         kwargs['import_form'] = ImportFileForm()
         kwargs['tinymce_apikey'] = settings.TINYMCE_APIKEY
         return kwargs
@@ -159,6 +176,39 @@ def get_success_url(self, *args, **kwargs):
         return reverse_lazy('preprint_providers:detail', kwargs={'preprint_provider_id': self.kwargs.get('preprint_provider_id')})
 
 
+class ProcessCustomTaxonomy(PermissionRequiredMixin, View):
+
+    permission_required = 'osf.view_preprintprovider'
+    raise_exception = True
+
+    def post(self, request, *args, **kwargs):
+        provider_form = PreprintProviderCustomTaxonomyForm(request.POST)
+        if provider_form.is_valid():
+            provider = PreprintProvider.objects.get(id=provider_form.cleaned_data['provider_id'])
+            try:
+                taxonomy_json = json.loads(provider_form.cleaned_data['custom_taxonomy_json'])
+                add_missing = provider_form.cleaned_data['add_missing']
+                if request.is_ajax():
+                    # An ajax request is for validation only, so run that validation!
+                    try:
+                        response_data = validate_input(custom_provider=provider, data=taxonomy_json, add_missing=add_missing)
+                    except (RuntimeError, AssertionError) as script_feedback:
+                        response_data = {'message': script_feedback.message, 'feedback_type': 'error'}
+                    if not response_data:
+                        response_data = {'message': 'Custom taxonomy validated!', 'feedback_type': 'success'}
+                else:
+                    # Actually do the migration of the custom taxonomies
+                    migrate(provider=provider._id, data=taxonomy_json, add_missing=add_missing)
+                    return redirect('preprint_providers:detail', preprint_provider_id=provider.id)
+
+            except ValueError as error:
+                response_data = {
+                    'message': 'There is an error with the submitted JSON. Here are some details: ' + error.message,
+                    'feedback_type': 'error'
+                }
+            # Return a JsonResponse with the JSON error or the validation error if it's not doing an actual migration
+            return JsonResponse(response_data)
+
 class ExportPreprintProvider(PermissionRequiredMixin, View):
     permission_required = 'osf.change_preprintprovider'
     raise_exception = True
diff --git a/admin/static/css/institutions.css b/admin/static/css/institutions.css
index 82ee270be88..8dcb8b3fb15 100644
--- a/admin/static/css/institutions.css
+++ b/admin/static/css/institutions.css
@@ -51,3 +51,12 @@ form p label {
 .firstlevel_subjects {
     width: 120%;
 }
+
+
+.select2-container.select2-container--default {
+    width: 75% !important;
+}
+
+#id_custom_taxonomy_json {
+    width: 100%;
+}
diff --git a/admin/static/js/preprint_providers/preprintProviders.js b/admin/static/js/preprint_providers/preprintProviders.js
index 8f0b6c0d3c3..24d3cb002ca 100644
--- a/admin/static/js/preprint_providers/preprintProviders.js
+++ b/admin/static/js/preprint_providers/preprintProviders.js
@@ -159,4 +159,58 @@ $(document).ready(function() {
             }
         });
     });
+
+    $( ".taxonomy-action-button" ).click(function() {
+        var taxonomyTextField=$("#id_custom_taxonomy_json");
+        var content = JSON.parse(taxonomyTextField.val());
+        var value = $("#" + $(this).attr("value")).val();
+        var subjects = content[$(this).attr("id")];
+        if (subjects.indexOf(value) == -1) {
+            subjects.push(value);
+        }
+        taxonomyTextField.val(JSON.stringify(content, undefined, 4));
+    });
+
+    $( "#id-add-custom" ).click(function() {
+        var taxonomyTextField=$("#id_custom_taxonomy_json");
+        var name = $("#id_custom_name").val();
+        var parent = $("#id_custom_parent").val();
+        var bepress = $("#id_bepress").val();
+        var content = JSON.parse(taxonomyTextField.val());
+        if (content["custom"][name] === undefined) {
+            content["custom"][name] = {
+                "parent": parent,
+                "bepress": bepress
+            };
+        }
+
+        taxonomyTextField.val(JSON.stringify(content, undefined, 4));
+    });
+
+    $("#id-validate-custom").on("click", function(event) {
+       checkTaxonomy();
+    });
+
+
+    function checkTaxonomy() {
+        var taxonomyForm = $("#taxonomy-form").serializeArray();
+        $.ajax({
+            url: window.templateVars.processCustomTaxonomyUrl,
+            type: "POST",
+            data: taxonomyForm,
+            success: function(json) {
+                var alert_class_div = (json["feedback_type"] == "success") ? "<div class='alert alert-info'>" : "<div class='alert alert-danger'>";
+                $("#taxonomy-field-info").html(alert_class_div + json["message"]+ "</div>");
+            }
+        });
+    };
+
+    $("#show-custom-taxonomy-form").click(function() {
+        $("#custom-taxonomy-form").toggle();
+    });
+
+    $("#id_include").select2();
+    $("#id_exclude").select2();
+    $("#id_bepress").select2();
+
 });
diff --git a/admin/templates/preprint_providers/detail.html b/admin/templates/preprint_providers/detail.html
index a2930af6e10..c88d9ae26ae 100644
--- a/admin/templates/preprint_providers/detail.html
+++ b/admin/templates/preprint_providers/detail.html
@@ -47,10 +47,9 @@ <h2>{{ preprint_provider.name }}</h2>
                 </table>
             </div>
         </div>
-        <div class="row" id="form-view" style="display:none;">
+        <div id="form-view" style="display:none;">
             {% include 'preprint_providers/update_preprint_provider_form.html' %}
         </div>
-    </div>
 
 {% endblock content %}
 {% block bottom_js %}
@@ -60,6 +59,7 @@ <h2>{{ preprint_provider.name }}</h2>
             'getSubjectsUrl': '{% url 'preprint_providers:get_subjects' %}',
             'getDescendantsUrl': '{% url 'preprint_providers:get_descendants' %}',
             'rulesToSubjectsUrl': '{% url 'preprint_providers:rules_to_subjects' %}',
+            'processCustomTaxonomyUrl': "{% url 'preprint_providers:process_custom_taxonomy' %}",
             'originalSubjects': {{ subject_ids | escapejs }}
         };
     </script>
diff --git a/admin/templates/preprint_providers/update_preprint_provider_form.html b/admin/templates/preprint_providers/update_preprint_provider_form.html
index a99d33fd371..4025a91bfcc 100644
--- a/admin/templates/preprint_providers/update_preprint_provider_form.html
+++ b/admin/templates/preprint_providers/update_preprint_provider_form.html
@@ -52,7 +52,10 @@
             tinymce.init(FOOTER_LINKS_CONFIG);
 
         </script>
+        <link href="https://cdnjs.cloudflare.com/ajax/libs/select2/4.0.4/css/select2.min.css" rel="stylesheet" />
+        <script src="https://cdnjs.cloudflare.com/ajax/libs/select2/4.0.4/js/select2.min.js"></script>
     </head>
+<div class="row">
     <div class="col-md-9">
         <form action="" method="post">
             {% csrf_token %}
@@ -108,3 +111,162 @@ <h4>Import from JSON</h4>
             </form>
         </div>
     </div>
+</div>
+<br>
+<br>
+<div class="row">
+    <div class="col-md-12">
+        <div>
+            <h2>Custom Taxonomy</h2>
+            <p>Use the form below to build a custom taxonomy. Enter the JSON for the custom taxonomy directly in the text box, or use the form fields below to build the taxonomy one entry at a time.</p>
+            <p>Click <a role="button" data-toggle="modal" href="#exampleModal">here</a> to see an example of a full custom taxonomy
+                and for more information about building a custom taxonomy.
+            </p>
+        </div>
+        <form id="taxonomy-form" action="{% url 'preprint_providers:process_custom_taxonomy' %}" method="post">
+            {% csrf_token %}
+            {{ taxonomy_form.non_field_errors }}
+            {{ taxonomy_form.provider_id }}
+
+            <div>
+                <div class="fieldWrapper">
+                    {{ taxonomy_form.add_missing.errors }}
+                    <div class="row">
+                        <div class="col-md-2">
+                            {{ taxonomy_form.add_missing.label_tag }}
+                        </div>
+                        <div class="col-md-10">
+                            {{ taxonomy_form.add_missing }}
+                        </div>
+                    </div>
+                </div>
+                <div class="fieldWrapper">
+                    {{ taxonomy_form.include.errors }}
+                    <div class="row">
+                        <div class="col-md-2">
+                            {{ taxonomy_form.include.label_tag }}
+                        </div>
+                        <div class="col-md-10">
+                            {{ taxonomy_form.include }}
+                            <button type="button" class="taxonomy-action-button" id={{ taxonomy_form.include.html_name }} value={{ taxonomy_form.include.id_for_label }}>Add</button>
+                        </div>
+                    </div>
+                </div>
+                <div class="fieldWrapper">
+                    {{ taxonomy_form.exclude.errors }}
+                    <div class="row">
+                        <div class="col-md-2">
+                            {{ taxonomy_form.exclude.label_tag }}
+                        </div>
+                        <div class="col-md-10">
+                            {{ taxonomy_form.exclude }}
+                            <button type="button" class="taxonomy-action-button" id={{ taxonomy_form.exclude.html_name }} value={{ taxonomy_form.exclude.id_for_label }}>Add</button>
+                        </div>
+                    </div>
+                </div>
+            </div>
+
+            <div>
+                <div><b>Custom:</b></div>
+                <div class="panel panel-default">
+                    <div class="panel-body">
+                        {{ taxonomy_form.custom_name.errors }}
+                        <p>
+                            {{ taxonomy_form.custom_name.label_tag }}
+                            {{ taxonomy_form.custom_name }}
+                        </p>
+                        {{ taxonomy_form.custom_parent.errors }}
+                        <p>
+                            {{ taxonomy_form.custom_parent.label_tag }}
+                            {{ taxonomy_form.custom_parent }}
+                        </p>
+                        {{ taxonomy_form.bepress.errors }}
+                        <p>
+                            {{ taxonomy_form.bepress.label_tag }}
+                            {{ taxonomy_form.bepress }}
+                        </p>
+                        <div class="pull-right">
+                            <button type="button" id="id-add-custom">Add</button>
+                        </div>
+                    </div>
+                </div>
+            </div>
+            <div>
+                <div class="fieldWrapper">
+                    {{ taxonomy_form.custom_taxonomy_json.errors }}
+                    <div class="row">
+                        <div class="col-md-12">
+                            <h4>Built Custom Taxonomy</h4>
+                            {{ taxonomy_form.custom_taxonomy_json }}
+                        </div>
+                    </div>
+                </div>
+            </div>
+
+            <div>
+                <button type="button" id="id-validate-custom">Validate</button>
+                <p>Click the 'Validate' button to check your taxonomy before clicking the 'Save' button to submit.</p>
+                <div id="taxonomy-field-info"></div>
+                <div>
+                </div>
+            </div>
+            <div>
+                <input class="form-button" type="submit" value="Save" />
+            </div>
+        </form>
+    </div>
+</div>
+
+<!-- Modal explaining Custom Taxonomy -->
+<div class="modal fade" id="exampleModal" tabindex="-1" role="dialog">
+    <div class="modal-dialog" role="document">
+        <div class="modal-content">
+            <div class="modal-header">
+                <h5 class="modal-title" id="exampleModalLabel">Custom Taxonomy Example</h5>
+                <button type="button" class="close" data-dismiss="modal"><span aria-hidden="true">&times;</span></button>
+            </div>
+            <div class="modal-body">
+                <pre>
+{
+    "include": [
+        "Rehabilitation and Therapy",
+        "Theatre and Performance Studies"
+    ],
+    "exclude": [
+        "Acting",
+        "Playwriting",
+        "Theatre History",
+        "Other Theatre and Performance Studies",
+        "Dramatic Literature, Criticism and Theory"
+    ],
+    "custom": {
+        "Medicine and Health": {
+            "parent": "Rehabilitation and Therapy",
+            "bepress": "Sociology"
+        },
+        "Sport and Exercise Science": {
+            "parent": "",
+            "bepress": "Kinesiology"
+        },
+        "Exercise Immunology": {
+            "parent": "Sport and Exercise Science",
+            "bepress": "Exercise Science"
+        },
+        "Motor Control and Development": {
+            "parent": "Sport and Exercise Science",
+            "bepress": "Motor Control"
+        }
+    }
+}
+                </pre>
+            </div>
+            <div class="modal-footer">
+                <button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
+            </div>
+        </div>
+    </div>
+</div>
+
+<script>
+    $("#id_provider_id").val({{ preprint_provider.id }})
+</script>

From 0810fe43b41b9fb516d57423f96d4f5c9e5b8391 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 28 Sep 2017 11:25:34 -0400
Subject: [PATCH 063/192] Disable the templates panel for django debug toolbar

was causing massive slowdowns for rendering template for custom taxonomy
form in the admin.
---
 admin/base/settings/defaults.py | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/admin/base/settings/defaults.py b/admin/base/settings/defaults.py
index 02379cdea8a..e51cab57647 100644
--- a/admin/base/settings/defaults.py
+++ b/admin/base/settings/defaults.py
@@ -242,5 +242,9 @@
     INSTALLED_APPS += ('debug_toolbar', )
     MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
     DEBUG_TOOLBAR_CONFIG = {
-        'SHOW_TOOLBAR_CALLBACK': lambda(_): True
+        'SHOW_TOOLBAR_CALLBACK': lambda(_): True,
+        'DISABLE_PANELS': {
+            'debug_toolbar.panels.templates.TemplatesPanel',
+            'debug_toolbar.panels.redirects.RedirectsPanel'
+        }
     }

From d42a533e8c5939ac7b88e063312a271adb8aacaf Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 28 Sep 2017 12:12:02 -0400
Subject: [PATCH 064/192] Break out custom taxonomy form into seperate template

---
 .../templates/preprint_providers/create.html  |   4 +-
 .../templates/preprint_providers/detail.html  |   1 +
 .../enter_custom_taxonomy.html                | 161 ++++++++++++++++++
 .../update_preprint_provider_form.html        | 160 -----------------
 4 files changed, 163 insertions(+), 163 deletions(-)
 create mode 100644 admin/templates/preprint_providers/enter_custom_taxonomy.html

diff --git a/admin/templates/preprint_providers/create.html b/admin/templates/preprint_providers/create.html
index 17e66cf2dc4..dda5c03bc8a 100644
--- a/admin/templates/preprint_providers/create.html
+++ b/admin/templates/preprint_providers/create.html
@@ -17,9 +17,7 @@
                 <h2>Create A Preprint Provider</h2>
             </div>
         </div>
-        <div class="row">
-            {% include 'preprint_providers/update_preprint_provider_form.html' %}
-        </div>
+        {% include 'preprint_providers/update_preprint_provider_form.html' %}
     </div>
 
 {% endblock content %}
diff --git a/admin/templates/preprint_providers/detail.html b/admin/templates/preprint_providers/detail.html
index c88d9ae26ae..625f662b453 100644
--- a/admin/templates/preprint_providers/detail.html
+++ b/admin/templates/preprint_providers/detail.html
@@ -49,6 +49,7 @@ <h2>{{ preprint_provider.name }}</h2>
         </div>
         <div id="form-view" style="display:none;">
             {% include 'preprint_providers/update_preprint_provider_form.html' %}
+            {% include 'preprint_providers/enter_custom_taxonomy.html' %}
         </div>
 
 {% endblock content %}
diff --git a/admin/templates/preprint_providers/enter_custom_taxonomy.html b/admin/templates/preprint_providers/enter_custom_taxonomy.html
new file mode 100644
index 00000000000..ae318fbde7c
--- /dev/null
+++ b/admin/templates/preprint_providers/enter_custom_taxonomy.html
@@ -0,0 +1,161 @@
+<head>
+    <link href="https://cdnjs.cloudflare.com/ajax/libs/select2/4.0.4/css/select2.min.css" rel="stylesheet" />
+    <script src="https://cdnjs.cloudflare.com/ajax/libs/select2/4.0.4/js/select2.min.js"></script>
+</head>
+
+<div class="row">
+    <div class="col-md-12">
+        <div>
+            <h2>Custom Taxonomy</h2>
+            <p>Use the form below to build a custom taxonomy. Enter the JSON for the custom taxonomy directly in the text box, or use the form fields below to build the taxonomy one entry at a time.</p>
+            <p>Click <a role="button" data-toggle="modal" href="#exampleModal">here</a> to see an example of a full custom taxonomy
+                and for more information about building a custom taxonomy.
+            </p>
+        </div>
+        <form id="taxonomy-form" action="{% url 'preprint_providers:process_custom_taxonomy' %}" method="post">
+            {% csrf_token %}
+            {{ taxonomy_form.non_field_errors }}
+            {{ taxonomy_form.provider_id }}
+
+            <div>
+                <div class="fieldWrapper">
+                    {{ taxonomy_form.add_missing.errors }}
+                    <div class="row">
+                        <div class="col-md-2">
+                            {{ taxonomy_form.add_missing.label_tag }}
+                        </div>
+                        <div class="col-md-10">
+                            {{ taxonomy_form.add_missing }}
+                        </div>
+                    </div>
+                </div>
+                <div class="fieldWrapper">
+                    {{ taxonomy_form.include.errors }}
+                    <div class="row">
+                        <div class="col-md-2">
+                            {{ taxonomy_form.include.label_tag }}
+                        </div>
+                        <div class="col-md-10">
+                            {{ taxonomy_form.include }}
+                            <button type="button" class="taxonomy-action-button" id={{ taxonomy_form.include.html_name }} value={{ taxonomy_form.include.id_for_label }}>Add</button>
+                        </div>
+                    </div>
+                </div>
+                <div class="fieldWrapper">
+                    {{ taxonomy_form.exclude.errors }}
+                    <div class="row">
+                        <div class="col-md-2">
+                            {{ taxonomy_form.exclude.label_tag }}
+                        </div>
+                        <div class="col-md-10">
+                            {{ taxonomy_form.exclude }}
+                            <button type="button" class="taxonomy-action-button" id={{ taxonomy_form.exclude.html_name }} value={{ taxonomy_form.exclude.id_for_label }}>Add</button>
+                        </div>
+                    </div>
+                </div>
+            </div>
+
+            <div>
+                <div><b>Custom:</b></div>
+                <div class="panel panel-default">
+                    <div class="panel-body">
+                        {{ taxonomy_form.custom_name.errors }}
+                        <p>
+                            {{ taxonomy_form.custom_name.label_tag }}
+                            {{ taxonomy_form.custom_name }}
+                        </p>
+                        {{ taxonomy_form.custom_parent.errors }}
+                        <p>
+                            {{ taxonomy_form.custom_parent.label_tag }}
+                            {{ taxonomy_form.custom_parent }}
+                        </p>
+                        {{ taxonomy_form.bepress.errors }}
+                        <p>
+                            {{ taxonomy_form.bepress.label_tag }}
+                            {{ taxonomy_form.bepress }}
+                        </p>
+                        <div class="pull-right">
+                            <button type="button" id="id-add-custom">Add</button>
+                        </div>
+                    </div>
+                </div>
+            </div>
+            <div>
+                <div class="fieldWrapper">
+                    {{ taxonomy_form.custom_taxonomy_json.errors }}
+                    <div class="row">
+                        <div class="col-md-12">
+                            <h4>Built Custom Taxonomy</h4>
+                            {{ taxonomy_form.custom_taxonomy_json }}
+                        </div>
+                    </div>
+                </div>
+            </div>
+
+            <div>
+                <button type="button" id="id-validate-custom">Validate</button>
+                <p>Click the 'Validate' button to check your taxonomy before clicking the 'Save' button to submit.</p>
+                <div id="taxonomy-field-info"></div>
+                <div>
+                </div>
+            </div>
+            <div>
+                <input class="form-button" type="submit" value="Save" />
+            </div>
+        </form>
+    </div>
+</div>
+
+<!-- Modal explaining Custom Taxonomy -->
+<div class="modal fade" id="exampleModal" tabindex="-1" role="dialog">
+    <div class="modal-dialog" role="document">
+        <div class="modal-content">
+            <div class="modal-header">
+                <h5 class="modal-title" id="exampleModalLabel">Custom Taxonomy Example</h5>
+                <button type="button" class="close" data-dismiss="modal"><span aria-hidden="true">&times;</span></button>
+            </div>
+            <div class="modal-body">
+                <pre>
+{
+    "include": [
+        "Rehabilitation and Therapy",
+        "Theatre and Performance Studies"
+    ],
+    "exclude": [
+        "Acting",
+        "Playwriting",
+        "Theatre History",
+        "Other Theatre and Performance Studies",
+        "Dramatic Literature, Criticism and Theory"
+    ],
+    "custom": {
+        "Medicine and Health": {
+            "parent": "Rehabilitation and Therapy",
+            "bepress": "Sociology"
+        },
+        "Sport and Exercise Science": {
+            "parent": "",
+            "bepress": "Kinesiology"
+        },
+        "Exercise Immunology": {
+            "parent": "Sport and Exercise Science",
+            "bepress": "Exercise Science"
+        },
+        "Motor Control and Development": {
+            "parent": "Sport and Exercise Science",
+            "bepress": "Motor Control"
+        }
+    }
+}
+                </pre>
+            </div>
+            <div class="modal-footer">
+                <button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
+            </div>
+        </div>
+    </div>
+</div>
+
+<script>
+    $("#id_provider_id").val({{ preprint_provider.id }})
+</script>
diff --git a/admin/templates/preprint_providers/update_preprint_provider_form.html b/admin/templates/preprint_providers/update_preprint_provider_form.html
index 4025a91bfcc..2e2e69cae06 100644
--- a/admin/templates/preprint_providers/update_preprint_provider_form.html
+++ b/admin/templates/preprint_providers/update_preprint_provider_form.html
@@ -52,8 +52,6 @@
             tinymce.init(FOOTER_LINKS_CONFIG);
 
         </script>
-        <link href="https://cdnjs.cloudflare.com/ajax/libs/select2/4.0.4/css/select2.min.css" rel="stylesheet" />
-        <script src="https://cdnjs.cloudflare.com/ajax/libs/select2/4.0.4/js/select2.min.js"></script>
     </head>
 <div class="row">
     <div class="col-md-9">
@@ -112,161 +110,3 @@ <h4>Import from JSON</h4>
         </div>
     </div>
 </div>
-<br>
-<br>
-<div class="row">
-    <div class="col-md-12">
-        <div>
-            <h2>Custom Taxonomy</h2>
-            <p>Use the form below to build a custom taxonomy. Enter the JSON for the custom taxonomy directly in the text box, or use the form fields below to build the taxonomy one entry at a time.</p>
-            <p>Click <a role="button" data-toggle="modal" href="#exampleModal">here</a> to see an example of a full custom taxonomy
-                and for more information about building a custom taxonomy.
-            </p>
-        </div>
-        <form id="taxonomy-form" action="{% url 'preprint_providers:process_custom_taxonomy' %}" method="post">
-            {% csrf_token %}
-            {{ taxonomy_form.non_field_errors }}
-            {{ taxonomy_form.provider_id }}
-
-            <div>
-                <div class="fieldWrapper">
-                    {{ taxonomy_form.add_missing.errors }}
-                    <div class="row">
-                        <div class="col-md-2">
-                            {{ taxonomy_form.add_missing.label_tag }}
-                        </div>
-                        <div class="col-md-10">
-                            {{ taxonomy_form.add_missing }}
-                        </div>
-                    </div>
-                </div>
-                <div class="fieldWrapper">
-                    {{ taxonomy_form.include.errors }}
-                    <div class="row">
-                        <div class="col-md-2">
-                            {{ taxonomy_form.include.label_tag }}
-                        </div>
-                        <div class="col-md-10">
-                            {{ taxonomy_form.include }}
-                            <button type="button" class="taxonomy-action-button" id={{ taxonomy_form.include.html_name }} value={{ taxonomy_form.include.id_for_label }}>Add</button>
-                        </div>
-                    </div>
-                </div>
-                <div class="fieldWrapper">
-                    {{ taxonomy_form.exclude.errors }}
-                    <div class="row">
-                        <div class="col-md-2">
-                            {{ taxonomy_form.exclude.label_tag }}
-                        </div>
-                        <div class="col-md-10">
-                            {{ taxonomy_form.exclude }}
-                            <button type="button" class="taxonomy-action-button" id={{ taxonomy_form.exclude.html_name }} value={{ taxonomy_form.exclude.id_for_label }}>Add</button>
-                        </div>
-                    </div>
-                </div>
-            </div>
-
-            <div>
-                <div><b>Custom:</b></div>
-                <div class="panel panel-default">
-                    <div class="panel-body">
-                        {{ taxonomy_form.custom_name.errors }}
-                        <p>
-                            {{ taxonomy_form.custom_name.label_tag }}
-                            {{ taxonomy_form.custom_name }}
-                        </p>
-                        {{ taxonomy_form.custom_parent.errors }}
-                        <p>
-                            {{ taxonomy_form.custom_parent.label_tag }}
-                            {{ taxonomy_form.custom_parent }}
-                        </p>
-                        {{ taxonomy_form.bepress.errors }}
-                        <p>
-                            {{ taxonomy_form.bepress.label_tag }}
-                            {{ taxonomy_form.bepress }}
-                        </p>
-                        <div class="pull-right">
-                            <button type="button" id="id-add-custom">Add</button>
-                        </div>
-                    </div>
-                </div>
-            </div>
-            <div>
-                <div class="fieldWrapper">
-                    {{ taxonomy_form.custom_taxonomy_json.errors }}
-                    <div class="row">
-                        <div class="col-md-12">
-                            <h4>Built Custom Taxonomy</h4>
-                            {{ taxonomy_form.custom_taxonomy_json }}
-                        </div>
-                    </div>
-                </div>
-            </div>
-
-            <div>
-                <button type="button" id="id-validate-custom">Validate</button>
-                <p>Click the 'Validate' button to check your taxonomy before clicking the 'Save' button to submit.</p>
-                <div id="taxonomy-field-info"></div>
-                <div>
-                </div>
-            </div>
-            <div>
-                <input class="form-button" type="submit" value="Save" />
-            </div>
-        </form>
-    </div>
-</div>
-
-<!-- Modal explaining Custom Taxonomy -->
-<div class="modal fade" id="exampleModal" tabindex="-1" role="dialog">
-    <div class="modal-dialog" role="document">
-        <div class="modal-content">
-            <div class="modal-header">
-                <h5 class="modal-title" id="exampleModalLabel">Custom Taxonomy Example</h5>
-                <button type="button" class="close" data-dismiss="modal"><span aria-hidden="true">&times;</span></button>
-            </div>
-            <div class="modal-body">
-                <pre>
-{
-    "include": [
-        "Rehabilitation and Therapy",
-        "Theatre and Performance Studies"
-    ],
-    "exclude": [
-        "Acting",
-        "Playwriting",
-        "Theatre History",
-        "Other Theatre and Performance Studies",
-        "Dramatic Literature, Criticism and Theory"
-    ],
-    "custom": {
-        "Medicine and Health": {
-            "parent": "Rehabilitation and Therapy",
-            "bepress": "Sociology"
-        },
-        "Sport and Exercise Science": {
-            "parent": "",
-            "bepress": "Kinesiology"
-        },
-        "Exercise Immunology": {
-            "parent": "Sport and Exercise Science",
-            "bepress": "Exercise Science"
-        },
-        "Motor Control and Development": {
-            "parent": "Sport and Exercise Science",
-            "bepress": "Motor Control"
-        }
-    }
-}
-                </pre>
-            </div>
-            <div class="modal-footer">
-                <button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
-            </div>
-        </div>
-    </div>
-</div>
-
-<script>
-    $("#id_provider_id").val({{ preprint_provider.id }})
-</script>

From e87a3f26b35a93843339011ec531c53188c554f7 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 28 Sep 2017 12:55:51 -0400
Subject: [PATCH 065/192] Remove add_missing argument from forms and views

---
 admin/preprint_providers/forms.py                    |  1 -
 admin/preprint_providers/views.py                    | 11 ++---------
 .../preprint_providers/enter_custom_taxonomy.html    | 12 +-----------
 3 files changed, 3 insertions(+), 21 deletions(-)

diff --git a/admin/preprint_providers/forms.py b/admin/preprint_providers/forms.py
index 7e9987fff86..998b5129579 100644
--- a/admin/preprint_providers/forms.py
+++ b/admin/preprint_providers/forms.py
@@ -63,7 +63,6 @@ def clean_footer_links(self, *args, **kwargs):
 
 class PreprintProviderCustomTaxonomyForm(forms.Form):
     custom_taxonomy_json = forms.CharField(widget=forms.Textarea, initial='{"include": [], "exclude": [], "custom": {}}', required=False)
-    add_missing = forms.BooleanField(required=False)
     provider_id = forms.IntegerField(widget=forms.HiddenInput())
     include = forms.ChoiceField(choices=[], required=False)
     exclude = forms.ChoiceField(choices=[], required=False)
diff --git a/admin/preprint_providers/views.py b/admin/preprint_providers/views.py
index 26c265f4c00..55bb40160f5 100644
--- a/admin/preprint_providers/views.py
+++ b/admin/preprint_providers/views.py
@@ -124,11 +124,6 @@ def get_context_data(self, *args, **kwargs):
 
         subject_html += '</ul>'
         preprint_provider_attributes['subjects_acceptable'] = subject_html
-        # subjects_with_mapped_taxonomies = [sub for sub in preprint_provider.subjects.all() if sub.text != sub.bepress_subject.text]
-        # mapped_taxonomies_html = '<ul>'
-        # for subject in subjects_with_mapped_taxonomies:
-        #     mapped_taxonomies_html += '<li>BePress: <b>{}</b> ==> New: <b>{}</b>'.format(subject.bepress_subject.text, subject.text)
-        # kwargs['mapped_taxonomies'] = mapped_taxonomies_html + '</ul>'
 
         kwargs['preprint_provider'] = preprint_provider_attributes
         kwargs['subject_ids'] = list(subject_ids)
@@ -139,7 +134,6 @@ def get_context_data(self, *args, **kwargs):
         kwargs['show_taxonomies'] = False if preprint_provider.subjects.exists() else True
         kwargs['form'] = PreprintProviderForm(initial=fields)
         kwargs['taxonomy_form'] = PreprintProviderCustomTaxonomyForm()
-        kwargs['no_add_list'] = ['custom_taxonomy_json', 'add_missing']
         kwargs['import_form'] = ImportFileForm()
         kwargs['tinymce_apikey'] = settings.TINYMCE_APIKEY
         return kwargs
@@ -187,18 +181,17 @@ def post(self, request, *args, **kwargs):
             provider = PreprintProvider.objects.get(id=provider_form.cleaned_data['provider_id'])
             try:
                 taxonomy_json = json.loads(provider_form.cleaned_data['custom_taxonomy_json'])
-                add_missing = provider_form.cleaned_data['add_missing']
                 if request.is_ajax():
                     # An ajax request is for validation only, so run that validation!
                     try:
-                        response_data = validate_input(custom_provider=provider, data=taxonomy_json, add_missing=add_missing)
+                        response_data = validate_input(custom_provider=provider, data=taxonomy_json)
                     except (RuntimeError, AssertionError) as script_feedback:
                         response_data = {'message': script_feedback.message, 'feedback_type': 'error'}
                     if not response_data:
                         response_data = {'message': 'Custom taxonomy validated!', 'feedback_type': 'success'}
                 else:
                     # Actually do the migration of the custom taxonomies
-                    migrate(provider=provider._id, data=taxonomy_json, add_missing=add_missing)
+                    migrate(provider=provider._id, data=taxonomy_json)
                     return redirect('preprint_providers:detail', preprint_provider_id=provider.id)
 
             except ValueError as error:
diff --git a/admin/templates/preprint_providers/enter_custom_taxonomy.html b/admin/templates/preprint_providers/enter_custom_taxonomy.html
index ae318fbde7c..51a4fdd5ea0 100644
--- a/admin/templates/preprint_providers/enter_custom_taxonomy.html
+++ b/admin/templates/preprint_providers/enter_custom_taxonomy.html
@@ -18,17 +18,6 @@ <h2>Custom Taxonomy</h2>
             {{ taxonomy_form.provider_id }}
 
             <div>
-                <div class="fieldWrapper">
-                    {{ taxonomy_form.add_missing.errors }}
-                    <div class="row">
-                        <div class="col-md-2">
-                            {{ taxonomy_form.add_missing.label_tag }}
-                        </div>
-                        <div class="col-md-10">
-                            {{ taxonomy_form.add_missing }}
-                        </div>
-                    </div>
-                </div>
                 <div class="fieldWrapper">
                     {{ taxonomy_form.include.errors }}
                     <div class="row">
@@ -113,6 +102,7 @@ <h4>Built Custom Taxonomy</h4>
             <div class="modal-header">
                 <h5 class="modal-title" id="exampleModalLabel">Custom Taxonomy Example</h5>
                 <button type="button" class="close" data-dismiss="modal"><span aria-hidden="true">&times;</span></button>
+                <p>For a custom taxonomy </p>
             </div>
             <div class="modal-body">
                 <pre>

From e44325c247e1db48755ef2f88608eb2096cd7650 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 28 Sep 2017 14:14:26 -0400
Subject: [PATCH 066/192] Add more text in custom taxonomy example

---
 .../enter_custom_taxonomy.html                | 44 ++++++++++++++-----
 1 file changed, 34 insertions(+), 10 deletions(-)

diff --git a/admin/templates/preprint_providers/enter_custom_taxonomy.html b/admin/templates/preprint_providers/enter_custom_taxonomy.html
index 51a4fdd5ea0..e7e278b59ab 100644
--- a/admin/templates/preprint_providers/enter_custom_taxonomy.html
+++ b/admin/templates/preprint_providers/enter_custom_taxonomy.html
@@ -7,10 +7,13 @@
     <div class="col-md-12">
         <div>
             <h2>Custom Taxonomy</h2>
-            <p>Use the form below to build a custom taxonomy. Enter the JSON for the custom taxonomy directly in the text box, or use the form fields below to build the taxonomy one entry at a time.</p>
-            <p>Click <a role="button" data-toggle="modal" href="#exampleModal">here</a> to see an example of a full custom taxonomy
-                and for more information about building a custom taxonomy.
-            </p>
+            <p>Use the form below to build a custom taxonomy. In each dropdown menu, start typing to select an existing subject. After selecting a subject, or entering
+                a custom mapping, click the "Add" button to add it to the taxonomy. The result will be displayed in the 'Built Custom Taxonomy' section.</p>
+
+            <p>To remove an entry, delete it from the text box in the Built Custom Taxonomy section. If deleting the last item in a list, be sure to also remove the
+                final comma.</p>
+
+            <p>Click <a role="button" data-toggle="modal" href="#exampleModal">here</a> to see an example of a full custom taxonomy and for more information about each field.</p>
         </div>
         <form id="taxonomy-form" action="{% url 'preprint_providers:process_custom_taxonomy' %}" method="post">
             {% csrf_token %}
@@ -100,11 +103,28 @@ <h4>Built Custom Taxonomy</h4>
     <div class="modal-dialog" role="document">
         <div class="modal-content">
             <div class="modal-header">
-                <h5 class="modal-title" id="exampleModalLabel">Custom Taxonomy Example</h5>
+                <h4 class="modal-title" id="exampleModalLabel">Custom Taxonomy Example</h4>
                 <button type="button" class="close" data-dismiss="modal"><span aria-hidden="true">&times;</span></button>
-                <p>For a custom taxonomy </p>
             </div>
             <div class="modal-body">
+                <div>
+                    <ul>
+                        <li><b>include</b>: These subjects, and their children will be included in the custom taxonomy.
+                            If a second level subject is included, that second level will become the top of the subject tree, and its parent will not be in the taxonomy.</li>
+                        <li><b>exclude</b>: These subjects, and their children will *not* be included in the custom taxonomy. Useful to include a top level subject, but then specify
+                        a certain child and its descendants to not be included.</li>
+                        <li><b>custom</b>: A new taxonomy name, along with information about which existing Bepress subject to replace with the new name.
+                            <ul>
+                                <li>custom name: the new name for the subject</li>
+                                <li>custom parent: the parent of the subject. Leave blank if it is a toplevel subject.
+                                    *Note*: if adding a new child of an existing bepress parent, you must also add a 'custom' parent with the same name that maps to the existing
+                                    bepress subject. See JSON below for an example.
+                                </li>
+                                <li>bepress: the existing subject that you would like to repalce with the subject listed in the custom name field.</li>
+                            </ul>
+                        </li>
+                    </ul>
+                </div>
                 <pre>
 {
     "include": [
@@ -127,13 +147,17 @@ <h5 class="modal-title" id="exampleModalLabel">Custom Taxonomy Example</h5>
             "parent": "",
             "bepress": "Kinesiology"
         },
-        "Exercise Immunology": {
-            "parent": "Sport and Exercise Science",
-            "bepress": "Exercise Science"
-        },
         "Motor Control and Development": {
             "parent": "Sport and Exercise Science",
             "bepress": "Motor Control"
+        },
+        "Super Engineering": {
+            "parent": "Engineering",
+            "bepress": "Aerospace Engineering"
+        },
+        "Engineering": {
+            "parent": "",
+            "bepress": "Engineering"
         }
     }
 }

From 66bd47bd4b2670c14ca046149b07e640d844bbe6 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 28 Sep 2017 15:56:30 -0400
Subject: [PATCH 067/192] Import management command methods when needed for db
 errors on tests

---
 admin/preprint_providers/views.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/admin/preprint_providers/views.py b/admin/preprint_providers/views.py
index 55bb40160f5..eaae2b1aa8e 100644
--- a/admin/preprint_providers/views.py
+++ b/admin/preprint_providers/views.py
@@ -15,7 +15,6 @@
 from admin.preprint_providers.forms import PreprintProviderForm, PreprintProviderCustomTaxonomyForm
 from osf.models import PreprintProvider, Subject, NodeLicense
 from osf.models.preprint_provider import rules_to_subjects
-from osf.management.commands.populate_custom_taxonomies import validate_input, migrate
 
 # When preprint_providers exclusively use Subject relations for creation, set this to False
 SHOW_TAXONOMIES_IN_PREPRINT_PROVIDER_CREATE = True
@@ -176,6 +175,9 @@ class ProcessCustomTaxonomy(PermissionRequiredMixin, View):
     raise_exception = True
 
     def post(self, request, *args, **kwargs):
+        # Import here to avoid test DB access errors when importing preprint provider views
+        from osf.management.commands.populate_custom_taxonomies import validate_input, migrate
+
         provider_form = PreprintProviderCustomTaxonomyForm(request.POST)
         if provider_form.is_valid():
             provider = PreprintProvider.objects.get(id=provider_form.cleaned_data['provider_id'])

From d7f2867bc0b2037334705037dce15d6857782c57 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 29 Sep 2017 14:39:22 -0400
Subject: [PATCH 068/192] Add tests for process custom taxonomy view

---
 admin_tests/preprint_providers/test_views.py | 59 ++++++++++++++++++++
 1 file changed, 59 insertions(+)

diff --git a/admin_tests/preprint_providers/test_views.py b/admin_tests/preprint_providers/test_views.py
index 70fdd511e51..6f7ba5b21e0 100644
--- a/admin_tests/preprint_providers/test_views.py
+++ b/admin_tests/preprint_providers/test_views.py
@@ -338,3 +338,62 @@ def test_get_subject_descendants(self):
             content_dict['all_descendants'],
             [self.child_1.id, self.child_2.id, self.grandchild_1.id]
         )
+
+
+class TestProcessCustomTaxonomy(AdminTestCase):
+    def setUp(self):
+
+        self.user = AuthUserFactory()
+
+        self.subject1 = SubjectFactory()
+        self.subject2 = SubjectFactory()
+        self.subject3 = SubjectFactory()
+
+        self.subject1_1 = SubjectFactory(parent=self.subject1)
+        self.subject2_1 = SubjectFactory(parent=self.subject2)
+        self.subject3_1 = SubjectFactory(parent=self.subject3)
+
+        self.subject1_1_1 = SubjectFactory(parent=self.subject1_1)
+
+        self.preprint_provider = PreprintProviderFactory()
+        self.request = RequestFactory().get('/fake_path')
+        self.view = views.ProcessCustomTaxonomy()
+        self.view = setup_user_view(self.view, self.request, user=self.user)
+
+    def test_process_taxonomy_changes_subjects(self):
+        custom_taxonomy = {
+            'include': [self.subject1.text, self.subject3_1.text],
+            'exclude': [self.subject1_1.text],
+            'custom': {
+                'Changed Subject Name': {'parent': self.subject2.text, 'bepress': self.subject2_1.text},
+                self.subject2.text: {'parent': '', 'bepress': self.subject2.text}
+            }
+        }
+        self.request.POST = {
+            'custom_taxonomy_json': json.dumps(custom_taxonomy),
+            'provider_id': self.preprint_provider.id
+        }
+
+        self.view.post(self.request)
+
+        actual_preprint_provider_subjects = self.preprint_provider.subjects.all().values_list('text', flat=True)
+        expected_subjects = [self.subject1.text, self.subject2.text, self.subject3_1.text, 'Changed Subject Name']
+
+        nt.assert_items_equal(actual_preprint_provider_subjects, expected_subjects)
+        assert self.preprint_provider.subjects.get(text='Changed Subject Name').parent.text == self.subject2.text
+
+    def test_process_taxonomy_invalid_returns_feedback(self):
+        custom_taxonomy = {
+            'include': [],
+            'exclude': [],
+            'custom': {
+                'Changed Subject Name': {'parent': self.subject2.text, 'bepress': self.subject2_1.text},
+            }
+        }
+        self.request.POST = {
+            'custom_taxonomy_json': json.dumps(custom_taxonomy),
+            'provider_id': self.preprint_provider.id
+        }
+
+        with nt.assert_raises(AssertionError):
+            self.view.post(self.request)

From 636fa57b75ba02c22759bb3b2184976ef4e0bd16 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 29 Sep 2017 14:57:41 -0400
Subject: [PATCH 069/192] Hide CUstom Taxonomy panel if Provider already has
 related subjects

---
 admin/templates/preprint_providers/detail.html | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/admin/templates/preprint_providers/detail.html b/admin/templates/preprint_providers/detail.html
index 625f662b453..749b01e0a22 100644
--- a/admin/templates/preprint_providers/detail.html
+++ b/admin/templates/preprint_providers/detail.html
@@ -49,7 +49,9 @@ <h2>{{ preprint_provider.name }}</h2>
         </div>
         <div id="form-view" style="display:none;">
             {% include 'preprint_providers/update_preprint_provider_form.html' %}
+            {% if show_taxonomies %}
             {% include 'preprint_providers/enter_custom_taxonomy.html' %}
+            {% endif %}
         </div>
 
 {% endblock content %}

From c0d2b911846b1700be6a13ce6e8f980420b6386f Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Tue, 24 Oct 2017 15:17:01 -0400
Subject: [PATCH 070/192] change permission for modifying preprintprovider

---
 admin/preprint_providers/views.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/admin/preprint_providers/views.py b/admin/preprint_providers/views.py
index eaae2b1aa8e..5ba78610fef 100644
--- a/admin/preprint_providers/views.py
+++ b/admin/preprint_providers/views.py
@@ -171,7 +171,7 @@ def get_success_url(self, *args, **kwargs):
 
 class ProcessCustomTaxonomy(PermissionRequiredMixin, View):
 
-    permission_required = 'osf.view_preprintprovider'
+    permission_required = 'osf.change_preprintprovider'
     raise_exception = True
 
     def post(self, request, *args, **kwargs):

From 5e1c2fc8f4014a50170763b8c18ab97737cf939d Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Wed, 25 Oct 2017 17:31:43 -0500
Subject: [PATCH 071/192] Assert model_cls is a Node or a Registration.

---
 api/base/utils.py | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/api/base/utils.py b/api/base/utils.py
index 35c7050bc8e..dc1880a0321 100644
--- a/api/base/utils.py
+++ b/api/base/utils.py
@@ -13,7 +13,7 @@
 from framework.auth import Auth
 from framework.auth.cas import CasResponse
 from framework.auth.oauth_scopes import ComposedScopes, normalize_scopes
-from osf.models import OSFUser, Contributor
+from osf.models import OSFUser, Contributor, Node, Registration
 from osf.models.base import GuidMixin
 from osf.modm_compat import to_django_query
 from osf.utils.requests import check_select_for_update
@@ -153,11 +153,15 @@ def waterbutler_url_for(request_type, provider, path, node_id, token, obj_args=N
     url.args.update(query)
     return url.url
 
+def check_model_cls(model_cls):
+    assert model_cls is Node or model_cls is Registration
 
 def default_node_list_queryset(model_cls):
+    check_model_cls(model_cls)
     return model_cls.objects.filter(is_deleted=False)
 
 def default_node_permission_queryset(user, model_cls):
+    check_model_cls(model_cls)
     if user.is_anonymous:
         return model_cls.objects.filter(is_public=True)
     sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user__id=user.id, read=True)
@@ -169,7 +173,6 @@ def default_node_list_permission_queryset(user, model_cls):
     # Django's alaising will break and the resulting QS will be empty and you will be sad.
     return default_node_permission_queryset(user, model_cls) & default_node_list_queryset(model_cls)
 
-
 def extend_querystring_params(url, params):
     scheme, netloc, path, query, _ = urlparse.urlsplit(url)
     orig_params = urlparse.parse_qs(query)

From f38abdbee07d486c6cb21872397deb15407add78 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Wed, 25 Oct 2017 17:31:58 -0500
Subject: [PATCH 072/192] Restore missing Node import.

---
 api/users/views.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/api/users/views.py b/api/users/views.py
index 582ce411645..69f33897207 100644
--- a/api/users/views.py
+++ b/api/users/views.py
@@ -39,6 +39,7 @@
                         QuickFilesNode,
                         AbstractNode,
                         PreprintService,
+                        Node,
                         OSFUser,
                         PreprintProvider,
                         Action,)

From b8f5b6bcd6c57a743faee53a5e83e0ff155173b8 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Wed, 25 Oct 2017 17:32:48 -0500
Subject: [PATCH 073/192] Remove inner 'Q'.

---
 api/nodes/views.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/api/nodes/views.py b/api/nodes/views.py
index ddc37fb95f4..c0371f6f723 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -1933,7 +1933,7 @@ def get_default_queryset(self):
             raise NotFound
 
         sub_qs = type(files_list).objects.filter(_children=OuterRef('pk'), pk=files_list.pk)
-        return files_list.children.annotate(folder=Exists(sub_qs)).filter(Q(folder=True)).prefetch_related('node__guids', 'versions', 'tags', 'guids')
+        return files_list.children.annotate(folder=Exists(sub_qs)).filter(folder=True).prefetch_related('node__guids', 'versions', 'tags', 'guids')
 
     # overrides ListAPIView
     def get_queryset(self):

From 4df02c98c6067e8a03e2aac69eb615cf056305df Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Wed, 25 Oct 2017 20:53:43 -0500
Subject: [PATCH 074/192] Fix unused imports.

---
 api/nodes/views.py              | 2 +-
 api/preprint_providers/views.py | 4 +---
 api/users/views.py              | 1 +
 3 files changed, 3 insertions(+), 4 deletions(-)

diff --git a/api/nodes/views.py b/api/nodes/views.py
index c0371f6f723..26f6cd6cc82 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -98,7 +98,7 @@
 from framework.postcommit_tasks.handlers import enqueue_postcommit_task
 from osf.models import AbstractNode
 from osf.models import (Node, PrivateLink, Institution, Comment, DraftRegistration,)
-from osf.models import OSFUser, Contributor
+from osf.models import OSFUser
 from osf.models import NodeRelation, Guid
 from osf.models import BaseFileNode
 from osf.models.files import File, Folder
diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index 3062d016de6..364b2ae2ab4 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -4,11 +4,9 @@
 from rest_framework import permissions as drf_permissions
 from rest_framework.exceptions import NotAuthenticated
 
-from django.db.models import Q, Exists, OuterRef
-
 from framework.auth.oauth_scopes import CoreScopes
 
-from osf.models import AbstractNode, Subject, PreprintProvider, Contributor
+from osf.models import AbstractNode, Subject, PreprintProvider
 
 from reviews import permissions as reviews_permissions
 
diff --git a/api/users/views.py b/api/users/views.py
index 69f33897207..162ad81f358 100644
--- a/api/users/views.py
+++ b/api/users/views.py
@@ -40,6 +40,7 @@
                         AbstractNode,
                         PreprintService,
                         Node,
+                        Registration,
                         OSFUser,
                         PreprintProvider,
                         Action,)

From 56c148f930b5c135d88132eb9e4dd442225e11ee Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Wed, 25 Oct 2017 22:41:45 -0500
Subject: [PATCH 075/192] Missing import.

---
 api/preprint_providers/views.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index 364b2ae2ab4..8ddf9dfab1c 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -1,4 +1,5 @@
 from guardian.shortcuts import get_objects_for_user
+from django.db.models import Q
 
 from rest_framework import generics
 from rest_framework import permissions as drf_permissions

From cf7d3147d1ad61970411035eca9a7c7ebd2afe49 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 26 Oct 2017 11:46:39 -0500
Subject: [PATCH 076/192] Add subquery to preprints_queryset to avoid use of
 distinct.

---
 api/base/filters.py | 14 +++++++-------
 1 file changed, 7 insertions(+), 7 deletions(-)

diff --git a/api/base/filters.py b/api/base/filters.py
index 0c44c861c36..e19e3e84180 100644
--- a/api/base/filters.py
+++ b/api/base/filters.py
@@ -14,11 +14,11 @@
 from dateutil import parser as date_parser
 from django.core.exceptions import ValidationError
 from django.db.models import QuerySet as DjangoQuerySet
-from django.db.models import Q
+from django.db.models import Q, Exists, OuterRef
 from modularodm.query import queryset as modularodm_queryset
 from rest_framework import serializers as ser
 from rest_framework.filters import OrderingFilter
-from osf.models import Subject, PreprintProvider
+from osf.models import Subject, PreprintProvider, Node
 from osf.models.base import GuidMixin
 from reviews.workflow import States
 
@@ -498,7 +498,7 @@ def postprocess_query_param(self, key, field_name, operation):
                 operation['op'] = 'iexact'
 
     def preprints_queryset(self, base_queryset, auth_user, allow_contribs=True):
-        default_query = Q(node__isnull=False, node__is_deleted=False)
+        sub_qs = Node.objects.filter(preprints=OuterRef('pk'), is_deleted=False)
         no_user_query = Q(is_published=True, node__is_public=True)
 
         if auth_user:
@@ -506,10 +506,10 @@ def preprints_queryset(self, base_queryset, auth_user, allow_contribs=True):
             reviews_user_query = Q(node__is_public=True, provider__in=get_objects_for_user(auth_user, 'view_submissions', PreprintProvider))
             if allow_contribs:
                 contrib_user_query = ~Q(reviews_state=States.INITIAL.value) & Q(node__contributor__user_id=auth_user.id, node__contributor__read=True)
-                query = default_query & (no_user_query | contrib_user_query | admin_user_query | reviews_user_query)
+                query = (no_user_query | contrib_user_query | admin_user_query | reviews_user_query)
             else:
-                query = default_query & (no_user_query | admin_user_query | reviews_user_query)
+                query = (no_user_query | admin_user_query | reviews_user_query)
         else:
-            query = default_query & no_user_query
+            query = no_user_query
 
-        return base_queryset.filter(query)
+        return base_queryset.annotate(default=Exists(sub_qs)).filter(Q(default=True) & query)

From b50fbd9b4b5ddb68b399d892cf3a0838a46b1c13 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Thu, 26 Oct 2017 13:55:53 -0400
Subject: [PATCH 077/192] Remove unnecessary use of lodashGet

---
 website/static/js/projectSettingsTreebeardBase.js | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/website/static/js/projectSettingsTreebeardBase.js b/website/static/js/projectSettingsTreebeardBase.js
index 7df60699548..551b98a3c77 100644
--- a/website/static/js/projectSettingsTreebeardBase.js
+++ b/website/static/js/projectSettingsTreebeardBase.js
@@ -7,7 +7,6 @@
 
 var m = require('mithril');
 var Fangorn = require('js/fangorn').Fangorn;
-var lodashGet = require('lodash.get');
 
 
 function resolveToggle(item) {
@@ -46,7 +45,7 @@ function getNodesOriginal(nodeTree, nodesOriginal) {
             visibleContributors.push(nodeTree.node.contributors[i].id);
         }
     }
-    var nodeInstitutions = lodashGet(nodeTree.node, 'affiliated_institutions', []);
+    var nodeInstitutions = nodeTree.node.affiliated_institutions || [];
 
     nodeInstitutions = nodeInstitutions.map(function(item) {
         return item.id;

From edd4340f5e33908340b77a9ae40666d9227bffca Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Thu, 26 Oct 2017 13:20:59 -0500
Subject: [PATCH 078/192] Modify incorrect test. Preprint_unpublished had the
 wrong project.

---
 api_tests/nodes/views/test_node_preprints.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/api_tests/nodes/views/test_node_preprints.py b/api_tests/nodes/views/test_node_preprints.py
index 0b156d143ba..ec23451d2e7 100644
--- a/api_tests/nodes/views/test_node_preprints.py
+++ b/api_tests/nodes/views/test_node_preprints.py
@@ -84,13 +84,14 @@ def url(self, project_published):
         return '/{}nodes/{}/preprints/?version=2.2&'.format(API_BASE, project_published._id)
 
     @pytest.fixture()
-    def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False)
+    def preprint_unpublished(self, user_admin_contrib, provider_one, project_published, subject):
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_published, is_published=False)
 
     def test_unpublished_visible_to_admins(self, app, user_admin_contrib, preprint_unpublished, preprint_published, url):
         res = app.get(url, auth=user_admin_contrib.auth)
         assert len(res.json['data']) == 2
         assert preprint_unpublished._id in [d['id'] for d in res.json['data']]
+        assert preprint_published._id in [d['id'] for d in res.json['data']]
 
     def test_unpublished_invisible_to_write_contribs(self, app, user_write_contrib, preprint_unpublished, preprint_published, url):
         res = app.get(url, auth=user_write_contrib.auth)

From 21dcc3c8b0be48f6bbcf7c4b8984b9fcb1ae8fa9 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Wed, 25 Oct 2017 15:00:16 -0400
Subject: [PATCH 079/192] Automatically expand first level children and addons.

---
 website/static/js/fangorn.js | 52 +++++++++++++++++++++++-------------
 website/util/rubeus.py       |  5 ++--
 2 files changed, 37 insertions(+), 20 deletions(-)

diff --git a/website/static/js/fangorn.js b/website/static/js/fangorn.js
index 8a2b02002d9..2eebce2afab 100644
--- a/website/static/js/fangorn.js
+++ b/website/static/js/fangorn.js
@@ -1588,6 +1588,7 @@ function _loadTopLevelChildren() {
  * @this Treebeard.controller
  * @private
  */
+var NO_AUTO_EXPAND_PROJECTS = ['ezcuj', 'ecmz4', 'w4wvg', 'sn64d'];
 function expandStateLoad(item) {
     var tb = this,
         icon = $('.tb-row[data-id="' + item.id + '"]').find('.tb-toggle-icon'),
@@ -1595,31 +1596,46 @@ function expandStateLoad(item) {
         addonList = [],
         i;
 
-    if (item.depth > 1 && !item.data.isAddonRoot && item.children.length === 0 && item.open) {
-        m.render(icon.get(0), tbOptions.resolveRefreshIcon());
+    if (item.children.length > 0 && item.depth === 1) {
+        // NOTE: On the RPP and a few select projects *only*: Load the top-level project's OSF Storage
+        // but do NOT lazy-load children in order to save hundreds of requests.
+        // TODO: We might want to do this for every project, but that's TBD.
+        // /sloria
+        if (window.contextVars && window.contextVars.node && NO_AUTO_EXPAND_PROJECTS.indexOf(window.contextVars.node.id) > -1) {
+            var osfsItems = item.children.filter(function(child) { return child.data.isAddonRoot && child.data.provider === 'osfstorage'; });
+            if (osfsItems.length) {
+                var osfsItem = osfsItems[0];
+                tb.updateFolder(null, osfsItem);
+            }
+        } else {
+            for (i = 0; i < item.children.length; i++) {
+                tb.updateFolder(null, item.children[i]);
+            }
+        }
+    }
+
+    if (item.children.length > 0 && item.depth === 2) {
+        for (i = 0; i < item.children.length; i++) {
+            if (item.children[i].data.isAddonRoot || item.children[i].data.addonFullName === 'OSF Storage' ) {
+                tb.updateFolder(null, item.children[i]);
+            }
+        }
+    }
+
+    if (item.depth > 2 && !item.data.isAddonRoot && item.children.length === 0 && item.open) {
+        if (icon.get(0)) {
+            m.render(icon.get(0), tbOptions.resolveRefreshIcon());
+        }
         $osf.ajaxJSON(
             'GET',
             '/api/v1/project/' + item.data.nodeID + '/files/grid/'
         ).done(function(xhr) {
             var data = xhr.data[0].children;
-            for (i = 0; i < data.length; i++) {
-                var child = tb.buildTree(data[i], item);
-                if (child.data.isAddonRoot) {
-                    addonList.push(child);
-                }
-                item.add(child);
-            }
-
-            item.open = true;
+            tb.updateFolder(data, item);
             tb.redraw();
-
-            for (i=0; i < addonList.length; ++i) {
-                tb.toggleFolder(tb.returnIndex(addonList[i].id));
+            if (icon.get(0)) {
+                m.render(icon.get(0), tbOptions.resolveToggle(item));
             }
-
-            m.render(icon.get(0), tbOptions.resolveToggle(item));
-            tb.redraw();
-
         }).fail(function(xhr) {
             item.notify.update('Unable to retrieve components.', 'danger', undefined, 3000);
             item.open = false;
diff --git a/website/util/rubeus.py b/website/util/rubeus.py
index be600692a77..21f092e362a 100644
--- a/website/util/rubeus.py
+++ b/website/util/rubeus.py
@@ -11,7 +11,6 @@
 from framework.auth.decorators import Auth
 
 from django.apps import apps
-from django.db import connection
 from django.db.models import Exists, OuterRef
 
 from website import settings
@@ -165,7 +164,6 @@ def to_hgrid(self):
         """Return the Rubeus.JS representation of the node's file data, including
         addons and components
         """
-        print(len(connection.queries))
         root = self._get_nodes(self.node)
         return [root]
 
@@ -190,6 +188,9 @@ def _serialize_node(self, node, parent=None, children=[]):
         can_view = node.can_view(auth=self.auth)
         can_edit = node.has_write_perm if hasattr(node, 'has_write_perm') else node.can_edit(auth=self.auth)
 
+        if parent and parent.root.title == parent.title:
+            children = self._get_nodes(node)['children']
+
         return {
             # TODO: Remove safe_unescape_html when mako html safe comes in
             'name': self._get_node_name(node, can_view, is_pointer),

From a953b8b70888aa6ae84bdcd4646a9b124846bedc Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Fri, 27 Oct 2017 15:05:35 -0400
Subject: [PATCH 080/192] Update tests to match updated rubeus functionality.

---
 tests/test_rubeus.py | 30 ++++++++++++++----------------
 1 file changed, 14 insertions(+), 16 deletions(-)

diff --git a/tests/test_rubeus.py b/tests/test_rubeus.py
index 3319bc11118..e55e543f9d6 100644
--- a/tests/test_rubeus.py
+++ b/tests/test_rubeus.py
@@ -256,53 +256,51 @@ def test_get_node_name(self):
         public_project = ProjectFactory(is_public=True)
         collector = rubeus.NodeFileCollector(node=public_project, auth=another_auth)
         node_name =  sanitize.unescape_entities(public_project.title)
-        assert_equal(collector._get_node_name(public_project), node_name)
+        assert_equal(collector._serialize_node(public_project)['name'], node_name)
 
         # Private  (Can't View)
         registration_private = RegistrationFactory(creator=user)
         registration_private.is_public = False
         registration_private.save()
         collector = rubeus.NodeFileCollector(node=registration_private, auth=another_auth)
-        assert_equal(collector._get_node_name(registration_private), u'Private Registration')
+        assert_equal(collector._serialize_node(registration_private)['name'], u'Private Registration')
 
         content = ProjectFactory(creator=user)
-        node = ProjectFactory(creator=user)
+        node = ProjectFactory(creator=user, is_public=True)
 
         forked_private = node.fork_node(auth=auth)
         forked_private.is_public = False
         forked_private.save()
         collector = rubeus.NodeFileCollector(node=forked_private, auth=another_auth)
-        assert_equal(collector._get_node_name(forked_private), u'Private Fork')
+        assert_equal(collector._serialize_node(forked_private)['name'], u'Private Fork')
 
-        pointer_private = node.add_pointer(content, auth=auth)
-        pointer_private.is_public = False
-        pointer_private.save()
-        collector = rubeus.NodeFileCollector(node=pointer_private, auth=another_auth)
-        assert_equal(collector._get_node_name(pointer_private), u'Private Link')
+        node.add_pointer(content, auth=auth)
+        collector = rubeus.NodeFileCollector(node=node, auth=another_auth)
+        assert_equal(collector._get_nodes(node)['children'][1]['name'], u'Private Link')
 
         private_project = ProjectFactory(is_public=False)
         collector = rubeus.NodeFileCollector(node=private_project, auth=another_auth)
-        assert_equal(collector._get_node_name(private_project), u'Private Component')
+        assert_equal(collector._serialize_node(private_project)['name'], u'Private Component')
 
         private_node = NodeFactory(is_public=False)
         collector = rubeus.NodeFileCollector(node=private_node, auth=another_auth)
-        assert_equal(collector._get_node_name(private_node), u'Private Component')
+        assert_equal(collector._serialize_node(private_node)['name'], u'Private Component')
 
-    def test_collect_components_deleted(self):
+    def test_get_nodes_deleted_component(self):
         node = NodeFactory(creator=self.project.creator, parent=self.project)
         node.is_deleted = True
         collector = rubeus.NodeFileCollector(
             self.project, Auth(user=UserFactory())
         )
-        nodes = collector._collect_components(self.project, visited=[])
-        assert_equal(len(nodes), 0)
+        nodes = collector._get_nodes(self.project)
+        assert_equal(len(nodes['children']), 0)
 
     def test_serialized_pointer_has_flag_indicating_its_a_pointer(self):
         project = ProjectFactory(creator=self.consolidated_auth.user)
         pointed_project = ProjectFactory(is_public=True)
         project.add_pointer(pointed_project, auth=self.consolidated_auth)
         serializer = rubeus.NodeFileCollector(node=project, auth=self.consolidated_auth)
-        ret = serializer._serialize_node(project)
+        ret = serializer._get_nodes(project)
         child = ret['children'][1]  # first child is OSFStorage, second child is pointer
         assert_true(child['isPointer'])
 
@@ -367,7 +365,7 @@ def test_sort_by_name_none(self):
         assert_equal(ret, sorted_files)
 
     def test_serialize_node(self):
-        ret = self.serializer._serialize_node(self.project)
+        ret = self.serializer._get_nodes(self.project)
         assert_equal(
             len(ret['children']),
             len(self.project.get_addons.return_value) + len(list(self.project.nodes))

From f16dfa1837c7f3274df944bec231cfc3d41c88f2 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Thu, 2 Nov 2017 13:24:39 -0400
Subject: [PATCH 081/192] Bump django version

---
 requirements.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/requirements.txt b/requirements.txt
index 6486ff8bae4..514c5df6a4f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -61,7 +61,7 @@ requests-oauthlib==0.5.0
 raven==5.32.0
 
 # API requirements
-Django==1.11.4
+Django==1.11.7
 djangorestframework==3.6.3
 django-cors-headers==1.3.1
 djangorestframework-bulk==0.2.1

From 5d1a2eb16117b7955d501d0678cf6a2ae7ac1a5e Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Fri, 3 Nov 2017 10:33:26 -0400
Subject: [PATCH 082/192] Add `on_delete` arg to all ForeignKey and OnetoOne
 fields.

- `on_delete` will be required in Django 2.0
---
 addons/base/models.py           |  9 ++++++---
 addons/bitbucket/models.py      |  2 +-
 addons/box/models.py            |  2 +-
 addons/dataverse/models.py      |  2 +-
 addons/dropbox/models.py        |  2 +-
 addons/figshare/models.py       |  2 +-
 addons/github/models.py         |  2 +-
 addons/googledrive/models.py    |  2 +-
 addons/mendeley/models.py       |  2 +-
 addons/osfstorage/models.py     |  2 +-
 addons/owncloud/models.py       |  2 +-
 addons/s3/models.py             |  2 +-
 addons/wiki/models.py           |  4 ++--
 addons/zotero/models.py         |  2 +-
 osf/models/action.py            |  4 ++--
 osf/models/admin_profile.py     |  3 ++-
 osf/models/archive.py           |  8 +++++---
 osf/models/base.py              |  2 +-
 osf/models/comment.py           |  4 ++--
 osf/models/contributor.py       | 10 +++++-----
 osf/models/files.py             | 12 ++++++------
 osf/models/identifiers.py       |  2 +-
 osf/models/node_relation.py     |  4 ++--
 osf/models/nodelog.py           |  8 +++++---
 osf/models/notifications.py     |  8 +++++---
 osf/models/preprint_provider.py |  3 ++-
 osf/models/queued_mail.py       |  2 +-
 osf/models/registrations.py     | 22 ++++++++++++----------
 osf/models/sanctions.py         | 10 +++++-----
 29 files changed, 76 insertions(+), 63 deletions(-)

diff --git a/addons/base/models.py b/addons/base/models.py
index d3c3961150e..9043994e634 100644
--- a/addons/base/models.py
+++ b/addons/base/models.py
@@ -84,7 +84,8 @@ def on_delete(self):
 
 
 class BaseUserSettings(BaseAddonSettings):
-    owner = models.OneToOneField(OSFUser, blank=True, null=True, related_name='%(app_label)s_user_settings')
+    owner = models.OneToOneField(OSFUser, related_name='%(app_label)s_user_settings',
+                                 blank=True, null=True, on_delete=models.CASCADE)
 
     class Meta:
         abstract = True
@@ -347,7 +348,8 @@ def on_delete(self):
 
 
 class BaseNodeSettings(BaseAddonSettings):
-    owner = models.OneToOneField(AbstractNode, null=True, blank=True, related_name='%(app_label)s_node_settings')
+    owner = models.OneToOneField(AbstractNode, related_name='%(app_label)s_node_settings',
+                                 null=True, blank=True, on_delete=models.CASCADE)
 
     class Meta:
         abstract = True
@@ -607,7 +609,8 @@ class BaseOAuthNodeSettings(BaseNodeSettings):
     # TODO: Validate this field to be sure it matches the provider's short_name
     # NOTE: Do not set this field directly. Use ``set_auth()``
     external_account = models.ForeignKey(ExternalAccount, null=True, blank=True,
-                                         related_name='%(app_label)s_node_settings')
+                                         related_name='%(app_label)s_node_settings',
+                                         on_delete=models.CASCADE)
 
     # NOTE: Do not set this field directly. Use ``set_auth()``
     # user_settings = fields.AbstractForeignField()
diff --git a/addons/bitbucket/models.py b/addons/bitbucket/models.py
index ec43912d8d9..f8d266a7d9f 100644
--- a/addons/bitbucket/models.py
+++ b/addons/bitbucket/models.py
@@ -108,7 +108,7 @@ class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
     user = models.TextField(blank=True, null=True)
     repo = models.TextField(blank=True, null=True)
     hook_id = models.TextField(blank=True, null=True)
-    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
 
     _api = None
 
diff --git a/addons/box/models.py b/addons/box/models.py
index 1c07880ded3..0e805acb9bc 100644
--- a/addons/box/models.py
+++ b/addons/box/models.py
@@ -99,7 +99,7 @@ class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
     folder_id = models.TextField(null=True, blank=True)
     folder_name = models.TextField(null=True, blank=True)
     folder_path = models.TextField(null=True, blank=True)
-    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
 
     _api = None
 
diff --git a/addons/dataverse/models.py b/addons/dataverse/models.py
index 632dd2f31ed..0e09196634f 100644
--- a/addons/dataverse/models.py
+++ b/addons/dataverse/models.py
@@ -83,7 +83,7 @@ class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
     dataset_doi = models.TextField(blank=True, null=True)
     _dataset_id = models.TextField(blank=True, null=True)
     dataset = models.TextField(blank=True, null=True)
-    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
 
     @property
     def folder_name(self):
diff --git a/addons/dropbox/models.py b/addons/dropbox/models.py
index 3e3b0df656c..b88a9837573 100644
--- a/addons/dropbox/models.py
+++ b/addons/dropbox/models.py
@@ -126,7 +126,7 @@ class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
     serializer = DropboxSerializer
 
     folder = models.TextField(null=True, blank=True)
-    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
 
     _api = None
 
diff --git a/addons/figshare/models.py b/addons/figshare/models.py
index b9c990d5988..dfc465bff4d 100644
--- a/addons/figshare/models.py
+++ b/addons/figshare/models.py
@@ -99,7 +99,7 @@ class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
     folder_id = models.TextField(blank=True, null=True)
     folder_name = models.TextField(blank=True, null=True)
     folder_path = models.TextField(blank=True, null=True)
-    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
 
     _api = None
 
diff --git a/addons/github/models.py b/addons/github/models.py
index 269fdb7a4de..43320a52c11 100644
--- a/addons/github/models.py
+++ b/addons/github/models.py
@@ -104,7 +104,7 @@ class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
     hook_id = models.TextField(blank=True, null=True)
     hook_secret = models.TextField(blank=True, null=True)
     registration_data = DateTimeAwareJSONField(default=dict, blank=True, null=True)
-    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
 
     @property
     def folder_id(self):
diff --git a/addons/googledrive/models.py b/addons/googledrive/models.py
index 10760fccca9..fba67a85c32 100644
--- a/addons/googledrive/models.py
+++ b/addons/googledrive/models.py
@@ -81,7 +81,7 @@ class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
     folder_id = models.TextField(null=True, blank=True)
     folder_path = models.TextField(null=True, blank=True)
     serializer = GoogleDriveSerializer
-    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
 
     _api = None
 
diff --git a/addons/mendeley/models.py b/addons/mendeley/models.py
index a90d83a0233..bdc9206fec1 100644
--- a/addons/mendeley/models.py
+++ b/addons/mendeley/models.py
@@ -258,7 +258,7 @@ class NodeSettings(BaseCitationsNodeSettings):
     provider_name = 'mendeley'
     oauth_provider = Mendeley
     serializer = MendeleySerializer
-    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
 
     list_id = models.TextField(blank=True, null=True)
     _api = None
diff --git a/addons/osfstorage/models.py b/addons/osfstorage/models.py
index e047fe40ecb..0f4777a2b34 100644
--- a/addons/osfstorage/models.py
+++ b/addons/osfstorage/models.py
@@ -386,7 +386,7 @@ class NodeSettings(BaseStorageAddon, BaseNodeSettings):
     complete = True
     has_auth = True
 
-    root_node = models.ForeignKey(OsfStorageFolder, null=True, blank=True)
+    root_node = models.ForeignKey(OsfStorageFolder, null=True, blank=True, on_delete=models.CASCADE)
 
     @property
     def folder_name(self):
diff --git a/addons/owncloud/models.py b/addons/owncloud/models.py
index 84209e1a783..62a0922f597 100644
--- a/addons/owncloud/models.py
+++ b/addons/owncloud/models.py
@@ -61,7 +61,7 @@ class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
     serializer = OwnCloudSerializer
 
     folder_id = models.TextField(blank=True, null=True)
-    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
 
     _api = None
 
diff --git a/addons/s3/models.py b/addons/s3/models.py
index ccf8f9bc4a2..0f8debc0da9 100644
--- a/addons/s3/models.py
+++ b/addons/s3/models.py
@@ -38,7 +38,7 @@ class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
     folder_id = models.TextField(blank=True, null=True)
     folder_name = models.TextField(blank=True, null=True)
     encrypt_uploads = models.BooleanField(default=ENCRYPT_UPLOADS_DEFAULT)
-    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
 
     @property
     def folder_path(self):
diff --git a/addons/wiki/models.py b/addons/wiki/models.py
index 34d40699afb..294820b69d4 100644
--- a/addons/wiki/models.py
+++ b/addons/wiki/models.py
@@ -82,8 +82,8 @@ class NodeWikiPage(GuidMixin, BaseModel):
     version = models.IntegerField(default=1)
     date = NonNaiveDateTimeField(auto_now_add=True)
     content = models.TextField(default='', blank=True)
-    user = models.ForeignKey('osf.OSFUser', null=True, blank=True)
-    node = models.ForeignKey('osf.AbstractNode', null=True, blank=True)
+    user = models.ForeignKey('osf.OSFUser', null=True, blank=True, on_delete=models.CASCADE)
+    node = models.ForeignKey('osf.AbstractNode', null=True, blank=True, on_delete=models.CASCADE)
 
     @property
     def is_current(self):
diff --git a/addons/zotero/models.py b/addons/zotero/models.py
index cf19f249b21..c1336d22c06 100644
--- a/addons/zotero/models.py
+++ b/addons/zotero/models.py
@@ -108,7 +108,7 @@ class NodeSettings(BaseCitationsNodeSettings):
     provider_name = 'zotero'
     oauth_provider = Zotero
     serializer = ZoteroSerializer
-    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
 
     list_id = models.TextField(blank=True, null=True)
     _api = None
diff --git a/osf/models/action.py b/osf/models/action.py
index 3012b3f7542..f3c712bf93d 100644
--- a/osf/models/action.py
+++ b/osf/models/action.py
@@ -16,8 +16,8 @@ class Action(ObjectIDMixin, BaseModel):
 
     objects = IncludeManager()
 
-    target = models.ForeignKey('PreprintService', related_name='actions')
-    creator = models.ForeignKey('OSFUser', related_name='+')
+    target = models.ForeignKey('PreprintService', related_name='actions', on_delete=models.CASCADE)
+    creator = models.ForeignKey('OSFUser', related_name='+', on_delete=models.CASCADE)
 
     trigger = models.CharField(max_length=31, choices=Triggers.choices())
     from_state = models.CharField(max_length=31, choices=States.choices())
diff --git a/osf/models/admin_profile.py b/osf/models/admin_profile.py
index 1ce9c2c5123..40df55e161e 100644
--- a/osf/models/admin_profile.py
+++ b/osf/models/admin_profile.py
@@ -4,7 +4,8 @@
 class AdminProfile(models.Model):
     primary_identifier_name = 'id'
 
-    user = models.OneToOneField('osf.OSFUser', related_name='admin_profile')
+    user = models.OneToOneField('osf.OSFUser', related_name='admin_profile',
+                                on_delete=models.CASCADE)
 
     desk_token = models.CharField(max_length=45, blank=True)
     desk_token_secret = models.CharField(max_length=45, blank=True)
diff --git a/osf/models/archive.py b/osf/models/archive.py
index 802d32a9564..5ab0b252bce 100644
--- a/osf/models/archive.py
+++ b/osf/models/archive.py
@@ -53,9 +53,11 @@ class ArchiveJob(ObjectIDMixin, BaseModel):
     datetime_initiated = NonNaiveDateTimeField(default=timezone.now, verbose_name='initiated at')
 
     dst_node = models.ForeignKey('Registration', related_name='archive_jobs',
-                                 verbose_name='destination node', null=True, blank=True)
-    src_node = models.ForeignKey('Node', verbose_name='source node', null=True, blank=True)
-    initiator = models.ForeignKey('OSFUser', null=True)
+                                 verbose_name='destination node', null=True,
+                                 blank=True, on_delete=models.CASCADE)
+    src_node = models.ForeignKey('Node', verbose_name='source node', null=True,
+                                 blank=True, on_delete=models.CASCADE)
+    initiator = models.ForeignKey('OSFUser', null=True, on_delete=models.CASCADE)
 
     target_addons = models.ManyToManyField('ArchiveTarget')
 
diff --git a/osf/models/base.py b/osf/models/base.py
index 7d6e7d2caa4..9511322279f 100644
--- a/osf/models/base.py
+++ b/osf/models/base.py
@@ -167,7 +167,7 @@ class Guid(BaseModel):
     _id = LowercaseCharField(max_length=255, null=False, blank=False, default=generate_guid, db_index=True,
                            unique=True)
     referent = GenericForeignKey()
-    content_type = models.ForeignKey(ContentType, null=True, blank=True)
+    content_type = models.ForeignKey(ContentType, null=True, blank=True, on_delete=models.CASCADE)
     object_id = models.PositiveIntegerField(null=True, blank=True)
     created = NonNaiveDateTimeField(db_index=True, auto_now_add=True)
 
diff --git a/osf/models/comment.py b/osf/models/comment.py
index e0a1ded70c4..bc5e5801875 100644
--- a/osf/models/comment.py
+++ b/osf/models/comment.py
@@ -24,9 +24,9 @@ class Comment(GuidMixin, SpamMixin, CommentableMixin, BaseModel):
     FILES = 'files'
     WIKI = 'wiki'
 
-    user = models.ForeignKey('OSFUser', null=True)
+    user = models.ForeignKey('OSFUser', null=True, on_delete=models.CASCADE)
     # the node that the comment belongs to
-    node = models.ForeignKey('AbstractNode', null=True)
+    node = models.ForeignKey('AbstractNode', null=True, on_delete=models.CASCADE)
 
     # The file or project overview page that the comment is for
     root_target = models.ForeignKey(Guid, on_delete=models.SET_NULL,
diff --git a/osf/models/contributor.py b/osf/models/contributor.py
index 310bef07305..ff00ac35bec 100644
--- a/osf/models/contributor.py
+++ b/osf/models/contributor.py
@@ -18,7 +18,7 @@ class AbstractBaseContributor(models.Model):
     write = models.BooleanField(default=False)
     admin = models.BooleanField(default=False)
     visible = models.BooleanField(default=False)
-    user = models.ForeignKey('OSFUser')
+    user = models.ForeignKey('OSFUser', on_delete=models.CASCADE)
 
     def __repr__(self):
         return ('<{self.__class__.__name__}(user={self.user}, '
@@ -42,7 +42,7 @@ def permission(self):
         return 'read'
 
 class Contributor(AbstractBaseContributor):
-    node = models.ForeignKey('AbstractNode')
+    node = models.ForeignKey('AbstractNode', on_delete=models.CASCADE)
 
     @property
     def _id(self):
@@ -55,14 +55,14 @@ class Meta:
         order_with_respect_to = 'node'
 
 class InstitutionalContributor(AbstractBaseContributor):
-    institution = models.ForeignKey('Institution')
+    institution = models.ForeignKey('Institution', on_delete=models.CASCADE)
 
     class Meta:
         unique_together = ('user', 'institution')
 
 class RecentlyAddedContributor(models.Model):
-    user = models.ForeignKey('OSFUser')  # the user who added the contributor
-    contributor = models.ForeignKey('OSFUser', related_name='recently_added_by')  # the added contributor
+    user = models.ForeignKey('OSFUser', on_delete=models.CASCADE)  # the user who added the contributor
+    contributor = models.ForeignKey('OSFUser', related_name='recently_added_by', on_delete=models.CASCADE)  # the added contributor
     date_added = NonNaiveDateTimeField(auto_now=True)
 
     class Meta:
diff --git a/osf/models/files.py b/osf/models/files.py
index b480d4e89fd..df69c93a4f3 100644
--- a/osf/models/files.py
+++ b/osf/models/files.py
@@ -81,7 +81,7 @@ class BaseFileNode(TypedModel, CommentableMixin, OptionalGuidMixin, Taggable, Ob
 
     # The User that has this file "checked out"
     # Should only be used for OsfStorage
-    checkout = models.ForeignKey('osf.OSFUser', blank=True, null=True)
+    checkout = models.ForeignKey('osf.OSFUser', blank=True, null=True, on_delete=models.CASCADE)
     # The last time the touch method was called on this FileNode
     last_touched = NonNaiveDateTimeField(null=True, blank=True)
     # A list of dictionaries sorted by the 'modified' key
@@ -91,9 +91,9 @@ class BaseFileNode(TypedModel, CommentableMixin, OptionalGuidMixin, Taggable, Ob
     # A concrete version of a FileNode, must have an identifier
     versions = models.ManyToManyField('FileVersion')
 
-    node = models.ForeignKey('osf.AbstractNode', blank=True, null=True, related_name='files')
-    parent = models.ForeignKey('self', blank=True, null=True, default=None, related_name='_children')
-    copied_from = models.ForeignKey('self', blank=True, null=True, default=None, related_name='copy_of')
+    node = models.ForeignKey('osf.AbstractNode', blank=True, null=True, related_name='files', on_delete=models.CASCADE)
+    parent = models.ForeignKey('self', blank=True, null=True, default=None, related_name='_children', on_delete=models.CASCADE)
+    copied_from = models.ForeignKey('self', blank=True, null=True, default=None, related_name='copy_of', on_delete=models.CASCADE)
 
     provider = models.CharField(max_length=25, blank=False, null=False, db_index=True)
 
@@ -103,7 +103,7 @@ class BaseFileNode(TypedModel, CommentableMixin, OptionalGuidMixin, Taggable, Ob
 
     is_deleted = False
     deleted_on = NonNaiveDateTimeField(blank=True, null=True)
-    deleted_by = models.ForeignKey('osf.OSFUser', related_name='files_deleted_by', null=True, blank=True)
+    deleted_by = models.ForeignKey('osf.OSFUser', related_name='files_deleted_by', null=True, blank=True, on_delete=models.CASCADE)
 
     objects = BaseFileNodeManager()
     active = ActiveFileNodeManager()
@@ -617,7 +617,7 @@ class FileVersion(ObjectIDMixin, BaseModel):
     about where the file is located, hashes and datetimes
     """
 
-    creator = models.ForeignKey('OSFUser', null=True, blank=True)
+    creator = models.ForeignKey('OSFUser', null=True, blank=True, on_delete=models.CASCADE)
 
     identifier = models.CharField(max_length=100, blank=False, null=False)  # max length on staging was 51
 
diff --git a/osf/models/identifiers.py b/osf/models/identifiers.py
index 0b79e0e31c8..95eaa1b0a25 100644
--- a/osf/models/identifiers.py
+++ b/osf/models/identifiers.py
@@ -9,7 +9,7 @@ class Identifier(ObjectIDMixin, BaseModel):
 
     # object to which the identifier points
     object_id = models.PositiveIntegerField(null=True, blank=True)
-    content_type = models.ForeignKey(ContentType, null=True, blank=True)
+    content_type = models.ForeignKey(ContentType, null=True, blank=True, on_delete=models.CASCADE)
     referent = GenericForeignKey()
     # category: e.g. 'ark', 'doi'
     category = models.CharField(max_length=10)  # longest was 3, 8/19/2016
diff --git a/osf/models/node_relation.py b/osf/models/node_relation.py
index 4a564fd3164..1ed0b919a42 100644
--- a/osf/models/node_relation.py
+++ b/osf/models/node_relation.py
@@ -4,8 +4,8 @@
 
 
 class NodeRelation(ObjectIDMixin, BaseModel):
-    parent = models.ForeignKey('AbstractNode', related_name='node_relations')
-    child = models.ForeignKey('AbstractNode', related_name='_parents')
+    parent = models.ForeignKey('AbstractNode', related_name='node_relations', on_delete=models.CASCADE)
+    child = models.ForeignKey('AbstractNode', related_name='_parents', on_delete=models.CASCADE)
     is_node_link = models.BooleanField(default=False, db_index=True)
 
     def __unicode__(self):
diff --git a/osf/models/nodelog.py b/osf/models/nodelog.py
index a7e8385a57f..f27f8c86bef 100644
--- a/osf/models/nodelog.py
+++ b/osf/models/nodelog.py
@@ -145,11 +145,13 @@ class NodeLog(ObjectIDMixin, BaseModel):
     action = models.CharField(max_length=255, db_index=True)  # , choices=action_choices)
     params = DateTimeAwareJSONField(default=dict)
     should_hide = models.BooleanField(default=False)
-    user = models.ForeignKey('OSFUser', related_name='logs', db_index=True, null=True, blank=True)
+    user = models.ForeignKey('OSFUser', related_name='logs', db_index=True,
+                             null=True, blank=True, on_delete=models.CASCADE)
     foreign_user = models.CharField(max_length=255, null=True, blank=True)
     node = models.ForeignKey('AbstractNode', related_name='logs',
-                             db_index=True, null=True, blank=True)
-    original_node = models.ForeignKey('AbstractNode', db_index=True, null=True, blank=True)
+                             db_index=True, null=True, blank=True, on_delete=models.CASCADE)
+    original_node = models.ForeignKey('AbstractNode', db_index=True,
+                                      null=True, blank=True, on_delete=models.CASCADE)
 
     def __unicode__(self):
         return ('({self.action!r}, user={self.user!r},, node={self.node!r}, params={self.params!r}) '
diff --git a/osf/models/notifications.py b/osf/models/notifications.py
index ec1a230fb71..fbaca680d6f 100644
--- a/osf/models/notifications.py
+++ b/osf/models/notifications.py
@@ -14,8 +14,10 @@ class NotificationSubscription(BaseModel):
 
     event_name = models.CharField(max_length=50)  # wiki_updated, comment_replies
 
-    user = models.ForeignKey('OSFUser', null=True, related_name='notification_subscriptions', blank=True)
-    node = models.ForeignKey('Node', null=True, blank=True, related_name='notification_subscriptions')
+    user = models.ForeignKey('OSFUser', related_name='notification_subscriptions',
+                             null=True, blank=True, on_delete=models.CASCADE)
+    node = models.ForeignKey('Node', related_name='notification_subscriptions',
+                             null=True, blank=True, on_delete=models.CASCADE)
 
     # Notification types
     none = models.ManyToManyField('OSFUser', related_name='+')  # reverse relationships
@@ -82,7 +84,7 @@ def remove_user_from_subscription(self, user, save=True):
             self.save()
 
 class NotificationDigest(ObjectIDMixin, BaseModel):
-    user = models.ForeignKey('OSFUser', null=True, blank=True)
+    user = models.ForeignKey('OSFUser', null=True, blank=True, on_delete=models.CASCADE)
     timestamp = NonNaiveDateTimeField()
     send_type = models.CharField(max_length=50, db_index=True, validators=[validate_subscription_type, ])
     event = models.CharField(max_length=50)
diff --git a/osf/models/preprint_provider.py b/osf/models/preprint_provider.py
index d4cb02fbe81..e6ae473ea7f 100644
--- a/osf/models/preprint_provider.py
+++ b/osf/models/preprint_provider.py
@@ -58,7 +58,8 @@ class PreprintProvider(ObjectIDMixin, ReviewProviderMixin, BaseModel):
 
     subjects_acceptable = DateTimeAwareJSONField(blank=True, default=list)
     licenses_acceptable = models.ManyToManyField(NodeLicense, blank=True, related_name='licenses_acceptable')
-    default_license = models.ForeignKey(NodeLicense, blank=True, related_name='default_license', null=True)
+    default_license = models.ForeignKey(NodeLicense, related_name='default_license',
+                                        null=True, blank=True, on_delete=models.CASCADE)
 
     class Meta:
         permissions = tuple(reviews_permissions.PERMISSIONS.items()) + (
diff --git a/osf/models/queued_mail.py b/osf/models/queued_mail.py
index de89891f4ef..f067894d25b 100644
--- a/osf/models/queued_mail.py
+++ b/osf/models/queued_mail.py
@@ -11,7 +11,7 @@
 
 
 class QueuedMail(ObjectIDMixin, BaseModel):
-    user = models.ForeignKey('OSFUser', db_index=True, null=True)
+    user = models.ForeignKey('OSFUser', db_index=True, null=True, on_delete=models.CASCADE)
     to_addr = models.CharField(max_length=255)
     send_at = NonNaiveDateTimeField(db_index=True, null=False)
 
diff --git a/osf/models/registrations.py b/osf/models/registrations.py
index bf607ba7333..497e052da9a 100644
--- a/osf/models/registrations.py
+++ b/osf/models/registrations.py
@@ -39,9 +39,9 @@ class Registration(AbstractNode):
 
     registered_meta = DateTimeAwareJSONField(default=dict, blank=True)
     # TODO Add back in once dependencies are resolved
-    registration_approval = models.ForeignKey(RegistrationApproval, null=True, blank=True)
-    retraction = models.ForeignKey(Retraction, null=True, blank=True)
-    embargo = models.ForeignKey(Embargo, null=True, blank=True)
+    registration_approval = models.ForeignKey(RegistrationApproval, null=True, blank=True, on_delete=models.CASCADE)
+    retraction = models.ForeignKey(Retraction, null=True, blank=True, on_delete=models.CASCADE)
+    embargo = models.ForeignKey(Embargo, null=True, blank=True, on_delete=models.CASCADE)
 
     registered_from = models.ForeignKey('self',
                                         related_name='registrations',
@@ -367,8 +367,9 @@ class DraftRegistrationLog(ObjectIDMixin, BaseModel):
     """
     date = NonNaiveDateTimeField(default=timezone.now)
     action = models.CharField(max_length=255)
-    draft = models.ForeignKey('DraftRegistration', related_name='logs', null=True, blank=True)
-    user = models.ForeignKey('OSFUser', null=True)
+    draft = models.ForeignKey('DraftRegistration', related_name='logs',
+                              null=True, blank=True, on_delete=models.CASCADE)
+    user = models.ForeignKey('OSFUser', null=True, on_delete=models.CASCADE)
 
     SUBMITTED = 'submitted'
     REGISTERED = 'registered'
@@ -387,9 +388,10 @@ class DraftRegistration(ObjectIDMixin, BaseModel):
     datetime_initiated = NonNaiveDateTimeField(auto_now_add=True)
     datetime_updated = NonNaiveDateTimeField(auto_now=True)
     # Original Node a draft registration is associated with
-    branched_from = models.ForeignKey('Node', null=True, related_name='registered_draft')
+    branched_from = models.ForeignKey('Node', related_name='registered_draft',
+                                      null=True, on_delete=models.CASCADE)
 
-    initiator = models.ForeignKey('OSFUser', null=True)
+    initiator = models.ForeignKey('OSFUser', null=True, on_delete=models.CASCADE)
 
     # Dictionary field mapping question id to a question's comments and answer
     # {
@@ -406,11 +408,11 @@ class DraftRegistration(ObjectIDMixin, BaseModel):
     #   }
     # }
     registration_metadata = DateTimeAwareJSONField(default=dict, blank=True)
-    registration_schema = models.ForeignKey('MetaSchema', null=True)
+    registration_schema = models.ForeignKey('MetaSchema', null=True, on_delete=models.CASCADE)
     registered_node = models.ForeignKey('Registration', null=True, blank=True,
-                                        related_name='draft_registration')
+                                        related_name='draft_registration', on_delete=models.CASCADE)
 
-    approval = models.ForeignKey('DraftRegistrationApproval', null=True, blank=True)
+    approval = models.ForeignKey('DraftRegistrationApproval', null=True, blank=True, on_delete=models.CASCADE)
 
     # Dictionary field mapping extra fields defined in the MetaSchema.schema to their
     # values. Defaults should be provided in the schema (e.g. 'paymentSent': false),
diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py
index 30f9f6c2eb8..0eab93bb37e 100644
--- a/osf/models/sanctions.py
+++ b/osf/models/sanctions.py
@@ -388,7 +388,7 @@ class Embargo(PreregCallbackMixin, EmailApprovableSanction):
     APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'project/{node_id}/?token={token}'
     REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'project/{node_id}/?token={token}'
 
-    initiated_by = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True)
+    initiated_by = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)
     for_existing_registration = models.BooleanField(default=False)
 
     @property
@@ -570,7 +570,7 @@ class Retraction(EmailApprovableSanction):
     APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'project/{node_id}/?token={token}'
     REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'project/{node_id}/?token={token}'
 
-    initiated_by = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True)
+    initiated_by = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)
     justification = models.CharField(max_length=2048, null=True, blank=True)
     date_retracted = NonNaiveDateTimeField(null=True, blank=True)
 
@@ -706,7 +706,7 @@ class RegistrationApproval(PreregCallbackMixin, EmailApprovableSanction):
     APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'project/{node_id}/?token={token}'
     REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'project/{node_id}/?token={token}'
 
-    initiated_by = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True)
+    initiated_by = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)
 
     def _get_registration(self):
         return self.registrations.first()
@@ -923,8 +923,8 @@ class EmbargoTerminationApproval(EmailApprovableSanction):
     APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'project/{node_id}/?token={token}'
     REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'project/{node_id}/?token={token}'
 
-    embargoed_registration = models.ForeignKey('Registration', null=True, blank=True)
-    initiated_by = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True)
+    embargoed_registration = models.ForeignKey('Registration', null=True, blank=True, on_delete=models.CASCADE)
+    initiated_by = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)
 
     def _get_registration(self):
         return self.embargoed_registration

From 1cd64dca1805e27ff66dda7ae5ffcd775f4cc397 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Fri, 3 Nov 2017 10:59:28 -0400
Subject: [PATCH 083/192] Pass app_name to django.conf.urls.include().

- Specifying a namespace in django.conf.urls.include() without providing an app_name will not be allowed in Django 2.0. Set the app_name attribute in the included module, or pass a 2-tuple containing the list of patterns and app_name instead.
---
 api/base/urls.py | 48 ++++++++++++++++++++++++------------------------
 1 file changed, 24 insertions(+), 24 deletions(-)

diff --git a/api/base/urls.py b/api/base/urls.py
index 6f250aa01c9..dc703635001 100644
--- a/api/base/urls.py
+++ b/api/base/urls.py
@@ -16,31 +16,31 @@
             [
                 url(r'^$', views.root, name='root'),
                 url(r'^status/', views.status_check, name='status_check'),
-                url(r'^actions/', include('api.actions.urls', namespace='actions')),
-                url(r'^addons/', include('api.addons.urls', namespace='addons')),
-                url(r'^applications/', include('api.applications.urls', namespace='applications')),
-                url(r'^citations/', include('api.citations.urls', namespace='citations')),
-                url(r'^collections/', include('api.collections.urls', namespace='collections')),
-                url(r'^comments/', include('api.comments.urls', namespace='comments')),
+                url(r'^actions/', include(('api.actions.urls', 'actions'), namespace='actions')),
+                url(r'^addons/', include(('api.addons.urls', 'addons'), namespace='addons')),
+                url(r'^applications/', include(('api.applications.urls', 'applications'), namespace='applications')),
+                url(r'^citations/', include(('api.citations.urls', 'citations'), namespace='citations')),
+                url(r'^collections/', include(('api.collections.urls', 'collections'), namespace='collections')),
+                url(r'^comments/', include(('api.comments.urls', 'comments'), namespace='comments')),
                 url(r'^docs/', RedirectView.as_view(pattern_name=views.root), name='redirect-to-root', kwargs={'version': default_version}),
-                url(r'^files/', include('api.files.urls', namespace='files')),
-                url(r'^guids/', include('api.guids.urls', namespace='guids')),
-                url(r'^identifiers/', include('api.identifiers.urls', namespace='identifiers')),
-                url(r'^institutions/', include('api.institutions.urls', namespace='institutions')),
-                url(r'^licenses/', include('api.licenses.urls', namespace='licenses')),
-                url(r'^logs/', include('api.logs.urls', namespace='logs')),
-                url(r'^metaschemas/', include('api.metaschemas.urls', namespace='metaschemas')),
-                url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
-                url(r'^preprints/', include('api.preprints.urls', namespace='preprints')),
-                url(r'^preprint_providers/', include('api.preprint_providers.urls', namespace='preprint_providers')),
-                url(r'^registrations/', include('api.registrations.urls', namespace='registrations')),
-                url(r'^search/', include('api.search.urls', namespace='search')),
-                url(r'^taxonomies/', include('api.taxonomies.urls', namespace='taxonomies')),
-                url(r'^test/', include('api.test.urls', namespace='test')),
-                url(r'^tokens/', include('api.tokens.urls', namespace='tokens')),
-                url(r'^users/', include('api.users.urls', namespace='users')),
-                url(r'^view_only_links/', include('api.view_only_links.urls', namespace='view-only-links')),
-                url(r'^wikis/', include('api.wikis.urls', namespace='wikis')),
+                url(r'^files/', include(('api.files.urls', 'files'), namespace='files')),
+                url(r'^guids/', include(('api.guids.urls', 'guids'), namespace='guids')),
+                url(r'^identifiers/', include(('api.identifiers.urls', 'identifiers'), namespace='identifiers')),
+                url(r'^institutions/', include(('api.institutions.urls', 'institutions'), namespace='institutions')),
+                url(r'^licenses/', include(('api.licenses.urls', 'licenses'), namespace='licenses')),
+                url(r'^logs/', include(('api.logs.urls', 'logs'), namespace='logs')),
+                url(r'^metaschemas/', include(('api.metaschemas.urls', 'metaschemas'), namespace='metaschemas')),
+                url(r'^nodes/', include(('api.nodes.urls', 'nodes'), namespace='nodes')),
+                url(r'^preprints/', include(('api.preprints.urls', 'preprints'), namespace='preprints')),
+                url(r'^preprint_providers/', include(('api.preprint_providers.urls', 'preprint_providers'), namespace='preprint_providers')),
+                url(r'^registrations/', include(('api.registrations.urls', 'registrations'), namespace='registrations')),
+                url(r'^search/', include(('api.search.urls', 'search'), namespace='search')),
+                url(r'^taxonomies/', include(('api.taxonomies.urls', 'taxonomies'), namespace='taxonomies')),
+                url(r'^test/', include(('api.test.urls', 'test'), namespace='test')),
+                url(r'^tokens/', include(('api.tokens.urls', 'tokens'), namespace='tokens')),
+                url(r'^users/', include(('api.users.urls', 'users'), namespace='users')),
+                url(r'^view_only_links/', include(('api.view_only_links.urls', 'view_only_links'), namespace='view-only-links')),
+                url(r'^wikis/', include(('api.wikis.urls', 'wikis'), namespace='wikis')),
             ],
         )
         ),

From 13730a3bc5617633864406ecaa44d950edf4a31c Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Fri, 3 Nov 2017 11:48:55 -0400
Subject: [PATCH 084/192] Replace `virtual_fields` with `private_fields`

- `Options.virtual_fields` is deprecated, use `private_fields` instead.
- See https://docs.djangoproject.com/en/1.11/releases/1.10/#features-deprecated-in-1-10
---
 osf/models/base.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/osf/models/base.py b/osf/models/base.py
index 9511322279f..78fa1598908 100644
--- a/osf/models/base.py
+++ b/osf/models/base.py
@@ -125,7 +125,7 @@ def reload(self):
     def refresh_from_db(self):
         super(BaseModel, self).refresh_from_db()
         # Django's refresh_from_db does not uncache GFKs
-        for field in self._meta.virtual_fields:
+        for field in self._meta.private_fields:
             if hasattr(field, 'cache_attr') and field.cache_attr in self.__dict__:
                 del self.__dict__[field.cache_attr]
 

From 52d8a0e292b3cf90a28a12e915b60d2d08dd302f Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Mon, 6 Nov 2017 15:51:10 -0500
Subject: [PATCH 085/192] Upgrade dependencies (non-breaking, minor version
 changes)

---
 requirements.txt | 56 ++++++++++++++++++++++++------------------------
 1 file changed, 28 insertions(+), 28 deletions(-)

diff --git a/requirements.txt b/requirements.txt
index 6486ff8bae4..6de6e6b978c 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -6,34 +6,34 @@
 # To install release requirements: inv requirements --release
 
 invoke==0.15.0
-Werkzeug==0.10.4
+Werkzeug==0.12.2
 Flask==0.10.1
-gevent==1.2.1
-Mako==1.0.0
-Markdown==2.4.1
+gevent==1.2.2
+Mako==1.0.7
+Markdown==2.6.9
 Pygments==1.6
 WTForms==1.0.4
-beautifulsoup4==4.3.2
+beautifulsoup4==4.6.0
 celery==3.1.25
-httplib2==0.9
+httplib2==0.10.3
 hurry.filesize==0.9
 itsdangerous==0.24
 lxml==3.4.1
 mailchimp==2.0.9
-nameparser==0.3.3
-bcrypt==3.1.3
-python-dateutil==2.5.0
-python-gnupg==0.3.6
+nameparser==0.5.3
+bcrypt==3.1.4
+python-dateutil==2.6.1
+python-gnupg==0.4.1
 pytz==2014.9
 bleach==1.4.1
 html5lib==0.999
-blinker==1.3
+blinker==1.4
 furl==0.4.92
 elasticsearch==1.3.0
-google-api-python-client==1.2
+google-api-python-client==1.6.4
 Babel==1.3
-citeproc-py==0.3.0
-boto3==1.4.4
+citeproc-py==0.4.0
+boto3==1.4.7
 # Support SSL SNI on Python < 2.7.9
 # (http://docs.python-requests.org/en/latest/community/faq/#what-are-hostname-doesn-t-match-errors)
 ndg-httpsclient==0.3.0
@@ -54,32 +54,32 @@ sendgrid==1.5.13
 # https://github.com/jaraco/keyring/blob/9.1/setup.py
 keyring==9.1
 
-requests==2.5.3
-urllib3==1.10.4
+requests==2.18.4
+urllib3==1.22
 oauthlib==1.1.2
-requests-oauthlib==0.5.0
+requests-oauthlib==0.8.0
 raven==5.32.0
 
 # API requirements
-Django==1.11.4
-djangorestframework==3.6.3
+Django==1.11.6
+djangorestframework==3.6.4
 django-cors-headers==1.3.1
 djangorestframework-bulk==0.2.1
-pyjwt==1.4.0
+pyjwt==1.5.3
 # Issue: sorry, but this version only supports 100 named groups (https://github.com/eliben/pycparser/issues/147)
-pycparser==2.13
+pycparser==2.18
 pyjwe==1.0.0
-jsonschema==2.5.1
+jsonschema==2.6.0
 django-guardian==1.4.9
 
 # Admin requirements
-django-webpack-loader==0.2.3
-django-nose==1.4.4
+django-webpack-loader==0.5.0
+django-nose==1.4.5
 django-password-reset==0.8.2
 sendgrid-django==2.0.0
 
 # Analytics requirements
-keen==0.3.21
+keen==0.5.1
 python-geoip-geolite2==2015.0303
 
 # OSF models
@@ -89,11 +89,11 @@ git+https://github.com/cos-forks/django-extensions@master
 django-include==0.2.4
 psycopg2==2.6.2
 ujson==1.35
-sqlparse==0.2.2
+sqlparse==0.2.4
 psycogreen==1.0
 django-bulk-update==1.1.10
-ciso8601==1.0.3
+ciso8601==1.0.5
 
 # Reviews requirements
-transitions==0.5.3
+transitions==0.6.1
 enum34==1.1.6

From d743a1da471a96c5762771b7044d818b092c7f33 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Thu, 9 Nov 2017 16:30:26 -0500
Subject: [PATCH 086/192] Specify app_name in included modules.

- Addresses RemovedInDjango20Warnings
---
 admin/base/urls.py               |  2 +-
 admin/common_auth/urls.py        |  2 ++
 admin/desk/urls.py               |  2 ++
 admin/institutions/urls.py       |  1 +
 admin/meetings/urls.py           |  2 ++
 admin/metrics/urls.py            |  2 ++
 admin/nodes/urls.py              |  1 +
 admin/pre_reg/urls.py            |  2 ++
 admin/preprint_providers/urls.py |  1 +
 admin/preprints/urls.py          |  1 +
 admin/spam/urls.py               |  2 ++
 admin/subjects/urls.py           |  2 ++
 admin/users/urls.py              |  2 ++
 api/actions/urls.py              |  2 ++
 api/addons/urls.py               |  2 ++
 api/applications/urls.py         |  2 ++
 api/base/urls.py                 | 48 ++++++++++++++++----------------
 api/citations/urls.py            |  2 ++
 api/collections/urls.py          |  2 ++
 api/comments/urls.py             |  2 ++
 api/files/urls.py                |  2 ++
 api/guids/urls.py                |  2 ++
 api/identifiers/urls.py          |  2 ++
 api/institutions/urls.py         |  2 ++
 api/licenses/urls.py             |  2 ++
 api/logs/urls.py                 |  2 ++
 api/metaschemas/urls.py          |  2 ++
 api/nodes/urls.py                |  2 ++
 api/preprint_providers/urls.py   |  2 ++
 api/preprints/urls.py            |  2 ++
 api/registrations/urls.py        |  1 +
 api/search/urls.py               |  2 ++
 api/taxonomies/urls.py           |  2 ++
 api/test/urls.py                 |  2 ++
 api/tokens/urls.py               |  2 ++
 api/users/urls.py                |  1 +
 api/view_only_links/urls.py      |  2 ++
 api/wikis/urls.py                |  1 +
 38 files changed, 90 insertions(+), 25 deletions(-)

diff --git a/admin/base/urls.py b/admin/base/urls.py
index 7e92598f2bc..57d9718124f 100644
--- a/admin/base/urls.py
+++ b/admin/base/urls.py
@@ -13,7 +13,7 @@
         base_pattern,
         include([
             url(r'^$', views.home, name='home'),
-            url(r'^admin/', include(admin.site.urls)),
+            url(r'^admin/', admin.site.urls),
             url(r'^spam/', include('admin.spam.urls', namespace='spam')),
             url(r'^institutions/', include('admin.institutions.urls', namespace='institutions')),
             url(r'^preprint_providers/', include('admin.preprint_providers.urls', namespace='preprint_providers')),
diff --git a/admin/common_auth/urls.py b/admin/common_auth/urls.py
index 644aa2b75e5..c1ee7e760eb 100644
--- a/admin/common_auth/urls.py
+++ b/admin/common_auth/urls.py
@@ -6,6 +6,8 @@
 
 from admin.common_auth import views
 
+app_name = 'admin'
+
 urlpatterns = [
     url(r'^login/?$', views.LoginView.as_view(), name='login'),
     url(r'^logout/$', views.logout_user, name='logout'),
diff --git a/admin/desk/urls.py b/admin/desk/urls.py
index eca4faacf8a..c80a4cb4108 100644
--- a/admin/desk/urls.py
+++ b/admin/desk/urls.py
@@ -2,6 +2,8 @@
 
 from admin.desk import views
 
+app_name = 'admin'
+
 urlpatterns = [
     url(r'^customer/(?P<user_id>[a-z0-9]+)/$', views.DeskCustomer.as_view(),
         name='customer'),
diff --git a/admin/institutions/urls.py b/admin/institutions/urls.py
index 258594ff4d4..cb42a6ea9e8 100644
--- a/admin/institutions/urls.py
+++ b/admin/institutions/urls.py
@@ -1,6 +1,7 @@
 from django.conf.urls import url
 from . import views
 
+app_name = 'admin'
 
 urlpatterns = [
     url(r'^$', views.InstitutionList.as_view(), name='list'),
diff --git a/admin/meetings/urls.py b/admin/meetings/urls.py
index b01baaea483..6d7a440d0cd 100644
--- a/admin/meetings/urls.py
+++ b/admin/meetings/urls.py
@@ -4,6 +4,8 @@
 
 from admin.meetings import views
 
+app_name = 'admin'
+
 urlpatterns = [
     url(r'^$', views.MeetingListView.as_view(), name='list'),
     url(r'^create/$', views.MeetingCreateFormView.as_view(), name='create'),
diff --git a/admin/metrics/urls.py b/admin/metrics/urls.py
index 2e776330184..b8618dacf39 100644
--- a/admin/metrics/urls.py
+++ b/admin/metrics/urls.py
@@ -3,6 +3,8 @@
 
 from . import views
 
+app_name = 'admin'
+
 urlpatterns = [
     url(r'^$', login(views.MetricsView.as_view()), name='metrics'),
 ]
diff --git a/admin/nodes/urls.py b/admin/nodes/urls.py
index f947988cdac..d017169399a 100644
--- a/admin/nodes/urls.py
+++ b/admin/nodes/urls.py
@@ -1,6 +1,7 @@
 from django.conf.urls import url
 from admin.nodes import views
 
+app_name = 'admin'
 
 urlpatterns = [
     url(r'^$', views.NodeFormView.as_view(),
diff --git a/admin/pre_reg/urls.py b/admin/pre_reg/urls.py
index 265835317aa..aae4e012a9f 100644
--- a/admin/pre_reg/urls.py
+++ b/admin/pre_reg/urls.py
@@ -3,6 +3,8 @@
 
 from admin.pre_reg import views
 
+app_name = 'admin'
+
 urlpatterns = [
     url(r'^$', views.DraftListView.as_view(), name='prereg'),
     url(r'^download/$', views.DraftDownloadListView.as_view(), name='download'),
diff --git a/admin/preprint_providers/urls.py b/admin/preprint_providers/urls.py
index dd2da1218ae..4d573654978 100644
--- a/admin/preprint_providers/urls.py
+++ b/admin/preprint_providers/urls.py
@@ -1,6 +1,7 @@
 from django.conf.urls import url
 from . import views
 
+app_name = 'admin'
 
 urlpatterns = [
     url(r'^$', views.PreprintProviderList.as_view(), name='list'),
diff --git a/admin/preprints/urls.py b/admin/preprints/urls.py
index fff3bba55a6..c42d40b46c1 100644
--- a/admin/preprints/urls.py
+++ b/admin/preprints/urls.py
@@ -1,6 +1,7 @@
 from django.conf.urls import url
 from admin.preprints import views
 
+app_name = 'admin'
 
 urlpatterns = [
     url(r'^$', views.PreprintFormView.as_view(), name='search'),
diff --git a/admin/spam/urls.py b/admin/spam/urls.py
index 37390437f7c..f5584350c90 100644
--- a/admin/spam/urls.py
+++ b/admin/spam/urls.py
@@ -2,6 +2,8 @@
 
 from . import views
 
+app_name = 'admin'
+
 urlpatterns = [
     url(r'^$', views.SpamList.as_view(), name='spam'),
     url(
diff --git a/admin/subjects/urls.py b/admin/subjects/urls.py
index d6ea2c921bb..bd4054a0729 100644
--- a/admin/subjects/urls.py
+++ b/admin/subjects/urls.py
@@ -4,6 +4,8 @@
 
 from admin.subjects import views
 
+app_name = 'admin'
+
 urlpatterns = [
     url(r'^$', views.SubjectListView.as_view(), name='list'),
     url(r'^(?P<pk>[0-9]+)/$', views.SubjectUpdateView.as_view(),
diff --git a/admin/users/urls.py b/admin/users/urls.py
index 0c33d60024a..e2ac48db121 100644
--- a/admin/users/urls.py
+++ b/admin/users/urls.py
@@ -2,6 +2,8 @@
 
 from . import views
 
+app_name = 'admin'
+
 urlpatterns = [
     url(r'^$', views.UserFormView.as_view(), name='search'),
     url(r'^flagged_spam$', views.UserFlaggedSpamList.as_view(), name='flagged-spam'),
diff --git a/api/actions/urls.py b/api/actions/urls.py
index 3f7357bbce1..adbd3513a5f 100644
--- a/api/actions/urls.py
+++ b/api/actions/urls.py
@@ -2,6 +2,8 @@
 
 from . import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^$', views.CreateAction.as_view(), name=views.CreateAction.view_name),
     url(r'^(?P<action_id>\w+)/$', views.ActionDetail.as_view(), name=views.ActionDetail.view_name),
diff --git a/api/addons/urls.py b/api/addons/urls.py
index 7180e07e111..8f04e68bc0b 100644
--- a/api/addons/urls.py
+++ b/api/addons/urls.py
@@ -2,6 +2,8 @@
 
 from api.addons import views
 
+app_name = 'osf'
+
 urlpatterns = [
     # Examples:
     # url(r'^$', 'api.views.home', name='home'),
diff --git a/api/applications/urls.py b/api/applications/urls.py
index 935681f058e..5d1bfe538db 100644
--- a/api/applications/urls.py
+++ b/api/applications/urls.py
@@ -2,6 +2,8 @@
 
 from api.applications import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^$', views.ApplicationList.as_view(), name=views.ApplicationList.view_name),
     url(r'^(?P<client_id>\w+)/$', views.ApplicationDetail.as_view(), name=views.ApplicationDetail.view_name),
diff --git a/api/base/urls.py b/api/base/urls.py
index dc703635001..6f250aa01c9 100644
--- a/api/base/urls.py
+++ b/api/base/urls.py
@@ -16,31 +16,31 @@
             [
                 url(r'^$', views.root, name='root'),
                 url(r'^status/', views.status_check, name='status_check'),
-                url(r'^actions/', include(('api.actions.urls', 'actions'), namespace='actions')),
-                url(r'^addons/', include(('api.addons.urls', 'addons'), namespace='addons')),
-                url(r'^applications/', include(('api.applications.urls', 'applications'), namespace='applications')),
-                url(r'^citations/', include(('api.citations.urls', 'citations'), namespace='citations')),
-                url(r'^collections/', include(('api.collections.urls', 'collections'), namespace='collections')),
-                url(r'^comments/', include(('api.comments.urls', 'comments'), namespace='comments')),
+                url(r'^actions/', include('api.actions.urls', namespace='actions')),
+                url(r'^addons/', include('api.addons.urls', namespace='addons')),
+                url(r'^applications/', include('api.applications.urls', namespace='applications')),
+                url(r'^citations/', include('api.citations.urls', namespace='citations')),
+                url(r'^collections/', include('api.collections.urls', namespace='collections')),
+                url(r'^comments/', include('api.comments.urls', namespace='comments')),
                 url(r'^docs/', RedirectView.as_view(pattern_name=views.root), name='redirect-to-root', kwargs={'version': default_version}),
-                url(r'^files/', include(('api.files.urls', 'files'), namespace='files')),
-                url(r'^guids/', include(('api.guids.urls', 'guids'), namespace='guids')),
-                url(r'^identifiers/', include(('api.identifiers.urls', 'identifiers'), namespace='identifiers')),
-                url(r'^institutions/', include(('api.institutions.urls', 'institutions'), namespace='institutions')),
-                url(r'^licenses/', include(('api.licenses.urls', 'licenses'), namespace='licenses')),
-                url(r'^logs/', include(('api.logs.urls', 'logs'), namespace='logs')),
-                url(r'^metaschemas/', include(('api.metaschemas.urls', 'metaschemas'), namespace='metaschemas')),
-                url(r'^nodes/', include(('api.nodes.urls', 'nodes'), namespace='nodes')),
-                url(r'^preprints/', include(('api.preprints.urls', 'preprints'), namespace='preprints')),
-                url(r'^preprint_providers/', include(('api.preprint_providers.urls', 'preprint_providers'), namespace='preprint_providers')),
-                url(r'^registrations/', include(('api.registrations.urls', 'registrations'), namespace='registrations')),
-                url(r'^search/', include(('api.search.urls', 'search'), namespace='search')),
-                url(r'^taxonomies/', include(('api.taxonomies.urls', 'taxonomies'), namespace='taxonomies')),
-                url(r'^test/', include(('api.test.urls', 'test'), namespace='test')),
-                url(r'^tokens/', include(('api.tokens.urls', 'tokens'), namespace='tokens')),
-                url(r'^users/', include(('api.users.urls', 'users'), namespace='users')),
-                url(r'^view_only_links/', include(('api.view_only_links.urls', 'view_only_links'), namespace='view-only-links')),
-                url(r'^wikis/', include(('api.wikis.urls', 'wikis'), namespace='wikis')),
+                url(r'^files/', include('api.files.urls', namespace='files')),
+                url(r'^guids/', include('api.guids.urls', namespace='guids')),
+                url(r'^identifiers/', include('api.identifiers.urls', namespace='identifiers')),
+                url(r'^institutions/', include('api.institutions.urls', namespace='institutions')),
+                url(r'^licenses/', include('api.licenses.urls', namespace='licenses')),
+                url(r'^logs/', include('api.logs.urls', namespace='logs')),
+                url(r'^metaschemas/', include('api.metaschemas.urls', namespace='metaschemas')),
+                url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
+                url(r'^preprints/', include('api.preprints.urls', namespace='preprints')),
+                url(r'^preprint_providers/', include('api.preprint_providers.urls', namespace='preprint_providers')),
+                url(r'^registrations/', include('api.registrations.urls', namespace='registrations')),
+                url(r'^search/', include('api.search.urls', namespace='search')),
+                url(r'^taxonomies/', include('api.taxonomies.urls', namespace='taxonomies')),
+                url(r'^test/', include('api.test.urls', namespace='test')),
+                url(r'^tokens/', include('api.tokens.urls', namespace='tokens')),
+                url(r'^users/', include('api.users.urls', namespace='users')),
+                url(r'^view_only_links/', include('api.view_only_links.urls', namespace='view-only-links')),
+                url(r'^wikis/', include('api.wikis.urls', namespace='wikis')),
             ],
         )
         ),
diff --git a/api/citations/urls.py b/api/citations/urls.py
index abb82587d5a..a60e977c268 100644
--- a/api/citations/urls.py
+++ b/api/citations/urls.py
@@ -2,6 +2,8 @@
 
 from api.citations import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^styles/$', views.CitationStyleList.as_view(), name=views.CitationStyleList.view_name),
     url(r'^styles/(?P<citation_id>\w+)/$', views.CitationStyleDetail.as_view(), name=views.CitationStyleDetail.view_name),
diff --git a/api/collections/urls.py b/api/collections/urls.py
index 1563bbbf813..9a28d175b42 100644
--- a/api/collections/urls.py
+++ b/api/collections/urls.py
@@ -2,6 +2,8 @@
 
 from api.collections import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^$', views.CollectionList.as_view(), name=views.CollectionList.view_name),
     url(r'^(?P<collection_id>\w+)/$', views.CollectionDetail.as_view(), name=views.CollectionDetail.view_name),
diff --git a/api/comments/urls.py b/api/comments/urls.py
index 65e53a4e20d..add20b8c627 100644
--- a/api/comments/urls.py
+++ b/api/comments/urls.py
@@ -1,6 +1,8 @@
 from django.conf.urls import url
 from api.comments import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^(?P<comment_id>\w+)/$', views.CommentDetail.as_view(), name=views.CommentDetail.view_name),
     url(r'^(?P<comment_id>\w+)/reports/$', views.CommentReportsList.as_view(), name=views.CommentReportsList.view_name),
diff --git a/api/files/urls.py b/api/files/urls.py
index 168a2aa60de..5444fcb87e6 100644
--- a/api/files/urls.py
+++ b/api/files/urls.py
@@ -2,6 +2,8 @@
 
 from api.files import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^(?P<file_id>\w+)/$', views.FileDetail.as_view(), name=views.FileDetail.view_name),
     url(r'^(?P<file_id>\w+)/versions/$', views.FileVersionsList.as_view(), name=views.FileVersionsList.view_name),
diff --git a/api/guids/urls.py b/api/guids/urls.py
index 6f379c33569..7d270ae80a0 100644
--- a/api/guids/urls.py
+++ b/api/guids/urls.py
@@ -2,6 +2,8 @@
 
 from api.guids import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^(?P<guids>\w+)/$', views.GuidDetail.as_view(), name=views.GuidDetail.view_name),
 ]
diff --git a/api/identifiers/urls.py b/api/identifiers/urls.py
index 81122e9d174..65d28ca069a 100644
--- a/api/identifiers/urls.py
+++ b/api/identifiers/urls.py
@@ -2,6 +2,8 @@
 
 from api.identifiers import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^(?P<identifier_id>\w+)/$', views.IdentifierDetail.as_view(), name=views.IdentifierDetail.view_name),
     url(r'^(?P<node_id>\w+)/identifiers/$', views.IdentifierList.as_view(), name=views.IdentifierList.view_name),
diff --git a/api/institutions/urls.py b/api/institutions/urls.py
index 5cf5c1dcaf4..87a4f445bc3 100644
--- a/api/institutions/urls.py
+++ b/api/institutions/urls.py
@@ -2,6 +2,8 @@
 
 from api.institutions import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^$', views.InstitutionList.as_view(), name=views.InstitutionList.view_name),
     url(r'^auth/$', views.InstitutionAuth.as_view(), name=views.InstitutionAuth.view_name),
diff --git a/api/licenses/urls.py b/api/licenses/urls.py
index 2193aaf6a35..dc89437a0fb 100644
--- a/api/licenses/urls.py
+++ b/api/licenses/urls.py
@@ -2,6 +2,8 @@
 
 from api.licenses import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^$', views.LicenseList.as_view(), name=views.LicenseList.view_name),
     url(r'^(?P<license_id>\w+)/$', views.LicenseDetail.as_view(), name=views.LicenseDetail.view_name),
diff --git a/api/logs/urls.py b/api/logs/urls.py
index e143e964400..5c0950afe96 100644
--- a/api/logs/urls.py
+++ b/api/logs/urls.py
@@ -2,6 +2,8 @@
 
 from api.logs import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name),
 ]
diff --git a/api/metaschemas/urls.py b/api/metaschemas/urls.py
index 8e8b758322e..a1e1a9ce7c8 100644
--- a/api/metaschemas/urls.py
+++ b/api/metaschemas/urls.py
@@ -2,6 +2,8 @@
 
 from api.metaschemas import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^$', views.MetaSchemasList.as_view(), name=views.MetaSchemasList.view_name),
     url(r'^(?P<metaschema_id>\w+)/$', views.MetaSchemaDetail.as_view(), name=views.MetaSchemaDetail.view_name)
diff --git a/api/nodes/urls.py b/api/nodes/urls.py
index 1450b47e34d..6b7ee88f6e0 100644
--- a/api/nodes/urls.py
+++ b/api/nodes/urls.py
@@ -2,6 +2,8 @@
 
 from api.nodes import views
 
+app_name = 'osf'
+
 urlpatterns = [
     # Examples:
     # url(r'^$', 'api.views.home', name='home'),
diff --git a/api/preprint_providers/urls.py b/api/preprint_providers/urls.py
index e40166cc7c4..5016745f687 100644
--- a/api/preprint_providers/urls.py
+++ b/api/preprint_providers/urls.py
@@ -3,6 +3,8 @@
 
 from api.preprint_providers import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^$', views.PreprintProviderList.as_view(), name=views.PreprintProviderList.view_name),
     url(r'^(?P<provider_id>\w+)/$', views.PreprintProviderDetail.as_view(), name=views.PreprintProviderDetail.view_name),
diff --git a/api/preprints/urls.py b/api/preprints/urls.py
index 065dbb62498..4a5b480b36c 100644
--- a/api/preprints/urls.py
+++ b/api/preprints/urls.py
@@ -2,6 +2,8 @@
 
 from . import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name),
     url(r'^(?P<preprint_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name),
diff --git a/api/registrations/urls.py b/api/registrations/urls.py
index b68b6ab57be..ce62f56d810 100644
--- a/api/registrations/urls.py
+++ b/api/registrations/urls.py
@@ -3,6 +3,7 @@
 from api.registrations import views
 from website import settings
 
+app_name = 'osf'
 
 urlpatterns = [
     # Examples:
diff --git a/api/search/urls.py b/api/search/urls.py
index 4afc133138b..ac8eaf9c9db 100644
--- a/api/search/urls.py
+++ b/api/search/urls.py
@@ -2,6 +2,8 @@
 
 from api.search import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^$', views.Search.as_view(), name=views.Search.view_name),
     url(r'^components/$', views.SearchComponents.as_view(), name=views.SearchComponents.view_name),
diff --git a/api/taxonomies/urls.py b/api/taxonomies/urls.py
index 9890bd7a42a..920d63c67e3 100644
--- a/api/taxonomies/urls.py
+++ b/api/taxonomies/urls.py
@@ -2,6 +2,8 @@
 
 from api.taxonomies import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^$', views.TaxonomyList.as_view(), name=views.TaxonomyList.view_name),
     url(r'^(?P<taxonomy_id>\w+)/$', views.TaxonomyDetail.as_view(), name=views.TaxonomyDetail.view_name),
diff --git a/api/test/urls.py b/api/test/urls.py
index 889e25e5ef1..8d181292311 100644
--- a/api/test/urls.py
+++ b/api/test/urls.py
@@ -2,6 +2,8 @@
 
 from api.test import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^throttle/', views.test_throttling, name='test-throttling'),
 ]
diff --git a/api/tokens/urls.py b/api/tokens/urls.py
index 683e201e4fc..eb8fd9376b9 100644
--- a/api/tokens/urls.py
+++ b/api/tokens/urls.py
@@ -2,6 +2,8 @@
 
 from api.tokens import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^$', views.TokenList.as_view(), name='token-list'),
     url(r'^(?P<_id>\w+)/$', views.TokenDetail.as_view(), name='token-detail')
diff --git a/api/users/urls.py b/api/users/urls.py
index 172e8b01e7a..68a81943a2d 100644
--- a/api/users/urls.py
+++ b/api/users/urls.py
@@ -1,6 +1,7 @@
 from django.conf.urls import url
 from . import views
 
+app_name = 'osf'
 
 urlpatterns = [
     url(r'^$', views.UserList.as_view(), name=views.UserList.view_name),
diff --git a/api/view_only_links/urls.py b/api/view_only_links/urls.py
index a3f0d9d0edd..79da5d3bf83 100644
--- a/api/view_only_links/urls.py
+++ b/api/view_only_links/urls.py
@@ -2,6 +2,8 @@
 
 from api.view_only_links import views
 
+app_name = 'osf'
+
 urlpatterns = [
     url(r'^(?P<link_id>\w+)/$', views.ViewOnlyLinkDetail.as_view(), name=views.ViewOnlyLinkDetail.view_name),
     url(r'^(?P<link_id>\w+)/nodes/$', views.ViewOnlyLinkNodes.as_view(), name=views.ViewOnlyLinkNodes.view_name),
diff --git a/api/wikis/urls.py b/api/wikis/urls.py
index 537a8ba069d..e6ef311e8ab 100644
--- a/api/wikis/urls.py
+++ b/api/wikis/urls.py
@@ -1,6 +1,7 @@
 from django.conf.urls import url
 from api.wikis import views
 
+app_name = 'osf'
 
 urlpatterns = [
     url(r'^(?P<wiki_id>\w+)/$', views.WikiDetail.as_view(), name=views.WikiDetail.view_name),

From 80d277f9790547920748b536b0140d7645a2e18e Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Tue, 7 Nov 2017 11:33:47 -0500
Subject: [PATCH 087/192] Upgrade dependencies (non-breaking, major version
 changes)

---
 requirements.txt | 13 ++++++-------
 1 file changed, 6 insertions(+), 7 deletions(-)

diff --git a/requirements.txt b/requirements.txt
index 6de6e6b978c..b10cdbb82f6 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -11,7 +11,6 @@ Flask==0.10.1
 gevent==1.2.2
 Mako==1.0.7
 Markdown==2.6.9
-Pygments==1.6
 WTForms==1.0.4
 beautifulsoup4==4.6.0
 celery==3.1.25
@@ -24,14 +23,14 @@ nameparser==0.5.3
 bcrypt==3.1.4
 python-dateutil==2.6.1
 python-gnupg==0.4.1
-pytz==2014.9
+pytz==2017.3
 bleach==1.4.1
-html5lib==0.999
+html5lib==0.999999999
 blinker==1.4
 furl==0.4.92
 elasticsearch==1.3.0
 google-api-python-client==1.6.4
-Babel==1.3
+Babel==2.5.1
 citeproc-py==0.4.0
 boto3==1.4.7
 # Support SSL SNI on Python < 2.7.9
@@ -56,7 +55,7 @@ keyring==9.1
 
 requests==2.18.4
 urllib3==1.22
-oauthlib==1.1.2
+oauthlib==2.0.6
 requests-oauthlib==0.8.0
 raven==5.32.0
 
@@ -75,7 +74,7 @@ django-guardian==1.4.9
 # Admin requirements
 django-webpack-loader==0.5.0
 django-nose==1.4.5
-django-password-reset==0.8.2
+django-password-reset==1.0
 sendgrid-django==2.0.0
 
 # Analytics requirements
@@ -91,7 +90,7 @@ psycopg2==2.6.2
 ujson==1.35
 sqlparse==0.2.4
 psycogreen==1.0
-django-bulk-update==1.1.10
+django-bulk-update==2.2.0
 ciso8601==1.0.5
 
 # Reviews requirements

From 1f44c667023cd49f00290e7a5c36246540e64a14 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Wed, 6 Sep 2017 14:21:18 -0400
Subject: [PATCH 088/192] Add django_celery_beat

[#OSF-8605]
---
 api/base/settings/defaults.py      |  3 +++
 framework/celery_tasks/__init__.py |  2 +-
 requirements.txt                   | 12 +++++++-----
 website/settings/defaults.py       | 20 ++++++++++----------
 website/settings/local-dist.py     |  4 ++--
 website/settings/local-travis.py   |  5 ++++-
 6 files changed, 27 insertions(+), 19 deletions(-)

diff --git a/api/base/settings/defaults.py b/api/base/settings/defaults.py
index 187e566ef98..0d5cfd4af1c 100644
--- a/api/base/settings/defaults.py
+++ b/api/base/settings/defaults.py
@@ -84,6 +84,7 @@
     'django.contrib.admin',
 
     # 3rd party
+    'django_celery_beat',
     'rest_framework',
     'corsheaders',
     'raven.contrib.django.raven_compat',
@@ -270,3 +271,5 @@
 
 # Disable anonymous user permissions in django-guardian
 ANONYMOUS_USER_NAME = None
+
+CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler'
diff --git a/framework/celery_tasks/__init__.py b/framework/celery_tasks/__init__.py
index f389437e68a..ddef4e6d4de 100644
--- a/framework/celery_tasks/__init__.py
+++ b/framework/celery_tasks/__init__.py
@@ -17,7 +17,7 @@
     client = Client(settings.SENTRY_DSN, release=settings.VERSION, tags={'App': 'celery'})
     register_signal(client)
 
-if settings.BROKER_USE_SSL:
+if settings.CELERY_BROKER_USE_SSL:
     app.setup_security()
 
 @app.task
diff --git a/requirements.txt b/requirements.txt
index 6486ff8bae4..82c2d3188e5 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -14,7 +14,9 @@ Markdown==2.4.1
 Pygments==1.6
 WTForms==1.0.4
 beautifulsoup4==4.3.2
-celery==3.1.25
+celery==4.1.0
+kombu==4.1.0
+vine==1.1.4
 httplib2==0.9
 hurry.filesize==0.9
 itsdangerous==0.24
@@ -41,10 +43,6 @@ git+https://github.com/CenterForOpenScience/modular-odm.git@0.4.0
 # Python markdown extensions for comment emails
 git+git://github.com/CenterForOpenScience/mdx_del_ins.git
 
-# Kombu with the ability to specify queue priority
-# TODO: Remove this when Kombu has a stable release including commit c20f854
-git+git://github.com/CenterForOpenScience/kombu.git@v3.0.36
-
 # Issue: certifi-2015.9.6.1 and 2015.9.6.2 fail verification (https://github.com/certifi/python-certifi/issues/26)
 # MailChimp Ticket: LTK1218902287135X, Domain: https://us9.api.mailchimp.com
 certifi==2015.4.28
@@ -65,6 +63,10 @@ Django==1.11.4
 djangorestframework==3.6.3
 django-cors-headers==1.3.1
 djangorestframework-bulk==0.2.1
+# django-celery-beat==1.0.1  # BSD 3 Clause
+# Contains a fix for handling disabled tasks that still has not been release
+git+git://github.com/celery/django-celery-beat@f014edcb954c707cb7628f4416257b6a58689523# BSD 3 Clause
+
 pyjwt==1.4.0
 # Issue: sorry, but this version only supports 100 named groups (https://github.com/eliben/pycparser/issues/147)
 pycparser==2.13
diff --git a/website/settings/defaults.py b/website/settings/defaults.py
index 05b71a62264..0d3619555ef 100644
--- a/website/settings/defaults.py
+++ b/website/settings/defaults.py
@@ -397,7 +397,7 @@ def parent_dir(path):
 except ImportError:
     pass
 else:
-    CELERY_QUEUES = (
+    CELERY_TASK_QUEUES = (
         Queue(LOW_QUEUE, Exchange(LOW_QUEUE), routing_key=LOW_QUEUE,
               consumer_arguments={'x-priority': -1}),
         Queue(DEFAULT_QUEUE, Exchange(DEFAULT_QUEUE), routing_key=DEFAULT_QUEUE,
@@ -408,10 +408,10 @@ def parent_dir(path):
               consumer_arguments={'x-priority': 10}),
     )
 
-    CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
-    CELERY_ROUTES = ('framework.celery_tasks.routers.CeleryRouter', )
-    CELERY_IGNORE_RESULT = True
-    CELERY_STORE_ERRORS_EVEN_IF_IGNORED = True
+    CELERY_TASK_DEFAULT_EXCHANGE_TYPE = 'direct'
+    CELERY_TASK_ROUTES = ('framework.celery_tasks.routers.CeleryRouter', )
+    CELERY_TASK_IGNORE_RESULT = True
+    CELERY_TASK_STORE_ERRORS_EVEN_IF_IGNORED = True
 
 # Default RabbitMQ broker
 RABBITMQ_USERNAME = os.environ.get('RABBITMQ_USERNAME', 'guest')
@@ -420,11 +420,11 @@ def parent_dir(path):
 RABBITMQ_PORT = os.environ.get('RABBITMQ_PORT', '5672')
 RABBITMQ_VHOST = os.environ.get('RABBITMQ_VHOST', '/')
 
-BROKER_URL = os.environ.get('BROKER_URL', 'amqp://{}:{}@{}:{}/{}'.format(RABBITMQ_USERNAME, RABBITMQ_PASSWORD, RABBITMQ_HOST, RABBITMQ_PORT, RABBITMQ_VHOST))
-BROKER_USE_SSL = False
+CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'amqp://{}:{}@{}:{}/{}'.format(RABBITMQ_USERNAME, RABBITMQ_PASSWORD, RABBITMQ_HOST, RABBITMQ_PORT, RABBITMQ_VHOST))
+CELERY_BROKER_USE_SSL = False
 
 # Default RabbitMQ backend
-CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', BROKER_URL)
+CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', CELERY_BROKER_URL)
 
 # Modules to import when celery launches
 CELERY_IMPORTS = (
@@ -468,7 +468,7 @@ def parent_dir(path):
     pass
 else:
     #  Setting up a scheduler, essentially replaces an independent cron job
-    CELERYBEAT_SCHEDULE = {
+    CELERY_BEAT_SCHEDULE = {
         '5-minute-emails': {
             'task': 'website.notifications.tasks.send_users_email',
             'schedule': crontab(minute='*/5'),
@@ -549,7 +549,7 @@ def parent_dir(path):
     }
 
     # Tasks that need metrics and release requirements
-    # CELERYBEAT_SCHEDULE.update({
+    # CELERY_BEAT_SCHEDULE.update({
     #     'usage_audit': {
     #         'task': 'scripts.osfstorage.usage_audit',
     #         'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
diff --git a/website/settings/local-dist.py b/website/settings/local-dist.py
index cf0318ee974..2f7a49e5bcc 100644
--- a/website/settings/local-dist.py
+++ b/website/settings/local-dist.py
@@ -78,12 +78,12 @@
 
 ##### Celery #####
 ## Default RabbitMQ broker
-BROKER_URL = 'amqp://'
+CELERY_BROKER_URL = 'amqp://'
 
 # Celery with SSL
 # import ssl
 #
-# BROKER_USE_SSL = {
+# CELERY_BROKER_USE_SSL = {
 #     'keyfile': '/etc/ssl/private/worker.key',
 #     'certfile': '/etc/ssl/certs/worker.pem',
 #     'ca_certs': '/etc/ssl/certs/ca-chain.cert.pem',
diff --git a/website/settings/local-travis.py b/website/settings/local-travis.py
index 142b96246db..63276a3a855 100644
--- a/website/settings/local-travis.py
+++ b/website/settings/local-travis.py
@@ -5,6 +5,7 @@
 NOTE: local.py will not be added to source control.
 '''
 import inspect
+import logging
 
 from . import defaults
 import os
@@ -53,7 +54,7 @@
 
 ##### Celery #####
 ## Default RabbitMQ broker
-BROKER_URL = 'amqp://'
+CELERY_BROKER_URL = 'amqp://'
 
 # In-memory result backend
 CELERY_RESULT_BACKEND = 'cache'
@@ -90,3 +91,5 @@
 
 EZID_USERNAME = 'testfortravisnotreal'
 EZID_PASSWORD = 'testfortravisnotreal'
+
+logging.getLogger('celery.app.trace').setLevel(logging.FATAL)

From 34b6dce993514aad7c294cbff0c859e1749db53f Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Mon, 13 Nov 2017 11:25:09 -0500
Subject: [PATCH 089/192] Update django-bulk-update imports

---
 api/base/views.py                                          | 2 +-
 osf/migrations/0032_unquote_gd_nodesettings_folder_path.py | 2 +-
 osf_tests/test_guid_auto_include.py                        | 2 +-
 website/conferences/views.py                               | 2 +-
 4 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/api/base/views.py b/api/base/views.py
index b8e49854073..9dd19791563 100644
--- a/api/base/views.py
+++ b/api/base/views.py
@@ -1,6 +1,6 @@
 from collections import defaultdict
 
-from bulk_update.helper import bulk_update
+from django_bulk_update.helper import bulk_update
 from django.conf import settings as django_settings
 from django.db import transaction
 from django.http import JsonResponse
diff --git a/osf/migrations/0032_unquote_gd_nodesettings_folder_path.py b/osf/migrations/0032_unquote_gd_nodesettings_folder_path.py
index 1fe43849de6..39169c239bb 100644
--- a/osf/migrations/0032_unquote_gd_nodesettings_folder_path.py
+++ b/osf/migrations/0032_unquote_gd_nodesettings_folder_path.py
@@ -3,7 +3,7 @@
 from __future__ import unicode_literals
 from urllib2 import quote, unquote
 
-from bulk_update.helper import bulk_update
+from django_bulk_update.helper import bulk_update
 from django.db import migrations
 
 
diff --git a/osf_tests/test_guid_auto_include.py b/osf_tests/test_guid_auto_include.py
index 92136675c01..1dde721f2ac 100644
--- a/osf_tests/test_guid_auto_include.py
+++ b/osf_tests/test_guid_auto_include.py
@@ -1,7 +1,7 @@
 from django.utils import timezone
 
 import pytest
-from bulk_update.helper import bulk_update
+from django_bulk_update.helper import bulk_update
 
 from osf.models import OSFUser
 from django.db.models import Max, DateTimeField
diff --git a/website/conferences/views.py b/website/conferences/views.py
index d64684b141d..0d5e8b88d2c 100644
--- a/website/conferences/views.py
+++ b/website/conferences/views.py
@@ -4,7 +4,7 @@
 import logging
 
 from django.db import transaction
-from bulk_update.helper import bulk_update
+from django_bulk_update.helper import bulk_update
 
 from addons.osfstorage.models import OsfStorageFile
 from framework.auth import get_or_create_user

From 081840dec08f9abc56921b9a2fb6f0a311f80e73 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Fri, 10 Nov 2017 15:29:19 -0500
Subject: [PATCH 090/192] Add `deleted` DateTimeField to DraftRegistration.

- Replace `remove_one` usages with `deleted=timezone.now()`
---
 api/nodes/views.py                            |  4 +++-
 .../0066_draftregistration_deleted.py         | 21 +++++++++++++++++++
 osf/models/registrations.py                   |  3 ++-
 website/project/views/drafts.py               |  3 ++-
 4 files changed, 28 insertions(+), 3 deletions(-)
 create mode 100644 osf/migrations/0066_draftregistration_deleted.py

diff --git a/api/nodes/views.py b/api/nodes/views.py
index ae3fc7fcdd0..99c2f6e9163 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -2,6 +2,7 @@
 
 from django.apps import apps
 from django.db.models import Q
+from django.utils import timezone
 from rest_framework import generics, permissions as drf_permissions
 from rest_framework.exceptions import PermissionDenied, ValidationError, NotFound, MethodNotAllowed, NotAuthenticated
 from rest_framework.response import Response
@@ -1016,7 +1017,8 @@ def get_object(self):
         return self.get_draft()
 
     def perform_destroy(self, draft):
-        DraftRegistration.remove_one(draft)
+        draft.deleted = timezone.now()
+        draft.save(update_fields=['deleted'])
 
 
 class NodeRegistrationsList(JSONAPIBaseView, generics.ListCreateAPIView, NodeMixin, DraftMixin):
diff --git a/osf/migrations/0066_draftregistration_deleted.py b/osf/migrations/0066_draftregistration_deleted.py
new file mode 100644
index 00000000000..9fde4aad0ef
--- /dev/null
+++ b/osf/migrations/0066_draftregistration_deleted.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.7 on 2017-11-13 16:44
+from __future__ import unicode_literals
+
+from django.db import migrations
+import osf.utils.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0065_preprintservice_original_publication_date'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='draftregistration',
+            name='deleted',
+            field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True),
+        ),
+    ]
diff --git a/osf/models/registrations.py b/osf/models/registrations.py
index bf607ba7333..d0857cd3d14 100644
--- a/osf/models/registrations.py
+++ b/osf/models/registrations.py
@@ -386,9 +386,10 @@ class DraftRegistration(ObjectIDMixin, BaseModel):
 
     datetime_initiated = NonNaiveDateTimeField(auto_now_add=True)
     datetime_updated = NonNaiveDateTimeField(auto_now=True)
+    deleted = NonNaiveDateTimeField(null=True, blank=True)
+
     # Original Node a draft registration is associated with
     branched_from = models.ForeignKey('Node', null=True, related_name='registered_draft')
-
     initiator = models.ForeignKey('OSFUser', null=True)
 
     # Dictionary field mapping question id to a question's comments and answer
diff --git a/website/project/views/drafts.py b/website/project/views/drafts.py
index 9421a488d8e..a9b79cad9f6 100644
--- a/website/project/views/drafts.py
+++ b/website/project/views/drafts.py
@@ -337,7 +337,8 @@ def delete_draft_registration(auth, node, draft, *args, **kwargs):
                 'message_long': 'This draft has already been registered and cannot be deleted.'
             }
         )
-    DraftRegistration.remove_one(draft)
+    draft.deleted = timezone.now()
+    draft.save(update_fields=['deleted'])
     return None, http.NO_CONTENT
 
 def get_metaschemas(*args, **kwargs):

From acdfc3d3c5819dfa398ce36bd924369c32f5544f Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Mon, 13 Nov 2017 13:26:43 -0500
Subject: [PATCH 091/192] Update views and tests to handle deleted drafts.

---
 api/base/utils.py                             |  2 +-
 api/nodes/views.py                            |  3 ++-
 .../test_node_draft_registration_detail.py    |  7 ++++++
 .../test_node_draft_registration_list.py      |  9 +++++++
 osf/models/node.py                            |  1 +
 tests/test_registrations/test_views.py        | 25 +++++++++++++------
 website/prereg/utils.py                       |  1 +
 website/project/views/drafts.py               |  2 ++
 8 files changed, 40 insertions(+), 10 deletions(-)

diff --git a/api/base/utils.py b/api/base/utils.py
index 9c602dccfdc..6a5e9ba4617 100644
--- a/api/base/utils.py
+++ b/api/base/utils.py
@@ -118,7 +118,7 @@ def get_object_or_error(model_cls, query_or_pk, request, display_name=None):
     # disabled.
     if model_cls is OSFUser and obj.is_disabled:
         raise UserGone(user=obj)
-    elif model_cls is not OSFUser and not getattr(obj, 'is_active', True) or getattr(obj, 'is_deleted', False):
+    elif model_cls is not OSFUser and not getattr(obj, 'is_active', True) or getattr(obj, 'is_deleted', False) or getattr(obj, 'deleted', False):
         if display_name is None:
             raise Gone
         else:
diff --git a/api/nodes/views.py b/api/nodes/views.py
index 99c2f6e9163..ccaffd65c53 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -911,7 +911,8 @@ def get_queryset(self):
         return DraftRegistration.objects.filter(
             Q(registered_node=None) |
             Q(registered_node__is_deleted=True),
-            branched_from=node
+            branched_from=node,
+            deleted__isnull=True
         )
 
     # overrides ListBulkCreateJSONAPIView
diff --git a/api_tests/nodes/views/test_node_draft_registration_detail.py b/api_tests/nodes/views/test_node_draft_registration_detail.py
index 38dac429bec..3ce90f789ae 100644
--- a/api_tests/nodes/views/test_node_draft_registration_detail.py
+++ b/api_tests/nodes/views/test_node_draft_registration_detail.py
@@ -65,6 +65,13 @@ def test_cannot_view_draft(self, app, user_write_contrib, user_read_contrib, use
         res = app.get(url_draft_registrations, expect_errors=True)
         assert res.status_code == 401
 
+    def test_cannot_view_deleted_draft(self, app, user, url_draft_registrations):
+        res = app.delete_json_api(url_draft_registrations, auth=user.auth)
+        assert res.status_code == 204
+
+        res = app.get(url_draft_registrations, auth=user.auth, expect_errors=True)
+        assert res.status_code == 410
+
     def test_draft_must_be_branched_from_node_in_kwargs(self, app, user, project_other, draft_registration):
         url = '/{}nodes/{}/draft_registrations/{}/'.format(API_BASE, project_other._id, draft_registration._id)
         res = app.get(url, auth=user.auth, expect_errors=True)
diff --git a/api_tests/nodes/views/test_node_draft_registration_list.py b/api_tests/nodes/views/test_node_draft_registration_list.py
index d0f1759141a..59fb09d1bc7 100644
--- a/api_tests/nodes/views/test_node_draft_registration_list.py
+++ b/api_tests/nodes/views/test_node_draft_registration_list.py
@@ -1,4 +1,5 @@
 import pytest
+from django.utils import timezone
 
 from api.base.settings.defaults import API_BASE
 from osf.models import MetaSchema
@@ -107,6 +108,14 @@ def test_cannot_view_draft_list(self, app, user_write_contrib, user_read_contrib
         res = app.get(url_draft_registrations, expect_errors=True)
         assert res.status_code == 401
 
+    def test_deleted_draft_registration_does_not_show_up_in_draft_list(self, app, user, draft_registration, url_draft_registrations):
+        draft_registration.deleted = timezone.now()
+        draft_registration.save()
+        res = app.get(url_draft_registrations, auth=user.auth)
+        assert res.status_code == 200
+        data = res.json['data']
+        assert len(data) == 0
+
     def test_draft_with_registered_node_does_not_show_up_in_draft_list(self, app, user, project_public, draft_registration, url_draft_registrations):
         reg = RegistrationFactory(project = project_public)
         draft_registration.registered_node = reg
diff --git a/osf/models/node.py b/osf/models/node.py
index 34f08e19cd4..29c6b76b176 100644
--- a/osf/models/node.py
+++ b/osf/models/node.py
@@ -627,6 +627,7 @@ def draft_registrations_active(self):
         DraftRegistration = apps.get_model('osf.DraftRegistration')
         return DraftRegistration.objects.filter(
             models.Q(branched_from=self) &
+            models.Q(deleted__isnull=True) &
             (models.Q(registered_node=None) | models.Q(registered_node__is_deleted=True))
         )
 
diff --git a/tests/test_registrations/test_views.py b/tests/test_registrations/test_views.py
index 61dbb151bcc..8e7b13307e2 100644
--- a/tests/test_registrations/test_views.py
+++ b/tests/test_registrations/test_views.py
@@ -271,6 +271,15 @@ def test_get_draft_registration(self):
         assert_equal(res.status_code, http.OK)
         assert_equal(res.json['pk'], self.draft._id)
 
+    def test_get_draft_registration_deleted(self):
+        self.draft.deleted = timezone.now()
+        self.draft.save()
+        self.draft.reload()
+
+        url = self.draft_api_url('get_draft_registration')
+        res = self.app.get(url, auth=self.user.auth, expect_errors=True)
+        assert_equal(res.status_code, http.GONE)
+
     def test_get_draft_registration_invalid(self):
         url = self.node.api_url_for('get_draft_registration', draft_id='13123123')
         res = self.app.get(url, auth=self.user.auth, expect_errors=True)
@@ -401,20 +410,20 @@ def test_update_draft_registration_non_admin(self):
         assert_equal(res.status_code, http.FORBIDDEN)
 
     def test_delete_draft_registration(self):
-        assert_equal(1, DraftRegistration.find().count())
+        assert_equal(1, DraftRegistration.objects.filter(deleted__isnull=True).count())
         url = self.node.api_url_for('delete_draft_registration', draft_id=self.draft._id)
 
         res = self.app.delete(url, auth=self.user.auth)
         assert_equal(res.status_code, http.NO_CONTENT)
-        assert_equal(0, DraftRegistration.find().count())
+        assert_equal(0, DraftRegistration.objects.filter(deleted__isnull=True).count())
 
     def test_delete_draft_registration_non_admin(self):
-        assert_equal(1, DraftRegistration.find().count())
+        assert_equal(1, DraftRegistration.objects.filter(deleted__isnull=True).count())
         url = self.node.api_url_for('delete_draft_registration', draft_id=self.draft._id)
 
         res = self.app.delete(url, auth=self.non_admin.auth, expect_errors=True)
         assert_equal(res.status_code, http.FORBIDDEN)
-        assert_equal(1, DraftRegistration.find().count())
+        assert_equal(1, DraftRegistration.objects.filter(deleted__isnull=True).count())
 
     @mock.patch('website.archiver.tasks.archive')
     def test_delete_draft_registration_registered(self, mock_register_draft):
@@ -430,21 +439,21 @@ def test_delete_draft_registration_approved_and_registration_deleted(self, mock_
         self.draft.registered_node.is_deleted = True
         self.draft.registered_node.save()
 
-        assert_equal(1, DraftRegistration.find().count())
+        assert_equal(1, DraftRegistration.objects.filter(deleted__isnull=True).count())
         url = self.node.api_url_for('delete_draft_registration', draft_id=self.draft._id)
 
         res = self.app.delete(url, auth=self.user.auth)
         assert_equal(res.status_code, http.NO_CONTENT)
-        assert_equal(0, DraftRegistration.find().count())
+        assert_equal(0, DraftRegistration.objects.filter(deleted__isnull=True).count())
 
     def test_only_admin_can_delete_registration(self):
         non_admin = AuthUserFactory()
-        assert_equal(1, DraftRegistration.find().count())
+        assert_equal(1, DraftRegistration.objects.filter(deleted__isnull=True).count())
         url = self.node.api_url_for('delete_draft_registration', draft_id=self.draft._id)
 
         res = self.app.delete(url, auth=non_admin.auth, expect_errors=True)
         assert_equal(res.status_code, http.FORBIDDEN)
-        assert_equal(1, DraftRegistration.find().count())
+        assert_equal(1, DraftRegistration.objects.filter(deleted__isnull=True).count())
 
     def test_get_metaschemas(self):
         url = api_url_for('get_metaschemas')
diff --git a/website/prereg/utils.py b/website/prereg/utils.py
index 990b261eaa4..9f2fbdd6718 100644
--- a/website/prereg/utils.py
+++ b/website/prereg/utils.py
@@ -11,6 +11,7 @@ def drafts_for_user(user, campaign):
         registration_schema=PREREG_CHALLENGE_METASCHEMA,
         approval=None,
         registered_node=None,
+        deleted__isnull=True,
         branched_from__in=Node.objects.filter(
             is_deleted=False,
             contributor__admin=True,
diff --git a/website/project/views/drafts.py b/website/project/views/drafts.py
index a9b79cad9f6..26243e4dd54 100644
--- a/website/project/views/drafts.py
+++ b/website/project/views/drafts.py
@@ -43,6 +43,8 @@ def must_be_branched_from_node(func):
     def wrapper(*args, **kwargs):
         node = kwargs['node']
         draft = kwargs['draft']
+        if draft.deleted:
+            raise HTTPError(http.GONE)
         if not draft.branched_from._id == node._id:
             raise HTTPError(
                 http.BAD_REQUEST,

From ae315b6695ec9c9773565ef34516f133bbd7acca Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 13 Nov 2017 14:23:45 -0600
Subject: [PATCH 092/192] Get rid of vague check_model_cls helper, and just
 assert that model_cls is a Node or Registration.

---
 api/base/utils.py | 7 ++-----
 1 file changed, 2 insertions(+), 5 deletions(-)

diff --git a/api/base/utils.py b/api/base/utils.py
index dc1880a0321..a42977ee705 100644
--- a/api/base/utils.py
+++ b/api/base/utils.py
@@ -153,15 +153,12 @@ def waterbutler_url_for(request_type, provider, path, node_id, token, obj_args=N
     url.args.update(query)
     return url.url
 
-def check_model_cls(model_cls):
-    assert model_cls is Node or model_cls is Registration
-
 def default_node_list_queryset(model_cls):
-    check_model_cls(model_cls)
+    assert model_cls in {Node, Registration}
     return model_cls.objects.filter(is_deleted=False)
 
 def default_node_permission_queryset(user, model_cls):
-    check_model_cls(model_cls)
+    assert model_cls in {Node, Registration}
     if user.is_anonymous:
         return model_cls.objects.filter(is_public=True)
     sub_qs = Contributor.objects.filter(node=OuterRef('pk'), user__id=user.id, read=True)

From f63723b562495a644678a4bedd9cde4ce48973ba Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 13 Nov 2017 14:25:03 -0600
Subject: [PATCH 093/192] Since files_list is definitely a folder, filter
 Folders instead of type(files_list).

---
 api/nodes/views.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/api/nodes/views.py b/api/nodes/views.py
index 26f6cd6cc82..e75a1763cca 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -1932,7 +1932,7 @@ def get_default_queryset(self):
             # We should not have gotten a file here
             raise NotFound
 
-        sub_qs = type(files_list).objects.filter(_children=OuterRef('pk'), pk=files_list.pk)
+        sub_qs = Folder.objects.filter(_children=OuterRef('pk'), pk=files_list.pk)
         return files_list.children.annotate(folder=Exists(sub_qs)).filter(folder=True).prefetch_related('node__guids', 'versions', 'tags', 'guids')
 
     # overrides ListAPIView

From 03b122e8c09f8f26dbd4c03d84844f54cabe64e6 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Mon, 13 Nov 2017 15:37:04 -0500
Subject: [PATCH 094/192] Remove unnecessary recursive call in register_node.

- Delete outdated comments.
---
 osf/models/node.py | 9 ---------
 1 file changed, 9 deletions(-)

diff --git a/osf/models/node.py b/osf/models/node.py
index 34f08e19cd4..e9425a90e84 100644
--- a/osf/models/node.py
+++ b/osf/models/node.py
@@ -1609,11 +1609,9 @@ def register_node(self, schema, auth, data, parent=None):
 
         :param schema: Schema object
         :param auth: All the auth information including user, API key.
-        :param template: Template name
         :param data: Form data
         :param parent Node: parent registration of registration to be created
         """
-        # TODO(lyndsysimon): "template" param is not necessary - use schema.name?
         # NOTE: Admins can register child nodes even if they don't have write access them
         if not self.can_edit(auth=auth) and not self.is_admin_parent(user=auth.user):
             raise PermissionsError(
@@ -1659,13 +1657,6 @@ def register_node(self, schema, auth, data, parent=None):
         # Copy unclaimed records to unregistered users for parent
         registered.copy_unclaimed_records()
 
-        # TODO: Do we need to recurse? .register already recurses
-        for node in registered.get_descendants_recursive():
-            node.is_public = False
-            node.save()
-            # Copy unclaimed records to unregistered users for children
-            node.copy_unclaimed_records()
-
         if parent:
             node_relation = NodeRelation.objects.get(parent=parent.registered_from, child=original)
             NodeRelation.objects.get_or_create(_order=node_relation._order, parent=parent, child=registered)

From 56bd9af87d7288d51deaaf414151c0feca371bc3 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 13 Nov 2017 14:41:05 -0600
Subject: [PATCH 095/192] Remove asserts of 'self' and 'html' in new tests for
 preprint_doi_links because these assertions are not relevant to field being
 tested.

---
 api_tests/preprints/views/test_preprint_detail.py | 6 ------
 1 file changed, 6 deletions(-)

diff --git a/api_tests/preprints/views/test_preprint_detail.py b/api_tests/preprints/views/test_preprint_detail.py
index f7de1a771c5..de6bab2ffbe 100644
--- a/api_tests/preprints/views/test_preprint_detail.py
+++ b/api_tests/preprints/views/test_preprint_detail.py
@@ -107,8 +107,6 @@ def test_preprint_doi_link_absent_in_unpublished_preprints(self, app, user, unpu
         res = app.get(unpublished_url, auth=user.auth)
         assert res.json['data']['id'] == unpublished_preprint._id
         assert res.json['data']['attributes']['is_published'] == False
-        assert 'self' in res.json['data']['links'].keys()
-        assert 'html' in res.json['data']['links'].keys()
         assert 'preprint_doi' not in res.json['data']['links'].keys()
         assert res.json['data']['attributes']['preprint_doi_on_datacite'] == False
 
@@ -118,8 +116,6 @@ def test_published_preprint_doi_link_returned_before_datacite_request(self, app,
         res = app.get(unpublished_url, auth=user.auth)
         assert res.json['data']['id'] == unpublished_preprint._id
         assert res.json['data']['attributes']['is_published'] == True
-        assert 'self' in res.json['data']['links'].keys()
-        assert 'html' in res.json['data']['links'].keys()
         assert 'preprint_doi' in res.json['data']['links'].keys()
         expected_doi = EZID_FORMAT.format(namespace=DOI_NAMESPACE, guid=unpublished_preprint._id).replace('doi:', '').upper()
         assert res.json['data']['links']['preprint_doi'] == 'https://dx.doi.org/{}'.format(expected_doi)
@@ -129,8 +125,6 @@ def test_published_preprint_doi_link_returned_after_datacite_request(self, app,
         res = app.get(url, auth=user.auth)
         assert res.json['data']['id'] == preprint._id
         assert res.json['data']['attributes']['is_published'] == True
-        assert 'self' in res.json['data']['links'].keys()
-        assert 'html' in res.json['data']['links'].keys()
         assert 'preprint_doi' in res.json['data']['links'].keys()
         expected_doi = EZID_FORMAT.format(namespace=DOI_NAMESPACE, guid=preprint._id).replace('doi:', '')
         assert res.json['data']['links']['preprint_doi'] == 'https://dx.doi.org/{}'.format(expected_doi)

From e9dda47b1fd1460fe9b866d3b33fd00cf2eee841 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 13 Nov 2017 14:49:45 -0600
Subject: [PATCH 096/192] Use is to compare to True and False.

---
 api_tests/preprints/views/test_preprint_detail.py | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/api_tests/preprints/views/test_preprint_detail.py b/api_tests/preprints/views/test_preprint_detail.py
index de6bab2ffbe..a82a9c994ad 100644
--- a/api_tests/preprints/views/test_preprint_detail.py
+++ b/api_tests/preprints/views/test_preprint_detail.py
@@ -106,29 +106,29 @@ def test_embed_contributors(self, app, user, preprint):
     def test_preprint_doi_link_absent_in_unpublished_preprints(self, app, user, unpublished_preprint, unpublished_url):
         res = app.get(unpublished_url, auth=user.auth)
         assert res.json['data']['id'] == unpublished_preprint._id
-        assert res.json['data']['attributes']['is_published'] == False
+        assert res.json['data']['attributes']['is_published'] is False
         assert 'preprint_doi' not in res.json['data']['links'].keys()
-        assert res.json['data']['attributes']['preprint_doi_on_datacite'] == False
+        assert res.json['data']['attributes']['preprint_doi_on_datacite'] is False
 
     def test_published_preprint_doi_link_returned_before_datacite_request(self, app, user, unpublished_preprint, unpublished_url):
         unpublished_preprint.is_published = True
         unpublished_preprint.save()
         res = app.get(unpublished_url, auth=user.auth)
         assert res.json['data']['id'] == unpublished_preprint._id
-        assert res.json['data']['attributes']['is_published'] == True
+        assert res.json['data']['attributes']['is_published'] is True
         assert 'preprint_doi' in res.json['data']['links'].keys()
         expected_doi = EZID_FORMAT.format(namespace=DOI_NAMESPACE, guid=unpublished_preprint._id).replace('doi:', '').upper()
         assert res.json['data']['links']['preprint_doi'] == 'https://dx.doi.org/{}'.format(expected_doi)
-        assert res.json['data']['attributes']['preprint_doi_on_datacite'] == False
+        assert res.json['data']['attributes']['preprint_doi_on_datacite'] is False
 
     def test_published_preprint_doi_link_returned_after_datacite_request(self, app, user, preprint, url):
         res = app.get(url, auth=user.auth)
         assert res.json['data']['id'] == preprint._id
-        assert res.json['data']['attributes']['is_published'] == True
+        assert res.json['data']['attributes']['is_published'] is True
         assert 'preprint_doi' in res.json['data']['links'].keys()
         expected_doi = EZID_FORMAT.format(namespace=DOI_NAMESPACE, guid=preprint._id).replace('doi:', '')
         assert res.json['data']['links']['preprint_doi'] == 'https://dx.doi.org/{}'.format(expected_doi)
-        assert res.json['data']['attributes']['preprint_doi_on_datacite'] == True
+        assert res.json['data']['attributes']['preprint_doi_on_datacite'] is True
 
 
 @pytest.mark.django_db

From 3978cb5c13e051e7b9d11971f493183178519417 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 13 Nov 2017 17:55:58 -0600
Subject: [PATCH 097/192] Use OsfStorageFolder instead of Folder to build
 subquery.

---
 api/nodes/views.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/api/nodes/views.py b/api/nodes/views.py
index e75a1763cca..c34a0d38f6d 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -7,6 +7,7 @@
 from rest_framework.response import Response
 from rest_framework.status import HTTP_204_NO_CONTENT
 
+from addons.osfstorage.models import OsfStorageFolder
 from api.addons.serializers import NodeAddonFolderSerializer
 from api.addons.views import AddonSettingsMixin
 from api.base import generic_bulk_views as bulk_views
@@ -1932,7 +1933,7 @@ def get_default_queryset(self):
             # We should not have gotten a file here
             raise NotFound
 
-        sub_qs = Folder.objects.filter(_children=OuterRef('pk'), pk=files_list.pk)
+        sub_qs = OsfStorageFolder.objects.filter(_children=OuterRef('pk'), pk=files_list.pk)
         return files_list.children.annotate(folder=Exists(sub_qs)).filter(folder=True).prefetch_related('node__guids', 'versions', 'tags', 'guids')
 
     # overrides ListAPIView

From 505bc7cd5ca67181ea6a7479813e1392aabe1490 Mon Sep 17 00:00:00 2001
From: "Barrett K. Harber" <barrett.harber@gmail.com>
Date: Mon, 13 Nov 2017 14:58:28 -0500
Subject: [PATCH 098/192] fix typo in settings, bump elasticsearch-py version
 to 2.4.0

---
 requirements.txt                 | 2 +-
 website/search/elastic_search.py | 2 +-
 website/settings/defaults.py     | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/requirements.txt b/requirements.txt
index d5a89ada170..81ee42c51c3 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -28,7 +28,7 @@ bleach==1.4.1
 html5lib==0.999999999
 blinker==1.4
 furl==0.4.92
-elasticsearch==1.3.0
+elasticsearch==2.4.0
 google-api-python-client==1.6.4
 Babel==2.5.1
 citeproc-py==0.4.0
diff --git a/website/search/elastic_search.py b/website/search/elastic_search.py
index 51b5e0141c4..97f227378b0 100644
--- a/website/search/elastic_search.py
+++ b/website/search/elastic_search.py
@@ -77,7 +77,7 @@ def client():
                 settings.ELASTIC_URI,
                 request_timeout=settings.ELASTIC_TIMEOUT,
                 retry_on_timeout=True,
-                **settings.ELASIC_KWARGS
+                **settings.ELASTIC_KWARGS
             )
             logging.getLogger('elasticsearch').setLevel(logging.WARN)
             logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
diff --git a/website/settings/defaults.py b/website/settings/defaults.py
index 3faf6614b6f..ebe01522473 100644
--- a/website/settings/defaults.py
+++ b/website/settings/defaults.py
@@ -98,7 +98,7 @@ def parent_dir(path):
 ELASTIC_URI = 'localhost:9200'
 ELASTIC_TIMEOUT = 10
 ELASTIC_INDEX = 'website'
-ELASIC_KWARGS = {
+ELASTIC_KWARGS = {
     # 'use_ssl': False,
     # 'verify_certs': True,
     # 'ca_certs': None,

From f6d43be820f797ead081a5329ca6106dc1a2bf15 Mon Sep 17 00:00:00 2001
From: TomBaxter <tkb608@gmail.com>
Date: Tue, 14 Nov 2017 12:05:10 -0500
Subject: [PATCH 099/192] Update linux override for several mac updates

[NO-TICKET]
---
 docker-compose.linux.yml | 70 ++++++++++++++++++++++++++++++++++++++--
 1 file changed, 67 insertions(+), 3 deletions(-)

diff --git a/docker-compose.linux.yml b/docker-compose.linux.yml
index b85a2e6aa6b..4ef2b1d989e 100644
--- a/docker-compose.linux.yml
+++ b/docker-compose.linux.yml
@@ -62,7 +62,18 @@ services:
 ##      - ../ember-osf:/ember-osf
 ##    depends_on:
 ##      - emberosf
-##    command: /bin/bash -c "cd /ember-osf && yarn link && cd /code && yarn link @centerforopenscience/ember-osf && yarn --pure-lockfile --ignore-engines && ./node_modules/bower/bin/bower install --allow-root --config.interactive=false && ./node_modules/ember-cli/bin/ember serve --host 0.0.0.0 --port 4200"
+##    command:
+##      - /bin/bash
+##      - -c
+##      - cd /ember-osf &&
+##        yarn link &&
+##        cd /code &&
+##        (rm -r node_modules || true) &&
+##        yarn --frozen-lockfile &&
+##        yarn link @centerforopenscience/ember-osf &&
+##        (rm -r bower_components || true) &&
+##        ./node_modules/.bin/bower install --allow-root --config.interactive=false &&
+##        yarn start --host 0.0.0.0 --port 4201 --live-reload-port 41954
 
 #  registries:
 #    volumes:
@@ -73,11 +84,64 @@ services:
 ##      - ../ember-osf:/ember-osf
 ##    depends_on:
 ##      - emberosf
-##    command: /bin/bash -c "cd /ember-osf && yarn link && cd /code && yarn link @centerforopenscience/ember-osf && yarn --pure-lockfile --ignore-engines && ./node_modules/bower/bin/bower install --allow-root --config.interactive=false && ./node_modules/ember-cli/bin/ember serve --host 0.0.0.0 --port 4300"
+##    command:
+##      - /bin/bash
+##      - -c
+##      - cd /ember-osf &&
+##        yarn link &&
+##        cd /code &&
+##        (rm -r node_modules || true) &&
+##        yarn --frozen-lockfile &&
+##        yarn link @centerforopenscience/ember-osf &&
+##        (rm -r bower_components || true) &&
+##        ./node_modules/.bin/bower install --allow-root --config.interactive=false &&
+##        yarn start --host 0.0.0.0 --port 4202 --live-reload-port 41955
+
+#  reviews:
+#    volumes:
+#      - reviews-sync:/code:nocopy
+#
+##      # Use this for ember-osf linked development (with docker-sync):
+##      - reviews_dist_vol:/code/dist
+##      - emberosf-sync:/ember-osf
+##    depends_on:
+##      - emberosf
+##    command:
+##      - /bin/bash
+##      - -c 
+##      - cd /ember-osf &&
+##        yarn link &&
+##        cd /code &&
+##        yarn link @centerforopenscience/ember-osf &&
+##        yarn --frozen-lockfile &&
+##        yarn start --host 0.0.0.0 --port 4203 --live-reload-port 41956
 
 #  # Use this for ember-osf linked development:
 #  emberosf:
 #    build: ../ember-osf
-#    command: /bin/bash -c "yarn --pure-lockfile --ignore-engines && ./node_modules/bower/bin/bower install --allow-root --config.interactive=false"
+#    command:
+#      - /bin/bash
+#      - -c
+#      - (rm -r node_modules || true) &&
+#        yarn --frozen-lockfile --ignore-engines &&
+#        (rm -r bower_components || true) &&
+#        ./node_modules/.bin/bower install --allow-root --config.interactive=false
 #    volumes:
 #      - ../ember-osf:/code
+
+#  ####################
+#  # RabbitMQ SSL
+#  # Enable this, place the certs in ./ssl, and uncomment the BROKER_USE_SSL dictionary in local.py
+#  # Uncomment lines under worker in docker-compose
+#  #####################
+#  rabbitmq:
+#    ports:
+#      - 5671:5671
+#    environment:
+#      RABBITMQ_SSL_CERTFILE: /etc/ssl/server_certificate.pem
+#      RABBITMQ_SSL_KEYFILE: /etc/ssl/server_key.pem
+#      RABBITMQ_SSL_CACERTFILE: /etc/ssl/ca_certificate.pem
+#    volumes:
+#      - ./ssl/celery-server.cert.pem:/etc/ssl/server_certificate.pem:ro
+#      - ./ssl/celery-server.key.pem:/etc/ssl/server_key.pem:ro
+#      - ./ssl/ca-chain.cert.pem:/etc/ssl/ca_certificate.pem:ro

From bc40d1de24180df0fd0e3005c98b24c9958459c4 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Tue, 14 Nov 2017 14:34:47 -0500
Subject: [PATCH 100/192] Remove duped Node import

---
 website/project/views/node.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/website/project/views/node.py b/website/project/views/node.py
index ba21ab5014a..132490b2da5 100644
--- a/website/project/views/node.py
+++ b/website/project/views/node.py
@@ -16,7 +16,6 @@
 from framework.auth.decorators import must_be_logged_in, collect_auth
 from framework.exceptions import HTTPError
 from osf.models.nodelog import NodeLog
-from osf.models.node import Node
 
 from website import language
 

From 8efff1600eca1c11d1805b6b931a4609efe5a1a1 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Tue, 14 Nov 2017 15:14:06 -0500
Subject: [PATCH 101/192] Downgrade to a working werkzeug version

The latest werkzeug breaks local development
Reverts the upgrade in #7870
---
 requirements.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/requirements.txt b/requirements.txt
index 36fbde11f80..c50213d484d 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -6,7 +6,7 @@
 # To install release requirements: inv requirements --release
 
 invoke==0.15.0
-Werkzeug==0.12.2
+Werkzeug==0.10.4
 Flask==0.10.1
 gevent==1.2.2
 Mako==1.0.7

From b88406e5d9f3e0979be4cc25241543cf68a8bf29 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Tue, 14 Nov 2017 11:12:48 -0500
Subject: [PATCH 102/192] Small CR changes.

- Handle folder expansion.
- Add log to sentry.
- Better naming.
- Remove unnecessary m.render()
---
 website/static/js/fangorn.js | 16 ++++++++++------
 website/util/rubeus.py       |  7 +++----
 2 files changed, 13 insertions(+), 10 deletions(-)

diff --git a/website/static/js/fangorn.js b/website/static/js/fangorn.js
index 2eebce2afab..e40eddb9ef9 100644
--- a/website/static/js/fangorn.js
+++ b/website/static/js/fangorn.js
@@ -1622,23 +1622,27 @@ function expandStateLoad(item) {
         }
     }
 
-    if (item.depth > 2 && !item.data.isAddonRoot && item.children.length === 0 && item.open) {
+    if (item.depth > 2 && !item.data.isAddonRoot && !item.data.type && item.children.length === 0 && item.open) {
+        // Displays loading indicator until request below completes
+        // Copied from toggleFolder() in Treebeard
         if (icon.get(0)) {
             m.render(icon.get(0), tbOptions.resolveRefreshIcon());
         }
         $osf.ajaxJSON(
             'GET',
             '/api/v1/project/' + item.data.nodeID + '/files/grid/'
-        ).done(function(xhr) {
-            var data = xhr.data[0].children;
+        ).done(function(response) {
+            var data = response.data[0].children;
             tb.updateFolder(data, item);
             tb.redraw();
-            if (icon.get(0)) {
-                m.render(icon.get(0), tbOptions.resolveToggle(item));
-            }
         }).fail(function(xhr) {
             item.notify.update('Unable to retrieve components.', 'danger', undefined, 3000);
             item.open = false;
+            Raven.captureMessage('Unable to retrieve components for node ' + item.data.nodeID, {
+                extra: {
+                    xhr: xhr
+                }
+            });
         });
     }
 
diff --git a/website/util/rubeus.py b/website/util/rubeus.py
index 21f092e362a..17591a759a5 100644
--- a/website/util/rubeus.py
+++ b/website/util/rubeus.py
@@ -184,7 +184,7 @@ def _get_node_name(self, node, can_view, is_pointer=False):
         return node_name
 
     def _serialize_node(self, node, parent=None, children=[]):
-        is_pointer = parent and node.linked_node
+        is_pointer = parent and node.is_linked_node
         can_view = node.can_view(auth=self.auth)
         can_edit = node.has_write_perm if hasattr(node, 'has_write_perm') else node.can_edit(auth=self.auth)
 
@@ -214,16 +214,15 @@ def _serialize_node(self, node, parent=None, children=[]):
     def _get_nodes(self, node):
         AbstractNode = apps.get_model('osf.AbstractNode')
         Contributor = apps.get_model('osf.Contributor')
-        NodeRelation = apps.get_model('osf.NodeRelation')
 
         data = []
         if node.can_view(auth=self.auth):
             serialized_addons = self._collect_addons(node)
-            linked_node_sqs = NodeRelation.objects.filter(parent=node, is_node_link=True)
+            linked_node_sqs = node.node_relations.filter(is_node_link=True)
             has_write_perm_sqs = Contributor.objects.filter(node=OuterRef('pk'), write=True, user=self.auth.user)
             children = (AbstractNode.objects
                         .filter(is_deleted=False, _parents__parent=node)
-                        .annotate(linked_node=Exists(linked_node_sqs))
+                        .annotate(is_linked_node=Exists(linked_node_sqs))
                         .annotate(has_write_perm=Exists(has_write_perm_sqs))
                         )
             serialized_children = [self._serialize_node(child, parent=node) for child in children]

From bf346ec5e5a56c2dc10f9b39f93fb7cc4e65b897 Mon Sep 17 00:00:00 2001
From: Rebecca Rosenblatt <rebecca@cos.io>
Date: Tue, 14 Nov 2017 16:12:09 -0500
Subject: [PATCH 103/192] added status page language and osfsupport twitter
 link

---
 website/templates/public/pages/support.mako | 11 ++++++++---
 1 file changed, 8 insertions(+), 3 deletions(-)

diff --git a/website/templates/public/pages/support.mako b/website/templates/public/pages/support.mako
index 041dd09c756..b15e8ebf550 100644
--- a/website/templates/public/pages/support.mako
+++ b/website/templates/public/pages/support.mako
@@ -61,12 +61,17 @@
             </div>
         </div>
         <hr>
+
         <div class="row m-b-lg">
-            <div class="col-sm-6">
+            <div class="col-sm-4">
                 <h5 class="m-t-md f-w-xl"> Do you have Prereg Challenge related questions? </h5>
                 <p>Check out our <a href="https://cos.io/prereg/">Prereg section</a> on the cos.io website. </p>
             </div>
-            <div class="col-sm-6">
+            <div class="col-sm-4">
+                <h5 class="m-t-md f-w-xl"> Are you experiencing downtime with our services? </h5>
+                <p> Check out our <a href="https://status.cos.io"> status page</a> for updates on how our services are operating.</p>
+            </div>
+            <div class="col-sm-4">
                 <h5 class="m-t-md f-w-xl"> Are you looking for statistics consultations?</h5>
                 <p>COS provides statistics consulation for free. To learn more about this service visit the <a href="https://cos.io/stats_consulting/"> COS statistics consulting page</a>.</p>
             </div>
@@ -76,7 +81,7 @@
         <div class="row m-b-lg">
             <div class="col-sm-12 text-center">
                 <h4 class="m-t-md f-w-xl"> Other ways to get help </h4>
-                <a href="https://twitter.com/OSFramework" class="btn btn-link"><i class="fa fa-twitter"></i> Ask us a question on twitter </a>
+                <a href="https://twitter.com/OSFSupport" class="btn btn-link"><i class="fa fa-twitter"></i> Ask us a question on twitter </a>
                 <a href="https://groups.google.com/forum/#!forum/openscienceframework" class="btn btn-link"><i class="fa fa-users"></i> Join our mailing list </a>
                 <a href="https://www.facebook.com/OpenScienceFramework" class="btn btn-link"><i class="fa fa-facebook"></i> Follow us on Facebook </a>
                 <a href="https://github.com/centerforopenscience" class="btn btn-link"><i class="fa fa-github"></i> Connect with COS on Github</a>

From af6fa4284539c0c6727ec74c8d772da09633183a Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Tue, 14 Nov 2017 16:33:38 -0500
Subject: [PATCH 104/192] Add pyup config

---
 .pyup.yml        | 3 +++
 requirements.txt | 2 +-
 2 files changed, 4 insertions(+), 1 deletion(-)
 create mode 100644 .pyup.yml

diff --git a/.pyup.yml b/.pyup.yml
new file mode 100644
index 00000000000..4a1055406a2
--- /dev/null
+++ b/.pyup.yml
@@ -0,0 +1,3 @@
+pin: True
+# schedule: "every week"
+search: True
diff --git a/requirements.txt b/requirements.txt
index c50213d484d..b0adbae09b5 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -6,7 +6,7 @@
 # To install release requirements: inv requirements --release
 
 invoke==0.15.0
-Werkzeug==0.10.4
+Werkzeug==0.10.4  # pyup: ignore # newer versions break local development
 Flask==0.10.1
 gevent==1.2.2
 Mako==1.0.7

From be80ab1c009d88dc97bfad667c178dd54e9a27a0 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Wed, 15 Nov 2017 10:45:19 -0500
Subject: [PATCH 105/192] Ensure that BEPRESS_PROVIDER is set

---
 osf/management/commands/populate_custom_taxonomies.py | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/osf/management/commands/populate_custom_taxonomies.py b/osf/management/commands/populate_custom_taxonomies.py
index 6138e0e63b3..12f553966e2 100644
--- a/osf/management/commands/populate_custom_taxonomies.py
+++ b/osf/management/commands/populate_custom_taxonomies.py
@@ -151,6 +151,10 @@ def map_preprints_to_custom_subjects(custom_provider, merge_dict, dry_run=False)
         logger.info('Successfully migrated preprint {}.\n\tOld hierarchy:{}\n\tNew hierarchy:{}'.format(preprint.id, old_hier, new_hier))
 
 def migrate(provider=None, share_title=None, data=None, dry_run=False, copy=False, add_missing=False):
+    # This function may be run outside of this command (e.g. in the admin app) so we
+    # need to make sure that BEPRESS_PROVIDER is set
+    global BEPRESS_PROVIDER
+    BEPRESS_PROVIDER = PreprintProvider.objects.filter(_id='osf').first()
     custom_provider = PreprintProvider.objects.filter(_id=provider).first()
     assert custom_provider, 'Unable to find specified provider: {}'.format(provider)
     assert custom_provider.id != BEPRESS_PROVIDER.id, 'Cannot add custom mapping to BePress provider'

From 26b429a73112598b2c5712faf451de0ebbce54c0 Mon Sep 17 00:00:00 2001
From: TomBaxter <tkb608@gmail.com>
Date: Wed, 27 Sep 2017 09:36:29 -0400
Subject: [PATCH 106/192] Add env variable required for some file types to
 render in docker

[#SVCS-336] B

SVCS-336 adds functionality to override DOCKER_LOCAL_HOST in certain
situations required for some file types(pdf, pdb, and video) to render.
This addendum PR makes that behavior the default in our docker development
setup.
---
 .docker-compose.mfr.env | 10 ++++++++++
 1 file changed, 10 insertions(+)

diff --git a/.docker-compose.mfr.env b/.docker-compose.mfr.env
index 1610a7a56d9..860e3258565 100644
--- a/.docker-compose.mfr.env
+++ b/.docker-compose.mfr.env
@@ -9,4 +9,14 @@ SERVER_CONFIG_ALLOWED_PROVIDER_DOMAINS='http://192.168.168.167:5000/ http://192.
 
 UNOCONV_PORT_2002_TCP_ADDR=192.168.168.167
 
+# Related settings from MFR  mfr/extensions/settings.py
+# These settings are used for changing URI in templates for renderers that download from 
+# waterbutler in template. e.g. 192.168.168.167 -> localhost
+# LOCAL_DEVELOPMENT = config.get_bool('LOCAL_DEVELOPMENT', 0)
+# DOCKER_LOCAL_HOST = config.get('DOCKER_LOCAL_HOST', '192.168.168.167')
+# LOCAL_HOST = config.get('LOCAL_HOST', 'localhost')
+
+# Indicates use of local docker development setup
+EXTENSION_CONFIG_LOCAL_DEVELOPMENT=1
+
 #PYTHONUNBUFFERED=0 # This when set to 0 will allow print statements to be visible in the Docker logs

From 50ece71737e28e79b78cde9dc7fbd72d1bb8e3e9 Mon Sep 17 00:00:00 2001
From: "Barrett K. Harber" <barrett.harber@gmail.com>
Date: Tue, 14 Nov 2017 19:29:09 -0500
Subject: [PATCH 107/192] Use a class for celery configuration, update settings
 to use the new lowercase syntax

---
 framework/celery_tasks/__init__.py |  12 +-
 framework/celery_tasks/routers.py  |  19 +-
 website/settings/defaults.py       | 472 +++++++++++++++--------------
 website/settings/local-dist.py     |  43 +--
 website/settings/local-travis.py   |  17 +-
 5 files changed, 286 insertions(+), 277 deletions(-)

diff --git a/framework/celery_tasks/__init__.py b/framework/celery_tasks/__init__.py
index ddef4e6d4de..9c08760e3e5 100644
--- a/framework/celery_tasks/__init__.py
+++ b/framework/celery_tasks/__init__.py
@@ -6,18 +6,16 @@
 from raven import Client
 from raven.contrib.celery import register_signal
 
-from website import settings
+from website.settings import SENTRY_DSN, VERSION, CeleryConfig
 
 app = Celery()
+app.config_from_object(CeleryConfig)
 
-# TODO: Hardcoded settings module. Should be set using framework's config handler
-app.config_from_object('website.settings')
-
-if settings.SENTRY_DSN:
-    client = Client(settings.SENTRY_DSN, release=settings.VERSION, tags={'App': 'celery'})
+if SENTRY_DSN:
+    client = Client(SENTRY_DSN, release=VERSION, tags={'App': 'celery'})
     register_signal(client)
 
-if settings.CELERY_BROKER_USE_SSL:
+if CeleryConfig.broker_use_ssl:
     app.setup_security()
 
 @app.task
diff --git a/framework/celery_tasks/routers.py b/framework/celery_tasks/routers.py
index 83443d5cda9..f6eb21ad3cb 100644
--- a/framework/celery_tasks/routers.py
+++ b/framework/celery_tasks/routers.py
@@ -1,20 +1,17 @@
 # -*- coding: utf-8 -*-
-from website.settings import (
-    DEFAULT_QUEUE, LOW_QUEUE, MED_QUEUE, HIGH_QUEUE,
-    LOW_PRI_MODULES, MED_PRI_MODULES, HIGH_PRI_MODULES
-)
+from website.settings import CeleryConfig
 
 def match_by_module(task_path):
     task_parts = task_path.split('.')
     for i in range(2, len(task_parts) + 1):
         task_subpath = '.'.join(task_parts[:i])
-        if task_subpath in LOW_PRI_MODULES:
-            return LOW_QUEUE
-        if task_subpath in MED_PRI_MODULES:
-            return MED_QUEUE
-        if task_subpath in HIGH_PRI_MODULES:
-            return HIGH_QUEUE
-    return DEFAULT_QUEUE
+        if task_subpath in CeleryConfig.low_pri_modules:
+            return CeleryConfig.task_low_queue
+        if task_subpath in CeleryConfig.med_pri_modules:
+            return CeleryConfig.task_med_queue
+        if task_subpath in CeleryConfig.high_pri_modules:
+            return CeleryConfig.task_low_queue
+    return CeleryConfig.task_default_queue
 
 
 class CeleryRouter(object):
diff --git a/website/settings/defaults.py b/website/settings/defaults.py
index e70233ca6fa..6da38e9eff3 100644
--- a/website/settings/defaults.py
+++ b/website/settings/defaults.py
@@ -349,250 +349,256 @@ def parent_dir(path):
 
 ##### CELERY #####
 
-DEFAULT_QUEUE = 'celery'
-LOW_QUEUE = 'low'
-MED_QUEUE = 'med'
-HIGH_QUEUE = 'high'
+# Default RabbitMQ broker
+RABBITMQ_USERNAME = os.environ.get('RABBITMQ_USERNAME', 'guest')
+RABBITMQ_PASSWORD = os.environ.get('RABBITMQ_PASSWORD', 'guest')
+RABBITMQ_HOST = os.environ.get('RABBITMQ_HOST', 'localhost')
+RABBITMQ_PORT = os.environ.get('RABBITMQ_PORT', '5672')
+RABBITMQ_VHOST = os.environ.get('RABBITMQ_VHOST', '/')
 
 # Seconds, not an actual celery setting
 CELERY_RETRY_BACKOFF_BASE = 5
 
-LOW_PRI_MODULES = {
-    'framework.analytics.tasks',
-    'framework.celery_tasks',
-    'scripts.osfstorage.usage_audit',
-    'scripts.stuck_registration_audit',
-    'scripts.osfstorage.glacier_inventory',
-    'scripts.analytics.tasks',
-    'scripts.osfstorage.files_audit',
-    'scripts.osfstorage.glacier_audit',
-    'scripts.populate_new_and_noteworthy_projects',
-    'scripts.populate_popular_projects_and_registrations',
-    'website.search.elastic_search',
-    'scripts.generate_sitemap',
-}
+class CeleryConfig:
+    """
+    Celery Configuration
+    http://docs.celeryproject.org/en/latest/userguide/configuration.html
+    """
+
+    task_default_queue = 'celery'
+    task_low_queue = 'low'
+    task_med_queue = 'med'
+    task_high_queue = 'high'
+
+    low_pri_modules = {
+        'framework.analytics.tasks',
+        'framework.celery_tasks',
+        'scripts.osfstorage.usage_audit',
+        'scripts.stuck_registration_audit',
+        'scripts.osfstorage.glacier_inventory',
+        'scripts.analytics.tasks',
+        'scripts.osfstorage.files_audit',
+        'scripts.osfstorage.glacier_audit',
+        'scripts.populate_new_and_noteworthy_projects',
+        'scripts.populate_popular_projects_and_registrations',
+        'website.search.elastic_search',
+        'scripts.generate_sitemap',
+    }
 
-MED_PRI_MODULES = {
-    'framework.email.tasks',
-    'scripts.send_queued_mails',
-    'scripts.triggered_mails',
-    'website.mailchimp_utils',
-    'website.notifications.tasks',
-    'scripts.analytics.run_keen_summaries',
-    'scripts.analytics.run_keen_snapshots',
-    'scripts.analytics.run_keen_events',
-}
+    med_pri_modules = {
+        'framework.email.tasks',
+        'scripts.send_queued_mails',
+        'scripts.triggered_mails',
+        'website.mailchimp_utils',
+        'website.notifications.tasks',
+        'scripts.analytics.run_keen_summaries',
+        'scripts.analytics.run_keen_snapshots',
+        'scripts.analytics.run_keen_events',
+    }
 
-HIGH_PRI_MODULES = {
-    'scripts.approve_embargo_terminations',
-    'scripts.approve_registrations',
-    'scripts.embargo_registrations',
-    'scripts.premigrate_created_modified',
-    'scripts.refresh_addon_tokens',
-    'scripts.retract_registrations',
-    'website.archiver.tasks',
-}
+    high_pri_modules = {
+        'scripts.approve_embargo_terminations',
+        'scripts.approve_registrations',
+        'scripts.embargo_registrations',
+        'scripts.premigrate_created_modified',
+        'scripts.refresh_addon_tokens',
+        'scripts.retract_registrations',
+        'website.archiver.tasks',
+    }
 
-try:
-    from kombu import Queue, Exchange
-except ImportError:
-    pass
-else:
-    CELERY_TASK_QUEUES = (
-        Queue(LOW_QUEUE, Exchange(LOW_QUEUE), routing_key=LOW_QUEUE,
-              consumer_arguments={'x-priority': -1}),
-        Queue(DEFAULT_QUEUE, Exchange(DEFAULT_QUEUE), routing_key=DEFAULT_QUEUE,
-              consumer_arguments={'x-priority': 0}),
-        Queue(MED_QUEUE, Exchange(MED_QUEUE), routing_key=MED_QUEUE,
-              consumer_arguments={'x-priority': 1}),
-        Queue(HIGH_QUEUE, Exchange(HIGH_QUEUE), routing_key=HIGH_QUEUE,
-              consumer_arguments={'x-priority': 10}),
+    try:
+        from kombu import Queue, Exchange
+    except ImportError:
+        pass
+    else:
+        task_queues = (
+            Queue(task_low_queue, Exchange(task_low_queue), routing_key=task_low_queue,
+                consumer_arguments={'x-priority': -1}),
+            Queue(task_default_queue, Exchange(task_default_queue), routing_key=task_default_queue,
+                consumer_arguments={'x-priority': 0}),
+            Queue(task_med_queue, Exchange(task_med_queue), routing_key=task_med_queue,
+                consumer_arguments={'x-priority': 1}),
+            Queue(task_high_queue, Exchange(task_high_queue), routing_key=task_high_queue,
+                consumer_arguments={'x-priority': 10}),
+        )
+
+        task_default_exchange_type = 'direct'
+        task_routes = ('framework.celery_tasks.routers.CeleryRouter', )
+        task_ignore_result = True
+        task_store_errors_even_if_ignored = True
+
+    broker_url = os.environ.get('BROKER_URL', 'amqp://{}:{}@{}:{}/{}'.format(RABBITMQ_USERNAME, RABBITMQ_PASSWORD, RABBITMQ_HOST, RABBITMQ_PORT, RABBITMQ_VHOST))
+    broker_use_ssl = False
+
+    # Default RabbitMQ backend
+    result_backend = os.environ.get('CELERY_RESULT_BACKEND', broker_url)
+
+    # Modules to import when celery launches
+    imports = (
+        'framework.celery_tasks',
+        'framework.email.tasks',
+        'website.mailchimp_utils',
+        'website.notifications.tasks',
+        'website.archiver.tasks',
+        'website.search.search',
+        'website.project.tasks',
+        'scripts.populate_new_and_noteworthy_projects',
+        'scripts.populate_popular_projects_and_registrations',
+        'scripts.refresh_addon_tokens',
+        'scripts.retract_registrations',
+        'scripts.embargo_registrations',
+        'scripts.approve_registrations',
+        'scripts.approve_embargo_terminations',
+        'scripts.triggered_mails',
+        'scripts.send_queued_mails',
+        'scripts.analytics.run_keen_summaries',
+        'scripts.analytics.run_keen_snapshots',
+        'scripts.analytics.run_keen_events',
+        'scripts.generate_sitemap',
+        'scripts.premigrate_created_modified',
     )
 
-    CELERY_TASK_DEFAULT_EXCHANGE_TYPE = 'direct'
-    CELERY_TASK_ROUTES = ('framework.celery_tasks.routers.CeleryRouter', )
-    CELERY_TASK_IGNORE_RESULT = True
-    CELERY_TASK_STORE_ERRORS_EVEN_IF_IGNORED = True
-
-# Default RabbitMQ broker
-RABBITMQ_USERNAME = os.environ.get('RABBITMQ_USERNAME', 'guest')
-RABBITMQ_PASSWORD = os.environ.get('RABBITMQ_PASSWORD', 'guest')
-RABBITMQ_HOST = os.environ.get('RABBITMQ_HOST', 'localhost')
-RABBITMQ_PORT = os.environ.get('RABBITMQ_PORT', '5672')
-RABBITMQ_VHOST = os.environ.get('RABBITMQ_VHOST', '/')
-
-CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'amqp://{}:{}@{}:{}/{}'.format(RABBITMQ_USERNAME, RABBITMQ_PASSWORD, RABBITMQ_HOST, RABBITMQ_PORT, RABBITMQ_VHOST))
-CELERY_BROKER_USE_SSL = False
-
-# Default RabbitMQ backend
-CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', CELERY_BROKER_URL)
-
-# Modules to import when celery launches
-CELERY_IMPORTS = (
-    'framework.celery_tasks',
-    'framework.email.tasks',
-    'website.mailchimp_utils',
-    'website.notifications.tasks',
-    'website.archiver.tasks',
-    'website.search.search',
-    'website.project.tasks',
-    'scripts.populate_new_and_noteworthy_projects',
-    'scripts.populate_popular_projects_and_registrations',
-    'scripts.refresh_addon_tokens',
-    'scripts.retract_registrations',
-    'scripts.embargo_registrations',
-    'scripts.approve_registrations',
-    'scripts.approve_embargo_terminations',
-    'scripts.triggered_mails',
-    'scripts.send_queued_mails',
-    'scripts.analytics.run_keen_summaries',
-    'scripts.analytics.run_keen_snapshots',
-    'scripts.analytics.run_keen_events',
-    'scripts.generate_sitemap',
-    'scripts.premigrate_created_modified',
-)
-
-# Modules that need metrics and release requirements
-# CELERY_IMPORTS += (
-#     'scripts.osfstorage.glacier_inventory',
-#     'scripts.osfstorage.glacier_audit',
-#     'scripts.osfstorage.usage_audit',
-#     'scripts.stuck_registration_audit',
-#     'scripts.osfstorage.files_audit',
-#     'scripts.analytics.tasks',
-#     'scripts.analytics.upload',
-# )
-
-# celery.schedule will not be installed when running invoke requirements the first time.
-try:
-    from celery.schedules import crontab
-except ImportError:
-    pass
-else:
-    #  Setting up a scheduler, essentially replaces an independent cron job
-    CELERY_BEAT_SCHEDULE = {
-        '5-minute-emails': {
-            'task': 'website.notifications.tasks.send_users_email',
-            'schedule': crontab(minute='*/5'),
-            'args': ('email_transactional',),
-        },
-        'daily-emails': {
-            'task': 'website.notifications.tasks.send_users_email',
-            'schedule': crontab(minute=0, hour=0),
-            'args': ('email_digest',),
-        },
-        'refresh_addons': {
-            'task': 'scripts.refresh_addon_tokens',
-            'schedule': crontab(minute=0, hour=2),  # Daily 2:00 a.m
-            'kwargs': {'dry_run': False, 'addons': {
-                'box': 60,          # https://docs.box.com/docs/oauth-20#section-6-using-the-access-and-refresh-tokens
-                'googledrive': 14,  # https://developers.google.com/identity/protocols/OAuth2#expiration
-                'mendeley': 14      # http://dev.mendeley.com/reference/topics/authorization_overview.html
-            }},
-        },
-        'retract_registrations': {
-            'task': 'scripts.retract_registrations',
-            'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
-            'kwargs': {'dry_run': False},
-        },
-        'embargo_registrations': {
-            'task': 'scripts.embargo_registrations',
-            'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
-            'kwargs': {'dry_run': False},
-        },
-        'approve_registrations': {
-            'task': 'scripts.approve_registrations',
-            'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
-            'kwargs': {'dry_run': False},
-        },
-        'approve_embargo_terminations': {
-            'task': 'scripts.approve_embargo_terminations',
-            'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
-            'kwargs': {'dry_run': False},
-        },
-        'triggered_mails': {
-            'task': 'scripts.triggered_mails',
-            'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
-            'kwargs': {'dry_run': False},
-        },
-        'send_queued_mails': {
-            'task': 'scripts.send_queued_mails',
-            'schedule': crontab(minute=0, hour=12),  # Daily 12 p.m.
-            'kwargs': {'dry_run': False},
-        },
-        'new-and-noteworthy': {
-            'task': 'scripts.populate_new_and_noteworthy_projects',
-            'schedule': crontab(minute=0, hour=2, day_of_week=6),  # Saturday 2:00 a.m.
-            'kwargs': {'dry_run': False}
-        },
-        'update_popular_nodes': {
-            'task': 'scripts.populate_popular_projects_and_registrations',
-            'schedule': crontab(minute=0, hour=2),  # Daily 2:00 a.m.
-            'kwargs': {'dry_run': False}
-        },
-        'run_keen_summaries': {
-            'task': 'scripts.analytics.run_keen_summaries',
-            'schedule': crontab(minute=00, hour=1),  # Daily 1:00 a.m.
-            'kwargs': {'yesterday': True}
-        },
-        'run_keen_snapshots': {
-            'task': 'scripts.analytics.run_keen_snapshots',
-            'schedule': crontab(minute=0, hour=3),  # Daily 3:00 a.m.
-        },
-        'run_keen_events': {
-            'task': 'scripts.analytics.run_keen_events',
-            'schedule': crontab(minute=0, hour=4),  # Daily 4:00 a.m.
-            'kwargs': {'yesterday': True}
-        },
-        'generate_sitemap': {
-            'task': 'scripts.generate_sitemap',
-            'schedule': crontab(minute=0, hour=0),  # Daily 12:00 a.m.
+    # Modules that need metrics and release requirements
+    # imports += (
+    #     'scripts.osfstorage.glacier_inventory',
+    #     'scripts.osfstorage.glacier_audit',
+    #     'scripts.osfstorage.usage_audit',
+    #     'scripts.stuck_registration_audit',
+    #     'scripts.osfstorage.files_audit',
+    #     'scripts.analytics.tasks',
+    #     'scripts.analytics.upload',
+    # )
+
+    # celery.schedule will not be installed when running invoke requirements the first time.
+    try:
+        from celery.schedules import crontab
+    except ImportError:
+        pass
+    else:
+        #  Setting up a scheduler, essentially replaces an independent cron job
+        beat_schedule = {
+            '5-minute-emails': {
+                'task': 'website.notifications.tasks.send_users_email',
+                'schedule': crontab(minute='*/5'),
+                'args': ('email_transactional',),
+            },
+            'daily-emails': {
+                'task': 'website.notifications.tasks.send_users_email',
+                'schedule': crontab(minute=0, hour=0),
+                'args': ('email_digest',),
+            },
+            'refresh_addons': {
+                'task': 'scripts.refresh_addon_tokens',
+                'schedule': crontab(minute=0, hour=2),  # Daily 2:00 a.m
+                'kwargs': {'dry_run': False, 'addons': {
+                    'box': 60,          # https://docs.box.com/docs/oauth-20#section-6-using-the-access-and-refresh-tokens
+                    'googledrive': 14,  # https://developers.google.com/identity/protocols/OAuth2#expiration
+                    'mendeley': 14      # http://dev.mendeley.com/reference/topics/authorization_overview.html
+                }},
+            },
+            'retract_registrations': {
+                'task': 'scripts.retract_registrations',
+                'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+                'kwargs': {'dry_run': False},
+            },
+            'embargo_registrations': {
+                'task': 'scripts.embargo_registrations',
+                'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+                'kwargs': {'dry_run': False},
+            },
+            'approve_registrations': {
+                'task': 'scripts.approve_registrations',
+                'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+                'kwargs': {'dry_run': False},
+            },
+            'approve_embargo_terminations': {
+                'task': 'scripts.approve_embargo_terminations',
+                'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+                'kwargs': {'dry_run': False},
+            },
+            'triggered_mails': {
+                'task': 'scripts.triggered_mails',
+                'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+                'kwargs': {'dry_run': False},
+            },
+            'send_queued_mails': {
+                'task': 'scripts.send_queued_mails',
+                'schedule': crontab(minute=0, hour=12),  # Daily 12 p.m.
+                'kwargs': {'dry_run': False},
+            },
+            'new-and-noteworthy': {
+                'task': 'scripts.populate_new_and_noteworthy_projects',
+                'schedule': crontab(minute=0, hour=2, day_of_week=6),  # Saturday 2:00 a.m.
+                'kwargs': {'dry_run': False}
+            },
+            'update_popular_nodes': {
+                'task': 'scripts.populate_popular_projects_and_registrations',
+                'schedule': crontab(minute=0, hour=2),  # Daily 2:00 a.m.
+                'kwargs': {'dry_run': False}
+            },
+            'run_keen_summaries': {
+                'task': 'scripts.analytics.run_keen_summaries',
+                'schedule': crontab(minute=00, hour=1),  # Daily 1:00 a.m.
+                'kwargs': {'yesterday': True}
+            },
+            'run_keen_snapshots': {
+                'task': 'scripts.analytics.run_keen_snapshots',
+                'schedule': crontab(minute=0, hour=3),  # Daily 3:00 a.m.
+            },
+            'run_keen_events': {
+                'task': 'scripts.analytics.run_keen_events',
+                'schedule': crontab(minute=0, hour=4),  # Daily 4:00 a.m.
+                'kwargs': {'yesterday': True}
+            },
+            'generate_sitemap': {
+                'task': 'scripts.generate_sitemap',
+                'schedule': crontab(minute=0, hour=0),  # Daily 12:00 a.m.
+            }
         }
-    }
 
-    # Tasks that need metrics and release requirements
-    # CELERY_BEAT_SCHEDULE.update({
-    #     'usage_audit': {
-    #         'task': 'scripts.osfstorage.usage_audit',
-    #         'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
-    #         'kwargs': {'send_mail': True},
-    #     },
-    #     'stuck_registration_audit': {
-    #         'task': 'scripts.stuck_registration_audit',
-    #         'schedule': crontab(minute=0, hour=6),  # Daily 6 a.m
-    #         'kwargs': {},
-    #     },
-    #     'glacier_inventory': {
-    #         'task': 'scripts.osfstorage.glacier_inventory',
-    #         'schedule': crontab(minute=0, hour= 0, day_of_week=0),  # Sunday 12:00 a.m.
-    #         'args': (),
-    #     },
-    #     'glacier_audit': {
-    #         'task': 'scripts.osfstorage.glacier_audit',
-    #         'schedule': crontab(minute=0, hour=6, day_of_week=0),  # Sunday 6:00 a.m.
-    #         'kwargs': {'dry_run': False},
-    #     },
-    #     'files_audit_0': {
-    #         'task': 'scripts.osfstorage.files_audit.0',
-    #         'schedule': crontab(minute=0, hour=2, day_of_week=0),  # Sunday 2:00 a.m.
-    #         'kwargs': {'num_of_workers': 4, 'dry_run': False},
-    #     },
-    #     'files_audit_1': {
-    #         'task': 'scripts.osfstorage.files_audit.1',
-    #         'schedule': crontab(minute=0, hour=2, day_of_week=0),  # Sunday 2:00 a.m.
-    #         'kwargs': {'num_of_workers': 4, 'dry_run': False},
-    #     },
-    #     'files_audit_2': {
-    #         'task': 'scripts.osfstorage.files_audit.2',
-    #         'schedule': crontab(minute=0, hour=2, day_of_week=0),  # Sunday 2:00 a.m.
-    #         'kwargs': {'num_of_workers': 4, 'dry_run': False},
-    #     },
-    #     'files_audit_3': {
-    #         'task': 'scripts.osfstorage.files_audit.3',
-    #         'schedule': crontab(minute=0, hour=2, day_of_week=0),  # Sunday 2:00 a.m.
-    #         'kwargs': {'num_of_workers': 4, 'dry_run': False},
-    #     },
-    # })
+        # Tasks that need metrics and release requirements
+        # beat_schedule.update({
+        #     'usage_audit': {
+        #         'task': 'scripts.osfstorage.usage_audit',
+        #         'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+        #         'kwargs': {'send_mail': True},
+        #     },
+        #     'stuck_registration_audit': {
+        #         'task': 'scripts.stuck_registration_audit',
+        #         'schedule': crontab(minute=0, hour=6),  # Daily 6 a.m
+        #         'kwargs': {},
+        #     },
+        #     'glacier_inventory': {
+        #         'task': 'scripts.osfstorage.glacier_inventory',
+        #         'schedule': crontab(minute=0, hour= 0, day_of_week=0),  # Sunday 12:00 a.m.
+        #         'args': (),
+        #     },
+        #     'glacier_audit': {
+        #         'task': 'scripts.osfstorage.glacier_audit',
+        #         'schedule': crontab(minute=0, hour=6, day_of_week=0),  # Sunday 6:00 a.m.
+        #         'kwargs': {'dry_run': False},
+        #     },
+        #     'files_audit_0': {
+        #         'task': 'scripts.osfstorage.files_audit.0',
+        #         'schedule': crontab(minute=0, hour=2, day_of_week=0),  # Sunday 2:00 a.m.
+        #         'kwargs': {'num_of_workers': 4, 'dry_run': False},
+        #     },
+        #     'files_audit_1': {
+        #         'task': 'scripts.osfstorage.files_audit.1',
+        #         'schedule': crontab(minute=0, hour=2, day_of_week=0),  # Sunday 2:00 a.m.
+        #         'kwargs': {'num_of_workers': 4, 'dry_run': False},
+        #     },
+        #     'files_audit_2': {
+        #         'task': 'scripts.osfstorage.files_audit.2',
+        #         'schedule': crontab(minute=0, hour=2, day_of_week=0),  # Sunday 2:00 a.m.
+        #         'kwargs': {'num_of_workers': 4, 'dry_run': False},
+        #     },
+        #     'files_audit_3': {
+        #         'task': 'scripts.osfstorage.files_audit.3',
+        #         'schedule': crontab(minute=0, hour=2, day_of_week=0),  # Sunday 2:00 a.m.
+        #         'kwargs': {'num_of_workers': 4, 'dry_run': False},
+        #     },
+        # })
 
 
 WATERBUTLER_JWE_SALT = 'yusaltydough'
diff --git a/website/settings/local-dist.py b/website/settings/local-dist.py
index 2f7a49e5bcc..fbf48f314ea 100644
--- a/website/settings/local-dist.py
+++ b/website/settings/local-dist.py
@@ -56,9 +56,6 @@
 SEARCH_ENGINE = 'elastic'
 ELASTIC_TIMEOUT = 10
 
-# Comment out to use celery in development
-USE_CELERY = False
-
 # Email
 USE_EMAIL = False
 MAIL_SERVER = 'localhost:1025'  # For local testing
@@ -76,22 +73,30 @@
 OSF_SERVER_KEY = None
 OSF_SERVER_CERT = None
 
-##### Celery #####
-## Default RabbitMQ broker
-CELERY_BROKER_URL = 'amqp://'
-
-# Celery with SSL
-# import ssl
-#
-# CELERY_BROKER_USE_SSL = {
-#     'keyfile': '/etc/ssl/private/worker.key',
-#     'certfile': '/etc/ssl/certs/worker.pem',
-#     'ca_certs': '/etc/ssl/certs/ca-chain.cert.pem',
-#     'cert_reqs': ssl.CERT_REQUIRED,
-# }
-
-# Default RabbitMQ backend
-CELERY_RESULT_BACKEND = 'amqp://'
+# Comment out to use celery in development
+USE_CELERY = False
+
+class CeleryConfig(defaults.CeleryConfig):
+    """
+    Celery configuration
+    """
+    ##### Celery #####
+    ## Default RabbitMQ broker
+    # broker_url = 'amqp://'
+
+    # Celery with SSL
+    # import ssl
+    #
+    # broker_use_ssl = {
+    #     'keyfile': '/etc/ssl/private/worker.key',
+    #     'certfile': '/etc/ssl/certs/worker.pem',
+    #     'ca_certs': '/etc/ssl/certs/ca-chain.cert.pem',
+    #     'cert_reqs': ssl.CERT_REQUIRED,
+    # }
+
+    # Default RabbitMQ backend
+    # result_backend = 'amqp://'
+
 
 USE_CDN_FOR_CLIENT_LIBS = False
 
diff --git a/website/settings/local-travis.py b/website/settings/local-travis.py
index 63276a3a855..95ec2246623 100644
--- a/website/settings/local-travis.py
+++ b/website/settings/local-travis.py
@@ -52,13 +52,16 @@
 OSF_SERVER_KEY = None
 OSF_SERVER_CERT = None
 
-##### Celery #####
-## Default RabbitMQ broker
-CELERY_BROKER_URL = 'amqp://'
-
-# In-memory result backend
-CELERY_RESULT_BACKEND = 'cache'
-CELERY_CACHE_BACKEND = 'memory'
+class CeleryConfig(defaults.CeleryConfig):
+    """
+    Celery configuration
+    """
+    ## Default RabbitMQ broker
+    broker_url = 'amqp://'
+
+    # In-memory result backend
+    result_backend = 'cache'
+    cache_backend = 'memory'
 
 USE_CDN_FOR_CLIENT_LIBS = False
 

From 96c85f9734634ee7789e3d424c44dd0232ddd4ae Mon Sep 17 00:00:00 2001
From: "Barrett K. Harber" <barrett.harber@gmail.com>
Date: Thu, 16 Nov 2017 14:57:09 -0500
Subject: [PATCH 108/192] Remove sharejs, add docker-compose/docker-sync
 settings for dev [ci skip]

---
 addons/wiki/.dockerignore   |   8 --
 addons/wiki/Dockerfile      |  23 ----
 addons/wiki/package.json    |  26 ----
 addons/wiki/shareServer.js  | 251 ------------------------------------
 docker-compose.linux.yml    |  11 ++
 docker-compose.override.yml |  15 ++-
 docker-compose.yml          |   3 -
 docker-sync.yml             |   8 ++
 8 files changed, 33 insertions(+), 312 deletions(-)
 delete mode 100644 addons/wiki/.dockerignore
 delete mode 100644 addons/wiki/Dockerfile
 delete mode 100644 addons/wiki/package.json
 delete mode 100644 addons/wiki/shareServer.js

diff --git a/addons/wiki/.dockerignore b/addons/wiki/.dockerignore
deleted file mode 100644
index fddafa0f82e..00000000000
--- a/addons/wiki/.dockerignore
+++ /dev/null
@@ -1,8 +0,0 @@
-.git
-node_modules
-**/local.py
-*tests
-**/tests
-**/bower_components
-*.pyc
-**/*.pyc
diff --git a/addons/wiki/Dockerfile b/addons/wiki/Dockerfile
deleted file mode 100644
index 0e077464abc..00000000000
--- a/addons/wiki/Dockerfile
+++ /dev/null
@@ -1,23 +0,0 @@
-FROM node:0.12
-
-ENV GOSU_VERSION 1.10
-RUN gpg --keyserver pool.sks-keyservers.net --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4 \
-    && curl -o /usr/local/bin/gosu -SL "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$(dpkg --print-architecture)" \
-  	&& curl -o /usr/local/bin/gosu.asc -SL "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$(dpkg --print-architecture).asc" \
-  	&& gpg --verify /usr/local/bin/gosu.asc \
-  	&& rm /usr/local/bin/gosu.asc \
-  	&& chmod +x /usr/local/bin/gosu
-
-RUN mkdir -p /code \
-    && chown node:node /code
-
-WORKDIR /code
-
-COPY package.json ./
-
-ENV NODE_ENV production
-RUN npm install
-
-COPY shareServer.js ./
-
-CMD ["gosu", "node", "npm", "start"]
\ No newline at end of file
diff --git a/addons/wiki/package.json b/addons/wiki/package.json
deleted file mode 100644
index e534643fd39..00000000000
--- a/addons/wiki/package.json
+++ /dev/null
@@ -1,26 +0,0 @@
-{
-  "name": "sharejs-server",
-  "version": "1.0.0",
-  "description": "ShareJS for OSF Wiki",
-  "main": "shareServer.js",
-  "directories": {
-    "test": "tests"
-  },
-  "scripts": {
-    "start": "node ./shareServer.js",
-    "test": "echo \"Error: no test specified\" && exit 1"
-  },
-  "author": "Center for Open Science",
-  "license": "Apache-2.0",
-  "dependencies": {
-    "async": "^0.9.0",
-    "body-parser": "~1.12.0",
-    "express": "~4.10.0",
-    "livedb": "~0.4.8",
-    "livedb-mongo": "~0.4.1",
-    "morgan": "^1.5.1",
-    "raven": "^0.7.2",
-    "share": "0.7.27",
-    "ws": "~0.4.32"
-  }
-}
diff --git a/addons/wiki/shareServer.js b/addons/wiki/shareServer.js
deleted file mode 100644
index aa1986759ec..00000000000
--- a/addons/wiki/shareServer.js
+++ /dev/null
@@ -1,251 +0,0 @@
-// Library imports
-var util = require('util');
-var http = require('http');
-var raven = require('raven');
-var sharejs = require('share');
-var livedb = require('livedb');
-var Duplex = require('stream').Duplex;
-var WebSocketServer = require('ws').Server;
-var express = require('express');
-var bodyParser = require('body-parser');
-var morgan = require('morgan');
-var async = require('async');
-
-var settings = {
-    debug: process.env.SHAREJS_DEBUG ? process.env.SHAREJS_DEBUG === 'true' : true,
-    // Server Options
-    host: process.env.SHAREJS_SERVER_HOST || 'localhost',
-    port: process.env.SHAREJS_SERVER_PORT || 7007,
-    corsAllowOrigin: process.env.SHAREJS_CORS_ALLOW_ORIGIN || 'http://localhost:5000',
-    // Mongo options
-    dbUrl: process.env.SHAREJS_DB_URL || 'mongodb://localhost:27017/sharejs',
-    // Raven client
-    sentryDSN: process.env.SHAREJS_SENTRY_DSN
-};
-
-var client = new raven.Client(settings.sentryDSN);
-
-if (!settings.debug) {
-    client.patchGlobal(function() {
-        // It is highly discouraged to leave the process running after a
-        // global uncaught exception has occurred.
-        //
-        // https://github.com/getsentry/raven-node#catching-global-errors
-        // http://nodejs.org/api/process.html#process_event_uncaughtexception
-        //
-        console.log('Uncaught Exception process exiting');
-        process.exit(1);
-    });
-}
-
-// Server setup
-var mongo = require('livedb-mongo')(settings.dbUrl, {safe:true});
-var backend = livedb.client(mongo);
-var share = sharejs.server.createClient({backend: backend});
-var app = express();
-var jsonParser = bodyParser.json();
-var server = http.createServer(app);
-var wss = new WebSocketServer({server: server});
-
-// Local variables
-var docs = {};  // TODO: Should this be stored in mongo?
-var locked = {};
-
-// Allow X-Forwarded-For headers
-app.set('trust proxy');
-
-// Raven Express Middleware
-app.use(raven.middleware.express(settings.sentryDSN));
-app.use(morgan('common'));
-
-// Allow CORS
-app.use(function(req, res, next) {
-    res.header('Access-Control-Allow-Origin', settings.corsAllowOrigin);
-    res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept');
-    next();
-});
-
-// Serve static sharejs files
-app.use(express.static(sharejs.scriptsDir));
-
-// Broadcasts message to all clients connected to that doc
-// TODO: Can we access the relevant list without iterating over every client?
-wss.broadcast = function(docId, message) {
-    async.each(this.clients, function (client, cb) {
-        if (client.userMeta && client.userMeta.docId === docId) {
-            try {
-                client.send(message);
-            } catch (e) {
-                // ignore errors - connection should be handled by share.js library
-            }
-        }
-
-        cb();
-    });
-};
-
-wss.on('connection', function(client) {
-    var stream = new Duplex({objectMode: true});
-
-    stream._read = function() {};
-    stream._write = function(chunk, encoding, callback) {
-        if (client.state !== 'closed') {
-            try {
-                client.send(JSON.stringify(chunk));
-            } catch (e) {
-                // ignore errors - connection should be handled by share.js library
-            }
-        }
-        callback();
-    };
-
-    stream.headers = client.upgradeReq.headers;
-    stream.remoteAddress = client.upgradeReq.connection.remoteAddress;
-
-    client.on('message', function(data) {
-        if (client.userMeta && locked[client.userMeta.docId]) {
-            wss.broadcast(client.userMeta.docId, JSON.stringify({type: 'lock'}));
-            return;
-        }
-
-        try {
-            data = JSON.parse(data);
-        } catch (e) {
-            client.captureMessage('Could not parse message data as json', {message: message});
-            return;
-        }
-
-        // Handle our custom messages separately
-        if (data.registration) {
-            console.info('[User Registered] docId: %s, userId: %s', data.docId, data.userId);
-            var docId = data.docId;
-            var userId = data.userId;
-
-            // Create a metadata entry for this document
-            if (!docs[docId]) {
-                docs[docId] = {};
-            }
-
-            // Add user to metadata
-            if (!docs[docId][userId]) {
-                docs[docId][userId] = {
-                    name: data.userName,
-                    url: data.userUrl,
-                    count: 1,
-                    gravatar: data.userGravatar
-                };
-            } else {
-                docs[docId][userId].count++;
-            }
-
-            // Attach metadata to the client object
-            client.userMeta = data;
-            wss.broadcast(docId, JSON.stringify({type: 'meta', users: docs[docId]}));
-
-            // Lock client if doc is locked
-            if (locked[docId]) {
-                try {
-                    client.send(JSON.stringify({type: 'lock'}));
-                } catch (e) {
-                    // ignore errors - connection should be handled by share.js library
-                }
-            }
-        } else {
-            stream.push(data);
-        }
-    });
-
-    client.on('close', function(reason) {
-        if (client.userMeta) {
-            console.info('[Connection Closed] docId: %s, userId: %s, reason: %s', client.userMeta.docId, client.userMeta.userId, reason);
-        } else {
-            console.info('[Connection Closed] reason: %s', reason);
-        }
-
-        if (client.userMeta) {
-            var docId = client.userMeta.docId;
-            var userId = client.userMeta.userId;
-
-            if (docs[docId] && docs[docId][userId]) {
-                docs[docId][userId].count--;
-                if (docs[docId][userId].count === 0) {
-                    delete docs[docId][userId];
-
-                    if (!Object.keys(docs[docId]).length) {
-                        delete docs[docId];
-                    }
-                }
-            }
-
-            wss.broadcast(docId, JSON.stringify({type: 'meta', users: docs[docId]}));
-        }
-
-        stream.push(null);
-        stream.emit('close');
-    });
-
-    stream.on('error', function(msg) {
-        client.captureMessage('Could not parse message data as json', {msg: msg});
-        client.close(msg);
-    });
-
-    stream.on('end', function() {
-        client.close();
-    });
-
-    // Give the stream to sharejs
-    return share.listen(stream);
-});
-
-// Update a document from storage
-app.post('/reload/:id', jsonParser, function (req, res, next) {
-    wss.broadcast(req.params.id, JSON.stringify({
-        type: 'reload',
-        contributors: req.body // All contributors to be updated
-    }));
-    console.info('[Document reloaded from storage] docId: %s', req.params.id);
-    res.send(util.format('%s was reloaded.', req.params.id));
-});
-
-// Lock a document
-app.post('/lock/:id', function (req, res, next) {
-    locked[req.params.id] = true;
-    wss.broadcast(req.params.id, JSON.stringify({type: 'lock'}));
-    console.info('[Document Locked] docId: %s', req.params.id);
-    res.send(util.format('%s was locked.', req.params.id));
-});
-
-// Unlock a document
-app.post('/unlock/:id', jsonParser, function (req, res, next) {
-    delete locked[req.params.id];
-    wss.broadcast(req.params.id, JSON.stringify({
-        type: 'unlock',
-        contributors: req.body // Contributors with write permission
-    }));
-    console.info('[Document Unlocked] docId: %s', req.params.id);
-    res.send(util.format('%s was unlocked.', req.params.id));
-});
-
-// Redirect from a document
-app.post('/redirect/:id/:redirect', function (req, res, next) {
-    wss.broadcast(req.params.id, JSON.stringify({
-        type: 'redirect',
-        redirect: req.params.redirect
-    }));
-    console.info('[Document Redirect] docId: %s, redirect: %s', req.params.id, req.params.redirect);
-    res.send(util.format('%s was redirected to %s', req.params.id, req.params.redirect));
-});
-
-// Redirect from a deleted document
-app.post('/delete/:id/:redirect', function (req, res, next) {
-    wss.broadcast(req.params.id, JSON.stringify({
-        type: 'delete',
-        redirect: req.params.redirect
-    }));
-    console.info('[Document Delete] docId: %s, redirect: %s', req.params.id, req.params.redirect);
-    res.send(util.format('%s was deleted and redirected to %s', req.params.id, req.params.redirect));
-});
-
-server.listen(settings.port, settings.host, function() {
-    console.log('Server running at http://%s:%s', settings.host, settings.port);
-});
diff --git a/docker-compose.linux.yml b/docker-compose.linux.yml
index 4ef2b1d989e..23b66955623 100644
--- a/docker-compose.linux.yml
+++ b/docker-compose.linux.yml
@@ -145,3 +145,14 @@ services:
 #      - ./ssl/celery-server.cert.pem:/etc/ssl/server_certificate.pem:ro
 #      - ./ssl/celery-server.key.pem:/etc/ssl/server_key.pem:ro
 #      - ./ssl/ca-chain.cert.pem:/etc/ssl/ca_certificate.pem:ro
+
+#  sharejs:
+#    volumes:
+#      - sharejs-sync:/code:nocopy
+#
+#    command:
+#      - /bin/sh
+#      - -c 
+#      - |-
+#        npm install
+#        gosu node npm start
diff --git a/docker-compose.override.yml b/docker-compose.override.yml
index 37a082d80b2..4531a2b2414 100644
--- a/docker-compose.override.yml
+++ b/docker-compose.override.yml
@@ -116,7 +116,6 @@ services:
 ##        yarn --frozen-lockfile &&
 ##        yarn start --host 0.0.0.0 --port 4203 --live-reload-port 41956
 
-
 #  # Use this for ember-osf linked development (with docker-sync):
 #  emberosf:
 #    build: ../ember-osf
@@ -147,6 +146,17 @@ services:
 #      - ./ssl/celery-server.key.pem:/etc/ssl/server_key.pem:ro
 #      - ./ssl/ca-chain.cert.pem:/etc/ssl/ca_certificate.pem:ro
 
+#  sharejs:
+#    volumes:
+#      - sharejs-sync:/code:nocopy
+#
+#    command:
+#      - /bin/sh
+#      - -c 
+#      - |-
+#        npm install
+#        gosu node npm start
+
 volumes:
   osf-sync:
     external: true
@@ -168,3 +178,6 @@ volumes:
 
 #  reviews-sync:
 #    external: true
+
+#  sharejs-sync:
+#    external: true
diff --git a/docker-compose.yml b/docker-compose.yml
index 72bef186e9d..35a0f9fbaee 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -369,9 +369,6 @@ services:
       - web
     env_file:
       - .docker-compose.sharejs.env
-    volumes:
-      - ./addons/wiki/package.json:/code/package.json
-      - ./addons/wiki/shareServer.js:/code/shareServer.js
     stdin_open: true
 
 #  beat:
diff --git a/docker-sync.yml b/docker-sync.yml
index 2c4cd47151a..f3558b0c7cb 100644
--- a/docker-sync.yml
+++ b/docker-sync.yml
@@ -56,6 +56,14 @@ syncs:
 #    sync_excludes: ['.DS_Store', '*.map', '*.pyc', '*.tmp', '.git', '.idea', 'bower_components', 'node_modules', 'tmp', 'dist']
 #    watch_excludes: ['.*\.DS_Store', '.*\.map', '.*\.pyc', '.*\.tmp', '.*/\.git', '.*/\.idea', '.*/bower_components', '.*/node_modules', '.*/tmp', '.*/dist']
 
+#  sharejs-sync:
+#    src: '../sharejs'
+#    sync_strategy: 'native_osx'
+#    sync_args: [ '-prefer newer' ]
+#    sync_excludes_type: 'Name'
+#    sync_excludes: ['.DS_Store', '*.map', '*.pyc', '*.tmp', '.git', '.idea', 'node_modules']
+#    watch_excludes: ['.*\.DS_Store', '.*\.map', '.*\.pyc', '.*\.tmp', '.*/\.git', '.*/\.idea', '.*/node_modules']
+
   osf-sync:
     src: './'
     sync_strategy: 'native_osx'

From d7c1be40b00eb2f3e0f410efd8fa2dab1176d396 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Fri, 17 Nov 2017 10:28:05 -0500
Subject: [PATCH 109/192] Update elasticsearch in travis

---
 .travis.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.travis.yml b/.travis.yml
index da64d4aab57..c12aaf21d12 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -23,7 +23,7 @@ env:
     - WHEELHOUSE="$HOME/.cache/wheelhouse"
     - LIBXML2_DEB="libxml2-dbg_2.9.1+dfsg1-3ubuntu4.9_amd64.deb"
     - POSTGRES_DEB="postgresql-9.6_9.6.3-1.pgdg12.4+1_amd64.deb"
-    - ELASTICSEARCH_ARCHIVE="elasticsearch-1.5.0.tar.gz"
+    - ELASTICSEARCH_ARCHIVE="elasticsearch-2.4.5.tar.gz"
     - LIBJEMALLOC_DEB="libjemalloc1_3.5.1-2_amd64.deb"
     - LIBPCRE_DEB="libpcre3_8.31-2ubuntu2.3_amd64.deb"
     # - VARNISH_DEB="varnish_4.1.0-1~trusty_amd64.deb"

From f5f006235a218b04a7494a120ac8da561803ece5 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Fri, 17 Nov 2017 11:16:57 -0500
Subject: [PATCH 110/192] Make pyup conditionally ignore elasticsearch and
 Django

[skip ci]
---
 requirements.txt | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/requirements.txt b/requirements.txt
index b0adbae09b5..23fd61884a2 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -30,7 +30,7 @@ bleach==1.4.1
 html5lib==0.999999999
 blinker==1.4
 furl==0.4.92
-elasticsearch==2.4.0
+elasticsearch==2.4.0  # pyup: >=2.4,<3.0 # Major version must be same as ES version
 google-api-python-client==1.6.4
 Babel==2.5.1
 citeproc-py==0.4.0
@@ -58,7 +58,7 @@ requests-oauthlib==0.8.0
 raven==5.32.0
 
 # API requirements
-Django==1.11.7
+Django==1.11.7  # pyup: <2.0 # Remove this when we're on Py3
 djangorestframework==3.6.4
 django-cors-headers==1.3.1
 djangorestframework-bulk==0.2.1

From d1271f513719943625338749363f224a7d1a6a2c Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Fri, 17 Nov 2017 15:00:36 -0500
Subject: [PATCH 111/192] Send ids to celery tasks   -Not complex objects

---
 api_tests/nodes/views/test_node_detail.py |  4 ++--
 osf/models/node.py                        |  2 +-
 osf/models/preprint_service.py            |  2 +-
 reviews/models/mixins.py                  |  2 +-
 website/identifiers/listeners.py          |  4 ++--
 website/preprints/tasks.py                | 10 +++++++---
 6 files changed, 14 insertions(+), 10 deletions(-)

diff --git a/api_tests/nodes/views/test_node_detail.py b/api_tests/nodes/views/test_node_detail.py
index c08e2f41d85..39cf6f9c125 100644
--- a/api_tests/nodes/views/test_node_detail.py
+++ b/api_tests/nodes/views/test_node_detail.py
@@ -835,7 +835,7 @@ def test_set_node_private_updates_ezid(self, mock_update_ezid_metadata, app, use
         assert res.status_code == 200
         project_public.reload()
         assert not project_public.is_public
-        mock_update_ezid_metadata.assert_called_with(project_public, status='unavailable')
+        mock_update_ezid_metadata.assert_called_with(project_public._id, status='unavailable')
 
     @mock.patch('website.preprints.tasks.update_ezid_metadata_on_change')
     def test_set_node_with_preprint_private_updates_ezid(self, mock_update_ezid_metadata, app, user, project_public, url_public, make_node_payload):
@@ -845,7 +845,7 @@ def test_set_node_with_preprint_private_updates_ezid(self, mock_update_ezid_meta
         assert res.status_code == 200
         project_public.reload()
         assert not project_public.is_public
-        mock_update_ezid_metadata.assert_called_with(target_object, status='unavailable')
+        mock_update_ezid_metadata.assert_called_with(target_object._id, status='unavailable')
 
 
 @pytest.mark.django_db
diff --git a/osf/models/node.py b/osf/models/node.py
index bc4085b1f1e..d67bf71fd39 100644
--- a/osf/models/node.py
+++ b/osf/models/node.py
@@ -1510,7 +1510,7 @@ def set_privacy(self, permissions, auth=None, log=True, save=True, meeting_creat
         # Update existing identifiers
         if self.get_identifier('doi'):
             doi_status = 'unavailable' if permissions == 'private' else 'public'
-            enqueue_task(update_ezid_metadata_on_change.s(self, status=doi_status))
+            enqueue_task(update_ezid_metadata_on_change.s(self._id, status=doi_status))
 
         if log:
             action = NodeLog.MADE_PUBLIC if permissions == 'public' else NodeLog.MADE_PRIVATE
diff --git a/osf/models/preprint_service.py b/osf/models/preprint_service.py
index 39d4306dec9..b1c982a24c6 100644
--- a/osf/models/preprint_service.py
+++ b/osf/models/preprint_service.py
@@ -205,7 +205,7 @@ def set_published(self, published, auth, save=False):
                 )
 
             # This should be called after all fields for EZID metadta have been set
-            enqueue_postcommit_task(get_and_set_preprint_identifiers, (), {'preprint': self}, celery=True)
+            enqueue_postcommit_task(get_and_set_preprint_identifiers, (), {'preprint_id': self._id}, celery=True)
 
             self._send_preprint_confirmation(auth)
 
diff --git a/reviews/models/mixins.py b/reviews/models/mixins.py
index 57cb25e6a81..47abbb10a18 100644
--- a/reviews/models/mixins.py
+++ b/reviews/models/mixins.py
@@ -186,7 +186,7 @@ def save_changes(self, ev):
                 raise ValueError('Preprint must have at least one subject to be published.')
             self.reviewable.date_published = now
             self.reviewable.is_published = True
-            enqueue_postcommit_task(get_and_set_preprint_identifiers, (), {'preprint': self.reviewable}, celery=True)
+            enqueue_postcommit_task(get_and_set_preprint_identifiers, (), {'preprint_id': self.reviewable._id}, celery=True)
         elif not should_publish and self.reviewable.is_published:
             self.reviewable.is_published = False
         self.reviewable.save()
diff --git a/website/identifiers/listeners.py b/website/identifiers/listeners.py
index fe8b45c22f1..2c262c81105 100644
--- a/website/identifiers/listeners.py
+++ b/website/identifiers/listeners.py
@@ -7,7 +7,7 @@ def update_status_on_delete(node):
     from website.preprints.tasks import update_ezid_metadata_on_change
 
     for preprint in node.preprints.all():
-        enqueue_task(update_ezid_metadata_on_change.s(preprint, status='unavailable'))
+        enqueue_task(update_ezid_metadata_on_change.s(preprint._id, status='unavailable'))
 
     if node.get_identifier('doi'):
-        enqueue_task(update_ezid_metadata_on_change.s(node, status='unavailable'))
+        enqueue_task(update_ezid_metadata_on_change.s(node._id, status='unavailable'))
diff --git a/website/preprints/tasks.py b/website/preprints/tasks.py
index 1e24a71655e..53d8b980233 100644
--- a/website/preprints/tasks.py
+++ b/website/preprints/tasks.py
@@ -28,7 +28,7 @@ def on_preprint_updated(preprint_id, update_share=True, share_type=None, old_sub
     if preprint.node:
         status = 'public' if preprint.verified_publishable else 'unavailable'
         try:
-            update_ezid_metadata_on_change(preprint, status=status)
+            update_ezid_metadata_on_change(preprint._id, status=status)
         except HTTPError as err:
             sentry.log_exception()
             sentry.log_message(err.args[0])
@@ -178,7 +178,9 @@ def format_preprint(preprint, share_type, old_subjects=None):
 
 
 @celery_app.task(ignore_results=True)
-def get_and_set_preprint_identifiers(preprint):
+def get_and_set_preprint_identifiers(preprint_id):
+    PreprintService = apps.get_model('osf.PreprintService')
+    preprint = PreprintService.load(preprint_id)
     ezid_response = request_identifiers_from_ezid(preprint)
     if ezid_response is None:
         return
@@ -187,7 +189,9 @@ def get_and_set_preprint_identifiers(preprint):
 
 
 @celery_app.task(ignore_results=True)
-def update_ezid_metadata_on_change(target_object, status):
+def update_ezid_metadata_on_change(target_guid, status):
+    Guid = apps.get_model('osf.Guid')
+    target_object = Guid.load(target_guid).referent
     if (settings.EZID_USERNAME and settings.EZID_PASSWORD) and target_object.get_identifier('doi'):
         client = get_ezid_client()
 

From a745b43b4291e0cf6e9a36f616125bc61993aa43 Mon Sep 17 00:00:00 2001
From: "Barrett K. Harber" <barrett.harber@gmail.com>
Date: Fri, 17 Nov 2017 15:35:40 -0500
Subject: [PATCH 112/192] Update syntax for docker-compose unoconv

---
 docker-compose.yml | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)

diff --git a/docker-compose.yml b/docker-compose.yml
index 35a0f9fbaee..9c8def8425a 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -146,10 +146,11 @@ services:
 
   unoconv:
     image: centerforopenscience/unoconv
-    command: /bin/bash -c "
-      /opt/libreoffice4.4/program/python -u /usr/bin/unoconv --listener --server=0.0.0.0 --port=2002 -vvv &&
-      chmod -R 777 /tmp/mfrlocalcache
-      "
+    command: 
+      - /bin/bash
+      - -c
+      - /opt/libreoffice4.4/program/python -u /usr/bin/unoconv --listener --server=0.0.0.0 --port=2002 -vvv &&
+        chmod -R 777 /tmp/mfrlocalcache
     restart: unless-stopped
     ports:
       - 2002:2002

From 9eace9c5ec858756d03d92ceafa2348d504cfeb6 Mon Sep 17 00:00:00 2001
From: Alex Schiller <alexschiller@gmail.com>
Date: Fri, 17 Nov 2017 16:20:59 -0500
Subject: [PATCH 113/192] Handle search_phase_execution_exception

---
 website/search/elastic_search.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/website/search/elastic_search.py b/website/search/elastic_search.py
index 97f227378b0..7c00393b4b1 100644
--- a/website/search/elastic_search.py
+++ b/website/search/elastic_search.py
@@ -112,6 +112,8 @@ def wrapped(*args, **kwargs):
             except NotFoundError as e:
                 raise exceptions.IndexNotFoundError(e.error)
             except RequestError as e:
+                if e.error == 'search_phase_execution_exception':
+                    raise exceptions.MalformedQueryError('Failed to parse query')
                 if 'ParseException' in e.error:  # ES 1.5
                     raise exceptions.MalformedQueryError(e.error)
                 if type(e.error) == dict:  # ES 2.0

From a556a5866141c1d025130d56c1df1fc1caf693b4 Mon Sep 17 00:00:00 2001
From: Alex Schiller <alexschiller@gmail.com>
Date: Mon, 20 Nov 2017 14:01:39 -0500
Subject: [PATCH 114/192] Fix arg order

---
 website/search_migration/migrate.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/website/search_migration/migrate.py b/website/search_migration/migrate.py
index 73e1f3bf6c7..afb68556295 100644
--- a/website/search_migration/migrate.py
+++ b/website/search_migration/migrate.py
@@ -94,7 +94,7 @@ def set_up_index(idx):
         helpers.reindex(es_client(), idx, index)
         logger.info('Deleting {} index'.format(idx))
         es_client().indices.delete(index=idx)
-        es_client().indices.put_alias(idx, index)
+        es_client().indices.put_alias(index=index, name=idx)
     else:
         # Increment version
         version = int(alias.keys()[0].split('_v')[1]) + 1
@@ -111,7 +111,7 @@ def set_up_alias(old_index, index):
         logger.info('Removing old aliases to {}'.format(old_index))
         es_client().indices.delete_alias(index=old_index, name='_all', ignore=404)
     logger.info('Creating new alias from {0} to {1}'.format(old_index, index))
-    es_client().indices.put_alias(old_index, index)
+    es_client().indices.put_alias(index=index, name=old_index)
 
 
 def delete_old(index):

From ecb3c0c8aafa84438419b40b4da0c6bc9fb7db8c Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Fri, 27 Oct 2017 15:47:03 -0400
Subject: [PATCH 115/192] Reorganize Reviews code

[#OSF-8576]
---
 api/actions/permissions.py                    |  57 ++++
 api/actions/serializers.py                    |  21 +-
 api/actions/views.py                          |  11 +-
 api/base/filters.py                           |   4 +-
 api/base/settings/defaults.py                 |   1 -
 .../preprint_providers}/permissions.py        |  64 +---
 api/preprint_providers/serializers.py         |   3 +-
 api/preprint_providers/views.py               |  20 +-
 api/preprint_providers/workflows.py           |  29 ++
 api/preprints/permissions.py                  |   4 +-
 api/preprints/views.py                        |   4 +-
 .../views/test_preprint_provider_detail.py    |   2 +-
 api_tests/preprints/filters/test_filters.py   |   4 +-
 .../preprints/views/test_preprint_actions.py  |   3 +-
 .../preprints/views/test_preprint_detail.py   |  15 +-
 .../preprints/views/test_preprint_list.py     |  10 +-
 api_tests/reviews/mixins/comment_settings.py  |   5 +-
 api_tests/reviews/mixins/filter_mixins.py     |   2 +-
 api_tests/users/views/test_user_actions.py    |   5 +-
 osf/exceptions.py                             |   7 +
 .../commands/create_fake_preprint_actions.py  |   6 +-
 osf/management/commands/update_auth_groups.py |   4 +-
 osf/migrations/0060_reviews.py                |   2 +-
 osf/migrations/0062_accept_preprints.py       |   6 +-
 osf/models/action.py                          |  10 +-
 osf/models/mixins.py                          | 111 ++++++-
 osf/models/preprint_provider.py               |  10 +-
 osf/models/preprint_service.py                |   7 +-
 osf/utils/machines.py                         | 128 ++++++++
 osf/utils/workflows.py                        |  64 ++++
 osf_tests/factories.py                        |   9 +-
 osf_tests/test_reviewable.py                  |  18 +-
 reviews/__init__.py                           |   0
 reviews/apps.py                               |   8 -
 reviews/exceptions.py                         |   6 -
 reviews/models/__init__.py                    |   2 -
 reviews/models/mixins.py                      | 277 ------------------
 reviews/test/.gitkeep                         |   0
 reviews/workflow.py                           |  90 ------
 tests/test_notifications.py                   |   6 +-
 website/reviews/listeners.py                  |  39 +++
 41 files changed, 516 insertions(+), 558 deletions(-)
 create mode 100644 api/actions/permissions.py
 rename {reviews => api/preprint_providers}/permissions.py (60%)
 create mode 100644 api/preprint_providers/workflows.py
 create mode 100644 osf/utils/machines.py
 create mode 100644 osf/utils/workflows.py
 delete mode 100644 reviews/__init__.py
 delete mode 100644 reviews/apps.py
 delete mode 100644 reviews/exceptions.py
 delete mode 100644 reviews/models/__init__.py
 delete mode 100644 reviews/models/mixins.py
 delete mode 100644 reviews/test/.gitkeep
 delete mode 100644 reviews/workflow.py
 create mode 100644 website/reviews/listeners.py

diff --git a/api/actions/permissions.py b/api/actions/permissions.py
new file mode 100644
index 00000000000..ee0d8a2a99d
--- /dev/null
+++ b/api/actions/permissions.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from rest_framework import permissions as drf_permissions
+
+from api.base.utils import get_user_auth
+from osf.models.action import Action
+from osf.models.mixins import ReviewableMixin, ReviewProviderMixin
+from osf.utils.workflows import DefaultTriggers
+from website.util import permissions as osf_permissions
+
+# Required permission to perform each action. `None` means no permissions required.
+TRIGGER_PERMISSIONS = {
+    DefaultTriggers.SUBMIT.value: None,
+    DefaultTriggers.ACCEPT.value: 'accept_submissions',
+    DefaultTriggers.REJECT.value: 'reject_submissions',
+    DefaultTriggers.EDIT_COMMENT.value: 'edit_review_comments',
+}
+
+
+class ActionPermission(drf_permissions.BasePermission):
+    def has_object_permission(self, request, view, obj):
+        auth = get_user_auth(request)
+        if auth.user is None:
+            return False
+
+        target = None
+        provider = None
+        if isinstance(obj, Action):
+            target = obj.target
+            provider = target.provider
+        elif isinstance(obj, ReviewableMixin):
+            target = obj
+            provider = target.provider
+        elif isinstance(obj, ReviewProviderMixin):
+            provider = obj
+        else:
+            raise ValueError('Not a reviews-related model: {}'.format(obj))
+
+        serializer = view.get_serializer()
+
+        if request.method in drf_permissions.SAFE_METHODS:
+            # Moderators and node contributors can view actions
+            is_node_contributor = target is not None and target.node.has_permission(auth.user, osf_permissions.READ)
+            return is_node_contributor or auth.user.has_perm('view_actions', provider)
+        else:
+            # Moderators and node admins can trigger state changes.
+            is_node_admin = target is not None and target.node.has_permission(auth.user, osf_permissions.ADMIN)
+            if not (is_node_admin or auth.user.has_perm('view_submissions', provider)):
+                return False
+
+            # User can trigger state changes on this reviewable, but can they use this trigger in particular?
+            serializer = view.get_serializer(data=request.data)
+            serializer.is_valid(raise_exception=True)
+            trigger = serializer.validated_data.get('trigger')
+            permission = TRIGGER_PERMISSIONS[trigger]
+            return permission is None or request.user.has_perm(permission, target.provider)
diff --git a/api/actions/serializers.py b/api/actions/serializers.py
index c3a5fc5267b..f3c8a75c71f 100644
--- a/api/actions/serializers.py
+++ b/api/actions/serializers.py
@@ -12,12 +12,9 @@
 from api.base.serializers import RelationshipField
 from api.base.serializers import HideIfProviderCommentsAnonymous
 from api.base.serializers import HideIfProviderCommentsPrivate
-
+from osf.exceptions import InvalidTriggerError
 from osf.models import PreprintService
-
-from reviews.exceptions import InvalidTriggerError
-from reviews.workflow import Triggers
-from reviews.workflow import States
+from osf.utils.workflows import DefaultStates, DefaultTriggers
 
 
 class ReviewableCountsRelationshipField(RelationshipField):
@@ -73,12 +70,12 @@ class ActionSerializer(JSONAPISerializer):
 
     id = ser.CharField(source='_id', read_only=True)
 
-    trigger = ser.ChoiceField(choices=Triggers.choices())
+    trigger = ser.ChoiceField(choices=DefaultTriggers.choices())
 
     comment = HideIfProviderCommentsPrivate(ser.CharField(max_length=65535, required=False))
 
-    from_state = ser.ChoiceField(choices=States.choices(), read_only=True)
-    to_state = ser.ChoiceField(choices=States.choices(), read_only=True)
+    from_state = ser.ChoiceField(choices=DefaultStates.choices(), read_only=True)
+    to_state = ser.ChoiceField(choices=DefaultStates.choices(), read_only=True)
 
     date_created = ser.DateTimeField(read_only=True)
     date_modified = ser.DateTimeField(read_only=True)
@@ -124,13 +121,13 @@ def create(self, validated_data):
         target = validated_data.pop('target')
         comment = validated_data.pop('comment', '')
         try:
-            if trigger == Triggers.ACCEPT.value:
+            if trigger == DefaultTriggers.ACCEPT.value:
                 return target.reviews_accept(user, comment)
-            if trigger == Triggers.REJECT.value:
+            if trigger == DefaultTriggers.REJECT.value:
                 return target.reviews_reject(user, comment)
-            if trigger == Triggers.EDIT_COMMENT.value:
+            if trigger == DefaultTriggers.EDIT_COMMENT.value:
                 return target.reviews_edit_comment(user, comment)
-            if trigger == Triggers.SUBMIT.value:
+            if trigger == DefaultTriggers.SUBMIT.value:
                 return target.reviews_submit(user)
         except InvalidTriggerError as e:
             # Invalid transition from the current state
diff --git a/api/actions/views.py b/api/actions/views.py
index 6645e45802f..ad3fd515ee6 100644
--- a/api/actions/views.py
+++ b/api/actions/views.py
@@ -5,10 +5,7 @@
 from rest_framework import generics
 from rest_framework import permissions
 
-from framework.auth.oauth_scopes import CoreScopes
-from osf.models import Action
-from reviews import permissions as reviews_permissions
-
+from api.actions.permissions import ActionPermission
 from api.actions.serializers import ActionSerializer
 from api.base.exceptions import Conflict
 from api.base.parsers import (
@@ -18,6 +15,8 @@
 from api.base.utils import absolute_reverse
 from api.base.views import JSONAPIBaseView
 from api.base import permissions as base_permissions
+from framework.auth.oauth_scopes import CoreScopes
+from osf.models import Action
 
 
 def get_actions_queryset():
@@ -63,7 +62,7 @@ class ActionDetail(JSONAPIBaseView, generics.RetrieveAPIView):
     permission_classes = (
         permissions.IsAuthenticatedOrReadOnly,
         base_permissions.TokenHasScope,
-        reviews_permissions.ActionPermission,
+        ActionPermission,
     )
 
     required_read_scopes = [CoreScopes.ACTIONS_READ]
@@ -150,7 +149,7 @@ class CreateAction(JSONAPIBaseView, generics.ListCreateAPIView):
     permission_classes = (
         permissions.IsAuthenticatedOrReadOnly,
         base_permissions.TokenHasScope,
-        reviews_permissions.ActionPermission,
+        ActionPermission,
     )
 
     required_read_scopes = [CoreScopes.NULL]
diff --git a/api/base/filters.py b/api/base/filters.py
index 0c448d87ced..dd8e66d67c4 100644
--- a/api/base/filters.py
+++ b/api/base/filters.py
@@ -19,7 +19,7 @@
 from rest_framework.filters import OrderingFilter
 from osf.models import Subject, PreprintProvider, Node
 from osf.models.base import GuidMixin
-from reviews.workflow import States
+from osf.utils.workflows import DefaultStates
 
 
 def lowercase(lower):
@@ -504,7 +504,7 @@ def preprints_queryset(self, base_queryset, auth_user, allow_contribs=True):
             admin_user_query = Q(node__contributor__user_id=auth_user.id, node__contributor__admin=True)
             reviews_user_query = Q(node__is_public=True, provider__in=get_objects_for_user(auth_user, 'view_submissions', PreprintProvider))
             if allow_contribs:
-                contrib_user_query = ~Q(reviews_state=States.INITIAL.value) & Q(node__contributor__user_id=auth_user.id, node__contributor__read=True)
+                contrib_user_query = ~Q(reviews_state=DefaultStates.INITIAL.value) & Q(node__contributor__user_id=auth_user.id, node__contributor__read=True)
                 query = (no_user_query | contrib_user_query | admin_user_query | reviews_user_query)
             else:
                 query = (no_user_query | admin_user_query | reviews_user_query)
diff --git a/api/base/settings/defaults.py b/api/base/settings/defaults.py
index e6a54ad8a6e..0f213392752 100644
--- a/api/base/settings/defaults.py
+++ b/api/base/settings/defaults.py
@@ -93,7 +93,6 @@
 
     # OSF
     'osf',
-    'reviews',
 
     # Addons
     'addons.osfstorage',
diff --git a/reviews/permissions.py b/api/preprint_providers/permissions.py
similarity index 60%
rename from reviews/permissions.py
rename to api/preprint_providers/permissions.py
index f9c5fd61a2d..8bd87d1fe6a 100644
--- a/reviews/permissions.py
+++ b/api/preprint_providers/permissions.py
@@ -1,25 +1,13 @@
 # -*- coding: utf-8 -*-
 from __future__ import unicode_literals
 
-import logging
-
+from django.contrib.auth.models import Group
 from guardian.shortcuts import assign_perm
 from guardian.shortcuts import get_perms
 from guardian.shortcuts import remove_perm
 from rest_framework import permissions as drf_permissions
 
-from django.contrib.auth.models import Group
-
 from api.base.utils import get_user_auth
-from osf.models.action import Action
-from website.util import permissions as osf_permissions
-
-from reviews.models import ReviewableMixin, ReviewProviderMixin
-from reviews.workflow import Triggers
-
-
-logger = logging.getLogger(__name__)
-
 
 # Object-level permissions for providers.
 # Prefer assigning object permissions to groups and adding users to groups, over assigning permissions to users.
@@ -53,16 +41,6 @@
     # 'reviewer': (),  # TODO Implement reviewers
 }
 
-
-# Required permission to perform each action. `None` means no permissions required.
-TRIGGER_PERMISSIONS = {
-    Triggers.SUBMIT.value: None,
-    Triggers.ACCEPT.value: 'accept_submissions',
-    Triggers.REJECT.value: 'reject_submissions',
-    Triggers.EDIT_COMMENT.value: 'edit_review_comments',
-}
-
-
 class GroupHelper(object):
     """Helper for managing permission groups for a given provider.
     """
@@ -90,46 +68,6 @@ def update_provider_auth_groups(self):
     def get_permissions(self, user):
         return [p for p in get_perms(user, self.provider) if p in PERMISSIONS]
 
-
-class ActionPermission(drf_permissions.BasePermission):
-    def has_object_permission(self, request, view, obj):
-        auth = get_user_auth(request)
-        if auth.user is None:
-            return False
-
-        target = None
-        provider = None
-        if isinstance(obj, Action):
-            target = obj.target
-            provider = target.provider
-        elif isinstance(obj, ReviewableMixin):
-            target = obj
-            provider = target.provider
-        elif isinstance(obj, ReviewProviderMixin):
-            provider = obj
-        else:
-            raise ValueError('Not a reviews-related model: {}'.format(obj))
-
-        serializer = view.get_serializer()
-
-        if request.method in drf_permissions.SAFE_METHODS:
-            # Moderators and node contributors can view actions
-            is_node_contributor = target is not None and target.node.has_permission(auth.user, osf_permissions.READ)
-            return is_node_contributor or auth.user.has_perm('view_actions', provider)
-        else:
-            # Moderators and node admins can trigger state changes.
-            is_node_admin = target is not None and target.node.has_permission(auth.user, osf_permissions.ADMIN)
-            if not (is_node_admin or auth.user.has_perm('view_submissions', provider)):
-                return False
-
-            # User can trigger state changes on this reviewable, but can they use this trigger in particular?
-            serializer = view.get_serializer(data=request.data)
-            serializer.is_valid(raise_exception=True)
-            trigger = serializer.validated_data.get('trigger')
-            permission = TRIGGER_PERMISSIONS[trigger]
-            return permission is None or request.user.has_perm(permission, target.provider)
-
-
 class CanSetUpProvider(drf_permissions.BasePermission):
     def has_object_permission(self, request, view, obj):
         if request.method in drf_permissions.SAFE_METHODS:
diff --git a/api/preprint_providers/serializers.py b/api/preprint_providers/serializers.py
index 8bddfd91478..7e576316d07 100644
--- a/api/preprint_providers/serializers.py
+++ b/api/preprint_providers/serializers.py
@@ -2,11 +2,10 @@
 from rest_framework import serializers as ser
 from rest_framework.exceptions import ValidationError
 
-from reviews.workflow import Workflows
-
 from api.actions.serializers import ReviewableCountsRelationshipField
 from api.base.utils import absolute_reverse, get_user_auth
 from api.base.serializers import JSONAPISerializer, LinksField, RelationshipField, ShowIfVersion
+from api.preprint_providers.workflows import Workflows
 
 
 class PreprintProviderSerializer(JSONAPISerializer):
diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index 2fb9c67c1d2..a55c29f0c72 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -1,16 +1,10 @@
-from guardian.shortcuts import get_objects_for_user
-from django.db.models import Q
 
+from django.db.models import Q
+from guardian.shortcuts import get_objects_for_user
 from rest_framework import generics
 from rest_framework import permissions as drf_permissions
 from rest_framework.exceptions import NotAuthenticated
 
-from framework.auth.oauth_scopes import CoreScopes
-
-from osf.models import AbstractNode, Subject, PreprintProvider
-
-from reviews import permissions as reviews_permissions
-
 from api.base import permissions as base_permissions
 from api.base.exceptions import InvalidFilterValue, InvalidFilterOperator, Conflict
 from api.base.filters import PreprintFilterMixin, ListFilterMixin
@@ -20,9 +14,11 @@
 from api.licenses.views import LicenseList
 from api.taxonomies.serializers import TaxonomySerializer
 from api.preprint_providers.serializers import PreprintProviderSerializer
+from api.preprint_providers.permissions import CanSetUpProvider, PERMISSIONS
 from api.preprints.serializers import PreprintSerializer
-
 from api.preprints.permissions import PreprintPublishedOrAdmin
+from framework.auth.oauth_scopes import CoreScopes
+from osf.models import AbstractNode, Subject, PreprintProvider
 
 class PreprintProviderList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin):
     """
@@ -99,8 +95,8 @@ def build_query_from_field(self, field_name, operation):
                 raise NotAuthenticated()
             value = operation['value'].lstrip('[').rstrip(']')
             permissions = [v.strip() for v in value.split(',')]
-            if any(p not in reviews_permissions.PERMISSIONS for p in permissions):
-                valid_permissions = ', '.join(reviews_permissions.PERMISSIONS.keys())
+            if any(p not in PERMISSIONS for p in permissions):
+                valid_permissions = ', '.join(PERMISSIONS.keys())
                 raise InvalidFilterValue('Invalid permission! Valid values are: {}'.format(valid_permissions))
             return Q(id__in=get_objects_for_user(auth_user, permissions, PreprintProvider, any_perm=True))
 
@@ -179,7 +175,7 @@ class PreprintProviderDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView):
     permission_classes = (
         drf_permissions.IsAuthenticatedOrReadOnly,
         base_permissions.TokenHasScope,
-        reviews_permissions.CanSetUpProvider,
+        CanSetUpProvider,
     )
 
     required_read_scopes = [CoreScopes.ALWAYS_PUBLIC]
diff --git a/api/preprint_providers/workflows.py b/api/preprint_providers/workflows.py
new file mode 100644
index 00000000000..a643b0d57e6
--- /dev/null
+++ b/api/preprint_providers/workflows.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from enum import unique
+
+from osf.utils.workflows import ChoiceEnum, DefaultStates
+
+
+@unique
+class Workflows(ChoiceEnum):
+    NONE = None
+    PRE_MODERATION = 'pre-moderation'
+    POST_MODERATION = 'post-moderation'
+
+PUBLIC_STATES = {
+    Workflows.NONE.value: (
+        DefaultStates.INITIAL.value,
+        DefaultStates.PENDING.value,
+        DefaultStates.ACCEPTED.value,
+        DefaultStates.REJECTED.value,
+    ),
+    Workflows.PRE_MODERATION.value: (
+        DefaultStates.ACCEPTED.value,
+    ),
+    Workflows.POST_MODERATION.value: (
+        DefaultStates.PENDING.value,
+        DefaultStates.ACCEPTED.value,
+    )
+}
diff --git a/api/preprints/permissions.py b/api/preprints/permissions.py
index 2a6f4fdde76..ee8d4da2a91 100644
--- a/api/preprints/permissions.py
+++ b/api/preprints/permissions.py
@@ -4,8 +4,8 @@
 
 from api.base.utils import get_user_auth
 from osf.models import PreprintService
+from osf.utils.workflows import DefaultStates
 from website.util import permissions as osf_permissions
-from reviews.workflow import States
 
 
 class PreprintPublishedOrAdmin(permissions.BasePermission):
@@ -21,7 +21,7 @@ def has_object_permission(self, request, view, obj):
                 user_has_permissions = (obj.verified_publishable or
                     (node.is_public and auth.user.has_perm('view_submissions', obj.provider)) or
                     node.has_permission(auth.user, osf_permissions.ADMIN) or
-                    (node.is_contributor(auth.user) and obj.reviews_state != States.INITIAL.value)
+                    (node.is_contributor(auth.user) and obj.reviews_state != DefaultStates.INITIAL.value)
                 )
                 return user_has_permissions
         else:
diff --git a/api/preprints/views.py b/api/preprints/views.py
index 0ebda342966..22f174a2b19 100644
--- a/api/preprints/views.py
+++ b/api/preprints/views.py
@@ -7,8 +7,8 @@
 from framework.auth.oauth_scopes import CoreScopes
 from osf.models import Action, PreprintService
 from osf.utils.requests import check_select_for_update
-from reviews import permissions as reviews_permissions
 
+from api.actions.permissions import ActionPermission
 from api.actions.serializers import ActionSerializer
 from api.actions.views import get_actions_queryset
 from api.base.exceptions import Conflict
@@ -448,7 +448,7 @@ class PreprintActionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin,
     permission_classes = (
         drf_permissions.IsAuthenticatedOrReadOnly,
         base_permissions.TokenHasScope,
-        reviews_permissions.ActionPermission,
+        ActionPermission,
     )
 
     required_read_scopes = [CoreScopes.ACTIONS_READ]
diff --git a/api_tests/preprint_providers/views/test_preprint_provider_detail.py b/api_tests/preprint_providers/views/test_preprint_provider_detail.py
index 18da85ae498..975bc94202c 100644
--- a/api_tests/preprint_providers/views/test_preprint_provider_detail.py
+++ b/api_tests/preprint_providers/views/test_preprint_provider_detail.py
@@ -1,11 +1,11 @@
 import pytest
 
 from api.base.settings.defaults import API_BASE
+from api.preprint_providers.permissions import GroupHelper
 from osf_tests.factories import (
     PreprintProviderFactory,
     AuthUserFactory,
 )
-from reviews.permissions import GroupHelper
 
 @pytest.mark.django_db
 class TestPreprintProviderExists:
diff --git a/api_tests/preprints/filters/test_filters.py b/api_tests/preprints/filters/test_filters.py
index 85457847a44..26a87e19c33 100644
--- a/api_tests/preprints/filters/test_filters.py
+++ b/api_tests/preprints/filters/test_filters.py
@@ -1,13 +1,11 @@
 import pytest
 
-from framework.auth.core import Auth
+from api.preprint_providers.permissions import GroupHelper
 from osf_tests.factories import (
     PreprintFactory,
     AuthUserFactory,
     SubjectFactory,
-    PreprintProviderFactory
 )
-from reviews.permissions import GroupHelper
 
 
 @pytest.mark.django_db
diff --git a/api_tests/preprints/views/test_preprint_actions.py b/api_tests/preprints/views/test_preprint_actions.py
index c2ee5d5f4bf..a824fc66535 100644
--- a/api_tests/preprints/views/test_preprint_actions.py
+++ b/api_tests/preprints/views/test_preprint_actions.py
@@ -1,11 +1,10 @@
 import pytest
 
 from api.base.settings.defaults import API_BASE
+from api.preprint_providers.permissions import GroupHelper
 from osf_tests.factories import (
-    PreprintFactory,
     AuthUserFactory,
 )
-from reviews.permissions import GroupHelper
 from website.util import permissions as osf_permissions
 
 from api_tests.reviews.mixins.filter_mixins import ActionFilterMixin
diff --git a/api_tests/preprints/views/test_preprint_detail.py b/api_tests/preprints/views/test_preprint_detail.py
index 2369476e073..8fb62df9646 100644
--- a/api_tests/preprints/views/test_preprint_detail.py
+++ b/api_tests/preprints/views/test_preprint_detail.py
@@ -1,12 +1,11 @@
-import functools
 import mock
-from django.db.models import Q
 import pytest
 
 from api.base.settings.defaults import API_BASE
 from api_tests import utils as test_utils
 from framework.auth.core import Auth
 from osf.models import PreprintService, NodeLicense
+from osf.utils.workflows import DefaultStates
 from osf_tests.factories import (
     PreprintFactory,
     AuthUserFactory,
@@ -15,10 +14,6 @@
     PreprintProviderFactory,
 )
 from rest_framework import exceptions
-from tests.base import fake, capture_signals
-from website.project.signals import contributor_added
-from website.identifiers.utils import build_ezid_metadata
-from reviews.workflow import States
 
 
 def build_preprint_update_payload(node_id, attributes=None, relationships=None):
@@ -69,7 +64,7 @@ def test_preprint_detail(self, app, user, preprint, url, res, data):
 
     #   test contributors in preprint data
         assert data['relationships'].get('contributors', None)
-        assert data['relationships']['contributors'].get('data', None) == None
+        assert data['relationships']['contributors'].get('data', None) is None
 
     #   test node type and id in preprint data
         assert data['relationships']['node']['data'].get('id', None) == preprint.node._id
@@ -1014,15 +1009,15 @@ def file_one_private_project(self, admin, private_project):
 
     @pytest.fixture()
     def unpublished_reviews_preprint(self, admin, reviews_provider, subject, public_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=reviews_provider, subjects=[[subject._id]], project=public_project, is_published=False, reviews_state=States.PENDING.value)
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=reviews_provider, subjects=[[subject._id]], project=public_project, is_published=False, reviews_state=DefaultStates.PENDING.value)
 
     @pytest.fixture()
     def unpublished_reviews_initial_preprint(self, admin, reviews_provider, subject, public_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=reviews_provider, subjects=[[subject._id]], project=public_project, is_published=False, reviews_state=States.INITIAL.value)
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=reviews_provider, subjects=[[subject._id]], project=public_project, is_published=False, reviews_state=DefaultStates.INITIAL.value)
 
     @pytest.fixture()
     def private_reviews_preprint(self, admin, reviews_provider, subject, private_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunsets.pdf', provider=reviews_provider, subjects=[[subject._id]], project=private_project, is_published=False, reviews_state=States.PENDING.value)
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunsets.pdf', provider=reviews_provider, subjects=[[subject._id]], project=private_project, is_published=False, reviews_state=DefaultStates.PENDING.value)
 
     @pytest.fixture()
     def unpublished_url(self, unpublished_reviews_preprint):
diff --git a/api_tests/preprints/views/test_preprint_list.py b/api_tests/preprints/views/test_preprint_list.py
index e38b3140192..45805fee66f 100644
--- a/api_tests/preprints/views/test_preprint_list.py
+++ b/api_tests/preprints/views/test_preprint_list.py
@@ -14,6 +14,7 @@
 from api_tests.reviews.mixins.filter_mixins import ReviewableFilterMixin
 from framework.auth.core import Auth
 from osf.models import PreprintService, Node
+from osf.utils.workflows import DefaultStates
 from osf_tests.factories import (
     ProjectFactory,
     PreprintFactory,
@@ -24,7 +25,6 @@
 from tests.base import ApiTestCase, capture_signals
 from website.project import signals as project_signals
 from website.util import permissions
-from reviews.workflow import States
 
 def build_preprint_create_payload(node_id=None, provider_id=None, file_id=None, attrs={}):
     payload = {
@@ -505,7 +505,7 @@ def url(self):
 
     @pytest.fixture()
     def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=States.PENDING.value)
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=DefaultStates.PENDING.value)
 
     def test_unpublished_visible_to_admins(self, app, user_admin_contrib, preprint_unpublished, preprint_published, url):
         res = app.get(url, auth=user_admin_contrib.auth)
@@ -552,7 +552,7 @@ def url(self):
 
     @pytest.fixture()
     def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=States.INITIAL.value)
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=DefaultStates.INITIAL.value)
 
     def test_unpublished_visible_to_admins(self, app, user_admin_contrib, preprint_unpublished, preprint_published, url):
         res = app.get(url, auth=user_admin_contrib.auth)
@@ -648,7 +648,7 @@ def project_public(self, user_admin_contrib, user_write_contrib):
 
     @pytest.fixture()
     def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=States.INITIAL.value)
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=DefaultStates.INITIAL.value)
 
     @pytest.fixture()
     def list_url(self):
@@ -701,7 +701,7 @@ def project_public(self, user_admin_contrib, user_write_contrib):
 
     @pytest.fixture()
     def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=States.PENDING.value)
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=DefaultStates.PENDING.value)
 
     @pytest.fixture()
     def list_url(self):
diff --git a/api_tests/reviews/mixins/comment_settings.py b/api_tests/reviews/mixins/comment_settings.py
index 8826da5b8be..23e006dc19b 100644
--- a/api_tests/reviews/mixins/comment_settings.py
+++ b/api_tests/reviews/mixins/comment_settings.py
@@ -1,15 +1,12 @@
-from datetime import timedelta
-
 import pytest
-from furl import furl
 
+from api.preprint_providers.permissions import GroupHelper
 from osf_tests.factories import (
     ActionFactory,
     AuthUserFactory,
     PreprintFactory,
     PreprintProviderFactory,
 )
-from reviews.permissions import GroupHelper
 from website.util import permissions as osf_permissions
 
 
diff --git a/api_tests/reviews/mixins/filter_mixins.py b/api_tests/reviews/mixins/filter_mixins.py
index e1cca87a4eb..082811d3982 100644
--- a/api_tests/reviews/mixins/filter_mixins.py
+++ b/api_tests/reviews/mixins/filter_mixins.py
@@ -3,6 +3,7 @@
 import pytest
 from furl import furl
 
+from api.preprint_providers.permissions import GroupHelper
 from osf_tests.factories import (
     ActionFactory,
     AuthUserFactory,
@@ -10,7 +11,6 @@
     PreprintProviderFactory,
     ProjectFactory,
 )
-from reviews.permissions import GroupHelper
 
 
 def get_actual(app, url, user=None, sort=None, expect_errors=False, **filters):
diff --git a/api_tests/users/views/test_user_actions.py b/api_tests/users/views/test_user_actions.py
index 1286e666279..4e09dd84dda 100644
--- a/api_tests/users/views/test_user_actions.py
+++ b/api_tests/users/views/test_user_actions.py
@@ -2,17 +2,14 @@
 import mock
 
 from api.base.settings.defaults import API_BASE
-
+from api.preprint_providers.permissions import GroupHelper
 from osf_tests.factories import (
     PreprintFactory,
     AuthUserFactory,
     PreprintProviderFactory,
 )
-
 from website.util import permissions as osf_permissions
 
-from reviews.permissions import GroupHelper
-
 from api_tests.reviews.mixins.filter_mixins import ActionFilterMixin
 
 
diff --git a/osf/exceptions.py b/osf/exceptions.py
index 3533fdf89ac..ab93cbcc7f2 100644
--- a/osf/exceptions.py
+++ b/osf/exceptions.py
@@ -114,3 +114,10 @@ class ValidationTypeError(ValidationError, MODMValidationTypeError):
 
 class NaiveDatetimeException(Exception):
     pass
+
+class InvalidTriggerError(Exception):
+    def __init__(self, trigger, state, valid_triggers):
+        self.trigger = trigger
+        self.state = state
+        self.valid_triggers = valid_triggers
+        self.message = 'Cannot trigger "{}" from state "{}". Valid triggers: {}'.format(trigger, state, valid_triggers)
diff --git a/osf/management/commands/create_fake_preprint_actions.py b/osf/management/commands/create_fake_preprint_actions.py
index a517097e382..3643fcecb6e 100644
--- a/osf/management/commands/create_fake_preprint_actions.py
+++ b/osf/management/commands/create_fake_preprint_actions.py
@@ -8,8 +8,8 @@
 
 from django.core.management.base import BaseCommand
 
-from reviews import workflow
 from osf.models import Action, PreprintService, OSFUser
+from osf.utils.workflows import DefaultStates, DefaultTriggers
 
 logger = logging.getLogger(__name__)
 
@@ -44,8 +44,8 @@ def handle(self, *args, **options):
             user = OSFUser.objects.get(guids___id=user_guid)
 
         fake = Faker()
-        triggers = [a.value for a in workflow.Triggers]
-        states = [s.value for s in workflow.States]
+        triggers = [a.value for a in DefaultTriggers]
+        states = [s.value for s in DefaultStates]
         for preprint in PreprintService.objects.filter(actions__isnull=True):
             for i in range(num_actions):
                 action = Action(
diff --git a/osf/management/commands/update_auth_groups.py b/osf/management/commands/update_auth_groups.py
index 30ea165915b..0670396a3ee 100644
--- a/osf/management/commands/update_auth_groups.py
+++ b/osf/management/commands/update_auth_groups.py
@@ -6,8 +6,8 @@
 from django.core.management.base import BaseCommand
 from django.db import transaction
 
-from reviews.models import ReviewProviderMixin
-from reviews.permissions import GroupHelper
+from api.preprint_providers.permissions import GroupHelper
+from osf.models.mixins import ReviewProviderMixin
 
 logger = logging.getLogger(__name__)
 
diff --git a/osf/migrations/0060_reviews.py b/osf/migrations/0060_reviews.py
index b95512c3218..845a568eab1 100644
--- a/osf/migrations/0060_reviews.py
+++ b/osf/migrations/0060_reviews.py
@@ -6,9 +6,9 @@
 from django.core.management.sql import emit_post_migrate_signal
 from django.db import migrations, models
 import django.db.models.deletion
+from api.preprint_providers.permissions import GroupHelper
 import osf.models.base
 import osf.utils.fields
-from reviews.permissions import GroupHelper
 
 
 def create_provider_auth_groups(apps, schema_editor):
diff --git a/osf/migrations/0062_accept_preprints.py b/osf/migrations/0062_accept_preprints.py
index 2e93be4e382..7eeec8c1cb9 100644
--- a/osf/migrations/0062_accept_preprints.py
+++ b/osf/migrations/0062_accept_preprints.py
@@ -5,15 +5,15 @@
 from django.db import migrations
 from django.db.models import F
 
-from reviews.workflow import States
+from osf.utils.workflows import DefaultStates
 
 
 # When a preprint provider is set up with a reviews/moderation workflow,
 # make sure all existing preprints will be in a public state.
 def accept_all_published_preprints(apps, schema_editor):
     Preprint = apps.get_model('osf', 'PreprintService')
-    published_preprints = Preprint.objects.filter(is_published=True, reviews_state=States.INITIAL.value)
-    published_preprints.update(reviews_state=States.ACCEPTED.value, date_last_transitioned=F('date_published'))
+    published_preprints = Preprint.objects.filter(is_published=True, reviews_state=DefaultStates.INITIAL.value)
+    published_preprints.update(reviews_state=DefaultStates.ACCEPTED.value, date_last_transitioned=F('date_published'))
 
 
 class Migration(migrations.Migration):
diff --git a/osf/models/action.py b/osf/models/action.py
index f3c712bf93d..af125606664 100644
--- a/osf/models/action.py
+++ b/osf/models/action.py
@@ -5,11 +5,9 @@
 
 from include import IncludeManager
 
-from reviews.workflow import Triggers
-from reviews.workflow import States
-
 from osf.models.base import BaseModel, ObjectIDMixin
 from osf.utils.fields import NonNaiveDateTimeField
+from osf.utils.workflows import DefaultStates, DefaultTriggers
 
 
 class Action(ObjectIDMixin, BaseModel):
@@ -19,9 +17,9 @@ class Action(ObjectIDMixin, BaseModel):
     target = models.ForeignKey('PreprintService', related_name='actions', on_delete=models.CASCADE)
     creator = models.ForeignKey('OSFUser', related_name='+', on_delete=models.CASCADE)
 
-    trigger = models.CharField(max_length=31, choices=Triggers.choices())
-    from_state = models.CharField(max_length=31, choices=States.choices())
-    to_state = models.CharField(max_length=31, choices=States.choices())
+    trigger = models.CharField(max_length=31, choices=DefaultTriggers.choices())
+    from_state = models.CharField(max_length=31, choices=DefaultStates.choices())
+    to_state = models.CharField(max_length=31, choices=DefaultStates.choices())
 
     comment = models.TextField(blank=True)
 
diff --git a/osf/models/mixins.py b/osf/models/mixins.py
index b2f31d5ca94..83c77fecbb8 100644
--- a/osf/models/mixins.py
+++ b/osf/models/mixins.py
@@ -1,11 +1,18 @@
 import pytz
+
 from django.apps import apps
-from django.db import models
+from django.db import models, transaction
 from django.core.exceptions import ObjectDoesNotExist
+from include import IncludeQuerySet
+
+from api.preprint_providers.workflows import Workflows, PUBLIC_STATES
 from framework.analytics import increment_user_activity_counters
+from osf.exceptions import InvalidTriggerError
 from osf.models.node_relation import NodeRelation
 from osf.models.nodelog import NodeLog
 from osf.models.tag import Tag
+from osf.utils.machines import ReviewsMachine
+from osf.utils.workflows import DefaultStates, DefaultTriggers
 from website.exceptions import NodeStateError
 from website import settings
 
@@ -456,3 +463,105 @@ def get_extra_log_params(self, comment):
         """Return extra data to pass as `params` to `Node.add_log` when a new comment is
         created, edited, deleted or restored."""
         return {}
+
+
+class ReviewProviderMixin(models.Model):
+    """A reviewed/moderated collection of objects.
+    """
+
+    REVIEWABLE_RELATION_NAME = None
+
+    class Meta:
+        abstract = True
+
+    reviews_workflow = models.CharField(null=True, blank=True, max_length=15, choices=Workflows.choices())
+    reviews_comments_private = models.NullBooleanField()
+    reviews_comments_anonymous = models.NullBooleanField()
+
+    @property
+    def is_reviewed(self):
+        return self.reviews_workflow is not None
+
+    def get_reviewable_state_counts(self):
+        assert self.REVIEWABLE_RELATION_NAME, 'REVIEWABLE_RELATION_NAME must be set to compute state counts'
+        qs = getattr(self, self.REVIEWABLE_RELATION_NAME)
+        if isinstance(qs, IncludeQuerySet):
+            qs = qs.include(None)
+        qs = qs.filter(node__isnull=False, node__is_deleted=False, node__is_public=True).values('reviews_state').annotate(count=models.Count('*'))
+        counts = {state.value: 0 for state in DefaultStates}
+        counts.update({row['reviews_state']: row['count'] for row in qs if row['reviews_state'] in counts})
+        return counts
+
+    def add_admin(self, user):
+        from api.preprint_providers.permissions import GroupHelper
+        return GroupHelper(self).get_group('admin').user_set.add(user)
+
+    def add_moderator(self, user):
+        from api.preprint_providers.permissions import GroupHelper
+        return GroupHelper(self).get_group('moderator').user_set.add(user)
+
+
+class ReviewableMixin(models.Model):
+    """Something that may be included in a reviewed collection and is subject to a reviews workflow.
+    """
+
+    class Meta:
+        abstract = True
+
+    # NOTE: reviews_state should rarely/never be modified directly -- use the state transition methods below
+    reviews_state = models.CharField(max_length=15, db_index=True, choices=DefaultStates.choices(), default=DefaultStates.INITIAL.value)
+
+    date_last_transitioned = models.DateTimeField(null=True, blank=True, db_index=True)
+
+    @property
+    def in_public_reviews_state(self):
+        public_states = PUBLIC_STATES.get(self.provider.reviews_workflow)
+        if not public_states:
+            return False
+        return self.reviews_state in public_states
+
+    def reviews_submit(self, user):
+        """Run the 'submit' state transition and create a corresponding Action.
+
+        Params:
+            user: The user triggering this transition.
+        """
+        return self.__run_transition(DefaultTriggers.SUBMIT.value, user=user)
+
+    def reviews_accept(self, user, comment):
+        """Run the 'accept' state transition and create a corresponding Action.
+
+        Params:
+            user: The user triggering this transition.
+            comment: Text describing why.
+        """
+        return self.__run_transition(DefaultTriggers.ACCEPT.value, user=user, comment=comment)
+
+    def reviews_reject(self, user, comment):
+        """Run the 'reject' state transition and create a corresponding Action.
+
+        Params:
+            user: The user triggering this transition.
+            comment: Text describing why.
+        """
+        return self.__run_transition(DefaultTriggers.REJECT.value, user=user, comment=comment)
+
+    def reviews_edit_comment(self, user, comment):
+        """Run the 'edit_comment' state transition and create a corresponding Action.
+
+        Params:
+            user: The user triggering this transition.
+            comment: New comment text.
+        """
+        return self.__run_transition(DefaultTriggers.EDIT_COMMENT.value, user=user, comment=comment)
+
+    def __run_transition(self, trigger, **kwargs):
+        reviews_machine = ReviewsMachine(self, 'reviews_state')
+        trigger_fn = getattr(reviews_machine, trigger)
+        with transaction.atomic():
+            result = trigger_fn(**kwargs)
+            action = reviews_machine.action
+            if not result or action is None:
+                valid_triggers = reviews_machine.get_triggers(self.reviews_state)
+                raise InvalidTriggerError(trigger, self.reviews_state, valid_triggers)
+            return action
diff --git a/osf/models/preprint_provider.py b/osf/models/preprint_provider.py
index e6ae473ea7f..665ac813cfb 100644
--- a/osf/models/preprint_provider.py
+++ b/osf/models/preprint_provider.py
@@ -4,15 +4,13 @@
 from django.db.models.signals import post_save
 from django.dispatch import receiver
 
+from api.preprint_providers.permissions import GroupHelper, PERMISSIONS
 from osf.models.base import BaseModel, ObjectIDMixin
 from osf.models.licenses import NodeLicense
+from osf.models.mixins import ReviewProviderMixin
 from osf.models.subject import Subject
 from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
 from osf.utils.fields import EncryptedTextField
-
-from reviews import permissions as reviews_permissions
-from reviews.models import ReviewProviderMixin
-
 from website import settings
 from website.util import api_v2_url
 
@@ -62,7 +60,7 @@ class PreprintProvider(ObjectIDMixin, ReviewProviderMixin, BaseModel):
                                         null=True, blank=True, on_delete=models.CASCADE)
 
     class Meta:
-        permissions = tuple(reviews_permissions.PERMISSIONS.items()) + (
+        permissions = tuple(PERMISSIONS.items()) + (
             # custom permissions for use in the OSF Admin App
             ('view_preprintprovider', 'Can view preprint provider details'),
         )
@@ -129,4 +127,4 @@ def rules_to_subjects(rules):
 @receiver(post_save, sender=PreprintProvider)
 def create_provider_auth_groups(sender, instance, created, **kwargs):
     if created:
-        reviews_permissions.GroupHelper(instance).update_provider_auth_groups()
+        GroupHelper(instance).update_provider_auth_groups()
diff --git a/osf/models/preprint_service.py b/osf/models/preprint_service.py
index b1c982a24c6..0d92af861a4 100644
--- a/osf/models/preprint_service.py
+++ b/osf/models/preprint_service.py
@@ -10,17 +10,16 @@
 from framework.postcommit_tasks.handlers import enqueue_postcommit_task
 from framework.exceptions import PermissionsError
 from osf.models import NodeLog, Subject
+from osf.models.mixins import ReviewableMixin
 from osf.models.validators import validate_subject_hierarchy
 from osf.utils.fields import NonNaiveDateTimeField
+from osf.utils.workflows import DefaultStates
 from website.preprints.tasks import on_preprint_updated, get_and_set_preprint_identifiers
 from website.project.licenses import set_license
 from website.util import api_v2_url
 from website.util.permissions import ADMIN
 from website import settings, mails
 
-from reviews.models.mixins import ReviewableMixin
-from reviews.workflow import States
-
 from osf.models.base import BaseModel, GuidMixin
 from osf.models.identifiers import IdentifierMixin, Identifier
 
@@ -185,7 +184,7 @@ def set_published(self, published, auth, save=False):
             self.node._has_abandoned_preprint = False
 
             # In case this provider is ever set up to use a reviews workflow, put this preprint in a sensible state
-            self.reviews_state = States.ACCEPTED.value
+            self.reviews_state = DefaultStates.ACCEPTED.value
             self.date_last_transitioned = self.date_published
 
             self.node.add_log(
diff --git a/osf/utils/machines.py b/osf/utils/machines.py
new file mode 100644
index 00000000000..4c2d87ec0e2
--- /dev/null
+++ b/osf/utils/machines.py
@@ -0,0 +1,128 @@
+
+from django.utils import timezone
+from transitions import Machine
+
+from api.preprint_providers.workflows import Workflows
+from framework.auth import Auth
+from framework.postcommit_tasks.handlers import enqueue_postcommit_task
+from osf.models.action import Action
+from osf.models.nodelog import NodeLog
+from osf.utils.workflows import DefaultStates, DEFAULT_TRANSITIONS
+from website.preprints.tasks import get_and_set_preprint_identifiers
+from website.reviews import signals as reviews_signals
+from website.settings import DOMAIN
+
+class ReviewsMachine(Machine):
+
+    action = None
+    from_state = None
+
+    def __init__(self, reviewable, state_attr):
+        self.reviewable = reviewable
+        self.__state_attr = state_attr
+
+        super(ReviewsMachine, self).__init__(
+            states=[s.value for s in DefaultStates],
+            transitions=DEFAULT_TRANSITIONS,
+            initial=self.state,
+            send_event=True,
+            prepare_event=['initialize_machine'],
+            ignore_invalid_triggers=True,
+        )
+
+    @property
+    def state(self):
+        return getattr(self.reviewable, self.__state_attr)
+
+    @state.setter
+    def state(self, value):
+        setattr(self.reviewable, self.__state_attr, value)
+
+    def initialize_machine(self, ev):
+        self.action = None
+        self.from_state = ev.state
+
+    def save_action(self, ev):
+        user = ev.kwargs.get('user')
+        self.action = Action.objects.create(
+            target=self.reviewable,
+            creator=user,
+            trigger=ev.event.name,
+            from_state=self.from_state.name,
+            to_state=ev.state.name,
+            comment=ev.kwargs.get('comment', ''),
+        )
+
+    def update_last_transitioned(self, ev):
+        now = self.action.date_created if self.action is not None else timezone.now()
+        self.reviewable.date_last_transitioned = now
+
+    def save_changes(self, ev):
+        node = self.reviewable.node
+        node._has_abandoned_preprint = False
+        now = self.action.date_created if self.action is not None else timezone.now()
+        should_publish = self.reviewable.in_public_reviews_state
+        if should_publish and not self.reviewable.is_published:
+            if not (self.reviewable.node.preprint_file and self.reviewable.node.preprint_file.node == self.reviewable.node):
+                raise ValueError('Preprint node is not a valid preprint; cannot publish.')
+            if not self.reviewable.provider:
+                raise ValueError('Preprint provider not specified; cannot publish.')
+            if not self.reviewable.subjects.exists():
+                raise ValueError('Preprint must have at least one subject to be published.')
+            self.reviewable.date_published = now
+            self.reviewable.is_published = True
+            enqueue_postcommit_task(get_and_set_preprint_identifiers, (), {'preprint': self.reviewable}, celery=True)
+        elif not should_publish and self.reviewable.is_published:
+            self.reviewable.is_published = False
+        self.reviewable.save()
+        node.save()
+
+    def resubmission_allowed(self, ev):
+        return self.reviewable.provider.reviews_workflow == Workflows.PRE_MODERATION.value
+
+    def notify_submit(self, ev):
+        context = self.get_context()
+        context['referrer'] = ev.kwargs.get('user')
+        user = ev.kwargs.get('user')
+        auth = Auth(user)
+        self.reviewable.node.add_log(
+            action=NodeLog.PREPRINT_INITIATED,
+            params={
+                'preprint': self.reviewable._id
+            },
+            auth=auth,
+            save=False,
+        )
+        recipients = list(self.reviewable.node.contributors)
+        reviews_signals.reviews_email_submit.send(context=context, recipients=recipients)
+
+    def notify_resubmit(self, ev):
+        context = self.get_context()
+        reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
+                                           template='reviews_resubmission_confirmation',
+                                           action=self.action)
+
+    def notify_accept_reject(self, ev):
+        context = self.get_context()
+        context['notify_comment'] = not self.reviewable.provider.reviews_comments_private and self.action.comment
+        context['is_rejected'] = self.action.to_state == DefaultStates.REJECTED.value
+        context['was_pending'] = self.action.from_state == DefaultStates.PENDING.value
+        reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
+                                           template='reviews_submission_status',
+                                           action=self.action)
+    def notify_edit_comment(self, ev):
+        context = self.get_context()
+        if not self.reviewable.provider.reviews_comments_private and self.action.comment:
+            reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
+                                               template='reviews_update_comment',
+                                               action=self.action)
+
+    def get_context(self):
+        return {
+            'domain': DOMAIN,
+            'reviewable': self.reviewable,
+            'workflow': self.reviewable.provider.reviews_workflow,
+            'provider_url': self.reviewable.provider.domain or '{domain}preprints/{provider_id}'.format(domain=DOMAIN, provider_id=self.reviewable.provider._id),
+            'provider_contact_email': self.reviewable.provider.email_contact or 'contact@osf.io',
+            'provider_support_email': self.reviewable.provider.email_support or 'support@osf.io',
+        }
diff --git a/osf/utils/workflows.py b/osf/utils/workflows.py
new file mode 100644
index 00000000000..595c3a20847
--- /dev/null
+++ b/osf/utils/workflows.py
@@ -0,0 +1,64 @@
+
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from enum import Enum
+from enum import unique
+
+class ChoiceEnum(Enum):
+    @classmethod
+    def choices(cls):
+        return tuple((v, unicode(v).title()) for v in cls.values())
+
+    @classmethod
+    def values(cls):
+        return tuple(c.value for c in cls)
+
+@unique
+class DefaultStates(ChoiceEnum):
+    INITIAL = 'initial'
+    PENDING = 'pending'
+    ACCEPTED = 'accepted'
+    REJECTED = 'rejected'
+
+
+@unique
+class DefaultTriggers(ChoiceEnum):
+    SUBMIT = 'submit'
+    ACCEPT = 'accept'
+    REJECT = 'reject'
+    EDIT_COMMENT = 'edit_comment'
+
+DEFAULT_TRANSITIONS = [
+    {
+        'trigger': DefaultTriggers.SUBMIT.value,
+        'source': [DefaultStates.INITIAL.value],
+        'dest': DefaultStates.PENDING.value,
+        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_submit'],
+    },
+    {
+        'trigger': DefaultTriggers.SUBMIT.value,
+        'source': [DefaultStates.PENDING.value, DefaultStates.REJECTED.value],
+        'conditions': 'resubmission_allowed',
+        'dest': DefaultStates.PENDING.value,
+        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_resubmit'],
+    },
+    {
+        'trigger': DefaultTriggers.ACCEPT.value,
+        'source': [DefaultStates.PENDING.value, DefaultStates.REJECTED.value],
+        'dest': DefaultStates.ACCEPTED.value,
+        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_accept_reject'],
+    },
+    {
+        'trigger': DefaultTriggers.REJECT.value,
+        'source': [DefaultStates.PENDING.value, DefaultStates.ACCEPTED.value],
+        'dest': DefaultStates.REJECTED.value,
+        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_accept_reject'],
+    },
+    {
+        'trigger': DefaultTriggers.EDIT_COMMENT.value,
+        'source': [DefaultStates.PENDING.value, DefaultStates.REJECTED.value, DefaultStates.ACCEPTED.value],
+        'dest': '=',
+        'after': ['save_action', 'save_changes', 'notify_edit_comment'],
+    },
+]
diff --git a/osf_tests/factories.py b/osf_tests/factories.py
index be74dfc0a81..6cfcd0110b8 100644
--- a/osf_tests/factories.py
+++ b/osf_tests/factories.py
@@ -1,5 +1,4 @@
 # -*- coding: utf-8 -*-
-import functools
 import time
 
 import datetime
@@ -15,7 +14,6 @@
 from django.db.utils import IntegrityError
 from faker import Factory
 
-from reviews import workflow
 from website import settings
 from website.notifications.constants import NOTIFICATION_TYPES
 from website.util import permissions
@@ -27,6 +25,7 @@
 from osf import models
 from osf.models.sanctions import Sanction
 from osf.utils.names import impute_names_model
+from osf.utils.workflows import DefaultStates, DefaultTriggers
 from addons.osfstorage.models import OsfStorageFile
 
 fake = Factory.create()
@@ -807,10 +806,10 @@ class ActionFactory(DjangoModelFactory):
     class Meta:
         model = models.Action
 
-    trigger = FuzzyChoice(choices=workflow.Triggers.values())
+    trigger = FuzzyChoice(choices=DefaultTriggers.values())
     comment = factory.Faker('text')
-    from_state = FuzzyChoice(choices=workflow.States.values())
-    to_state = FuzzyChoice(choices=workflow.States.values())
+    from_state = FuzzyChoice(choices=DefaultStates.values())
+    to_state = FuzzyChoice(choices=DefaultStates.values())
 
     target = factory.SubFactory(PreprintFactory)
     creator = factory.SubFactory(AuthUserFactory)
diff --git a/osf_tests/test_reviewable.py b/osf_tests/test_reviewable.py
index f493dbcf20c..b7e5c6e1d22 100644
--- a/osf_tests/test_reviewable.py
+++ b/osf_tests/test_reviewable.py
@@ -1,8 +1,8 @@
 import pytest
 
 from osf.models import PreprintService
+from osf.utils.workflows import DefaultStates
 from osf_tests.factories import PreprintFactory, AuthUserFactory
-from reviews.workflow import States
 
 @pytest.mark.django_db
 class TestReviewable:
@@ -10,22 +10,22 @@ class TestReviewable:
     def test_state_changes(self):
         user = AuthUserFactory()
         preprint = PreprintFactory(provider__reviews_workflow='pre-moderation', is_published=False)
-        assert preprint.reviews_state == States.INITIAL.value
+        assert preprint.reviews_state == DefaultStates.INITIAL.value
 
         preprint.reviews_submit(user)
-        assert preprint.reviews_state == States.PENDING.value
+        assert preprint.reviews_state == DefaultStates.PENDING.value
 
         preprint.reviews_accept(user, 'comment')
-        assert preprint.reviews_state == States.ACCEPTED.value
+        assert preprint.reviews_state == DefaultStates.ACCEPTED.value
         from_db = PreprintService.objects.get(id=preprint.id)
-        assert from_db.reviews_state == States.ACCEPTED.value
+        assert from_db.reviews_state == DefaultStates.ACCEPTED.value
 
         preprint.reviews_reject(user, 'comment')
-        assert preprint.reviews_state == States.REJECTED.value
+        assert preprint.reviews_state == DefaultStates.REJECTED.value
         from_db.refresh_from_db()
-        assert from_db.reviews_state == States.REJECTED.value
+        assert from_db.reviews_state == DefaultStates.REJECTED.value
 
         preprint.reviews_accept(user, 'comment')
-        assert preprint.reviews_state == States.ACCEPTED.value
+        assert preprint.reviews_state == DefaultStates.ACCEPTED.value
         from_db.refresh_from_db()
-        assert from_db.reviews_state == States.ACCEPTED.value
+        assert from_db.reviews_state == DefaultStates.ACCEPTED.value
diff --git a/reviews/__init__.py b/reviews/__init__.py
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/reviews/apps.py b/reviews/apps.py
deleted file mode 100644
index 09ab21d5a6e..00000000000
--- a/reviews/apps.py
+++ /dev/null
@@ -1,8 +0,0 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from django.apps import AppConfig
-
-
-class ReviewsConfig(AppConfig):
-    name = 'reviews'
diff --git a/reviews/exceptions.py b/reviews/exceptions.py
deleted file mode 100644
index d155f961743..00000000000
--- a/reviews/exceptions.py
+++ /dev/null
@@ -1,6 +0,0 @@
-class InvalidTriggerError(Exception):
-    def __init__(self, trigger, state, valid_triggers):
-        self.trigger = trigger
-        self.state = state
-        self.valid_triggers = valid_triggers
-        self.message = 'Cannot trigger "{}" from state "{}". Valid triggers: {}'.format(trigger, state, valid_triggers)
diff --git a/reviews/models/__init__.py b/reviews/models/__init__.py
deleted file mode 100644
index 8fd9a1ce1c0..00000000000
--- a/reviews/models/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .mixins import ReviewProviderMixin  # noqa
-from .mixins import ReviewableMixin  # noqa
diff --git a/reviews/models/mixins.py b/reviews/models/mixins.py
deleted file mode 100644
index 47abbb10a18..00000000000
--- a/reviews/models/mixins.py
+++ /dev/null
@@ -1,277 +0,0 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from include import IncludeQuerySet
-from transitions import Machine
-from framework.auth import Auth
-from framework.postcommit_tasks.handlers import enqueue_postcommit_task
-
-from django.db import models
-from django.db import transaction
-from django.utils import timezone
-
-from osf.models.action import Action
-from osf.models import NodeLog
-from reviews import workflow
-from reviews.exceptions import InvalidTriggerError
-from website.preprints.tasks import get_and_set_preprint_identifiers
-
-from website import settings
-
-from website.mails import mails
-from website.notifications.emails import get_user_subscriptions
-from website.notifications import utils
-from website.notifications import emails
-from website.reviews import signals as reviews_signals
-
-
-class ReviewProviderMixin(models.Model):
-    """A reviewed/moderated collection of objects.
-    """
-
-    REVIEWABLE_RELATION_NAME = None
-
-    class Meta:
-        abstract = True
-
-    reviews_workflow = models.CharField(null=True, blank=True, max_length=15, choices=workflow.Workflows.choices())
-    reviews_comments_private = models.NullBooleanField()
-    reviews_comments_anonymous = models.NullBooleanField()
-
-    @property
-    def is_reviewed(self):
-        return self.reviews_workflow is not None
-
-    def get_reviewable_state_counts(self):
-        assert self.REVIEWABLE_RELATION_NAME, 'REVIEWABLE_RELATION_NAME must be set to compute state counts'
-        qs = getattr(self, self.REVIEWABLE_RELATION_NAME)
-        if isinstance(qs, IncludeQuerySet):
-            qs = qs.include(None)
-        qs = qs.filter(node__isnull=False, node__is_deleted=False, node__is_public=True).values('reviews_state').annotate(count=models.Count('*'))
-        counts = {state.value: 0 for state in workflow.States}
-        counts.update({row['reviews_state']: row['count'] for row in qs if row['reviews_state'] in counts})
-        return counts
-
-    def add_admin(self, user):
-        from reviews.permissions import GroupHelper
-        return GroupHelper(self).get_group('admin').user_set.add(user)
-
-    def add_moderator(self, user):
-        from reviews.permissions import GroupHelper
-        return GroupHelper(self).get_group('moderator').user_set.add(user)
-
-
-class ReviewableMixin(models.Model):
-    """Something that may be included in a reviewed collection and is subject to a reviews workflow.
-    """
-
-    class Meta:
-        abstract = True
-
-    # NOTE: reviews_state should rarely/never be modified directly -- use the state transition methods below
-    reviews_state = models.CharField(max_length=15, db_index=True, choices=workflow.States.choices(), default=workflow.States.INITIAL.value)
-
-    date_last_transitioned = models.DateTimeField(null=True, blank=True, db_index=True)
-
-    @property
-    def in_public_reviews_state(self):
-        public_states = workflow.PUBLIC_STATES.get(self.provider.reviews_workflow)
-        if not public_states:
-            return False
-        return self.reviews_state in public_states
-
-    def reviews_submit(self, user):
-        """Run the 'submit' state transition and create a corresponding Action.
-
-        Params:
-            user: The user triggering this transition.
-        """
-        return self.__run_transition(workflow.Triggers.SUBMIT.value, user=user)
-
-    def reviews_accept(self, user, comment):
-        """Run the 'accept' state transition and create a corresponding Action.
-
-        Params:
-            user: The user triggering this transition.
-            comment: Text describing why.
-        """
-        return self.__run_transition(workflow.Triggers.ACCEPT.value, user=user, comment=comment)
-
-    def reviews_reject(self, user, comment):
-        """Run the 'reject' state transition and create a corresponding Action.
-
-        Params:
-            user: The user triggering this transition.
-            comment: Text describing why.
-        """
-        return self.__run_transition(workflow.Triggers.REJECT.value, user=user, comment=comment)
-
-    def reviews_edit_comment(self, user, comment):
-        """Run the 'edit_comment' state transition and create a corresponding Action.
-
-        Params:
-            user: The user triggering this transition.
-            comment: New comment text.
-        """
-        return self.__run_transition(workflow.Triggers.EDIT_COMMENT.value, user=user, comment=comment)
-
-    def __run_transition(self, trigger, **kwargs):
-        reviews_machine = ReviewsMachine(self, 'reviews_state')
-        trigger_fn = getattr(reviews_machine, trigger)
-        with transaction.atomic():
-            result = trigger_fn(**kwargs)
-            action = reviews_machine.action
-            if not result or action is None:
-                valid_triggers = reviews_machine.get_triggers(self.reviews_state)
-                raise InvalidTriggerError(trigger, self.reviews_state, valid_triggers)
-            return action
-
-
-class ReviewsMachine(Machine):
-
-    action = None
-    from_state = None
-
-    def __init__(self, reviewable, state_attr):
-        self.reviewable = reviewable
-        self.__state_attr = state_attr
-
-        super(ReviewsMachine, self).__init__(
-            states=[s.value for s in workflow.States],
-            transitions=workflow.TRANSITIONS,
-            initial=self.state,
-            send_event=True,
-            prepare_event=['initialize_machine'],
-            ignore_invalid_triggers=True,
-        )
-
-    @property
-    def state(self):
-        return getattr(self.reviewable, self.__state_attr)
-
-    @state.setter
-    def state(self, value):
-        setattr(self.reviewable, self.__state_attr, value)
-
-    def initialize_machine(self, ev):
-        self.action = None
-        self.from_state = ev.state
-
-    def save_action(self, ev):
-        user = ev.kwargs.get('user')
-        self.action = Action.objects.create(
-            target=self.reviewable,
-            creator=user,
-            trigger=ev.event.name,
-            from_state=self.from_state.name,
-            to_state=ev.state.name,
-            comment=ev.kwargs.get('comment', ''),
-        )
-
-    def update_last_transitioned(self, ev):
-        now = self.action.date_created if self.action is not None else timezone.now()
-        self.reviewable.date_last_transitioned = now
-
-    def save_changes(self, ev):
-        node = self.reviewable.node
-        node._has_abandoned_preprint = False
-        now = self.action.date_created if self.action is not None else timezone.now()
-        should_publish = self.reviewable.in_public_reviews_state
-        if should_publish and not self.reviewable.is_published:
-            if not (self.reviewable.node.preprint_file and self.reviewable.node.preprint_file.node == self.reviewable.node):
-                raise ValueError('Preprint node is not a valid preprint; cannot publish.')
-            if not self.reviewable.provider:
-                raise ValueError('Preprint provider not specified; cannot publish.')
-            if not self.reviewable.subjects.exists():
-                raise ValueError('Preprint must have at least one subject to be published.')
-            self.reviewable.date_published = now
-            self.reviewable.is_published = True
-            enqueue_postcommit_task(get_and_set_preprint_identifiers, (), {'preprint_id': self.reviewable._id}, celery=True)
-        elif not should_publish and self.reviewable.is_published:
-            self.reviewable.is_published = False
-        self.reviewable.save()
-        node.save()
-
-    def resubmission_allowed(self, ev):
-        return self.reviewable.provider.reviews_workflow == workflow.Workflows.PRE_MODERATION.value
-
-    def notify_submit(self, ev):
-        context = self.get_context()
-        context['referrer'] = ev.kwargs.get('user')
-        user = ev.kwargs.get('user')
-        auth = Auth(user)
-        self.reviewable.node.add_log(
-            action=NodeLog.PREPRINT_INITIATED,
-            params={
-                'preprint': self.reviewable._id
-            },
-            auth=auth,
-            save=False,
-        )
-        recipients = list(self.reviewable.node.contributors)
-        reviews_signals.reviews_email_submit.send(context=context, recipients=recipients)
-
-    def notify_resubmit(self, ev):
-        context = self.get_context()
-        reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
-                                           template='reviews_resubmission_confirmation',
-                                           action=self.action)
-
-    def notify_accept_reject(self, ev):
-        context = self.get_context()
-        context['notify_comment'] = not self.reviewable.provider.reviews_comments_private and self.action.comment
-        context['is_rejected'] = self.action.to_state == workflow.States.REJECTED.value
-        context['was_pending'] = self.action.from_state == workflow.States.PENDING.value
-        reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
-                                           template='reviews_submission_status',
-                                           action=self.action)
-
-    def notify_edit_comment(self, ev):
-        context = self.get_context()
-        if not self.reviewable.provider.reviews_comments_private and self.action.comment:
-            reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
-                                               template='reviews_update_comment',
-                                               action=self.action)
-
-    def get_context(self):
-        return {
-            'domain': settings.DOMAIN,
-            'reviewable': self.reviewable,
-            'workflow': self.reviewable.provider.reviews_workflow,
-            'provider_url': self.reviewable.provider.domain or '{domain}preprints/{provider_id}'.format(domain=settings.DOMAIN, provider_id=self.reviewable.provider._id),
-            'provider_contact_email': self.reviewable.provider.email_contact or 'contact@osf.io',
-            'provider_support_email': self.reviewable.provider.email_support or 'support@osf.io',
-        }
-
-# Handle email notifications including: update comment, accept, and reject of submission.
-@reviews_signals.reviews_email.connect
-def reviews_notification(self, creator, template, context, action):
-    recipients = list(action.target.node.contributors)
-    time_now = action.date_created if action is not None else timezone.now()
-    node = action.target.node
-    emails.notify_global_event(
-        event='global_reviews',
-        sender_user=creator,
-        node=node,
-        timestamp=time_now,
-        recipients=recipients,
-        template=template,
-        context=context
-    )
-
-# Handle email notifications for a new submission.
-@reviews_signals.reviews_email_submit.connect
-def reviews_submit_notification(self, recipients, context):
-    event_type = utils.find_subscription_type('global_reviews')
-    for recipient in recipients:
-        user_subscriptions = get_user_subscriptions(recipient, event_type)
-        context['no_future_emails'] = user_subscriptions['none']
-        context['is_creator'] = recipient == context['reviewable'].node.creator
-        context['provider_name'] = context['reviewable'].provider.name
-        mails.send_mail(
-            recipient.username,
-            mails.REVIEWS_SUBMISSION_CONFIRMATION,
-            mimetype='html',
-            user=recipient,
-            **context
-        )
diff --git a/reviews/test/.gitkeep b/reviews/test/.gitkeep
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/reviews/workflow.py b/reviews/workflow.py
deleted file mode 100644
index 6c6eeff0170..00000000000
--- a/reviews/workflow.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from enum import Enum
-from enum import unique
-
-
-class ChoiceEnum(Enum):
-    @classmethod
-    def choices(cls):
-        return tuple((v, unicode(v).title()) for v in cls.values())
-
-    @classmethod
-    def values(cls):
-        return tuple(c.value for c in cls)
-
-
-@unique
-class Workflows(ChoiceEnum):
-    NONE = None
-    PRE_MODERATION = 'pre-moderation'
-    POST_MODERATION = 'post-moderation'
-
-
-@unique
-class States(ChoiceEnum):
-    INITIAL = 'initial'
-    PENDING = 'pending'
-    ACCEPTED = 'accepted'
-    REJECTED = 'rejected'
-
-
-@unique
-class Triggers(ChoiceEnum):
-    SUBMIT = 'submit'
-    ACCEPT = 'accept'
-    REJECT = 'reject'
-    EDIT_COMMENT = 'edit_comment'
-
-
-PUBLIC_STATES = {
-    Workflows.NONE.value: (
-        States.INITIAL.value,
-        States.PENDING.value,
-        States.ACCEPTED.value,
-        States.REJECTED.value,
-    ),
-    Workflows.PRE_MODERATION.value: (
-        States.ACCEPTED.value,
-    ),
-    Workflows.POST_MODERATION.value: (
-        States.PENDING.value,
-        States.ACCEPTED.value,
-    )
-}
-
-
-TRANSITIONS = [
-    {
-        'trigger': Triggers.SUBMIT.value,
-        'source': [States.INITIAL.value],
-        'dest': States.PENDING.value,
-        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_submit'],
-    },
-    {
-        'trigger': Triggers.SUBMIT.value,
-        'source': [States.PENDING.value, States.REJECTED.value],
-        'conditions': 'resubmission_allowed',
-        'dest': States.PENDING.value,
-        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_resubmit'],
-    },
-    {
-        'trigger': Triggers.ACCEPT.value,
-        'source': [States.PENDING.value, States.REJECTED.value],
-        'dest': States.ACCEPTED.value,
-        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_accept_reject'],
-    },
-    {
-        'trigger': Triggers.REJECT.value,
-        'source': [States.PENDING.value, States.ACCEPTED.value],
-        'dest': States.REJECTED.value,
-        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_accept_reject'],
-    },
-    {
-        'trigger': Triggers.EDIT_COMMENT.value,
-        'source': [States.PENDING.value, States.REJECTED.value, States.ACCEPTED.value],
-        'dest': '=',
-        'after': ['save_action', 'save_changes', 'notify_edit_comment'],
-    },
-]
diff --git a/tests/test_notifications.py b/tests/test_notifications.py
index 54780474bcf..b7ad700c82d 100644
--- a/tests/test_notifications.py
+++ b/tests/test_notifications.py
@@ -15,6 +15,7 @@
 from website.notifications import utils
 from website import mails, settings
 from website.project.signals import contributor_removed, node_deleted
+from website.reviews import listeners
 from website.util import api_url_for
 from website.util import web_url_for
 
@@ -22,7 +23,6 @@
 from tests.base import capture_signals
 from tests.base import OsfTestCase, NotificationTestCase
 
-from reviews.models import mixins
 
 
 class TestNotificationsModels(OsfTestCase):
@@ -1842,10 +1842,10 @@ def test_reviews_base_notification(self):
 
     @mock.patch('website.mails.mails.send_mail')
     def test_reviews_submit_notification(self, mock_send_email):
-        mixins.reviews_submit_notification(self, context=self.context_info, recipients=[self.sender, self.user])
+        listeners.reviews_submit_notification(self, context=self.context_info, recipients=[self.sender, self.user])
         assert_true(mock_send_email.called)
 
     @mock.patch('website.notifications.emails.notify_global_event')
     def test_reviews_notification(self, mock_notify):
-        mixins.reviews_notification(self, creator=self.sender, context=self.context_info, action=self.action, template='test.html.mako')
+        listeners.reviews_notification(self, creator=self.sender, context=self.context_info, action=self.action, template='test.html.mako')
         assert_true(mock_notify.called)
diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py
new file mode 100644
index 00000000000..07aad47cfd5
--- /dev/null
+++ b/website/reviews/listeners.py
@@ -0,0 +1,39 @@
+
+from django.utils import timezone
+
+from website.mails import mails
+from website.notifications import emails, utils
+from website.reviews import signals as reviews_signals
+
+# Handle email notifications including: update comment, accept, and reject of submission.
+@reviews_signals.reviews_email.connect
+def reviews_notification(self, creator, template, context, action):
+    recipients = list(action.target.node.contributors)
+    time_now = action.date_created if action is not None else timezone.now()
+    node = action.target.node
+    emails.notify_global_event(
+        event='global_reviews',
+        sender_user=creator,
+        node=node,
+        timestamp=time_now,
+        recipients=recipients,
+        template=template,
+        context=context
+    )
+
+# Handle email notifications for a new submission.
+@reviews_signals.reviews_email_submit.connect
+def reviews_submit_notification(self, recipients, context):
+    event_type = utils.find_subscription_type('global_reviews')
+    for recipient in recipients:
+        user_subscriptions = emails.get_user_subscriptions(recipient, event_type)
+        context['no_future_emails'] = user_subscriptions['none']
+        context['is_creator'] = recipient == context['reviewable'].node.creator
+        context['provider_name'] = context['reviewable'].provider.name
+        mails.send_mail(
+            recipient.username,
+            mails.REVIEWS_SUBMISSION_CONFIRMATION,
+            mimetype='html',
+            user=recipient,
+            **context
+        )

From 4eb3dea7a0f89a503c251f1423373641dc282b42 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Mon, 30 Oct 2017 13:25:37 -0400
Subject: [PATCH 116/192] Generalize shareable mixin/machine code

---
 api/actions/serializers.py                    |   8 +-
 api/base/filters.py                           |   2 +-
 api/preprints/permissions.py                  |   2 +-
 api/preprints/serializers.py                  |   2 +-
 .../test_preprint_provider_preprints_list.py  |   6 +-
 .../preprints/views/test_preprint_detail.py   |   6 +-
 .../preprints/views/test_preprint_list.py     |  18 +--
 api_tests/reviews/mixins/filter_mixins.py     |   4 +-
 api_tests/users/views/test_user_actions.py    |  14 +-
 osf/exceptions.py                             |   4 +
 osf/models/mixins.py                          | 123 ++++++++++--------
 osf/models/preprint_service.py                |   2 +-
 osf/utils/machines.py                         |  78 ++++++-----
 osf_tests/factories.py                        |   2 +-
 osf_tests/test_reviewable.py                  |  24 ++--
 15 files changed, 161 insertions(+), 134 deletions(-)

diff --git a/api/actions/serializers.py b/api/actions/serializers.py
index f3c8a75c71f..5ff09a58d8d 100644
--- a/api/actions/serializers.py
+++ b/api/actions/serializers.py
@@ -122,13 +122,13 @@ def create(self, validated_data):
         comment = validated_data.pop('comment', '')
         try:
             if trigger == DefaultTriggers.ACCEPT.value:
-                return target.reviews_accept(user, comment)
+                return target.run_accept(user, comment)
             if trigger == DefaultTriggers.REJECT.value:
-                return target.reviews_reject(user, comment)
+                return target.run_reject(user, comment)
             if trigger == DefaultTriggers.EDIT_COMMENT.value:
-                return target.reviews_edit_comment(user, comment)
+                return target.run_edit_comment(user, comment)
             if trigger == DefaultTriggers.SUBMIT.value:
-                return target.reviews_submit(user)
+                return target.run_submit(user)
         except InvalidTriggerError as e:
             # Invalid transition from the current state
             raise Conflict(e.message)
diff --git a/api/base/filters.py b/api/base/filters.py
index dd8e66d67c4..b8b8c5b1764 100644
--- a/api/base/filters.py
+++ b/api/base/filters.py
@@ -504,7 +504,7 @@ def preprints_queryset(self, base_queryset, auth_user, allow_contribs=True):
             admin_user_query = Q(node__contributor__user_id=auth_user.id, node__contributor__admin=True)
             reviews_user_query = Q(node__is_public=True, provider__in=get_objects_for_user(auth_user, 'view_submissions', PreprintProvider))
             if allow_contribs:
-                contrib_user_query = ~Q(reviews_state=DefaultStates.INITIAL.value) & Q(node__contributor__user_id=auth_user.id, node__contributor__read=True)
+                contrib_user_query = ~Q(machine_state=DefaultStates.INITIAL.value) & Q(node__contributor__user_id=auth_user.id, node__contributor__read=True)
                 query = (no_user_query | contrib_user_query | admin_user_query | reviews_user_query)
             else:
                 query = (no_user_query | admin_user_query | reviews_user_query)
diff --git a/api/preprints/permissions.py b/api/preprints/permissions.py
index ee8d4da2a91..eb4bf812510 100644
--- a/api/preprints/permissions.py
+++ b/api/preprints/permissions.py
@@ -21,7 +21,7 @@ def has_object_permission(self, request, view, obj):
                 user_has_permissions = (obj.verified_publishable or
                     (node.is_public and auth.user.has_perm('view_submissions', obj.provider)) or
                     node.has_permission(auth.user, osf_permissions.ADMIN) or
-                    (node.is_contributor(auth.user) and obj.reviews_state != DefaultStates.INITIAL.value)
+                    (node.is_contributor(auth.user) and obj.machine_state != DefaultStates.INITIAL.value)
                 )
                 return user_has_permissions
         else:
diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py
index dfc387f235e..1718165c215 100644
--- a/api/preprints/serializers.py
+++ b/api/preprints/serializers.py
@@ -89,7 +89,7 @@ class PreprintSerializer(JSONAPISerializer):
         related_view_kwargs={'node_id': '<node._id>'},
     )
 
-    reviews_state = ser.CharField(read_only=True, max_length=15)
+    reviews_state = ser.CharField(source='machine_state', read_only=True, max_length=15)
     date_last_transitioned = DateByVersion(read_only=True)
 
     citation = RelationshipField(
diff --git a/api_tests/preprint_providers/views/test_preprint_provider_preprints_list.py b/api_tests/preprint_providers/views/test_preprint_provider_preprints_list.py
index 05f370a7b39..5976b484d9e 100644
--- a/api_tests/preprint_providers/views/test_preprint_provider_preprints_list.py
+++ b/api_tests/preprint_providers/views/test_preprint_provider_preprints_list.py
@@ -56,11 +56,11 @@ def test_provider_filter_equals_returns_multiple(self, app, user, provider_one,
 
     def test_reviews_state_counts(self, app, user, provider_one, preprint_one, preprint_two, preprint_three, url):
         url = '{}meta[reviews_state_counts]=true'.format(url)
-        preprint_one.reviews_state = 'pending'
+        preprint_one.machine_state = 'pending'
         preprint_one.save()
-        preprint_two.reviews_state = 'pending'
+        preprint_two.machine_state = 'pending'
         preprint_two.save()
-        preprint_three.reviews_state = 'accepted'
+        preprint_three.machine_state = 'accepted'
         preprint_three.save()
 
         expected = {
diff --git a/api_tests/preprints/views/test_preprint_detail.py b/api_tests/preprints/views/test_preprint_detail.py
index 8fb62df9646..9ace5a18328 100644
--- a/api_tests/preprints/views/test_preprint_detail.py
+++ b/api_tests/preprints/views/test_preprint_detail.py
@@ -1009,15 +1009,15 @@ def file_one_private_project(self, admin, private_project):
 
     @pytest.fixture()
     def unpublished_reviews_preprint(self, admin, reviews_provider, subject, public_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=reviews_provider, subjects=[[subject._id]], project=public_project, is_published=False, reviews_state=DefaultStates.PENDING.value)
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=reviews_provider, subjects=[[subject._id]], project=public_project, is_published=False, machine_state=DefaultStates.PENDING.value)
 
     @pytest.fixture()
     def unpublished_reviews_initial_preprint(self, admin, reviews_provider, subject, public_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=reviews_provider, subjects=[[subject._id]], project=public_project, is_published=False, reviews_state=DefaultStates.INITIAL.value)
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=reviews_provider, subjects=[[subject._id]], project=public_project, is_published=False, machine_state=DefaultStates.INITIAL.value)
 
     @pytest.fixture()
     def private_reviews_preprint(self, admin, reviews_provider, subject, private_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunsets.pdf', provider=reviews_provider, subjects=[[subject._id]], project=private_project, is_published=False, reviews_state=DefaultStates.PENDING.value)
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunsets.pdf', provider=reviews_provider, subjects=[[subject._id]], project=private_project, is_published=False, machine_state=DefaultStates.PENDING.value)
 
     @pytest.fixture()
     def unpublished_url(self, unpublished_reviews_preprint):
diff --git a/api_tests/preprints/views/test_preprint_list.py b/api_tests/preprints/views/test_preprint_list.py
index 45805fee66f..7a40cffc192 100644
--- a/api_tests/preprints/views/test_preprint_list.py
+++ b/api_tests/preprints/views/test_preprint_list.py
@@ -197,11 +197,11 @@ def expected_reviewables(self, user):
             PreprintFactory(is_published=False, project=ProjectFactory(is_public=True)),
             PreprintFactory(is_published=False, project=ProjectFactory(is_public=True)),
         ]
-        preprints[0].reviews_submit(user)
-        preprints[0].reviews_accept(user, 'comment')
-        preprints[1].reviews_submit(user)
-        preprints[1].reviews_reject(user, 'comment')
-        preprints[2].reviews_submit(user)
+        preprints[0].run_submit(user)
+        preprints[0].run_accept(user, 'comment')
+        preprints[1].run_submit(user)
+        preprints[1].run_reject(user, 'comment')
+        preprints[2].run_submit(user)
         return preprints
 
     @pytest.fixture
@@ -505,7 +505,7 @@ def url(self):
 
     @pytest.fixture()
     def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=DefaultStates.PENDING.value)
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, machine_state=DefaultStates.PENDING.value)
 
     def test_unpublished_visible_to_admins(self, app, user_admin_contrib, preprint_unpublished, preprint_published, url):
         res = app.get(url, auth=user_admin_contrib.auth)
@@ -552,7 +552,7 @@ def url(self):
 
     @pytest.fixture()
     def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=DefaultStates.INITIAL.value)
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, machine_state=DefaultStates.INITIAL.value)
 
     def test_unpublished_visible_to_admins(self, app, user_admin_contrib, preprint_unpublished, preprint_published, url):
         res = app.get(url, auth=user_admin_contrib.auth)
@@ -648,7 +648,7 @@ def project_public(self, user_admin_contrib, user_write_contrib):
 
     @pytest.fixture()
     def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=DefaultStates.INITIAL.value)
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, machine_state=DefaultStates.INITIAL.value)
 
     @pytest.fixture()
     def list_url(self):
@@ -701,7 +701,7 @@ def project_public(self, user_admin_contrib, user_write_contrib):
 
     @pytest.fixture()
     def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=DefaultStates.PENDING.value)
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, machine_state=DefaultStates.PENDING.value)
 
     @pytest.fixture()
     def list_url(self):
diff --git a/api_tests/reviews/mixins/filter_mixins.py b/api_tests/reviews/mixins/filter_mixins.py
index 082811d3982..0f10005a3ab 100644
--- a/api_tests/reviews/mixins/filter_mixins.py
+++ b/api_tests/reviews/mixins/filter_mixins.py
@@ -164,8 +164,8 @@ def test_reviewable_filters(self, app, url, user, expected_reviewables):
         reviewable = expected_reviewables[0]
 
         # filter by reviews_state
-        expected = set([r._id for r in expected_reviewables if r.reviews_state == reviewable.reviews_state])
-        actual = get_actual(app, url, user, reviews_state=reviewable.reviews_state)
+        expected = set([r._id for r in expected_reviewables if r.machine_state == reviewable.machine_state])
+        actual = get_actual(app, url, user, reviews_state=reviewable.machine_state)
         assert expected == actual
 
         # order by date_last_transitioned
diff --git a/api_tests/users/views/test_user_actions.py b/api_tests/users/views/test_user_actions.py
index 4e09dd84dda..ca506afcd50 100644
--- a/api_tests/users/views/test_user_actions.py
+++ b/api_tests/users/views/test_user_actions.py
@@ -80,7 +80,7 @@ def moderator(self, provider):
 
     @mock.patch('website.preprints.tasks.get_and_set_preprint_identifiers.si')
     def test_create_permissions(self, mock_ezid, app, url, preprint, node_admin, moderator):
-        assert preprint.reviews_state == 'initial'
+        assert preprint.machine_state == 'initial'
 
         submit_payload = self.create_payload(preprint._id, trigger='submit')
 
@@ -97,7 +97,7 @@ def test_create_permissions(self, mock_ezid, app, url, preprint, node_admin, mod
         res = app.post_json_api(url, submit_payload, auth=node_admin.auth)
         assert res.status_code == 201
         preprint.refresh_from_db()
-        assert preprint.reviews_state == 'pending'
+        assert preprint.machine_state == 'pending'
         assert not preprint.is_published
 
         accept_payload = self.create_payload(preprint._id, trigger='accept', comment='This is good.')
@@ -122,14 +122,14 @@ def test_create_permissions(self, mock_ezid, app, url, preprint, node_admin, mod
 
         # Still unchanged after all those tries
         preprint.refresh_from_db()
-        assert preprint.reviews_state == 'pending'
+        assert preprint.machine_state == 'pending'
         assert not preprint.is_published
 
         # Moderator can accept
         res = app.post_json_api(url, accept_payload, auth=moderator.auth)
         assert res.status_code == 201
         preprint.refresh_from_db()
-        assert preprint.reviews_state == 'accepted'
+        assert preprint.machine_state == 'accepted'
         assert preprint.is_published
 
         # Check if "get_and_set_preprint_identifiers" is called once.
@@ -167,7 +167,7 @@ def test_bad_requests(self, app, url, preprint, provider, moderator):
             provider.reviews_workflow = workflow
             provider.save()
             for state, trigger in transitions:
-                preprint.reviews_state = state
+                preprint.machine_state = state
                 preprint.save()
                 bad_payload = self.create_payload(preprint._id, trigger=trigger)
                 res = app.post_json_api(url, bad_payload, auth=moderator.auth, expect_errors=True)
@@ -213,7 +213,7 @@ def test_valid_transitions(self, mock_ezid, app, url, preprint, provider, modera
             provider.reviews_workflow = workflow
             provider.save()
             for from_state, trigger, to_state in transitions:
-                preprint.reviews_state = from_state
+                preprint.machine_state = from_state
                 preprint.is_published = False
                 preprint.date_published = None
                 preprint.date_last_transitioned = None
@@ -226,7 +226,7 @@ def test_valid_transitions(self, mock_ezid, app, url, preprint, provider, modera
                 assert action.trigger == trigger
 
                 preprint.refresh_from_db()
-                assert preprint.reviews_state == to_state
+                assert preprint.machine_state == to_state
                 if preprint.in_public_reviews_state:
                     assert preprint.is_published
                     assert preprint.date_published == action.date_created
diff --git a/osf/exceptions.py b/osf/exceptions.py
index ab93cbcc7f2..c1c87489ebf 100644
--- a/osf/exceptions.py
+++ b/osf/exceptions.py
@@ -121,3 +121,7 @@ def __init__(self, trigger, state, valid_triggers):
         self.state = state
         self.valid_triggers = valid_triggers
         self.message = 'Cannot trigger "{}" from state "{}". Valid triggers: {}'.format(trigger, state, valid_triggers)
+
+class InvalidTransitionError(Exception):
+    def __init__(self, machine, transition):
+        self.message = 'Machine "{}" received invalid transitions: "{}" expected but not defined'.format(machine, transition)
diff --git a/osf/models/mixins.py b/osf/models/mixins.py
index 83c77fecbb8..e33da3a7afb 100644
--- a/osf/models/mixins.py
+++ b/osf/models/mixins.py
@@ -465,62 +465,20 @@ def get_extra_log_params(self, comment):
         return {}
 
 
-class ReviewProviderMixin(models.Model):
-    """A reviewed/moderated collection of objects.
-    """
-
-    REVIEWABLE_RELATION_NAME = None
-
+class MachineableMixin(models.Model):
     class Meta:
         abstract = True
 
-    reviews_workflow = models.CharField(null=True, blank=True, max_length=15, choices=Workflows.choices())
-    reviews_comments_private = models.NullBooleanField()
-    reviews_comments_anonymous = models.NullBooleanField()
-
-    @property
-    def is_reviewed(self):
-        return self.reviews_workflow is not None
-
-    def get_reviewable_state_counts(self):
-        assert self.REVIEWABLE_RELATION_NAME, 'REVIEWABLE_RELATION_NAME must be set to compute state counts'
-        qs = getattr(self, self.REVIEWABLE_RELATION_NAME)
-        if isinstance(qs, IncludeQuerySet):
-            qs = qs.include(None)
-        qs = qs.filter(node__isnull=False, node__is_deleted=False, node__is_public=True).values('reviews_state').annotate(count=models.Count('*'))
-        counts = {state.value: 0 for state in DefaultStates}
-        counts.update({row['reviews_state']: row['count'] for row in qs if row['reviews_state'] in counts})
-        return counts
-
-    def add_admin(self, user):
-        from api.preprint_providers.permissions import GroupHelper
-        return GroupHelper(self).get_group('admin').user_set.add(user)
-
-    def add_moderator(self, user):
-        from api.preprint_providers.permissions import GroupHelper
-        return GroupHelper(self).get_group('moderator').user_set.add(user)
-
-
-class ReviewableMixin(models.Model):
-    """Something that may be included in a reviewed collection and is subject to a reviews workflow.
-    """
-
-    class Meta:
-        abstract = True
-
-    # NOTE: reviews_state should rarely/never be modified directly -- use the state transition methods below
-    reviews_state = models.CharField(max_length=15, db_index=True, choices=DefaultStates.choices(), default=DefaultStates.INITIAL.value)
+    # NOTE: machine_state should rarely/never be modified directly -- use the state transition methods below
+    machine_state = models.CharField(max_length=15, db_index=True, choices=DefaultStates.choices(), default=DefaultStates.INITIAL.value)
 
     date_last_transitioned = models.DateTimeField(null=True, blank=True, db_index=True)
 
     @property
-    def in_public_reviews_state(self):
-        public_states = PUBLIC_STATES.get(self.provider.reviews_workflow)
-        if not public_states:
-            return False
-        return self.reviews_state in public_states
+    def MachineClass(self):
+        raise NotImplementedError()
 
-    def reviews_submit(self, user):
+    def run_submit(self, user):
         """Run the 'submit' state transition and create a corresponding Action.
 
         Params:
@@ -528,7 +486,7 @@ def reviews_submit(self, user):
         """
         return self.__run_transition(DefaultTriggers.SUBMIT.value, user=user)
 
-    def reviews_accept(self, user, comment):
+    def run_accept(self, user, comment):
         """Run the 'accept' state transition and create a corresponding Action.
 
         Params:
@@ -537,7 +495,7 @@ def reviews_accept(self, user, comment):
         """
         return self.__run_transition(DefaultTriggers.ACCEPT.value, user=user, comment=comment)
 
-    def reviews_reject(self, user, comment):
+    def run_reject(self, user, comment):
         """Run the 'reject' state transition and create a corresponding Action.
 
         Params:
@@ -546,7 +504,7 @@ def reviews_reject(self, user, comment):
         """
         return self.__run_transition(DefaultTriggers.REJECT.value, user=user, comment=comment)
 
-    def reviews_edit_comment(self, user, comment):
+    def run_edit_comment(self, user, comment):
         """Run the 'edit_comment' state transition and create a corresponding Action.
 
         Params:
@@ -556,12 +514,65 @@ def reviews_edit_comment(self, user, comment):
         return self.__run_transition(DefaultTriggers.EDIT_COMMENT.value, user=user, comment=comment)
 
     def __run_transition(self, trigger, **kwargs):
-        reviews_machine = ReviewsMachine(self, 'reviews_state')
-        trigger_fn = getattr(reviews_machine, trigger)
+        machine = self.MachineClass(self, 'machine_state')
+        trigger_fn = getattr(machine, trigger)
         with transaction.atomic():
             result = trigger_fn(**kwargs)
-            action = reviews_machine.action
+            action = machine.action
             if not result or action is None:
-                valid_triggers = reviews_machine.get_triggers(self.reviews_state)
-                raise InvalidTriggerError(trigger, self.reviews_state, valid_triggers)
+                valid_triggers = machine.get_triggers(self.machine_state)
+                raise InvalidTriggerError(trigger, self.machine_state, valid_triggers)
             return action
+
+
+class ReviewableMixin(MachineableMixin):
+    """Something that may be included in a reviewed collection and is subject to a reviews workflow.
+    """
+
+    class Meta:
+        abstract = True
+
+    MachineClass = ReviewsMachine
+
+    @property
+    def in_public_reviews_state(self):
+        public_states = PUBLIC_STATES.get(self.provider.reviews_workflow)
+        if not public_states:
+            return False
+        return self.machine_state in public_states
+
+
+class ReviewProviderMixin(models.Model):
+    """A reviewed/moderated collection of objects.
+    """
+
+    REVIEWABLE_RELATION_NAME = None
+
+    class Meta:
+        abstract = True
+
+    reviews_workflow = models.CharField(null=True, blank=True, max_length=15, choices=Workflows.choices())
+    reviews_comments_private = models.NullBooleanField()
+    reviews_comments_anonymous = models.NullBooleanField()
+
+    @property
+    def is_reviewed(self):
+        return self.reviews_workflow is not None
+
+    def get_reviewable_state_counts(self):
+        assert self.REVIEWABLE_RELATION_NAME, 'REVIEWABLE_RELATION_NAME must be set to compute state counts'
+        qs = getattr(self, self.REVIEWABLE_RELATION_NAME)
+        if isinstance(qs, IncludeQuerySet):
+            qs = qs.include(None)
+        qs = qs.filter(node__isnull=False, node__is_deleted=False, node__is_public=True).values('machine_state').annotate(count=models.Count('*'))
+        counts = {state.value: 0 for state in DefaultStates}
+        counts.update({row['machine_state']: row['count'] for row in qs if row['machine_state'] in counts})
+        return counts
+
+    def add_admin(self, user):
+        from api.preprint_providers.permissions import GroupHelper
+        return GroupHelper(self).get_group('admin').user_set.add(user)
+
+    def add_moderator(self, user):
+        from api.preprint_providers.permissions import GroupHelper
+        return GroupHelper(self).get_group('moderator').user_set.add(user)
diff --git a/osf/models/preprint_service.py b/osf/models/preprint_service.py
index 0d92af861a4..394c73cac57 100644
--- a/osf/models/preprint_service.py
+++ b/osf/models/preprint_service.py
@@ -184,7 +184,7 @@ def set_published(self, published, auth, save=False):
             self.node._has_abandoned_preprint = False
 
             # In case this provider is ever set up to use a reviews workflow, put this preprint in a sensible state
-            self.reviews_state = DefaultStates.ACCEPTED.value
+            self.machine_state = DefaultStates.ACCEPTED.value
             self.date_last_transitioned = self.date_published
 
             self.node.add_log(
diff --git a/osf/utils/machines.py b/osf/utils/machines.py
index 4c2d87ec0e2..eaace2e3450 100644
--- a/osf/utils/machines.py
+++ b/osf/utils/machines.py
@@ -5,6 +5,7 @@
 from api.preprint_providers.workflows import Workflows
 from framework.auth import Auth
 from framework.postcommit_tasks.handlers import enqueue_postcommit_task
+from osf.exceptions import InvalidTransitionError
 from osf.models.action import Action
 from osf.models.nodelog import NodeLog
 from osf.utils.workflows import DefaultStates, DEFAULT_TRANSITIONS
@@ -12,18 +13,22 @@
 from website.reviews import signals as reviews_signals
 from website.settings import DOMAIN
 
-class ReviewsMachine(Machine):
+
+class BaseMachine(Machine):
 
     action = None
     from_state = None
 
-    def __init__(self, reviewable, state_attr):
-        self.reviewable = reviewable
+    def __init__(self, machineable, state_attr, **kwargs):
+        self.machineable = machineable
         self.__state_attr = state_attr
+        states = kwargs.get('states', [s.value for s in DefaultStates])
+        transitions = kwargs.get('transitions', DEFAULT_TRANSITIONS)
+        self._validate_transitions(transitions)
 
-        super(ReviewsMachine, self).__init__(
-            states=[s.value for s in DefaultStates],
-            transitions=DEFAULT_TRANSITIONS,
+        super(BaseMachine, self).__init__(
+            states=states,
+            transitions=transitions,
             initial=self.state,
             send_event=True,
             prepare_event=['initialize_machine'],
@@ -32,11 +37,16 @@ def __init__(self, reviewable, state_attr):
 
     @property
     def state(self):
-        return getattr(self.reviewable, self.__state_attr)
+        return getattr(self.machineable, self.__state_attr)
 
     @state.setter
     def state(self, value):
-        setattr(self.reviewable, self.__state_attr, value)
+        setattr(self.machineable, self.__state_attr, value)
+
+    def _validate_transitions(self, transitions):
+        for transition in set(sum([t['after'] for t in transitions], [])):
+            if not hasattr(self, transition):
+                raise InvalidTransitionError(self, transition)
 
     def initialize_machine(self, ev):
         self.action = None
@@ -45,7 +55,7 @@ def initialize_machine(self, ev):
     def save_action(self, ev):
         user = ev.kwargs.get('user')
         self.action = Action.objects.create(
-            target=self.reviewable,
+            target=self.machineable,
             creator=user,
             trigger=ev.event.name,
             from_state=self.from_state.name,
@@ -55,45 +65,47 @@ def save_action(self, ev):
 
     def update_last_transitioned(self, ev):
         now = self.action.date_created if self.action is not None else timezone.now()
-        self.reviewable.date_last_transitioned = now
+        self.machineable.date_last_transitioned = now
+
+class ReviewsMachine(BaseMachine):
 
     def save_changes(self, ev):
-        node = self.reviewable.node
+        node = self.machineable.node
         node._has_abandoned_preprint = False
         now = self.action.date_created if self.action is not None else timezone.now()
-        should_publish = self.reviewable.in_public_reviews_state
-        if should_publish and not self.reviewable.is_published:
-            if not (self.reviewable.node.preprint_file and self.reviewable.node.preprint_file.node == self.reviewable.node):
+        should_publish = self.machineable.in_public_reviews_state
+        if should_publish and not self.machineable.is_published:
+            if not (self.machineable.node.preprint_file and self.machineable.node.preprint_file.node == self.machineable.node):
                 raise ValueError('Preprint node is not a valid preprint; cannot publish.')
-            if not self.reviewable.provider:
+            if not self.machineable.provider:
                 raise ValueError('Preprint provider not specified; cannot publish.')
-            if not self.reviewable.subjects.exists():
+            if not self.machineable.subjects.exists():
                 raise ValueError('Preprint must have at least one subject to be published.')
-            self.reviewable.date_published = now
-            self.reviewable.is_published = True
-            enqueue_postcommit_task(get_and_set_preprint_identifiers, (), {'preprint': self.reviewable}, celery=True)
-        elif not should_publish and self.reviewable.is_published:
-            self.reviewable.is_published = False
-        self.reviewable.save()
+            self.machineable.date_published = now
+            self.machineable.is_published = True
+            enqueue_postcommit_task(get_and_set_preprint_identifiers, (), {'preprint_id': self.machineable._id}, celery=True)
+        elif not should_publish and self.machineable.is_published:
+            self.machineable.is_published = False
+        self.machineable.save()
         node.save()
 
     def resubmission_allowed(self, ev):
-        return self.reviewable.provider.reviews_workflow == Workflows.PRE_MODERATION.value
+        return self.machineable.provider.reviews_workflow == Workflows.PRE_MODERATION.value
 
     def notify_submit(self, ev):
         context = self.get_context()
         context['referrer'] = ev.kwargs.get('user')
         user = ev.kwargs.get('user')
         auth = Auth(user)
-        self.reviewable.node.add_log(
+        self.machineable.node.add_log(
             action=NodeLog.PREPRINT_INITIATED,
             params={
-                'preprint': self.reviewable._id
+                'preprint': self.machineable._id
             },
             auth=auth,
             save=False,
         )
-        recipients = list(self.reviewable.node.contributors)
+        recipients = list(self.machineable.node.contributors)
         reviews_signals.reviews_email_submit.send(context=context, recipients=recipients)
 
     def notify_resubmit(self, ev):
@@ -104,7 +116,7 @@ def notify_resubmit(self, ev):
 
     def notify_accept_reject(self, ev):
         context = self.get_context()
-        context['notify_comment'] = not self.reviewable.provider.reviews_comments_private and self.action.comment
+        context['notify_comment'] = not self.machineable.provider.reviews_comments_private and self.action.comment
         context['is_rejected'] = self.action.to_state == DefaultStates.REJECTED.value
         context['was_pending'] = self.action.from_state == DefaultStates.PENDING.value
         reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
@@ -112,7 +124,7 @@ def notify_accept_reject(self, ev):
                                            action=self.action)
     def notify_edit_comment(self, ev):
         context = self.get_context()
-        if not self.reviewable.provider.reviews_comments_private and self.action.comment:
+        if not self.machineable.provider.reviews_comments_private and self.action.comment:
             reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
                                                template='reviews_update_comment',
                                                action=self.action)
@@ -120,9 +132,9 @@ def notify_edit_comment(self, ev):
     def get_context(self):
         return {
             'domain': DOMAIN,
-            'reviewable': self.reviewable,
-            'workflow': self.reviewable.provider.reviews_workflow,
-            'provider_url': self.reviewable.provider.domain or '{domain}preprints/{provider_id}'.format(domain=DOMAIN, provider_id=self.reviewable.provider._id),
-            'provider_contact_email': self.reviewable.provider.email_contact or 'contact@osf.io',
-            'provider_support_email': self.reviewable.provider.email_support or 'support@osf.io',
+            'reviewable': self.machineable,
+            'workflow': self.machineable.provider.reviews_workflow,
+            'provider_url': self.machineable.provider.domain or '{domain}preprints/{provider_id}'.format(domain=DOMAIN, provider_id=self.machineable.provider._id),
+            'provider_contact_email': self.machineable.provider.email_contact or 'contact@osf.io',
+            'provider_support_email': self.machineable.provider.email_support or 'support@osf.io',
         }
diff --git a/osf_tests/factories.py b/osf_tests/factories.py
index 6cfcd0110b8..2c58fa925c2 100644
--- a/osf_tests/factories.py
+++ b/osf_tests/factories.py
@@ -582,7 +582,7 @@ def _create(cls, target_class, *args, **kwargs):
         subjects = kwargs.pop('subjects', None) or [[SubjectFactory()._id]]
         instance.node.preprint_article_doi = doi
 
-        instance.reviews_state = kwargs.pop('reviews_state', 'initial')
+        instance.machine_state = kwargs.pop('machine_state', 'initial')
 
         user = kwargs.pop('creator', None) or instance.node.creator
         if not instance.node.is_contributor(user):
diff --git a/osf_tests/test_reviewable.py b/osf_tests/test_reviewable.py
index b7e5c6e1d22..7c8754f4a1b 100644
--- a/osf_tests/test_reviewable.py
+++ b/osf_tests/test_reviewable.py
@@ -10,22 +10,22 @@ class TestReviewable:
     def test_state_changes(self):
         user = AuthUserFactory()
         preprint = PreprintFactory(provider__reviews_workflow='pre-moderation', is_published=False)
-        assert preprint.reviews_state == DefaultStates.INITIAL.value
+        assert preprint.machine_state == DefaultStates.INITIAL.value
 
-        preprint.reviews_submit(user)
-        assert preprint.reviews_state == DefaultStates.PENDING.value
+        preprint.run_submit(user)
+        assert preprint.machine_state == DefaultStates.PENDING.value
 
-        preprint.reviews_accept(user, 'comment')
-        assert preprint.reviews_state == DefaultStates.ACCEPTED.value
+        preprint.run_accept(user, 'comment')
+        assert preprint.machine_state == DefaultStates.ACCEPTED.value
         from_db = PreprintService.objects.get(id=preprint.id)
-        assert from_db.reviews_state == DefaultStates.ACCEPTED.value
+        assert from_db.machine_state == DefaultStates.ACCEPTED.value
 
-        preprint.reviews_reject(user, 'comment')
-        assert preprint.reviews_state == DefaultStates.REJECTED.value
+        preprint.run_reject(user, 'comment')
+        assert preprint.machine_state == DefaultStates.REJECTED.value
         from_db.refresh_from_db()
-        assert from_db.reviews_state == DefaultStates.REJECTED.value
+        assert from_db.machine_state == DefaultStates.REJECTED.value
 
-        preprint.reviews_accept(user, 'comment')
-        assert preprint.reviews_state == DefaultStates.ACCEPTED.value
+        preprint.run_accept(user, 'comment')
+        assert preprint.machine_state == DefaultStates.ACCEPTED.value
         from_db.refresh_from_db()
-        assert from_db.reviews_state == DefaultStates.ACCEPTED.value
+        assert from_db.machine_state == DefaultStates.ACCEPTED.value

From be26204ff31d87a012d2af351a2665f4ca93ee18 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Mon, 30 Oct 2017 15:57:56 -0400
Subject: [PATCH 117/192] Generalize Action model   - add migration

---
 api/actions/permissions.py                    |   6 +-
 api/actions/serializers.py                    |  92 ++++++++++-----
 api/actions/urls.py                           |   2 +-
 api/actions/views.py                          | 106 ++++++------------
 api/preprints/serializers.py                  |   9 +-
 api/preprints/views.py                        |  39 +++++--
 api/users/serializers.py                      |   5 -
 api/users/urls.py                             |   1 -
 api/users/views.py                            |  73 +-----------
 .../test_preprint_provider_preprints_list.py  |   8 +-
 .../preprints/views/test_preprint_actions.py  |   8 +-
 .../preprints/views/test_preprint_detail.py   |   8 +-
 api_tests/reviews/mixins/comment_settings.py  |   6 +-
 api_tests/reviews/mixins/filter_mixins.py     |   6 +-
 api_tests/users/views/test_user_actions.py    |  14 +--
 framework/auth/oauth_scopes.py                |   4 +-
 .../commands/create_fake_preprint_actions.py  |   4 +-
 osf/migrations/0066_auto_20171031_1409.py     |  24 ++++
 osf/models/__init__.py                        |   2 +-
 osf/models/action.py                          |  12 +-
 osf/models/mixins.py                          |   2 +-
 osf/utils/machines.py                         |   9 +-
 osf_tests/factories.py                        |   4 +-
 tests/test_notifications.py                   |   2 +-
 24 files changed, 212 insertions(+), 234 deletions(-)
 create mode 100644 osf/migrations/0066_auto_20171031_1409.py

diff --git a/api/actions/permissions.py b/api/actions/permissions.py
index ee0d8a2a99d..302ef66fb53 100644
--- a/api/actions/permissions.py
+++ b/api/actions/permissions.py
@@ -4,7 +4,7 @@
 from rest_framework import permissions as drf_permissions
 
 from api.base.utils import get_user_auth
-from osf.models.action import Action
+from osf.models.action import ReviewAction
 from osf.models.mixins import ReviewableMixin, ReviewProviderMixin
 from osf.utils.workflows import DefaultTriggers
 from website.util import permissions as osf_permissions
@@ -18,7 +18,7 @@
 }
 
 
-class ActionPermission(drf_permissions.BasePermission):
+class ReviewActionPermission(drf_permissions.BasePermission):
     def has_object_permission(self, request, view, obj):
         auth = get_user_auth(request)
         if auth.user is None:
@@ -26,7 +26,7 @@ def has_object_permission(self, request, view, obj):
 
         target = None
         provider = None
-        if isinstance(obj, Action):
+        if isinstance(obj, ReviewAction):
             target = obj.target
             provider = target.provider
         elif isinstance(obj, ReviewableMixin):
diff --git a/api/actions/serializers.py b/api/actions/serializers.py
index 5ff09a58d8d..1ec72408ee2 100644
--- a/api/actions/serializers.py
+++ b/api/actions/serializers.py
@@ -48,15 +48,27 @@ def get_meta_information(self, metadata, provider):
 
 
 class TargetRelationshipField(RelationshipField):
-    def get_object(self, preprint_id):
-        return PreprintService.objects.get(guids___id=preprint_id)
+    _target_class = None
+
+    def __init__(self, *args, **kwargs):
+        self._target_class = kwargs.pop('target_class', None)
+        super(TargetRelationshipField, self).__init__(*args, **kwargs)
+
+    @property
+    def TargetClass(self):
+        if self._target_class:
+            return self._target_class
+        raise NotImplementedError()
+
+    def get_object(self, object_id):
+        return self.TargetClass.load(object_id)
 
     def to_internal_value(self, data):
-        preprint = self.get_object(data)
-        return {'target': preprint}
+        target = self.get_object(data)
+        return {'target': target}
 
 
-class ActionSerializer(JSONAPISerializer):
+class BaseActionSerializer(JSONAPISerializer):
     filterable_fields = frozenset([
         'id',
         'trigger',
@@ -64,7 +76,6 @@ class ActionSerializer(JSONAPISerializer):
         'to_state',
         'date_created',
         'date_modified',
-        'provider',
         'target',
     ])
 
@@ -80,28 +91,13 @@ class ActionSerializer(JSONAPISerializer):
     date_created = ser.DateTimeField(read_only=True)
     date_modified = ser.DateTimeField(read_only=True)
 
-    provider = RelationshipField(
-        read_only=True,
-        related_view='preprint_providers:preprint_provider-detail',
-        related_view_kwargs={'provider_id': '<target.provider._id>'},
-        filter_key='target__provider___id',
-    )
-
-    target = TargetRelationshipField(
-        read_only=False,
-        required=True,
-        related_view='preprints:preprint-detail',
-        related_view_kwargs={'preprint_id': '<target._id>'},
-        filter_key='target__guids___id',
-    )
-
-    creator = HideIfProviderCommentsAnonymous(RelationshipField(
+    creator = RelationshipField(
         read_only=True,
         related_view='users:user-detail',
         related_view_kwargs={'user_id': '<creator._id>'},
         filter_key='creator__guids___id',
         always_embed=True,
-    ))
+    )
 
     links = LinksField(
         {
@@ -109,12 +105,13 @@ class ActionSerializer(JSONAPISerializer):
         }
     )
 
+    @property
+    def get_action_url(self):
+        raise NotImplementedError()
+
     def get_absolute_url(self, obj):
         return self.get_action_url(obj)
 
-    def get_action_url(self, obj):
-        return utils.absolute_reverse('actions:action-detail', kwargs={'action_id': obj._id, 'version': self.context['request'].parser_context['kwargs']['version']})
-
     def create(self, validated_data):
         trigger = validated_data.pop('trigger')
         user = validated_data.pop('user')
@@ -137,3 +134,46 @@ def create(self, validated_data):
 
     class Meta:
         type_ = 'actions'
+        abstract = True
+
+class ReviewActionSerializer(BaseActionSerializer):
+    class Meta:
+        type_ = 'review-actions'
+
+    filterable_fields = frozenset([
+        'id',
+        'trigger',
+        'from_state',
+        'to_state',
+        'date_created',
+        'date_modified',
+        'provider',
+        'target',
+    ])
+
+    provider = RelationshipField(
+        read_only=True,
+        related_view='preprint_providers:preprint_provider-detail',
+        related_view_kwargs={'provider_id': '<target.provider._id>'},
+        filter_key='target__provider___id',
+    )
+
+    creator = HideIfProviderCommentsAnonymous(RelationshipField(
+        read_only=True,
+        related_view='users:user-detail',
+        related_view_kwargs={'user_id': '<creator._id>'},
+        filter_key='creator__guids___id',
+        always_embed=True,
+    ))
+
+    target = TargetRelationshipField(
+        target_class=PreprintService,
+        read_only=False,
+        required=True,
+        related_view='preprints:preprint-detail',
+        related_view_kwargs={'preprint_id': '<target._id>'},
+        filter_key='target__guids___id',
+    )
+
+    def get_action_url(self, obj):
+        return utils.absolute_reverse('actions:action-detail', kwargs={'action_id': obj._id, 'version': self.context['request'].parser_context['kwargs']['version']})
diff --git a/api/actions/urls.py b/api/actions/urls.py
index adbd3513a5f..7c1fe88d2f2 100644
--- a/api/actions/urls.py
+++ b/api/actions/urls.py
@@ -5,6 +5,6 @@
 app_name = 'osf'
 
 urlpatterns = [
-    url(r'^$', views.CreateAction.as_view(), name=views.CreateAction.view_name),
+    url(r'^reviews/$', views.ReviewActionList.as_view(), name=views.ReviewActionList.view_name),
     url(r'^(?P<action_id>\w+)/$', views.ActionDetail.as_view(), name=views.ActionDetail.view_name),
 ]
diff --git a/api/actions/views.py b/api/actions/views.py
index ad3fd515ee6..55487dda430 100644
--- a/api/actions/views.py
+++ b/api/actions/views.py
@@ -2,25 +2,22 @@
 from __future__ import unicode_literals
 
 from django.shortcuts import get_object_or_404
+from guardian.shortcuts import get_objects_for_user
 from rest_framework import generics
 from rest_framework import permissions
+from rest_framework.exceptions import NotFound
 
-from api.actions.permissions import ActionPermission
-from api.actions.serializers import ActionSerializer
-from api.base.exceptions import Conflict
-from api.base.parsers import (
-    JSONAPIMultipleRelationshipsParser,
-    JSONAPIMultipleRelationshipsParserForRegularJSON,
-)
-from api.base.utils import absolute_reverse
+from api.actions.permissions import ReviewActionPermission
+from api.actions.serializers import ReviewActionSerializer
+from api.base.filters import ListFilterMixin
 from api.base.views import JSONAPIBaseView
 from api.base import permissions as base_permissions
 from framework.auth.oauth_scopes import CoreScopes
-from osf.models import Action
+from osf.models import PreprintProvider, ReviewAction
 
 
-def get_actions_queryset():
-    return Action.objects.include(
+def get_review_actions_queryset():
+    return ReviewAction.objects.include(
         'creator',
         'creator__guids',
         'target',
@@ -32,7 +29,7 @@ def get_actions_queryset():
 class ActionDetail(JSONAPIBaseView, generics.RetrieveAPIView):
     """Action Detail
 
-    Actions represent state changes and/or comments on a reviewable object (e.g. a preprint)
+    Actions represent state changes and/or comments on any actionable object (e.g. preprints, noderequests)
 
     ##Action Attributes
 
@@ -62,29 +59,30 @@ class ActionDetail(JSONAPIBaseView, generics.RetrieveAPIView):
     permission_classes = (
         permissions.IsAuthenticatedOrReadOnly,
         base_permissions.TokenHasScope,
-        ActionPermission,
+        ReviewActionPermission,
     )
 
     required_read_scopes = [CoreScopes.ACTIONS_READ]
     required_write_scopes = [CoreScopes.ACTIONS_WRITE]
 
-    serializer_class = ActionSerializer
+    serializer_class = ReviewActionSerializer
     view_category = 'actions'
     view_name = 'action-detail'
 
     def get_object(self):
-        action = get_object_or_404(get_actions_queryset(), _id=self.kwargs['action_id'])
+        action = None
+        if ReviewAction.objects.filter(_id=self.kwargs['action_id']):
+            action = get_object_or_404(get_review_actions_queryset(), _id=self.kwargs['action_id'])
+        if not action:
+            raise NotFound('Unable to find specified Action')
         self.check_object_permissions(self.request, action)
         return action
 
 
-class CreateAction(JSONAPIBaseView, generics.ListCreateAPIView):
-    """Create Actions *Write-only*
-
-    Use this endpoint to create a new Action and thereby trigger a state change on a preprint.
+class ReviewActionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin):
+    """List of review actions viewable by this user *Read-only*
 
-    GETting from this endpoint will always return an empty list.
-    Use `/user/me/actions/` or `/preprints/<guid>/actions/` to read lists of actions.
+    Actions represent state changes and/or comments on a reviewable object (e.g. a preprint)
 
     ##Action Attributes
 
@@ -118,66 +116,28 @@ class CreateAction(JSONAPIBaseView, generics.ListCreateAPIView):
     + `filter[<fieldname>]=<Str>` -- fields and values to filter the search results on.
 
     Actions may be filtered by their `id`, `from_state`, `to_state`, `date_created`, `date_modified`, `creator`, `provider`, `target`
-
-    ###Creating New Actions
-
-    Create a new Action by POSTing to `/actions/`, including the target preprint and the action trigger.
-
-    Valid triggers are: `submit`, `accept`, `reject`, and `edit_comment`
-
-        Method:        POST
-        URL:           /actions/
-        Query Params:  <none>
-        Body (JSON):   {
-                        "data": {
-                            "attributes": {
-                                "trigger": {trigger},           # required
-                                "comment": {comment},
-                            },
-                            "relationships": {
-                                "target": {                     # required
-                                    "data": {
-                                        "type": "preprints",
-                                        "id": {preprint_id}
-                                    }
-                                },
-                            }
-                        }
-                    }
-        Success:       201 CREATED + action representation
     """
+    # Permissions handled in get_default_django_query
     permission_classes = (
-        permissions.IsAuthenticatedOrReadOnly,
+        permissions.IsAuthenticated,
         base_permissions.TokenHasScope,
-        ActionPermission,
     )
 
-    required_read_scopes = [CoreScopes.NULL]
-    required_write_scopes = [CoreScopes.ACTIONS_WRITE]
-
-    parser_classes = (JSONAPIMultipleRelationshipsParser, JSONAPIMultipleRelationshipsParserForRegularJSON,)
+    required_read_scopes = [CoreScopes.ACTIONS_READ]
+    required_write_scopes = [CoreScopes.NULL]
 
-    serializer_class = ActionSerializer
+    serializer_class = ReviewActionSerializer
+    model_class = ReviewAction
 
+    ordering = ('-date_created',)
     view_category = 'actions'
-    view_name = 'create-action'
-
-    # overrides ListCreateAPIView
-    def perform_create(self, serializer):
-        target = serializer.validated_data['target']
-        self.check_object_permissions(self.request, target)
-
-        if not target.provider.is_reviewed:
-            raise Conflict('{} is an unmoderated provider. If you are an admin, set up moderation by setting `reviews_workflow` at {}'.format(
-                target.provider.name,
-                absolute_reverse('preprint_providers:preprint_provider-detail', kwargs={
-                    'provider_id': target.provider._id,
-                    'version': self.request.parser_context['kwargs']['version']
-                })
-            ))
+    view_name = 'review-action-list'
 
-        serializer.save(user=self.request.user)
+    # overrides ListFilterMixin
+    def get_default_queryset(self):
+        provider_queryset = get_objects_for_user(self.request.user, 'view_actions', PreprintProvider)
+        return get_review_actions_queryset().filter(target__node__is_public=True, target__provider__in=provider_queryset)
 
-    # overrides ListCreateAPIView
+    # overrides ListAPIView
     def get_queryset(self):
-        return Action.objects.none()
+        return self.get_queryset_from_request()
diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py
index 1718165c215..203e89f464e 100644
--- a/api/preprints/serializers.py
+++ b/api/preprints/serializers.py
@@ -132,8 +132,8 @@ class PreprintSerializer(JSONAPISerializer):
         read_only=False
     )
 
-    actions = RelationshipField(
-        related_view='preprints:preprint-action-list',
+    review_actions = RelationshipField(
+        related_view='preprints:preprint-review-action-list',
         related_view_kwargs={'preprint_id': '<_id>'}
     )
 
@@ -189,8 +189,9 @@ def update(self, preprint, validated_data):
         if published and preprint.provider.is_reviewed:
             raise Conflict('{} uses a moderation workflow, so preprints must be submitted for review instead of published directly. Submit a preprint by creating a `submit` Action at {}'.format(
                 preprint.provider.name,
-                absolute_reverse('actions:create-action', kwargs={
-                    'version': self.context['request'].parser_context['kwargs']['version']
+                absolute_reverse('preprints:preprint-review-action-list', kwargs={
+                    'version': self.context['request'].parser_context['kwargs']['version'],
+                    'preprint_id': preprint._id
                 })
             ))
 
diff --git a/api/preprints/views.py b/api/preprints/views.py
index 22f174a2b19..68d58f49416 100644
--- a/api/preprints/views.py
+++ b/api/preprints/views.py
@@ -5,12 +5,12 @@
 from rest_framework import permissions as drf_permissions
 
 from framework.auth.oauth_scopes import CoreScopes
-from osf.models import Action, PreprintService
+from osf.models import ReviewAction, PreprintService
 from osf.utils.requests import check_select_for_update
 
-from api.actions.permissions import ActionPermission
-from api.actions.serializers import ActionSerializer
-from api.actions.views import get_actions_queryset
+from api.actions.permissions import ReviewActionPermission
+from api.actions.serializers import ReviewActionSerializer
+from api.actions.views import get_review_actions_queryset
 from api.base.exceptions import Conflict
 from api.base.views import JSONAPIBaseView, WaterButlerMixin
 from api.base.filters import ListFilterMixin, PreprintFilterMixin
@@ -18,7 +18,7 @@
     JSONAPIMultipleRelationshipsParser,
     JSONAPIMultipleRelationshipsParserForRegularJSON,
 )
-from api.base.utils import get_user_auth
+from api.base.utils import absolute_reverse, get_user_auth
 from api.base import permissions as base_permissions
 from api.citations.utils import render_citation, preprint_csl
 from api.preprints.serializers import (
@@ -407,7 +407,7 @@ def create(self, request, *args, **kwargs):
         return super(PreprintContributorsList, self).create(request, *args, **kwargs)
 
 
-class PreprintActionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, PreprintMixin):
+class PreprintActionList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMixin, PreprintMixin):
     """Action List *Read-only*
 
     Actions represent state changes and/or comments on a reviewable object (e.g. a preprint)
@@ -448,22 +448,39 @@ class PreprintActionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin,
     permission_classes = (
         drf_permissions.IsAuthenticatedOrReadOnly,
         base_permissions.TokenHasScope,
-        ActionPermission,
+        ReviewActionPermission,
     )
 
     required_read_scopes = [CoreScopes.ACTIONS_READ]
     required_write_scopes = [CoreScopes.ACTIONS_WRITE]
 
-    serializer_class = ActionSerializer
-    model_class = Action
+    parser_classes = (JSONAPIMultipleRelationshipsParser, JSONAPIMultipleRelationshipsParserForRegularJSON,)
+    serializer_class = ReviewActionSerializer
+    model_class = ReviewAction
 
     ordering = ('-date_created',)
     view_category = 'preprints'
-    view_name = 'preprint-action-list'
+    view_name = 'preprint-review-action-list'
+
+    # overrides ListCreateAPIView
+    def perform_create(self, serializer):
+        target = serializer.validated_data['target']
+        self.check_object_permissions(self.request, target)
+
+        if not target.provider.is_reviewed:
+            raise Conflict('{} is an unmoderated provider. If you are an admin, set up moderation by setting `reviews_workflow` at {}'.format(
+                target.provider.name,
+                absolute_reverse('preprint_providers:preprint_provider-detail', kwargs={
+                    'provider_id': target.provider._id,
+                    'version': self.request.parser_context['kwargs']['version']
+                })
+            ))
+
+        serializer.save(user=self.request.user)
 
     # overrides ListFilterMixin
     def get_default_queryset(self):
-        return get_actions_queryset().filter(target_id=self.get_preprint().id)
+        return get_review_actions_queryset().filter(target_id=self.get_preprint().id)
 
     # overrides ListAPIView
     def get_queryset(self):
diff --git a/api/users/serializers.py b/api/users/serializers.py
index 9613573c0a4..2e2a2127e94 100644
--- a/api/users/serializers.py
+++ b/api/users/serializers.py
@@ -86,11 +86,6 @@ class UserSerializer(JSONAPISerializer):
         self_view_kwargs={'user_id': '<_id>'},
     ))
 
-    actions = ShowIfCurrentUser(RelationshipField(
-        related_view='users:user-action-list',
-        related_view_kwargs={'user_id': '<_id>'},
-    ))
-
     class Meta:
         type_ = 'users'
 
diff --git a/api/users/urls.py b/api/users/urls.py
index 68a81943a2d..ab72931e6ff 100644
--- a/api/users/urls.py
+++ b/api/users/urls.py
@@ -6,7 +6,6 @@
 urlpatterns = [
     url(r'^$', views.UserList.as_view(), name=views.UserList.view_name),
     url(r'^(?P<user_id>\w+)/$', views.UserDetail.as_view(), name=views.UserDetail.view_name),
-    url(r'^(?P<user_id>\w+)/actions/$', views.UserActionList.as_view(), name=views.UserActionList.view_name),
     url(r'^(?P<user_id>\w+)/addons/$', views.UserAddonList.as_view(), name=views.UserAddonList.view_name),
     url(r'^(?P<user_id>\w+)/addons/(?P<provider>\w+)/$', views.UserAddonDetail.as_view(), name=views.UserAddonDetail.view_name),
     url(r'^(?P<user_id>\w+)/addons/(?P<provider>\w+)/accounts/$', views.UserAddonAccountList.as_view(), name=views.UserAddonAccountList.view_name),
diff --git a/api/users/views.py b/api/users/views.py
index 162ad81f358..0c7a7cd0010 100644
--- a/api/users/views.py
+++ b/api/users/views.py
@@ -1,10 +1,6 @@
 from django.apps import apps
 
-from guardian.shortcuts import get_objects_for_user
-
 from api.addons.views import AddonSettingsMixin
-from api.actions.views import get_actions_queryset
-from api.actions.serializers import ActionSerializer
 from api.base import permissions as base_permissions
 from api.base.exceptions import Conflict, UserGone
 from api.base.filters import ListFilterMixin, PreprintFilterMixin
@@ -41,9 +37,7 @@
                         PreprintService,
                         Node,
                         Registration,
-                        OSFUser,
-                        PreprintProvider,
-                        Action,)
+                        OSFUser)
 
 
 class UserMixin(object):
@@ -786,68 +780,3 @@ def perform_destroy(self, instance):
             if val['id'] in current_institutions:
                 user.remove_institution(val['id'])
         user.save()
-
-
-class UserActionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, UserMixin):
-    """List of actions viewable by this user *Read-only*
-
-    Actions represent state changes and/or comments on a reviewable object (e.g. a preprint)
-
-    ##Action Attributes
-
-        name                            type                                description
-        ====================================================================================
-        date_created                    iso8601 timestamp                   timestamp that the action was created
-        date_modified                   iso8601 timestamp                   timestamp that the action was last modified
-        from_state                      string                              state of the reviewable before this action was created
-        to_state                        string                              state of the reviewable after this action was created
-        comment                         string                              comment explaining the state change
-        trigger                         string                              name of the trigger for this action
-
-    ##Relationships
-
-    ###Target
-    Link to the object (e.g. preprint) this action acts on
-
-    ###Provider
-    Link to detail for the target object's provider
-
-    ###Creator
-    Link to the user that created this action
-
-    ##Links
-    - `self` -- Detail page for the current action
-
-    ##Query Params
-
-    + `page=<Int>` -- page number of results to view, default 1
-
-    + `filter[<fieldname>]=<Str>` -- fields and values to filter the search results on.
-
-    Actions may be filtered by their `id`, `from_state`, `to_state`, `date_created`, `date_modified`, `creator`, `provider`, `target`
-    """
-    # Permissions handled in get_default_django_query
-    permission_classes = (
-        drf_permissions.IsAuthenticated,
-        base_permissions.TokenHasScope,
-        CurrentUser,
-    )
-
-    required_read_scopes = [CoreScopes.ACTIONS_READ]
-    required_write_scopes = [CoreScopes.NULL]
-
-    serializer_class = ActionSerializer
-    model_class = Action
-
-    ordering = ('-date_created',)
-    view_category = 'users'
-    view_name = 'user-action-list'
-
-    # overrides ListFilterMixin
-    def get_default_queryset(self):
-        provider_queryset = get_objects_for_user(self.get_user(), 'view_actions', PreprintProvider)
-        return get_actions_queryset().filter(target__node__is_public=True, target__provider__in=provider_queryset)
-
-    # overrides ListAPIView
-    def get_queryset(self):
-        return self.get_queryset_from_request()
diff --git a/api_tests/preprint_providers/views/test_preprint_provider_preprints_list.py b/api_tests/preprint_providers/views/test_preprint_provider_preprints_list.py
index 5976b484d9e..38b217fef9a 100644
--- a/api_tests/preprint_providers/views/test_preprint_provider_preprints_list.py
+++ b/api_tests/preprint_providers/views/test_preprint_provider_preprints_list.py
@@ -114,10 +114,10 @@ def expected_reviewables(self, provider, user):
             PreprintFactory(is_published=False, provider=provider, project=ProjectFactory(is_public=True)),
             PreprintFactory(is_published=False, provider=provider, project=ProjectFactory(is_public=True)),
         ]
-        preprints[0].reviews_submit(user)
-        preprints[0].reviews_accept(user, 'comment')
-        preprints[1].reviews_submit(user)
-        preprints[2].reviews_submit(user)
+        preprints[0].run_submit(user)
+        preprints[0].run_accept(user, 'comment')
+        preprints[1].run_submit(user)
+        preprints[2].run_submit(user)
         return preprints
 
     @pytest.fixture
diff --git a/api_tests/preprints/views/test_preprint_actions.py b/api_tests/preprints/views/test_preprint_actions.py
index a824fc66535..4d465a69494 100644
--- a/api_tests/preprints/views/test_preprint_actions.py
+++ b/api_tests/preprints/views/test_preprint_actions.py
@@ -7,11 +7,11 @@
 )
 from website.util import permissions as osf_permissions
 
-from api_tests.reviews.mixins.filter_mixins import ActionFilterMixin
-from api_tests.reviews.mixins.comment_settings import ActionCommentSettingsMixin
+from api_tests.reviews.mixins.filter_mixins import ReviewActionFilterMixin
+from api_tests.reviews.mixins.comment_settings import ReviewActionCommentSettingsMixin
 
 
-class TestPreprintActionFilters(ActionFilterMixin):
+class TestPreprintActionFilters(ReviewActionFilterMixin):
 
     @pytest.fixture()
     def preprint(self, all_actions):
@@ -43,7 +43,7 @@ def test_unauthorized_user(self, app, url):
         assert res.status_code == 403
 
 
-class TestActionSettings(ActionCommentSettingsMixin):
+class TestReviewActionSettings(ReviewActionCommentSettingsMixin):
     @pytest.fixture()
     def url(self, preprint):
         return '/{}preprints/{}/actions/'.format(API_BASE, preprint._id)
diff --git a/api_tests/preprints/views/test_preprint_detail.py b/api_tests/preprints/views/test_preprint_detail.py
index 9ace5a18328..0ea047810df 100644
--- a/api_tests/preprints/views/test_preprint_detail.py
+++ b/api_tests/preprints/views/test_preprint_detail.py
@@ -863,19 +863,19 @@ def file_one_public_project(self, admin, public_project):
 
     @pytest.fixture()
     def unpublished_preprint(self, admin, provider, subject, public_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=public_project, is_published=False, reviews_state='initial')
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=public_project, is_published=False, machine_state='initial')
 
     @pytest.fixture()
     def private_preprint(self, admin, provider, subject, private_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=private_project, is_published=False, reviews_state='accepted')
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=private_project, is_published=False, machine_state='accepted')
 
     @pytest.fixture()
     def abandoned_private_preprint(self, admin, provider, subject, private_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=private_project, is_published=False, reviews_state='initial')
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=private_project, is_published=False, machine_state='initial')
 
     @pytest.fixture()
     def abandoned_public_preprint(self, admin, provider, subject, public_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=public_project, is_published=False, reviews_state='initial')
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=public_project, is_published=False, machine_state='initial')
 
     @pytest.fixture()
     def abandoned_private_url(self, abandoned_private_preprint):
diff --git a/api_tests/reviews/mixins/comment_settings.py b/api_tests/reviews/mixins/comment_settings.py
index 23e006dc19b..baa280959e2 100644
--- a/api_tests/reviews/mixins/comment_settings.py
+++ b/api_tests/reviews/mixins/comment_settings.py
@@ -2,7 +2,7 @@
 
 from api.preprint_providers.permissions import GroupHelper
 from osf_tests.factories import (
-    ActionFactory,
+    ReviewActionFactory,
     AuthUserFactory,
     PreprintFactory,
     PreprintProviderFactory,
@@ -11,7 +11,7 @@
 
 
 @pytest.mark.django_db
-class ActionCommentSettingsMixin(object):
+class ReviewActionCommentSettingsMixin(object):
 
     @pytest.fixture()
     def url(self):
@@ -27,7 +27,7 @@ def preprint(self, provider):
 
     @pytest.fixture()
     def actions(self, preprint):
-        return [ActionFactory(target=preprint) for _ in range(5)]
+        return [ReviewActionFactory(target=preprint) for _ in range(5)]
 
     @pytest.fixture()
     def provider_admin(self, provider):
diff --git a/api_tests/reviews/mixins/filter_mixins.py b/api_tests/reviews/mixins/filter_mixins.py
index 0f10005a3ab..ae0d1569695 100644
--- a/api_tests/reviews/mixins/filter_mixins.py
+++ b/api_tests/reviews/mixins/filter_mixins.py
@@ -5,7 +5,7 @@
 
 from api.preprint_providers.permissions import GroupHelper
 from osf_tests.factories import (
-    ActionFactory,
+    ReviewActionFactory,
     AuthUserFactory,
     PreprintFactory,
     PreprintProviderFactory,
@@ -42,7 +42,7 @@ def get_actual(app, url, user=None, sort=None, expect_errors=False, **filters):
 
 
 @pytest.mark.django_db
-class ActionFilterMixin(object):
+class ReviewActionFilterMixin(object):
 
     @pytest.fixture()
     def url(self):
@@ -58,7 +58,7 @@ def all_actions(self, providers):
         for provider in providers:
             preprint = PreprintFactory(provider=provider, project=ProjectFactory(is_public=True))
             for _ in range(5):
-                actions.append(ActionFactory(target=preprint))
+                actions.append(ReviewActionFactory(target=preprint))
         return actions
 
     @pytest.fixture()
diff --git a/api_tests/users/views/test_user_actions.py b/api_tests/users/views/test_user_actions.py
index ca506afcd50..60b83c847f2 100644
--- a/api_tests/users/views/test_user_actions.py
+++ b/api_tests/users/views/test_user_actions.py
@@ -10,17 +10,17 @@
 )
 from website.util import permissions as osf_permissions
 
-from api_tests.reviews.mixins.filter_mixins import ActionFilterMixin
+from api_tests.reviews.mixins.filter_mixins import ReviewActionFilterMixin
 
 
-class TestActionFilters(ActionFilterMixin):
+class TestReviewActionFilters(ReviewActionFilterMixin):
     @pytest.fixture()
     def url(self):
-        return '/{}users/me/actions/'.format(API_BASE)
+        return '/{}actions/reviews/'.format(API_BASE)
 
     @pytest.fixture()
     def expected_actions(self, all_actions, allowed_providers):
-        actions = super(TestActionFilters, self).expected_actions(all_actions, allowed_providers)
+        actions = super(TestReviewActionFilters, self).expected_actions(all_actions, allowed_providers)
         node = actions[0].target.node
         node.is_public = False
         node.save()
@@ -36,7 +36,7 @@ def test_no_permission(self, app, url, expected_actions):
 
 
 @pytest.mark.django_db
-class TestActionCreate(object):
+class TestReviewActionCreate(object):
     def create_payload(self, reviewable_id=None, **attrs):
         payload = {
             'data': {
@@ -55,8 +55,8 @@ def create_payload(self, reviewable_id=None, **attrs):
         return payload
 
     @pytest.fixture()
-    def url(self):
-        return '/{}actions/'.format(API_BASE)
+    def url(self, preprint):
+        return '/{}preprints/{}/actions/'.format(API_BASE, preprint._id)
 
     @pytest.fixture()
     def provider(self):
diff --git a/framework/auth/oauth_scopes.py b/framework/auth/oauth_scopes.py
index 8490554f128..d3bed583f4f 100644
--- a/framework/auth/oauth_scopes.py
+++ b/framework/auth/oauth_scopes.py
@@ -95,8 +95,8 @@ class CoreScopes(object):
 
     SEARCH = 'search_read'
 
-    ACTIONS_READ = 'review_logs_read'
-    ACTIONS_WRITE = 'review_logs_write'
+    ACTIONS_READ = 'actions_read'
+    ACTIONS_WRITE = 'actions_write'
 
     PROVIDERS_WRITE = 'providers_write'
 
diff --git a/osf/management/commands/create_fake_preprint_actions.py b/osf/management/commands/create_fake_preprint_actions.py
index 3643fcecb6e..64f2630f606 100644
--- a/osf/management/commands/create_fake_preprint_actions.py
+++ b/osf/management/commands/create_fake_preprint_actions.py
@@ -8,7 +8,7 @@
 
 from django.core.management.base import BaseCommand
 
-from osf.models import Action, PreprintService, OSFUser
+from osf.models import ReviewAction, PreprintService, OSFUser
 from osf.utils.workflows import DefaultStates, DefaultTriggers
 
 logger = logging.getLogger(__name__)
@@ -48,7 +48,7 @@ def handle(self, *args, **options):
         states = [s.value for s in DefaultStates]
         for preprint in PreprintService.objects.filter(actions__isnull=True):
             for i in range(num_actions):
-                action = Action(
+                action = ReviewAction(
                     target=preprint,
                     creator=user,
                     trigger=random.choice(triggers),
diff --git a/osf/migrations/0066_auto_20171031_1409.py b/osf/migrations/0066_auto_20171031_1409.py
new file mode 100644
index 00000000000..5cc7b04e48d
--- /dev/null
+++ b/osf/migrations/0066_auto_20171031_1409.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.4 on 2017-10-31 19:09
+from __future__ import unicode_literals
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0065_preprintservice_original_publication_date'),
+    ]
+
+    operations = [
+        migrations.RenameModel(
+            old_name='Action',
+            new_name='ReviewAction',
+        ),
+        migrations.RenameField(
+            model_name='preprintservice',
+            old_name='reviews_state',
+            new_name='machine_state',
+        ),
+    ]
diff --git a/osf/models/__init__.py b/osf/models/__init__.py
index 828aee2ff9f..0a83b8e3e17 100644
--- a/osf/models/__init__.py
+++ b/osf/models/__init__.py
@@ -35,4 +35,4 @@
 from osf.models.admin_log_entry import AdminLogEntry  # noqa
 from osf.models.maintenance_state import MaintenanceState  # noqa
 from osf.models.quickfiles import QuickFilesNode  # noqa
-from osf.models.action import Action  # noqa
+from osf.models.action import ReviewAction  # noqa
diff --git a/osf/models/action.py b/osf/models/action.py
index af125606664..0e12eaf5fa7 100644
--- a/osf/models/action.py
+++ b/osf/models/action.py
@@ -10,11 +10,12 @@
 from osf.utils.workflows import DefaultStates, DefaultTriggers
 
 
-class Action(ObjectIDMixin, BaseModel):
+class BaseAction(ObjectIDMixin, BaseModel):
+    class Meta:
+        abstract = True
 
     objects = IncludeManager()
 
-    target = models.ForeignKey('PreprintService', related_name='actions', on_delete=models.CASCADE)
     creator = models.ForeignKey('OSFUser', related_name='+', on_delete=models.CASCADE)
 
     trigger = models.CharField(max_length=31, choices=DefaultTriggers.choices())
@@ -26,3 +27,10 @@ class Action(ObjectIDMixin, BaseModel):
     is_deleted = models.BooleanField(default=False)
     date_created = NonNaiveDateTimeField(auto_now_add=True)
     date_modified = NonNaiveDateTimeField(auto_now=True)
+
+    @property
+    def target(self):
+        raise NotImplementedError()
+
+class ReviewAction(BaseAction):
+    target = models.ForeignKey('PreprintService', related_name='actions', on_delete=models.CASCADE)
diff --git a/osf/models/mixins.py b/osf/models/mixins.py
index e33da3a7afb..75efda5046c 100644
--- a/osf/models/mixins.py
+++ b/osf/models/mixins.py
@@ -1,8 +1,8 @@
 import pytz
 
 from django.apps import apps
-from django.db import models, transaction
 from django.core.exceptions import ObjectDoesNotExist
+from django.db import models, transaction
 from include import IncludeQuerySet
 
 from api.preprint_providers.workflows import Workflows, PUBLIC_STATES
diff --git a/osf/utils/machines.py b/osf/utils/machines.py
index eaace2e3450..6ec7fdc62ac 100644
--- a/osf/utils/machines.py
+++ b/osf/utils/machines.py
@@ -6,7 +6,7 @@
 from framework.auth import Auth
 from framework.postcommit_tasks.handlers import enqueue_postcommit_task
 from osf.exceptions import InvalidTransitionError
-from osf.models.action import Action
+from osf.models.action import ReviewAction
 from osf.models.nodelog import NodeLog
 from osf.utils.workflows import DefaultStates, DEFAULT_TRANSITIONS
 from website.preprints.tasks import get_and_set_preprint_identifiers
@@ -43,6 +43,10 @@ def state(self):
     def state(self, value):
         setattr(self.machineable, self.__state_attr, value)
 
+    @property
+    def ActionClass(self):
+        raise NotImplementedError()
+
     def _validate_transitions(self, transitions):
         for transition in set(sum([t['after'] for t in transitions], [])):
             if not hasattr(self, transition):
@@ -54,7 +58,7 @@ def initialize_machine(self, ev):
 
     def save_action(self, ev):
         user = ev.kwargs.get('user')
-        self.action = Action.objects.create(
+        self.action = self.ActionClass.objects.create(
             target=self.machineable,
             creator=user,
             trigger=ev.event.name,
@@ -68,6 +72,7 @@ def update_last_transitioned(self, ev):
         self.machineable.date_last_transitioned = now
 
 class ReviewsMachine(BaseMachine):
+    ActionClass = ReviewAction
 
     def save_changes(self, ev):
         node = self.machineable.node
diff --git a/osf_tests/factories.py b/osf_tests/factories.py
index 2c58fa925c2..b55b95fbabd 100644
--- a/osf_tests/factories.py
+++ b/osf_tests/factories.py
@@ -802,9 +802,9 @@ class Meta:
         model = models.ArchiveJob
 
 
-class ActionFactory(DjangoModelFactory):
+class ReviewActionFactory(DjangoModelFactory):
     class Meta:
-        model = models.Action
+        model = models.ReviewAction
 
     trigger = FuzzyChoice(choices=DefaultTriggers.values())
     comment = factory.Faker('text')
diff --git a/tests/test_notifications.py b/tests/test_notifications.py
index b7ad700c82d..17228be1862 100644
--- a/tests/test_notifications.py
+++ b/tests/test_notifications.py
@@ -1816,7 +1816,7 @@ def setUp(self):
             'provider_contact_email': 'contact@osf.io',
             'provider_support_email': 'support@osf.io',
         }
-        self.action = factories.ActionFactory()
+        self.action = factories.ReviewActionFactory()
         factories.NotificationSubscriptionFactory(
             _id=self.user._id + '_' + 'global_comments',
             user=self.user,

From f923f8c7ba39f5be42e4e4bc1c45a3aa21b72b5b Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Tue, 21 Nov 2017 11:33:09 -0500
Subject: [PATCH 118/192] Remove duplicate entry in addons.json

---
 addons.json | 1 -
 1 file changed, 1 deletion(-)

diff --git a/addons.json b/addons.json
index 043d0f08a96..a37225dae40 100644
--- a/addons.json
+++ b/addons.json
@@ -28,7 +28,6 @@
         "github": "partial",
         "gitlab": "partial",
         "owncloud": "partial",
-        "gitlab": "partial",
         "s3": "partial",
         "wiki": "full",
         "bitbucket": "partial"

From 21994a126c00d42b4a2aa158e5f63467f7791611 Mon Sep 17 00:00:00 2001
From: Dan Sterlace <dsterlace@growingtechnologies.com>
Date: Thu, 5 Nov 2015 10:53:44 -0800
Subject: [PATCH 119/192] Initial OneDrive commit   - 1/3

---
 addons.json                                   |   2 +
 framework/addons/data/addons.json             |  30 ++
 website/addons/onedrive/README.md             |   8 +
 website/addons/onedrive/__init__.py           |  48 ++
 website/addons/onedrive/client.py             |  92 ++++
 website/addons/onedrive/model.py              | 250 ++++++++++
 website/addons/onedrive/requirements.txt      |   1 +
 website/addons/onedrive/routes.py             |  74 +++
 website/addons/onedrive/serializer.py         | 109 +++++
 website/addons/onedrive/settings/__init__.py  |   9 +
 website/addons/onedrive/settings/defaults.py  |  10 +
 .../addons/onedrive/settings/local-dist.py    |   7 +
 website/addons/onedrive/static/comicon.png    | Bin 0 -> 15288 bytes
 website/addons/onedrive/static/node-cfg.js    |   7 +
 website/addons/onedrive/static/onedrive.css   |  39 ++
 .../onedrive/templates/log_templates.mako     |  53 ++
 website/addons/onedrive/tests/__init__.py     |   0
 website/addons/onedrive/tests/factories.py    |  27 ++
 website/addons/onedrive/tests/test_client.py  |  39 ++
 website/addons/onedrive/tests/test_models.py  | 333 +++++++++++++
 website/addons/onedrive/tests/test_utils.py   | 129 +++++
 website/addons/onedrive/tests/test_views.py   | 453 ++++++++++++++++++
 .../addons/onedrive/tests/test_webtests.py    |  33 ++
 website/addons/onedrive/tests/utils.py        | 168 +++++++
 website/addons/onedrive/utils.py              |  97 ++++
 website/addons/onedrive/views.py              | 216 +++++++++
 26 files changed, 2234 insertions(+)
 create mode 100644 website/addons/onedrive/README.md
 create mode 100644 website/addons/onedrive/__init__.py
 create mode 100644 website/addons/onedrive/client.py
 create mode 100644 website/addons/onedrive/model.py
 create mode 100644 website/addons/onedrive/requirements.txt
 create mode 100644 website/addons/onedrive/routes.py
 create mode 100644 website/addons/onedrive/serializer.py
 create mode 100644 website/addons/onedrive/settings/__init__.py
 create mode 100644 website/addons/onedrive/settings/defaults.py
 create mode 100644 website/addons/onedrive/settings/local-dist.py
 create mode 100644 website/addons/onedrive/static/comicon.png
 create mode 100644 website/addons/onedrive/static/node-cfg.js
 create mode 100644 website/addons/onedrive/static/onedrive.css
 create mode 100644 website/addons/onedrive/templates/log_templates.mako
 create mode 100644 website/addons/onedrive/tests/__init__.py
 create mode 100644 website/addons/onedrive/tests/factories.py
 create mode 100644 website/addons/onedrive/tests/test_client.py
 create mode 100644 website/addons/onedrive/tests/test_models.py
 create mode 100644 website/addons/onedrive/tests/test_utils.py
 create mode 100644 website/addons/onedrive/tests/test_views.py
 create mode 100644 website/addons/onedrive/tests/test_webtests.py
 create mode 100644 website/addons/onedrive/tests/utils.py
 create mode 100644 website/addons/onedrive/utils.py
 create mode 100644 website/addons/onedrive/views.py

diff --git a/addons.json b/addons.json
index a37225dae40..8617f88f69b 100644
--- a/addons.json
+++ b/addons.json
@@ -11,6 +11,7 @@
         "zotero",
         "osfstorage",
         "owncloud",
+        "onedrive",
         "s3",
         "twofactor",
         "wiki",
@@ -25,6 +26,7 @@
         "figshare": "partial",
         "forward": "full",
         "googledrive": "partial",
+        "onedrive": "partial",
         "github": "partial",
         "gitlab": "partial",
         "owncloud": "partial",
diff --git a/framework/addons/data/addons.json b/framework/addons/data/addons.json
index 0c5dddcb3cc..5a4b02c3c42 100644
--- a/framework/addons/data/addons.json
+++ b/framework/addons/data/addons.json
@@ -150,6 +150,36 @@
                 "text": "Dropbox content will be registered, but version history will not be copied to the registration."
             }
         },
+        "OneDrive": {
+            "Permissions": {
+                "status": "partial",
+                "text": "Making an OSF project public or private is independent of OneDrive privacy. The OSF does not alter the permissions of linked OneDrive folders."
+            },
+            "View / download file versions": {
+                "status": "full",
+                "text": "OneDrive files and their versions can be viewed/downloaded via OSF."
+            },
+            "Add / update files": {
+                "status": "full",
+                "text": "Adding/updating files in the project via OSF will be reflected in OneDrive."
+            },
+            "Delete files": {
+                "status": "full",
+                "text": "Files deleted via OSF will be deleted in OneDrive."
+            },
+            "Logs": {
+                "status": "partial",
+                "text": "OSF keeps track of changes you make to your OneDrive content through OSF, but not for changes made using OneDrive directly."
+            },
+            "Forking": {
+                "status": "partial",
+                "text": "Forking a project or component does not copy OneDrive authorization unless the user forking the project is the same user who authorized the OneDrive add-on in the source project being forked."
+            },
+            "Registering": {
+                "status": "partial",
+                "text": "OneDrive content will be registered, but version history will not be copied to the registration."
+            }
+        },
         "Dataverse": {
             "Permissions": {
                 "status": "partial",
diff --git a/website/addons/onedrive/README.md b/website/addons/onedrive/README.md
new file mode 100644
index 00000000000..ee88d887c1d
--- /dev/null
+++ b/website/addons/onedrive/README.md
@@ -0,0 +1,8 @@
+# OSF OneDrive Addon
+
+Enabling the addon for development
+
+1. In `website/settings/local.py` add, `"onedrive"` to the `ADDONS_REQUESTED` list.
+2. If `website/addons/onedrive/settings/local.py` does not yet exist, create a local box settings file with `cp website/addons/onedrive/settings/local-dist.py website/addons/onedrive/settings/local.py`
+...
+?. Enter your OneDrive `client_id` and `client_secret` as `ONEDRIVE_KEY` and `ONEDRIVE_SECRET` in `website/addons/onedrive/settings/local.py`. 
diff --git a/website/addons/onedrive/__init__.py b/website/addons/onedrive/__init__.py
new file mode 100644
index 00000000000..bbb02d03a84
--- /dev/null
+++ b/website/addons/onedrive/__init__.py
@@ -0,0 +1,48 @@
+import os
+
+from website.addons.onedrive import model, routes, utils
+
+
+MODELS = [
+    model.OnedriveUserSettings,
+    model.OnedriveNodeSettings,
+]
+
+USER_SETTINGS_MODEL = model.OnedriveUserSettings
+NODE_SETTINGS_MODEL = model.OnedriveNodeSettings
+
+ROUTES = [routes.api_routes]
+
+SHORT_NAME = 'onedrive'
+FULL_NAME = 'OneDrive'
+
+OWNERS = ['user', 'node']
+
+ADDED_DEFAULT = []
+ADDED_MANDATORY = []
+
+VIEWS = []
+CONFIGS = ['accounts', 'node']
+
+CATEGORIES = ['storage']
+
+# TODO: Deprecate in favor of webpack/CommonJS bundles
+INCLUDE_JS = {
+    'widget': [],
+    'page': [],
+    'files': []
+}
+
+INCLUDE_CSS = {
+    'widget': [],
+    'page': [],
+}
+
+HAS_HGRID_FILES = True
+GET_HGRID_DATA = utils.onedrive_addon_folder
+
+MAX_FILE_SIZE = 250  # MB
+
+HERE = os.path.dirname(os.path.abspath(__file__))
+NODE_SETTINGS_TEMPLATE = None  # use default node settings template
+USER_SETTINGS_TEMPLATE = None  # use default user settings template
diff --git a/website/addons/onedrive/client.py b/website/addons/onedrive/client.py
new file mode 100644
index 00000000000..4dde53e4273
--- /dev/null
+++ b/website/addons/onedrive/client.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+import logging
+
+#import requests #TODO: remove this after determining onedrive connection issues w/make_request
+
+from requests_oauthlib import OAuth2Session
+from oauthlib.oauth2 import InvalidGrantError
+
+from framework.exceptions import HTTPError
+
+from website.util.client import BaseClient
+#from website.addons.googledrive import settings
+from website.addons.googledrive import exceptions
+from website.addons.onedrive import settings
+
+logger = logging.getLogger(__name__)
+
+logging.getLogger('onedrive1').setLevel(logging.WARNING)
+
+
+class OneDriveAuthClient(BaseClient):
+
+    def refresh(self, access_token, refresh_token):
+        client = OAuth2Session(
+            settings.ONEDRIVE_KEY,
+            token={
+                'access_token': access_token,
+                'refresh_token': refresh_token,
+                'token_type': 'Bearer',
+                'expires_in': '-30',
+            }
+        )
+ 
+        extra = {
+            'client_id': settings.ONEDRIVE_KEY,
+            'client_secret': settings.ONEDRIVE_SECRET,
+        }
+ 
+        try:
+            return client.refresh_token(
+                self._build_url(settings.ONEDRIVE_OAUTH_TOKEN_ENDPOINT),
+                # ('love')
+                **extra
+            )
+        except InvalidGrantError:
+            raise exceptions.ExpiredAuthError()
+ 
+    def userinfo(self, access_token):
+        return self._make_request(
+            'GET',
+            self._build_url(settings.MSLIVE_API_URL, 'oauth2', 'v3', 'userinfo'),
+            params={'access_token': access_token},
+            expects=(200, ),
+            throws=HTTPError(401)
+        ).json()
+
+
+class OneDriveClient(BaseClient):
+
+    def __init__(self, access_token=None):
+        self.access_token = access_token
+
+    @property
+    def _default_headers(self):
+        if self.access_token:
+            return {'Authorization': 'bearer {}'.format(self.access_token)}
+        return {}
+
+    def about(self):
+        return self._make_request(
+            'GET',
+            self._build_url(settings.ONEDRIVE_API_URL, 'drive', 'v2', 'about', ),
+            expects=(200, ),
+            throws=HTTPError(401)
+        ).json()
+
+    def folders(self, folder_id='root/children'):
+#         query = ' and '.join([
+#             "'{0}' in parents".format(folder_id),
+#             'trashed = false',
+#             "mimeType = 'application/vnd.google-apps.folder'",
+#         ])
+        logger.debug('folders::made it1')
+        res = self._make_request(
+            'GET',
+            self._build_url(settings.ONEDRIVE_API_URL, 'drive/', folder_id),
+            params={}, #'q': query
+            expects=(200, ),
+            throws=HTTPError(401)
+        )
+        logger.debug('res::' + repr(res))
+        return res.json()['value']
diff --git a/website/addons/onedrive/model.py b/website/addons/onedrive/model.py
new file mode 100644
index 00000000000..fa373d31448
--- /dev/null
+++ b/website/addons/onedrive/model.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+import logging
+import requests
+
+from flask import abort, request
+from datetime import datetime
+
+
+# import onedrivesdk
+# from onedrivesdk.helpers import GetAuthCodeServer
+
+#from onedrivesdk import CredentialsV2, OnedriveClient
+#from onedrivesdk.client import OnedriveClientException
+from modularodm import fields
+
+from framework.auth import Auth
+from framework.exceptions import HTTPError
+
+from website.addons.base import exceptions
+from website.addons.base import AddonOAuthUserSettingsBase, AddonOAuthNodeSettingsBase
+from website.addons.base import StorageAddonBase
+
+from website.addons.onedrive import settings
+from website.addons.onedrive.utils import OnedriveNodeLogger
+from website.addons.onedrive.serializer import OnedriveSerializer
+from website.addons.onedrive.client import OneDriveAuthClient
+from website.addons.onedrive.client import OneDriveClient
+
+from website.oauth.models import ExternalProvider
+
+logger = logging.getLogger(__name__)
+
+logging.getLogger('onedrive1').setLevel(logging.WARNING)
+
+class Onedrive(ExternalProvider):
+    name = 'Onedrive'
+    short_name = 'onedrive'
+
+    client_id = settings.ONEDRIVE_KEY
+    client_secret = settings.ONEDRIVE_SECRET
+
+    auth_url_base = settings.ONEDRIVE_OAUTH_AUTH_ENDPOINT
+    callback_url = settings.ONEDRIVE_OAUTH_TOKEN_ENDPOINT
+    auto_refresh_url = settings.ONEDRIVE_OAUTH_TOKEN_ENDPOINT
+    default_scopes = ['wl.basic wl.signin onedrive.readwrite wl.offline_access']
+
+    _auth_client = OneDriveAuthClient()
+    _drive_client = OneDriveClient()
+
+    def handle_callback(self, response):        
+        """View called when the Oauth flow is completed. Adds a new OnedriveUserSettings
+        record to the user and saves the user's access token and account info.
+        """
+        
+        userInfoRequest = requests.get(("{}me?access_token={}").format(settings.MSLIVE_API_URL, response['access_token']))
+        
+        logger.debug("userInfoRequest:: %s", repr(userInfoRequest))
+        
+        userInfo = userInfoRequest.json()
+        logger.debug("userInfo:: %s", repr(userInfo))
+
+        return {
+            'provider_id': userInfo['id'],
+            'display_name': userInfo['name'],
+            'profile_url': userInfo['link']
+        }
+        
+    def _refresh_token(self, access_token, refresh_token):
+        """ Handles the actual request to refresh tokens
+
+        :param str access_token: Access token (oauth key) associated with this account
+        :param str refresh_token: Refresh token used to request a new access token
+        :return dict token: New set of tokens
+        """
+        client = self._auth_client
+        if refresh_token:
+            token = client.refresh(access_token, refresh_token)
+            return token
+        else:
+            return False
+        
+    def fetch_access_token(self, force_refresh=False):
+        self.refresh_access_token(force=force_refresh)
+        return self.account.oauth_key
+
+    def refresh_access_token(self, force=False):
+        """ If the token has expired or will soon, handles refreshing and the storage of new tokens
+
+        :param bool force: Indicates whether or not to force the refreshing process, for the purpose of ensuring that authorization has not been unexpectedly removed.
+        """
+        if self._needs_refresh() or force:
+            token = self._refresh_token(self.account.oauth_key, self.account.refresh_token)
+            self.account.oauth_key = token['access_token']
+            self.account.refresh_token = token['refresh_token']
+            self.account.expires_at = datetime.utcfromtimestamp(token['expires_at'])
+            self.account.save()
+
+    def _needs_refresh(self):
+        if self.account.expires_at is None:
+            return False
+        return (self.account.expires_at - datetime.utcnow()).total_seconds() < settings.REFRESH_TIME
+
+class OnedriveUserSettings(AddonOAuthUserSettingsBase):
+    """Stores user-specific onedrive information
+    """
+    oauth_provider = Onedrive
+    serializer = OnedriveSerializer
+#     myBase = AddonOAuthUserSettingsBase
+
+
+class OnedriveNodeSettings(StorageAddonBase, AddonOAuthNodeSettingsBase):
+
+    oauth_provider = Onedrive
+    serializer = OnedriveSerializer
+
+    foreign_user_settings = fields.ForeignField(
+        'onedriveusersettings', backref='authorized'
+    )
+    folder_id = fields.StringField(default=None)
+    folder_name = fields.StringField()
+    folder_path = fields.StringField()
+
+    _folder_data = None
+
+    _api = None
+
+    @property
+    def api(self):
+        """authenticated ExternalProvider instance"""
+        if self._api is None:
+            self._api = Onedrive(self.external_account)
+        return self._api
+
+    @property
+    def display_name(self):
+        return '{0}: {1}'.format(self.config.full_name, self.folder_id)
+
+    @property
+    def has_auth(self):
+        """Whether an access token is associated with this node."""
+        return bool(self.user_settings and self.user_settings.has_auth)
+
+    @property
+    def complete(self):
+        return bool(self.has_auth and self.user_settings.verify_oauth_access(
+            node=self.owner,
+            external_account=self.external_account,
+        ))
+
+    def fetch_folder_name(self):
+        self._update_folder_data()
+        return self.folder_name.replace('All Files', '/ (Full Onedrive)')
+
+    def fetch_full_folder_path(self):
+        self._update_folder_data()
+        return self.folder_path
+
+    def _update_folder_data(self):
+        if self.folder_id is None:
+            return None
+
+        if not self._folder_data:
+
+            self.folder_name = self._folder_data['name']
+            self.folder_path = '/'.join(
+                [x['name'] for x in self._folder_data['path_collection']['entries']]
+                + [self._folder_data['name']]
+            )
+            self.save()
+
+    def set_folder(self, folder_id, auth):
+        self.folder_id = str(folder_id)
+        self._update_folder_data()
+        self.save()
+
+        if not self.complete:
+            self.user_settings.grant_oauth_access(
+                node=self.owner,
+                external_account=self.external_account,
+                metadata={'folder': self.folder_id}
+            )
+            self.user_settings.save()
+
+        # Add log to node
+        nodelogger = OnedriveNodeLogger(node=self.owner, auth=auth)
+        nodelogger.log(action="folder_selected", save=True)
+
+    def set_user_auth(self, user_settings):
+        """Import a user's Onedrive authentication and create a NodeLog.
+
+        :param OnedriveUserSettings user_settings: The user settings to link.
+        """
+        self.user_settings = user_settings
+        nodelogger = OnedriveNodeLogger(node=self.owner, auth=Auth(user_settings.owner))
+        nodelogger.log(action="node_authorized", save=True)
+
+    def deauthorize(self, auth=None, add_log=True):
+        """Remove user authorization from this node and log the event."""
+        node = self.owner
+
+        if add_log:
+            extra = {'folder_id': self.folder_id}
+            nodelogger = OnedriveNodeLogger(node=node, auth=auth)
+            nodelogger.log(action="node_deauthorized", extra=extra, save=True)
+
+        self.folder_id = None
+        self._update_folder_data()
+        self.user_settings = None
+        self.clear_auth()
+
+        self.save()
+
+    def serialize_waterbutler_credentials(self):
+        if not self.has_auth:
+            raise exceptions.AddonError('Addon is not authorized')
+
+    def serialize_waterbutler_settings(self):
+        if self.folder_id is None:
+            raise exceptions.AddonError('Folder is not configured')
+        return {'folder': self.folder_id}
+
+    def create_waterbutler_log(self, auth, action, metadata):
+        self.owner.add_log(
+            'onedrive_{0}'.format(action),
+            auth=auth,
+            params={
+                'path': metadata['materialized'],
+                'project': self.owner.parent_id,
+                'node': self.owner._id,
+                'folder': self.folder_id,
+                'urls': {
+                    'view': self.owner.web_url_for('addon_view_or_download_file', provider='onedrive', action='view', path=metadata['path']),
+                    'download': self.owner.web_url_for('addon_view_or_download_file', provider='onedrive', action='download', path=metadata['path']),
+                },
+            },
+        )
+
+    def fetch_access_token(self):
+        return self.api.fetch_access_token()
+
+
+    ##### Callback overrides #####
+    def after_delete(self, node=None, user=None):
+        self.deauthorize(Auth(user=user), add_log=True)
+        self.save()
+
+    def on_delete(self):
+        self.deauthorize(add_log=False)
+        self.clear_auth()
+        self.save()
diff --git a/website/addons/onedrive/requirements.txt b/website/addons/onedrive/requirements.txt
new file mode 100644
index 00000000000..e14dc8cba55
--- /dev/null
+++ b/website/addons/onedrive/requirements.txt
@@ -0,0 +1 @@
+onedrivesdk==1.0.1
\ No newline at end of file
diff --git a/website/addons/onedrive/routes.py b/website/addons/onedrive/routes.py
new file mode 100644
index 00000000000..299e0504e50
--- /dev/null
+++ b/website/addons/onedrive/routes.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+"""OneDrive addon routes."""
+from framework.routing import Rule, json_renderer
+
+from website.addons.onedrive import views
+
+
+api_routes = {
+    'rules': [
+        Rule(
+            [
+                '/settings/onedrive/accounts/',
+            ],
+            'get',
+            views.onedrive_get_user_settings,
+            json_renderer,
+        ),
+        Rule(
+            [
+                '/project/<pid>/onedrive/settings/',
+                '/project/<pid>/node/<nid>/onedrive/settings/'
+            ],
+            'get',
+            views.onedrive_get_config,
+            json_renderer,
+        ),
+        Rule(
+            [
+                '/project/<pid>/onedrive/settings/',
+                '/project/<pid>/node/<nid>/onedrive/settings/'
+            ],
+            'put',
+            views.onedrive_set_config,
+            json_renderer,
+        ),
+        Rule(
+            [
+                '/project/<pid>/onedrive/user_auth/',
+                '/project/<pid>/node/<nid>/onedrive/user_auth/'
+            ],
+            'put',
+            views.onedrive_add_user_auth,
+            json_renderer,
+        ),
+        Rule(
+            [
+                '/project/<pid>/onedrive/user_auth/',
+                '/project/<pid>/node/<nid>/onedrive/user_auth/'
+            ],
+            'delete',
+            views.onedrive_remove_user_auth,
+            json_renderer,
+        ),
+        Rule(
+            [
+                '/project/<pid>/onedrive/config/share/',
+                '/project/<pid>/node/<nid>/onedrive/config/share/'
+            ],
+            'get',
+            views.onedrive_get_share_emails,
+            json_renderer,
+        ),
+        Rule(
+            [
+                '/project/<pid>/onedrive/folders/',
+                '/project/<pid>/node/<nid>/onedrive/folders/',
+            ],
+            'get',
+            views.onedrive_folder_list,
+            json_renderer,
+        ),
+    ],
+    'prefix': '/api/v1'
+}
diff --git a/website/addons/onedrive/serializer.py b/website/addons/onedrive/serializer.py
new file mode 100644
index 00000000000..79e45bbab02
--- /dev/null
+++ b/website/addons/onedrive/serializer.py
@@ -0,0 +1,109 @@
+import logging
+
+from website.addons.base.serializer import OAuthAddonSerializer
+
+from website.util import api_url_for, web_url_for
+
+# from OneDriveSDK
+import onedrivesdk
+from onedrivesdk.helpers import GetAuthCodeServer
+
+
+logger = logging.getLogger(__name__)
+
+logging.getLogger('onedrive1').setLevel(logging.WARNING)
+
+class OnedriveSerializer(OAuthAddonSerializer):
+
+    def credentials_owner(self, user_settings=None):
+        return user_settings.owner or self.user_settings.owner
+
+    @property
+    def user_is_owner(self):
+        if self.user_settings is None or self.node_settings is None:
+            return False
+
+        user_accounts = self.user_settings.external_accounts
+        return bool(
+            (
+                self.node_settings.has_auth and
+                (self.node_settings.external_account in user_accounts)
+            ) or len(user_accounts)
+        )
+
+    @property
+    def serialized_urls(self):
+        
+        logger.error('serialized_urls-1')
+        
+        ret = self.addon_serialized_urls
+        ret.update({'settings': web_url_for('user_addons')})
+        return ret
+
+    @property
+    def addon_serialized_urls(self):
+        logger.error('addon_serialized_urls-1')
+        node = self.node_settings.owner
+        
+        return {
+            #'auth': api_url_for('oauth_connect',
+             #                   service_name='onedrive'),
+            'importAuth': node.api_url_for('onedrive_add_user_auth'),
+            'files': node.web_url_for('collect_file_trees'),
+            'folders': node.api_url_for('onedrive_folder_list'),
+            'config': node.api_url_for('onedrive_set_config'),
+            #'emails': node.api_url_for('onedrive_get_share_emails'),
+            #'share': 'https://app.onedrive.com/files/0/f/{0}'.format(self.node_settings.folder_id),
+            'deauthorize': node.api_url_for('onedrive_remove_user_auth'),
+            'accounts': node.api_url_for('onedrive_get_user_settings'),
+        }
+
+    def serialize_settings(self, node_settings, current_user, client=None):
+        """View helper that returns a dictionary representation of a
+        OnedriveNodeSettings record. Provides the return value for the
+        onedrive config endpoints.
+        """
+        
+        logger.error('addon_serialized_settings-1')
+        #TODO: review onedrive serilaized settings to determine if we need to add more
+        valid_credentials = True
+        user_settings = node_settings.user_settings
+        self.node_settings = node_settings
+        current_user_settings = current_user.get_addon('onedrive')
+        user_is_owner = user_settings is not None and user_settings.owner == current_user
+
+#        if user_settings:
+#            try:
+#                client = client or OnedriveClient(user_settings.external_accounts[0].oauth_key)
+#                client.get_user_info()
+#            except (OnedriveClientException, IndexError):
+#                valid_credentials = False
+
+        result = {
+            'userIsOwner': user_is_owner,
+            'nodeHasAuth': node_settings.has_auth,
+            'urls': self.addon_serialized_urls,
+            'validCredentials': valid_credentials,
+            'userHasAuth': current_user_settings is not None and current_user_settings.has_auth,
+        }
+
+        if node_settings.has_auth:
+            # Add owner's profile URL
+            result['urls']['owner'] = web_url_for(
+                'profile_view_id',
+                uid=user_settings.owner._id
+            )
+            result['ownerName'] = user_settings.owner.fullname
+            # Show available folders
+            # path = node_settings.folder
+
+            if node_settings.folder_id is None:
+                result['folder'] = {'name': None, 'path': None}
+            elif valid_credentials:
+                path = node_settings.fetch_full_folder_path()
+
+                result['folder'] = {
+                    'path': path,
+                    'name': path.replace('All Files', '', 1) if path != 'All Files' else '/ (Full Onedrive)'
+                }
+        return result
diff --git a/website/addons/onedrive/settings/__init__.py b/website/addons/onedrive/settings/__init__.py
new file mode 100644
index 00000000000..c26f460775d
--- /dev/null
+++ b/website/addons/onedrive/settings/__init__.py
@@ -0,0 +1,9 @@
+import logging
+from .defaults import *  # noqa
+
+logger = logging.getLogger(__name__)
+
+try:
+    from .local import *  # noqa
+except ImportError as error:
+    logger.warn('No local.py settings file found')
diff --git a/website/addons/onedrive/settings/defaults.py b/website/addons/onedrive/settings/defaults.py
new file mode 100644
index 00000000000..3f425bb5ec8
--- /dev/null
+++ b/website/addons/onedrive/settings/defaults.py
@@ -0,0 +1,10 @@
+# OAuth app keys
+ONEDRIVE_KEY = None
+ONEDRIVE_SECRET = None
+
+REFRESH_TIME = 5 * 60  # 5 minutes
+
+ONEDRIVE_OAUTH_TOKEN_ENDPOINT = 'https://login.live.com/oauth20_token.srf?'
+ONEDRIVE_OAUTH_AUTH_ENDPOINT = 'https://login.live.com/oauth20_authorize.srf?'
+MSLIVE_API_URL = 'https://apis.live.net/v5.0/' #https://graph.microsoft.com/v1.0/me
+ONEDRIVE_API_URL = 'https://api.onedrive.com/v1.0'
\ No newline at end of file
diff --git a/website/addons/onedrive/settings/local-dist.py b/website/addons/onedrive/settings/local-dist.py
new file mode 100644
index 00000000000..9d72644bffd
--- /dev/null
+++ b/website/addons/onedrive/settings/local-dist.py
@@ -0,0 +1,7 @@
+# -*- coding: utf-8 -*-
+"""Example OneDrive local settings file. Copy this file to local.py and change
+these settings.
+"""
+# Get an app key and secret at https://account.live.com/developers/applications
+ONEDRIVE_KEY = '000000004416C3D3'
+ONEDRIVE_SECRET = 'JFvPrqEnHuepkaDjZXCcnChQuyTjnk0Z'
diff --git a/website/addons/onedrive/static/comicon.png b/website/addons/onedrive/static/comicon.png
new file mode 100644
index 0000000000000000000000000000000000000000..fc65b2a62610977a01ee023b566aa3f19b2e92b3
GIT binary patch
literal 15288
zcmeI3e^e7!7RP6GU4_bWimhwcUm+->OEO7F2$_aZBp?V;ehH`tVVF!{N|K4mV8TyV
z#VV`rinMB#_6V(Tq>H+&S}FqK546gPt4H1TsO=GJ0a>-x(yi<%JClU)fZ()y`sd`3
z<mSEaz4y)M-uIY)rYwHh5--o+dqNQ86%(yaq`!Tb_mtnzpD9=VoJxOrn4(u&An2uO
z%==f+zRDR86f%!UPNUK^aR^2lgs7Ix!i6@2iB?09GR$T|v0R+uW#QR`F_izfsg=(o
zw4wY|nMR~BMc_F^bb%R9Dp;0`735+HEk8`<sk9+<00T~;Jexspv>>)nzCA8N+srV`
z=h-DxZYV#TNytmn#PcFZGtQF<g9Vr<Sk99xgkqUoE|mxJBqFg476rjUB7r0bk%|zh
zgy($mRi3n^G;4K8qB_c%j{X(O&!H$20>f6TRcH+olICn!tWYRmkpz}V1hhh6$v0A{
zO<=V6If4wuQR5cOOqeKwH1e3ZXcn1Ah4T4KBG=1#T?UgYk<sF0M^l7ts0kJeMX)<3
zE#~qu<(c*NrD-u3*W(7<NLgq<v0Fb=4oQ)g9CDnLuIX_T(>2j(T)Ew@*I;m)+CoLH
zqY*ffy7jaq=bLai5x0<eW(<#9M_-DcyQ`-N-EeV6L}F&fT{~_g#&Kh29B%eja8yW1
zUn>$}#!-qiCzGUJ<><2bA%wh$2zyuY0#XU1mb6+HD&f&C!}T7HhO1EuS26u45R2)*
zWU&kpD-cnLKqNy%A|QxEIVy&R)DpUUH!&m<gzicp2A~`rgWf+{ltO<}6i^Gq(PD^>
zG#gM#MHtX*95xxVmGJN&pmcQ=5<%)oGrcTa6{LjS4Fg{GPD5gh778_Dc#K*_4+sfD
ziztF6iXgE<CqOYdCXk0nwSo{CO8-<yWm2hFo0TO+?GQB@(0xp3HHqahyTut=`!hoW
z?qfnPHxu+phw1?hv-+Q#!cCYlsiFxB-JkhD-_qS{Kg*KvwIfG|CiH~;OfsQn3(oAg
zQ2x-Ca~)!?Gm7!C?_31MnA1#!F&hHcD&Y~M!^?7cI^Br>w}6iLfcyA4tU0*xMH|Wy
z%B~teh=tTqR@97#XVX3LQ}((e>PWZI;gv9R`s<02g>a0Htj|!#j;*JXIn)rk7wC1N
z_8N$l@R+gT%np4TQ&WAOT`!l2=oc(zKAo=M^k~<WFLni|N4pMtt}qgmO5)5mY8dch
zPF}~ckVM}k;HqGmEJ&t=M-Pwoh$n2gK21%~``5yBr8vatGkU_U<I3?IfeELhqg24Z
zb9*Hnqx8)JeV+omZd67V*|iaVVPp7;e&N7iI4%iDgVpA6u_6Km94?RstIgqJMFa{s
zTp$fro5RJ52o!L*KpLzzhl>>vDBy5`G+1p87b_xAz~KUEu-Y6hRz#qH!v)e{wK-g@
zh(G~{3#7qnbGTR$fdUQ}NQ2epaIqo+1spDr2CL2CVnqZBI9wnNR-41ciU<^NxIh}L
zHiwH95h&nrfizfc4i_sTP{82=X|UQHE>=XKfWrmSV6{11tcXAXhYO^^YIC?)5rF~@
z7f6HE=5Vnh0tFl{kOr&G;bKJu3OHOK4OW}O#fk_NaJWDkto93Wc@97Nha2hV{jBtZ
zean-HQu?7l9u}RbfuMr<5cJ1Q5cI<!{oMmWc_Ikv&4eIiF9iLL+?8=Y3W6s7Hbxzu
zZ2P?9dU{;a<QeVXOg?k--tEoupjfG|zcEV^uUoY(tt#>}LiVm-?CeJqD&7?JN=q^)
z?fEO;Gj5CST5-UHlmo{?Yq$7rJy=xsk?m;piYAZhZ8Lp))on*+tnCzb?9+ex=+u*Y
z*6;iW?|)(o_c=0QP3z8ww`ZKHT4MT6b-MjAj6E$0(@$QF9%<U(a|=p&=e^~{gGpcC
zJvXbf|M(VROW~<5?_(J=FLbtsw)Z^r{-C<*<d?<w=kCb7<^At}rS@Dv2d4x^UOFk4
z+@5yz^0~!jNqb^iUrC9S-g*=ok@KII-(KC7V0v%nrr7)`>+gF6hdj^jSmnEI;mv8g
zd|SVUj(KetFO6#mw$3lQ>l0pGarMUEJ|vV?QNFWgcP6}S^IS5J^hV2*Ul*TUU)AvR
zO5pJ}P0pRtHT(xR|JhjI6i^$|sQfr!=bon81q-Yh7bWxW1Z90z-`DN=MSI)s!T|5M
z%)vt*2N2QLic6wv!FhAfSFUO~-nZc1*@BX+u+_%beS-PNH(YHjyT3~JYUQDYPos7P
zHoZM7ml)`)yOQwAK-tEq35HG9^3`=6C56QGOnpss_w7m@(ega~`sruZhW$UP-?|v1
zd-t2znPn7=ZC}H$->}!Z{SQmeWc&%e7v1?1uhswC*AgpB6XrdeQ*)RcIC&KL@B99+
z2`cdJ`p1s#@=p^Mh3ihO^q-#p+Wwxe{v!DJ%f3c0-kr;h=Y25BtK-D?NZ*CS-3RIy
zOm9^5gq5$Kq>*$@kF6xid$*-etk^KMd~H;D+PrJ}haIsePCVcFpjdI?+2D_#`27$P
zmNvUP>QHt!_F+g_=_1d~&0ocSTewJ6yL(Z1&C(|cf{S$KnqI%TE6&8eSrMnbRC+gZ
z>JHV}I&yK$<<^|P&+$3?`K8m3gKGB{$uX}v?SaI+hJ7B*aoVkw`u+q#bONc1DXrQ4
zaMqp^)z+M;uh*@Q++6f(dRuHxvuLAW%fOA?mSz=su(=TJPf307RznKbpW1fp+>P+`
zn^#Ke+ota$k8XaD8{B`;b5gP8o&6si>H5C4?&=p4p>^F*Mp1n$y)z&vCUTkj@Z!vk
F{{uC?Cr1DP

literal 0
HcmV?d00001

diff --git a/website/addons/onedrive/static/node-cfg.js b/website/addons/onedrive/static/node-cfg.js
new file mode 100644
index 00000000000..c634a4ccb75
--- /dev/null
+++ b/website/addons/onedrive/static/node-cfg.js
@@ -0,0 +1,7 @@
+'use strict';
+
+require('./onedrive.css');
+var OauthAddonNodeConfig = require('js/oauthAddonNodeConfig').OauthAddonNodeConfig;
+
+var url = window.contextVars.node.urls.api + 'onedrive/settings/';
+new OauthAddonNodeConfig('Onedrive', '#onedriveScope', url, '#onedriveGrid');
diff --git a/website/addons/onedrive/static/onedrive.css b/website/addons/onedrive/static/onedrive.css
new file mode 100644
index 00000000000..6becf911ec4
--- /dev/null
+++ b/website/addons/onedrive/static/onedrive.css
@@ -0,0 +1,39 @@
+.onedrive-confirm-selection {
+    padding-top: 10px;
+}
+.onedrive-folder-picker {
+    margin-top: 10px;
+}
+
+.selected-folder {
+    margin: 12px;
+    font-size: 1.3em;
+}
+
+.onedrive-loading-text {
+    padding-top: 20px;
+}
+
+
+.btn-onedrive {
+    color: #333;
+    background-color: #fff;
+    border-color: #ccc;
+}
+
+.btn-onedrive:hover,
+.btn-onedrive:focus,
+.btn-onedrive:active,
+.btn-onedrive.active,
+.open .dropdown-toggle.btn-onedrive {
+    color: #333;
+    background-color: #ebebeb;
+    border-color: #adadad;
+}
+
+.onedrive-folderpicker-odd {
+    background-color: #f5f5f5;
+}
+.onedrive-folderpicker-even {
+    background-color: #fff;
+}
\ No newline at end of file
diff --git a/website/addons/onedrive/templates/log_templates.mako b/website/addons/onedrive/templates/log_templates.mako
new file mode 100644
index 00000000000..b4611237bb0
--- /dev/null
+++ b/website/addons/onedrive/templates/log_templates.mako
@@ -0,0 +1,53 @@
+<script type="text/html" id="onedrive_file_added">
+added file
+<a class="overflow log-file-link" data-bind="click: NodeActions.addonFileRedirect">
+    {{ stripSlash(params.path) }}</a> to
+Onedrive in
+<a class="log-node-title-link overflow" data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
+</script>
+
+<script type="text/html" id="onedrive_folder_created">
+created folder
+<span class="overflow log-folder">{{ stripSlash(params.path) }}</span> in
+Onedrive in
+<a class="log-node-title-link overflow" data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
+</script>
+
+<script type="text/html" id="onedrive_file_updated">
+updated file
+<a class="overflow log-file-link" data-bind="click: NodeActions.addonFileRedirect">
+    {{ stripSlash(params.path) }}</a> to
+Onedrive in
+<a class="log-node-title-link overflow" data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
+</script>
+
+
+<script type="text/html" id="onedrive_file_removed">
+removed {{ pathType(params.path) }} <span class="overflow">
+    {{ stripSlash(params.path) }}</span> from
+Onedrive in
+<a class="log-node-title-link overflow" data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
+</script>
+
+
+<script type="text/html" id="onedrive_folder_selected">
+linked Onedrive folder
+<span class="overflow">
+    {{ params.folder === 'All Files' ? '/ (Full Onedrive)' : (params.folder || '').replace('All Files','')}}
+</span> to
+<a class="log-node-title-link overflow" data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
+</script>
+
+
+<script type="text/html" id="onedrive_node_deauthorized">
+deauthorized the Onedrive addon for
+<a class="log-node-title-link overflow"
+    data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
+</script>
+
+
+<script type="text/html" id="onedrive_node_authorized">
+authorized the Onedrive addon for
+<a class="log-node-title-link overflow"
+    data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
+</script>
diff --git a/website/addons/onedrive/tests/__init__.py b/website/addons/onedrive/tests/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/website/addons/onedrive/tests/factories.py b/website/addons/onedrive/tests/factories.py
new file mode 100644
index 00000000000..71cd638028f
--- /dev/null
+++ b/website/addons/onedrive/tests/factories.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+"""Factory boy factories for the OneDrive addon."""
+
+from framework.auth import Auth
+
+from factory import SubFactory, Sequence, post_generation
+from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory
+
+from website.addons.onedrive.model import (
+    OneDriveUserSettings, OneDriveNodeSettings
+)
+
+
+# TODO(sloria): make an abstract UserSettingsFactory that just includes the owner field
+class OneDriveUserSettingsFactory(ModularOdmFactory):
+    FACTORY_FOR = OneDriveUserSettings
+
+    owner = SubFactory(UserFactory)
+    access_token = Sequence(lambda n: 'abcdef{0}'.format(n))
+
+
+class OneDriveNodeSettingsFactory(ModularOdmFactory):
+    FACTORY_FOR = OneDriveNodeSettings
+
+    owner = SubFactory(ProjectFactory)
+    user_settings = SubFactory(OneDriveUserSettingsFactory)
+    folder = 'Camera Uploads'
diff --git a/website/addons/onedrive/tests/test_client.py b/website/addons/onedrive/tests/test_client.py
new file mode 100644
index 00000000000..57e31310e65
--- /dev/null
+++ b/website/addons/onedrive/tests/test_client.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+
+from nose.tools import *  # noqa (PEP8 asserts)
+#from onedrive.client import OneDriveClient
+
+from tests.base import OsfTestCase
+from tests.factories import UserFactory
+
+from website.addons.base.exceptions import AddonError
+from website.addons.onedrive.model import OneDriveUserSettings
+from website.addons.onedrive.tests.factories import (
+    OneDriveNodeSettingsFactory,
+    OneDriveUserSettingsFactory
+)
+from website.addons.onedrive.client import (
+    get_client, get_node_addon_client, get_node_client,
+    get_client_from_user_settings
+)
+
+
+class TestCore(OsfTestCase):
+
+    def setUp(self):
+
+        super(TestCore, self).setUp()
+
+        self.user = UserFactory()
+        self.user.add_addon('onedrive')
+        self.user.save()
+
+        self.settings = self.user.get_addon('onedrive')
+        self.settings.access_token = '12345'
+        self.settings.save()
+
+    def test_get_addon_returns_onedrive_user_settings(self):
+        result = self.user.get_addon('onedrive')
+        assert_true(isinstance(result, OneDriveUserSettings))
+
+
diff --git a/website/addons/onedrive/tests/test_models.py b/website/addons/onedrive/tests/test_models.py
new file mode 100644
index 00000000000..afe430f91fa
--- /dev/null
+++ b/website/addons/onedrive/tests/test_models.py
@@ -0,0 +1,333 @@
+# -*- coding: utf-8 -*-
+import mock
+
+from nose.tools import *  # noqa (PEP8 asserts)
+
+from framework.auth import Auth
+from website.addons.onedrive.model import (
+    OneDriveUserSettings, OneDriveNodeSettings
+)
+from tests.base import OsfTestCase
+from tests.factories import UserFactory, ProjectFactory
+from website.addons.onedrive.tests.factories import (
+    OneDriveUserSettingsFactory, OneDriveNodeSettingsFactory,
+)
+from website.addons.base import exceptions
+
+
+class TestUserSettingsModel(OsfTestCase):
+
+    def setUp(self):
+        super(TestUserSettingsModel, self).setUp()
+        self.user = UserFactory()
+
+    def test_fields(self):
+        user_settings = OneDriveUserSettings(
+            access_token='12345',
+            onedrive_id='abc',
+            owner=self.user)
+        user_settings.save()
+        retrieved = OneDriveUserSettings.load(user_settings._primary_key)
+        assert_true(retrieved.access_token)
+        assert_true(retrieved.onedrive_id)
+        assert_true(retrieved.owner)
+
+    def test_has_auth(self):
+        user_settings = OneDriveUserSettingsFactory(access_token=None)
+        assert_false(user_settings.has_auth)
+        user_settings.access_token = '12345'
+        user_settings.save()
+        assert_true(user_settings.has_auth)
+
+    def test_clear_clears_associated_node_settings(self):
+        node_settings = OneDriveNodeSettingsFactory.build()
+        user_settings = OneDriveUserSettingsFactory()
+        node_settings.user_settings = user_settings
+        node_settings.save()
+
+        user_settings.clear()
+        user_settings.save()
+
+        # Node settings no longer associated with user settings
+        assert_is(node_settings.user_settings, None)
+        assert_is(node_settings.folder, None)
+
+    def test_clear(self):
+        node_settings = OneDriveNodeSettingsFactory.build()
+        user_settings = OneDriveUserSettingsFactory(access_token='abcde',
+            onedrive_id='abc')
+        node_settings.user_settings = user_settings
+        node_settings.save()
+
+        assert_true(user_settings.access_token)
+        user_settings.clear()
+        user_settings.save()
+        assert_false(user_settings.access_token)
+        assert_false(user_settings.onedrive_id)
+
+    def test_delete(self):
+        user_settings = OneDriveUserSettingsFactory()
+        assert_true(user_settings.has_auth)
+        user_settings.delete()
+        user_settings.save()
+        assert_false(user_settings.access_token)
+        assert_false(user_settings.onedrive_id)
+        assert_true(user_settings.deleted)
+
+    def test_delete_clears_associated_node_settings(self):
+        node_settings = OneDriveNodeSettingsFactory.build()
+        user_settings = OneDriveUserSettingsFactory()
+        node_settings.user_settings = user_settings
+        node_settings.save()
+
+        user_settings.delete()
+        user_settings.save()
+
+        # Node settings no longer associated with user settings
+        assert_is(node_settings.user_settings, None)
+        assert_is(node_settings.folder, None)
+        assert_false(node_settings.deleted)
+
+    def test_to_json(self):
+        user_settings = OneDriveUserSettingsFactory()
+        result = user_settings.to_json()
+        assert_equal(result['has_auth'], user_settings.has_auth)
+
+
+class TestOneDriveNodeSettingsModel(OsfTestCase):
+
+    def setUp(self):
+        super(TestOneDriveNodeSettingsModel, self).setUp()
+        self.user = UserFactory()
+        self.user.add_addon('onedrive')
+        self.user.save()
+        self.user_settings = self.user.get_addon('onedrive')
+        self.project = ProjectFactory()
+        self.node_settings = OneDriveNodeSettingsFactory(
+            user_settings=self.user_settings,
+            owner=self.project
+        )
+
+    def test_complete_true(self):
+        self.node_settings.user_settings.access_token = 'seems legit'
+
+        assert_true(self.node_settings.has_auth)
+        assert_true(self.node_settings.complete)
+
+    def test_complete_false(self):
+        self.node_settings.user_settings.access_token = 'seems legit'
+        self.node_settings.folder = None
+
+        assert_true(self.node_settings.has_auth)
+        assert_false(self.node_settings.complete)
+
+    def test_complete_auth_false(self):
+        self.node_settings.user_settings = None
+
+        assert_false(self.node_settings.has_auth)
+        assert_false(self.node_settings.complete)
+
+    def test_fields(self):
+        node_settings = OneDriveNodeSettings(user_settings=self.user_settings)
+        node_settings.save()
+        assert_true(node_settings.user_settings)
+        assert_equal(node_settings.user_settings.owner, self.user)
+        assert_true(hasattr(node_settings, 'folder'))
+        assert_true(hasattr(node_settings, 'registration_data'))
+
+    def test_folder_defaults_to_none(self):
+        node_settings = OneDriveNodeSettings(user_settings=self.user_settings)
+        node_settings.save()
+        assert_is_none(node_settings.folder)
+
+    def test_has_auth(self):
+        settings = OneDriveNodeSettings(user_settings=self.user_settings)
+        settings.save()
+        assert_false(settings.has_auth)
+
+        settings.user_settings.access_token = '123abc'
+        settings.user_settings.save()
+        assert_true(settings.has_auth)
+
+    def test_to_json(self):
+        settings = self.node_settings
+        user = UserFactory()
+        result = settings.to_json(user)
+        assert_equal(result['addon_short_name'], 'onedrive')
+
+    def test_delete(self):
+        assert_true(self.node_settings.user_settings)
+        assert_true(self.node_settings.folder)
+        old_logs = self.project.logs
+        self.node_settings.delete()
+        self.node_settings.save()
+        assert_is(self.node_settings.user_settings, None)
+        assert_is(self.node_settings.folder, None)
+        assert_true(self.node_settings.deleted)
+        assert_equal(self.project.logs, old_logs)
+
+    def test_deauthorize(self):
+        assert_true(self.node_settings.user_settings)
+        assert_true(self.node_settings.folder)
+        self.node_settings.deauthorize(auth=Auth(self.user))
+        self.node_settings.save()
+        assert_is(self.node_settings.user_settings, None)
+        assert_is(self.node_settings.folder, None)
+
+        last_log = self.project.logs[-1]
+        assert_equal(last_log.action, 'onedrive_node_deauthorized')
+        params = last_log.params
+        assert_in('node', params)
+        assert_in('project', params)
+        assert_in('folder', params)
+
+    def test_set_folder(self):
+        folder_name = 'queen/freddie'
+        self.node_settings.set_folder(folder_name, auth=Auth(self.user))
+        self.node_settings.save()
+        # Folder was set
+        assert_equal(self.node_settings.folder, folder_name)
+        # Log was saved
+        last_log = self.project.logs[-1]
+        assert_equal(last_log.action, 'onedrive_folder_selected')
+
+    def test_set_user_auth(self):
+        node_settings = OneDriveNodeSettingsFactory()
+        user_settings = OneDriveUserSettingsFactory()
+
+        node_settings.set_user_auth(user_settings)
+        node_settings.save()
+
+        assert_true(node_settings.has_auth)
+        assert_equal(node_settings.user_settings, user_settings)
+        # A log was saved
+        last_log = node_settings.owner.logs[-1]
+        assert_equal(last_log.action, 'onedrive_node_authorized')
+        log_params = last_log.params
+        assert_equal(log_params['folder'], node_settings.folder)
+        assert_equal(log_params['node'], node_settings.owner._primary_key)
+        assert_equal(last_log.user, user_settings.owner)
+
+    def test_serialize_credentials(self):
+        self.user_settings.access_token = 'secret'
+        self.user_settings.save()
+        credentials = self.node_settings.serialize_waterbutler_credentials()
+        expected = {'token': self.node_settings.user_settings.access_token}
+        assert_equal(credentials, expected)
+
+    def test_serialize_credentials_not_authorized(self):
+        self.node_settings.user_settings = None
+        self.node_settings.save()
+        with assert_raises(exceptions.AddonError):
+            self.node_settings.serialize_waterbutler_credentials()
+
+    def test_serialize_settings(self):
+        settings = self.node_settings.serialize_waterbutler_settings()
+        expected = {'folder': self.node_settings.folder}
+        assert_equal(settings, expected)
+
+    def test_serialize_settings_not_configured(self):
+        self.node_settings.folder = None
+        self.node_settings.save()
+        with assert_raises(exceptions.AddonError):
+            self.node_settings.serialize_waterbutler_settings()
+
+    def test_create_log(self):
+        action = 'file_added'
+        path = 'pizza.nii'
+        self.node_settings.folder = '/SomeOddPath'
+        self.node_settings.save()
+        nlog = len(self.project.logs)
+        self.node_settings.create_waterbutler_log(
+            auth=Auth(user=self.user),
+            action=action,
+            metadata={'path': path},
+        )
+        self.project.reload()
+        assert_equal(len(self.project.logs), nlog + 1)
+        assert_equal(
+            self.project.logs[-1].action,
+            'onedrive_{0}'.format(action),
+        )
+        assert_equal(
+            self.project.logs[-1].params['path'],
+            path,
+        )
+
+    @mock.patch('website.archiver.tasks.archive')
+    def test_does_not_get_copied_to_registrations(self, mock_archive):
+        registration = self.project.register_node(
+            schema=None,
+            auth=Auth(user=self.project.creator),
+            template='Template1',
+            data='hodor'
+        )
+        assert_false(registration.has_addon('onedrive'))
+
+
+class TestNodeSettingsCallbacks(OsfTestCase):
+
+    def setUp(self):
+        super(TestNodeSettingsCallbacks, self).setUp()
+        # Create node settings with auth
+        self.user_settings = OneDriveUserSettingsFactory(access_token='123abc')
+        self.node_settings = OneDriveNodeSettingsFactory(
+            user_settings=self.user_settings,
+            folder='',
+        )
+
+        self.project = self.node_settings.owner
+        self.user = self.user_settings.owner
+
+    def test_after_fork_by_authorized_onedrive_user(self):
+        fork = ProjectFactory()
+        clone, message = self.node_settings.after_fork(
+            node=self.project, fork=fork, user=self.user_settings.owner
+        )
+        assert_equal(clone.user_settings, self.user_settings)
+
+    def test_after_fork_by_unauthorized_onedrive_user(self):
+        fork = ProjectFactory()
+        user = UserFactory()
+        clone, message = self.node_settings.after_fork(
+            node=self.project, fork=fork, user=user,
+            save=True
+        )
+        # need request context for url_for
+        assert_is(clone.user_settings, None)
+
+    def test_before_fork(self):
+        node = ProjectFactory()
+        message = self.node_settings.before_fork(node, self.user)
+        assert_true(message)
+
+    def test_before_remove_contributor_message(self):
+        message = self.node_settings.before_remove_contributor(
+            self.project, self.user)
+        assert_true(message)
+        assert_in(self.user.fullname, message)
+        assert_in(self.project.project_or_component, message)
+
+    def test_after_remove_authorized_onedrive_user_self(self):
+        auth = Auth(user=self.user_settings.owner)
+        message = self.node_settings.after_remove_contributor(
+            self.project, self.user_settings.owner, auth)
+        self.node_settings.save()
+        assert_is_none(self.node_settings.user_settings)
+        assert_true(message)
+        assert_not_in("You can re-authenticate", message)
+
+    def test_after_remove_authorized_onedrive_user_not_self(self):
+        message = self.node_settings.after_remove_contributor(
+            node=self.project, removed=self.user_settings.owner)
+        self.node_settings.save()
+        assert_is_none(self.node_settings.user_settings)
+        assert_true(message)
+        assert_in("You can re-authenticate", message)
+
+    def test_after_delete(self):
+        self.project.remove_node(Auth(user=self.project.creator))
+        # Ensure that changes to node settings have been saved
+        self.node_settings.reload()
+        assert_true(self.node_settings.user_settings is None)
+        assert_true(self.node_settings.folder is None)
diff --git a/website/addons/onedrive/tests/test_utils.py b/website/addons/onedrive/tests/test_utils.py
new file mode 100644
index 00000000000..8a5f1cef786
--- /dev/null
+++ b/website/addons/onedrive/tests/test_utils.py
@@ -0,0 +1,129 @@
+# -*- coding: utf-8 -*-
+"""Tests for website.addons.onedrive.utils."""
+import os
+
+from nose.tools import *  # noqa (PEP8 asserts)
+
+from framework.auth import Auth
+from website.project.model import NodeLog
+
+from tests.base import OsfTestCase
+from tests.factories import ProjectFactory
+
+from website.addons.onedrive.tests.utils import OneDriveAddonTestCase
+from website.addons.onedrive import utils
+from website.addons.onedrive.views.config import serialize_folder
+
+
+class TestNodeLogger(OneDriveAddonTestCase):
+
+    def test_log_file_added(self):
+        logger = utils.OneDriveNodeLogger(
+            node=self.project,
+            auth=Auth(self.user),
+        )
+        logger.log(NodeLog.FILE_ADDED, save=True)
+
+        last_log = self.project.logs[-1]
+
+        assert_equal(last_log.action, "onedrive_{0}".format(NodeLog.FILE_ADDED))
+
+    # Regression test for https://github.com/CenterForOpenScience/osf.io/issues/1557
+    def test_log_deauthorized_when_node_settings_are_deleted(self):
+        project = ProjectFactory()
+        project.add_addon('onedrive', auth=Auth(project.creator))
+        dbox_settings = project.get_addon('onedrive')
+        dbox_settings.delete(save=True)
+        # sanity check
+        assert_true(dbox_settings.deleted)
+
+        logger = utils.OneDriveNodeLogger(node=project, auth=Auth(self.user))
+        logger.log(action='node_deauthorized', save=True)
+
+        last_log = project.logs[-1]
+        assert_equal(last_log.action, 'onedrive_node_deauthorized')
+
+
+def test_get_file_name():
+    assert_equal(utils.get_file_name('foo/bar/baz.txt'), 'baz.txt')
+    assert_equal(utils.get_file_name('/foo/bar/baz.txt'), 'baz.txt')
+    assert_equal(utils.get_file_name('/foo/bar/baz.txt/'), 'baz.txt')
+
+
+def test_is_subdir():
+    assert_true(utils.is_subdir('foo/bar', 'foo'))
+    assert_true(utils.is_subdir('foo', 'foo'))
+    assert_true(utils.is_subdir('foo/bar baz', 'foo'))
+    assert_true(utils.is_subdir('bar baz/foo', 'bar baz'))
+    assert_true(utils.is_subdir('foo', '/'))
+    assert_true(utils.is_subdir('/', '/'))
+
+    assert_false(utils.is_subdir('foo/bar', 'baz'))
+    assert_false(utils.is_subdir('foo/bar', 'bar'))
+    assert_false(utils.is_subdir('foo', 'foo/bar'))
+    assert_false(utils.is_subdir('', 'foo'))
+    assert_false(utils.is_subdir('foo', ''))
+    assert_false(utils.is_subdir('foo', None))
+    assert_false(utils.is_subdir(None, 'foo'))
+    assert_false(utils.is_subdir(None, None))
+    assert_false(utils.is_subdir('', ''))
+
+    assert_true(utils.is_subdir('foo/bar', 'Foo/bar'))
+    assert_true(utils.is_subdir('Foo/bar', 'foo/bar'))
+
+
+def test_clean_path():
+    assert_equal(utils.clean_path('/'), '/')
+    assert_equal(utils.clean_path('/foo/bar/baz/'), 'foo/bar/baz')
+    assert_equal(utils.clean_path(None), '')
+
+
+def test_get_share_folder_uri():
+    expected = 'https://onedrive.com/home/foo?shareoptions=1&share_subfolder=0&share=1'
+    assert_equal(utils.get_share_folder_uri('/foo/'), expected)
+    assert_equal(utils.get_share_folder_uri('foo'), expected)
+
+
+def test_serialize_folder():
+    metadata = {
+        u'bytes': 0,
+        u'icon': u'folder',
+        u'is_dir': True,
+        u'modified': u'Sat, 22 Mar 2014 05:40:29 +0000',
+        u'path': u'/datasets/New Folder',
+        u'rev': u'3fed51f002c12fc',
+        u'revision': 67032351,
+        u'root': u'onedrive',
+        u'size': u'0 bytes',
+        u'thumb_exists': False
+    }
+    result = serialize_folder(metadata)
+    assert_equal(result['path'], metadata['path'])
+    assert_equal(result['name'], 'OneDrive' + metadata['path'])
+
+
+class TestMetadataSerialization(OsfTestCase):
+
+    def test_metadata_to_hgrid(self):
+        metadata = {
+            u'bytes': 123,
+            u'icon': u'file',
+            u'is_dir': False,
+            u'modified': u'Sat, 22 Mar 2014 05:40:29 +0000',
+            u'path': u'/foo/bar/baz.mp3',
+            u'rev': u'3fed51f002c12fc',
+            u'revision': 67032351,
+            u'root': u'onedrive',
+            u'size': u'0 bytes',
+            u'thumb_exists': False,
+            u'mime_type': u'audio/mpeg',
+        }
+        node = ProjectFactory()
+        permissions = {'view': True, 'edit': False}
+        result = utils.metadata_to_hgrid(metadata, node, permissions)
+        assert_equal(result['addon'], 'onedrive')
+        assert_equal(result['permissions'], permissions)
+        filename = utils.get_file_name(metadata['path'])
+        assert_equal(result['name'], filename)
+        assert_equal(result['path'], metadata['path'])
+        assert_equal(result['ext'], os.path.splitext(filename)[1])
diff --git a/website/addons/onedrive/tests/test_views.py b/website/addons/onedrive/tests/test_views.py
new file mode 100644
index 00000000000..ef383e5c7f0
--- /dev/null
+++ b/website/addons/onedrive/tests/test_views.py
@@ -0,0 +1,453 @@
+# -*- coding: utf-8 -*-
+"""Views tests for the OneDrive addon."""
+import os
+import unittest
+from nose.tools import *  # noqa (PEP8 asserts)
+import mock
+import httplib
+
+from framework.auth import Auth
+from website.util import api_url_for, web_url_for
+from onedrive.rest import ErrorResponse
+from onedrive.client import OneDriveOAuth2Flow
+
+from urllib3.exceptions import MaxRetryError
+
+from tests.base import OsfTestCase, assert_is_redirect
+from tests.factories import AuthUserFactory, ProjectFactory
+
+from website.addons.onedrive.tests.utils import (
+    OneDriveAddonTestCase, mock_responses, MockOneDrive, patch_client
+)
+from website.addons.onedrive.views.config import serialize_settings
+from website.addons.onedrive.views.hgrid import onedrive_addon_folder
+from website.addons.onedrive import utils
+
+mock_client = MockOneDrive()
+
+
+class TestAuthViews(OsfTestCase):
+
+    def setUp(self):
+        super(TestAuthViews, self).setUp()
+        self.user = AuthUserFactory()
+        # Log user in
+        self.app.authenticate(*self.user.auth)
+
+    def test_onedrive_oauth_start(self):
+        url = api_url_for('onedrive_oauth_start_user')
+        res = self.app.get(url)
+        assert_is_redirect(res)
+        assert_in('&force_reapprove=true', res.location)
+
+    @mock.patch('website.addons.onedrive.views.auth.OneDriveOAuth2Flow.finish')
+    @mock.patch('website.addons.onedrive.views.auth.get_client_from_user_settings')
+    def test_onedrive_oauth_finish(self, mock_get, mock_finish):
+        mock_client = mock.MagicMock()
+        mock_client.account_info.return_value = {'display_name': 'Mr. Drop Box'}
+        mock_get.return_value = mock_client
+        mock_finish.return_value = ('mytoken123', 'myonedriveid', 'done')
+        url = api_url_for('onedrive_oauth_finish')
+        res = self.app.get(url)
+        assert_is_redirect(res)
+
+    @mock.patch('website.addons.onedrive.views.auth.session')
+    @mock.patch('website.addons.onedrive.views.auth.OneDriveOAuth2Flow.finish')
+    def test_onedrive_oauth_finish_cancelled(self, mock_finish, mock_session):
+        node = ProjectFactory(creator=self.user)
+        mock_session.data = {'onedrive_auth_nid': node._id}
+        mock_response = mock.Mock()
+        mock_response.status = 404
+        mock_finish.side_effect = OneDriveOAuth2Flow.NotApprovedException
+        settings = self.user.get_addon('onedrive')
+        url = api_url_for('onedrive_oauth_finish')
+        res = self.app.get(url)
+
+        assert_is_redirect(res)
+        assert_in(node._id, res.headers["location"])
+        assert_false(settings)
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.disable_access_token')
+    def test_onedrive_oauth_delete_user(self, mock_disable_access_token):
+        self.user.add_addon('onedrive')
+        settings = self.user.get_addon('onedrive')
+        settings.access_token = '12345abc'
+        settings.save()
+        assert_true(settings.has_auth)
+        self.user.save()
+        url = api_url_for('onedrive_oauth_delete_user')
+        self.app.delete(url)
+        settings.reload()
+        assert_false(settings.has_auth)
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.disable_access_token')
+    def test_onedrive_oauth_delete_user_with_invalid_credentials(self, mock_disable_access_token):
+        self.user.add_addon('onedrive')
+        settings = self.user.get_addon('onedrive')
+        settings.access_token = '12345abc'
+        settings.save()
+        assert_true(settings.has_auth)
+
+        mock_response = mock.Mock()
+        mock_response.status = 401
+        mock_disable_access_token.side_effect = ErrorResponse(mock_response, "The given OAuth 2 access token doesn't exist or has expired.")
+
+        self.user.save()
+        url = api_url_for('onedrive_oauth_delete_user')
+        self.app.delete(url)
+        settings.reload()
+        assert_false(settings.has_auth)
+
+
+class TestConfigViews(OneDriveAddonTestCase):
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
+    def test_onedrive_user_config_get_has_auth_info(self, mock_account_info):
+        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
+        url = api_url_for('onedrive_user_config_get')
+        res = self.app.get(url, auth=self.user.auth)
+        assert_equal(res.status_code, 200)
+        # The JSON result
+        result = res.json['result']
+        assert_true(result['userHasAuth'])
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
+    def test_onedrive_user_config_get_has_valid_credentials(self, mock_account_info):
+        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
+        url = api_url_for('onedrive_user_config_get')
+        res = self.app.get(url, auth=self.user.auth)
+        assert_equal(res.status_code, 200)
+        # The JSON result
+        result = res.json['result']
+        assert_true(result['validCredentials'])
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
+    def test_onedrive_user_config_get_has_invalid_credentials(self, mock_account_info):
+        mock_response = mock.Mock()
+        mock_response.status = 401
+        mock_account_info.side_effect = ErrorResponse(mock_response, "The given OAuth 2 access token doesn't exist or has expired.")
+        url = api_url_for('onedrive_user_config_get')
+        res = self.app.get(url, auth=self.user.auth)
+        assert_equal(res.status_code, 200)
+        # The JSON result
+        result = res.json['result']
+        assert_false(result['validCredentials'])
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
+    def test_onedrive_user_config_get_returns_correct_urls(self, mock_account_info):
+        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
+        url = api_url_for('onedrive_user_config_get')
+        res = self.app.get(url, auth=self.user.auth)
+        assert_equal(res.status_code, 200)
+        # The JSONified URLs result
+        urls = res.json['result']['urls']
+        assert_equal(urls['delete'], api_url_for('onedrive_oauth_delete_user'))
+        assert_equal(urls['create'], api_url_for('onedrive_oauth_start_user'))
+
+    def test_serialize_settings_helper_returns_correct_urls(self):
+        result = serialize_settings(self.node_settings, self.user, client=mock_client)
+        urls = result['urls']
+
+        assert_equal(urls['config'], self.project.api_url_for('onedrive_config_put'))
+        assert_equal(urls['deauthorize'], self.project.api_url_for('onedrive_deauthorize'))
+        assert_equal(urls['auth'], self.project.api_url_for('onedrive_oauth_start'))
+        assert_equal(urls['importAuth'], self.project.api_url_for('onedrive_import_user_auth'))
+        assert_equal(urls['files'], self.project.web_url_for('collect_file_trees'))
+        # Includes endpoint for fetching folders only
+        # NOTE: Querystring params are in camelCase
+        assert_equal(urls['folders'],
+            self.project.api_url_for('onedrive_hgrid_data_contents', root=1))
+        assert_equal(urls['settings'], web_url_for('user_addons'))
+
+    def test_serialize_settings_helper_returns_correct_auth_info(self):
+        result = serialize_settings(self.node_settings, self.user, client=mock_client)
+        assert_equal(result['nodeHasAuth'], self.node_settings.has_auth)
+        assert_true(result['userHasAuth'])
+        assert_true(result['userIsOwner'])
+
+    def test_serialize_settings_for_user_no_auth(self):
+        no_addon_user = AuthUserFactory()
+        result = serialize_settings(self.node_settings, no_addon_user, client=mock_client)
+        assert_false(result['userIsOwner'])
+        assert_false(result['userHasAuth'])
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
+    def test_serialize_settings_valid_credentials(self, mock_account_info):
+        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
+        result = serialize_settings(self.node_settings, self.user, client=mock_client)
+        assert_true(result['validCredentials'])
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
+    def test_serialize_settings_invalid_credentials(self, mock_account_info):
+        mock_response = mock.Mock()
+        mock_response.status = 401
+        mock_account_info.side_effect = ErrorResponse(mock_response, "The given OAuth 2 access token doesn't exist or has expired.")
+        result = serialize_settings(self.node_settings, self.user)
+        assert_false(result['validCredentials'])
+
+    def test_serialize_settings_helper_returns_correct_folder_info(self):
+        result = serialize_settings(self.node_settings, self.user, client=mock_client)
+        folder = result['folder']
+        assert_equal(folder['name'], self.node_settings.folder)
+        assert_equal(folder['path'], self.node_settings.folder)
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
+    def test_onedrive_config_get(self, mock_account_info):
+        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
+        self.user_settings.save()
+
+        url = self.project.api_url_for('onedrive_config_get')
+
+        res = self.app.get(url, auth=self.user.auth)
+        assert_equal(res.status_code, 200)
+        result = res.json['result']
+        assert_equal(result['ownerName'], self.user_settings.owner.fullname)
+
+        assert_equal(
+            result['urls']['config'],
+            self.project.api_url_for('onedrive_config_put'),
+        )
+
+    def test_onedrive_config_put(self):
+        url = self.project.api_url_for('onedrive_config_put')
+        # Can set folder through API call
+        res = self.app.put_json(url, {'selected': {'path': 'My test folder',
+            'name': 'OneDrive/My test folder'}},
+            auth=self.user.auth)
+        assert_equal(res.status_code, 200)
+        self.node_settings.reload()
+        self.project.reload()
+
+        # Folder was set
+        assert_equal(self.node_settings.folder, 'My test folder')
+        # A log event was created
+        last_log = self.project.logs[-1]
+        assert_equal(last_log.action, 'onedrive_folder_selected')
+        params = last_log.params
+        assert_equal(params['folder'], 'My test folder')
+
+    def test_onedrive_deauthorize(self):
+        url = self.project.api_url_for('onedrive_deauthorize')
+        saved_folder = self.node_settings.folder
+        self.app.delete(url, auth=self.user.auth)
+        self.project.reload()
+        self.node_settings.reload()
+
+        assert_false(self.node_settings.has_auth)
+        assert_is(self.node_settings.user_settings, None)
+        assert_is(self.node_settings.folder, None)
+
+        # A log event was saved
+        last_log = self.project.logs[-1]
+        assert_equal(last_log.action, 'onedrive_node_deauthorized')
+        log_params = last_log.params
+        assert_equal(log_params['node'], self.project._primary_key)
+        assert_equal(log_params['folder'], saved_folder)
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
+    def test_onedrive_import_user_auth_returns_serialized_settings(self, mock_account_info):
+        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
+        # Node does not have user settings
+        self.node_settings.user_settings = None
+        self.node_settings.save()
+        url = self.project.api_url_for('onedrive_import_user_auth')
+        res = self.app.put(url, auth=self.user.auth)
+        self.project.reload()
+        self.node_settings.reload()
+
+        expected_result = serialize_settings(self.node_settings, self.user,
+                                             client=mock_client)
+        result = res.json['result']
+        assert_equal(result, expected_result)
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
+    def test_onedrive_import_user_auth_adds_a_log(self, mock_account_info):
+        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
+        # Node does not have user settings
+        self.node_settings.user_settings = None
+        self.node_settings.save()
+        url = self.project.api_url_for('onedrive_import_user_auth')
+        self.app.put(url, auth=self.user.auth)
+        self.project.reload()
+        self.node_settings.reload()
+        last_log = self.project.logs[-1]
+
+        assert_equal(last_log.action, 'onedrive_node_authorized')
+        log_params = last_log.params
+        assert_equal(log_params['node'], self.project._primary_key)
+        assert_equal(last_log.user, self.user)
+
+    def test_onedrive_get_share_emails(self):
+        # project has some contributors
+        contrib = AuthUserFactory()
+        self.project.add_contributor(contrib, auth=Auth(self.user))
+        self.project.save()
+        url = self.project.api_url_for('onedrive_get_share_emails')
+        res = self.app.get(url, auth=self.user.auth)
+        result = res.json['result']
+        assert_equal(result['emails'], [u.username for u in self.project.contributors
+                                        if u != self.user])
+        assert_equal(result['url'], utils.get_share_folder_uri(self.node_settings.folder))
+
+    def test_onedrive_get_share_emails_returns_error_if_not_authorizer(self):
+        contrib = AuthUserFactory()
+        contrib.add_addon('onedrive')
+        contrib.save()
+        self.project.add_contributor(contrib, auth=Auth(self.user))
+        self.project.save()
+        url = self.project.api_url_for('onedrive_get_share_emails')
+        # Non-authorizing contributor sends request
+        res = self.app.get(url, auth=contrib.auth, expect_errors=True)
+        assert_equal(res.status_code, httplib.FORBIDDEN)
+
+    def test_onedrive_get_share_emails_requires_user_addon(self):
+        # Node doesn't have auth
+        self.node_settings.user_settings = None
+        self.node_settings.save()
+        url = self.project.api_url_for('onedrive_get_share_emails')
+        # Non-authorizing contributor sends request
+        res = self.app.get(url, auth=self.user.auth, expect_errors=True)
+        assert_equal(res.status_code, httplib.BAD_REQUEST)
+
+
+class TestFilebrowserViews(OneDriveAddonTestCase):
+
+    def test_onedrive_hgrid_data_contents(self):
+        with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
+            url = self.project.api_url_for(
+                'onedrive_hgrid_data_contents',
+                path=self.node_settings.folder,
+            )
+            res = self.app.get(url, auth=self.user.auth)
+            contents = [x for x in mock_client.metadata('', list=True)['contents'] if x['is_dir']]
+            assert_equal(len(res.json), len(contents))
+            first = res.json[0]
+            assert_in('kind', first)
+            assert_equal(first['path'], contents[0]['path'])
+
+    def test_onedrive_hgrid_data_contents_if_folder_is_none_and_folders_only(self):
+        with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
+            self.node_settings.folder = None
+            self.node_settings.save()
+            url = self.project.api_url_for('onedrive_hgrid_data_contents', foldersOnly=True)
+            res = self.app.get(url, auth=self.user.auth)
+            contents = mock_client.metadata('', list=True)['contents']
+            expected = [each for each in contents if each['is_dir']]
+            assert_equal(len(res.json), len(expected))
+
+    def test_onedrive_hgrid_data_contents_folders_only(self):
+        with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
+            url = self.project.api_url_for('onedrive_hgrid_data_contents', foldersOnly=True)
+            res = self.app.get(url, auth=self.user.auth)
+            contents = mock_client.metadata('', list=True)['contents']
+            expected = [each for each in contents if each['is_dir']]
+            assert_equal(len(res.json), len(expected))
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.metadata')
+    def test_onedrive_hgrid_data_contents_include_root(self, mock_metadata):
+        with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
+            url = self.project.api_url_for('onedrive_hgrid_data_contents', root=1)
+
+            res = self.app.get(url, auth=self.user.auth)
+            contents = mock_client.metadata('', list=True)['contents']
+            assert_equal(len(res.json), 1)
+            assert_not_equal(len(res.json), len(contents))
+            first_elem = res.json[0]
+            assert_equal(first_elem['path'], '/')
+
+    @unittest.skip('finish this')
+    def test_onedrive_addon_folder(self):
+        assert 0, 'finish me'
+
+    def test_onedrive_addon_folder_if_folder_is_none(self):
+        # Something is returned on normal circumstances
+        root = onedrive_addon_folder(
+            node_settings=self.node_settings, auth=self.user.auth)
+        assert_true(root)
+
+        # Nothing is returned when there is no folder linked
+        self.node_settings.folder = None
+        self.node_settings.save()
+        root = onedrive_addon_folder(
+            node_settings=self.node_settings, auth=self.user.auth)
+        assert_is_none(root)
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.metadata')
+    def test_onedrive_hgrid_data_contents_deleted(self, mock_metadata):
+        # Example metadata for a deleted folder
+        mock_metadata.return_value = {
+            u'bytes': 0,
+            u'contents': [],
+            u'hash': u'e3c62eb85bc50dfa1107b4ca8047812b',
+            u'icon': u'folder_gray',
+            u'is_deleted': True,
+            u'is_dir': True,
+            u'modified': u'Sat, 29 Mar 2014 20:11:49 +0000',
+            u'path': u'/tests',
+            u'rev': u'3fed844002c12fc',
+            u'revision': 67033156,
+            u'root': u'onedrive',
+            u'size': u'0 bytes',
+            u'thumb_exists': False
+        }
+        url = self.project.api_url_for('onedrive_hgrid_data_contents')
+        res = self.app.get(url, auth=self.user.auth, expect_errors=True)
+        assert_equal(res.status_code, httplib.NOT_FOUND)
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.metadata')
+    def test_onedrive_hgrid_data_contents_returns_error_if_invalid_path(self, mock_metadata):
+        mock_response = mock.Mock()
+        mock_metadata.side_effect = ErrorResponse(mock_response, body='File not found')
+        url = self.project.api_url_for('onedrive_hgrid_data_contents')
+        res = self.app.get(url, auth=self.user.auth, expect_errors=True)
+        assert_equal(res.status_code, httplib.NOT_FOUND)
+
+    @mock.patch('website.addons.onedrive.client.OneDriveClient.metadata')
+    def test_onedrive_hgrid_data_contents_handles_max_retry_error(self, mock_metadata):
+        mock_response = mock.Mock()
+        url = self.project.api_url_for('onedrive_hgrid_data_contents')
+        mock_metadata.side_effect = MaxRetryError(mock_response, url)
+        res = self.app.get(url, auth=self.user.auth, expect_errors=True)
+        assert_equal(res.status_code, httplib.REQUEST_TIMEOUT)
+
+
+class TestRestrictions(OneDriveAddonTestCase):
+
+    def setUp(self):
+        super(OneDriveAddonTestCase, self).setUp()
+
+        # Nasty contributor who will try to access folders that he shouldn't have
+        # access to
+        self.contrib = AuthUserFactory()
+        self.project.add_contributor(self.contrib, auth=Auth(self.user))
+        self.project.save()
+
+        # Set shared folder
+        self.node_settings.folder = 'foo bar/bar'
+        self.node_settings.save()
+
+    @mock.patch('website.addons.onedrivesdk.client.OneDriveClient.metadata')
+    def test_restricted_hgrid_data_contents(self, mock_metadata):
+        mock_metadata.return_value = mock_responses['metadata_list']
+
+        # tries to access a parent folder
+        url = self.project.api_url_for('onedrive_hgrid_data_contents',
+            path='foo bar')
+        res = self.app.get(url, auth=self.contrib.auth, expect_errors=True)
+        assert_equal(res.status_code, httplib.FORBIDDEN)
+
+    def test_restricted_config_contrib_no_addon(self):
+        url = self.project.api_url_for('onedrive_config_put')
+        res = self.app.put_json(url, {'selected': {'path': 'foo'}},
+            auth=self.contrib.auth, expect_errors=True)
+        assert_equal(res.status_code, httplib.BAD_REQUEST)
+
+    def test_restricted_config_contrib_not_owner(self):
+        # Contributor has onedrive auth, but is not the node authorizer
+        self.contrib.add_addon('onedrive')
+        self.contrib.save()
+
+        url = self.project.api_url_for('onedrive_config_put')
+        res = self.app.put_json(url, {'selected': {'path': 'foo'}},
+            auth=self.contrib.auth, expect_errors=True)
+        assert_equal(res.status_code, httplib.FORBIDDEN)
diff --git a/website/addons/onedrive/tests/test_webtests.py b/website/addons/onedrive/tests/test_webtests.py
new file mode 100644
index 00000000000..66ad4d2c383
--- /dev/null
+++ b/website/addons/onedrive/tests/test_webtests.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+from nose.tools import *  # noqa (PEP8 asserts)
+
+from website.util import api_url_for, web_url_for
+from tests.base import OsfTestCase
+from tests.factories import AuthUserFactory
+
+
+class TestOneDriveIntegration(OsfTestCase):
+
+    def setUp(self):
+        super(TestOneDriveIntegration, self).setUp()
+        self.user = AuthUserFactory()
+        # User is logged in
+        self.app.authenticate(*self.user.auth)
+
+    def test_cant_start_oauth_if_already_authorized(self):
+        # User already has onedrive authorized
+        self.user.add_addon('onedrive')
+        self.user.save()
+        settings = self.user.get_addon('onedrive')
+        settings.access_token = 'abc123foobarbaz'
+        settings.save()
+        assert_true(self.user.get_addon('onedrive').has_auth)
+        # Tries to start oauth again
+        url = api_url_for('onedrive_oauth_start_user')
+        res = self.app.get(url).follow()
+
+        # Is redirected back to settings page
+        assert_equal(
+            res.request.path,
+            web_url_for('user_addons')
+        )
diff --git a/website/addons/onedrive/tests/utils.py b/website/addons/onedrive/tests/utils.py
new file mode 100644
index 00000000000..775afc8b179
--- /dev/null
+++ b/website/addons/onedrive/tests/utils.py
@@ -0,0 +1,168 @@
+# -*- coding: utf-8 -*-
+import mock
+from contextlib import contextmanager
+
+from modularodm import storage
+
+from framework.mongo import set_up_storage
+
+from website.addons.base.testing import AddonTestCase
+from website.addons.onedrive import MODELS
+
+
+def init_storage():
+    set_up_storage(MODELS, storage_class=storage.MongoStorage)
+
+
+class OneDriveAddonTestCase(AddonTestCase):
+
+    ADDON_SHORT_NAME = 'onedrive'
+
+    def set_user_settings(self, settings):
+        settings.access_token = '12345abc'
+        settings.onedrive_id = 'myonedriveid'
+
+    def set_node_settings(self, settings):
+        settings.folder = 'foo'
+
+
+mock_responses = {
+    'put_file': {
+        'bytes': 77,
+        'icon': 'page_white_text',
+        'is_dir': False,
+        'mime_type': 'text/plain',
+        'modified': 'Wed, 20 Jul 2011 22:04:50 +0000',
+        'path': '/magnum-opus.txt',
+        'rev': '362e2029684fe',
+        'revision': 221922,
+        'root': 'onedrive',
+        'size': '77 bytes',
+        'thumb_exists': False
+    },
+    'metadata_list': {
+        "size": "0 bytes",
+        "hash": "37eb1ba1849d4b0fb0b28caf7ef3af52",
+        "bytes": 0,
+        "thumb_exists": False,
+        "rev": "714f029684fe",
+        "modified": "Wed, 27 Apr 2011 22:18:51 +0000",
+        "path": "/Public",
+        "is_dir": True,
+        "icon": "folder_public",
+        "root": "onedrive",
+        "contents": [
+            {
+                "size": "0 bytes",
+                "rev": "35c1f029684fe",
+                "thumb_exists": False,
+                "bytes": 0,
+                "modified": "Mon, 18 Jul 2011 20:13:43 +0000",
+                "client_mtime": "Wed, 20 Apr 2011 16:20:19 +0000",
+                "path": "/Public/latest.txt",
+                "is_dir": False,
+                "icon": "page_white_text",
+                "root": "onedrive",
+                "mime_type": "text/plain",
+                "revision": 220191
+            },
+            {
+                u'bytes': 0,
+                u'icon': u'folder',
+                u'is_dir': True,
+                u'modified': u'Sat, 22 Mar 2014 05:40:29 +0000',
+                u'path': u'/datasets/New Folder',
+                u'rev': u'3fed51f002c12fc',
+                u'revision': 67032351,
+                u'root': u'onedrive',
+                u'size': u'0 bytes',
+                u'thumb_exists': False
+            }
+        ],
+        "revision": 29007
+    },
+    'metadata_single': {
+        u'bytes': 74,
+        u'client_mtime': u'Mon, 13 Jan 2014 20:24:15 +0000',
+        u'icon': u'page_white',
+        u'is_dir': False,
+        u'mime_type': u'text/csv',
+        u'modified': u'Fri, 21 Mar 2014 05:46:36 +0000',
+        u'path': '/datasets/foo.txt',
+        u'rev': u'a2149fb64',
+        u'revision': 10,
+        u'root': u'app_folder',
+        u'size': u'74 bytes',
+        u'thumb_exists': False
+    },
+    'revisions': [{u'bytes': 0,
+        u'client_mtime': u'Wed, 31 Dec 1969 23:59:59 +0000',
+        u'icon': u'page_white_picture',
+        u'is_deleted': True,
+        u'is_dir': False,
+        u'mime_type': u'image/png',
+        u'modified': u'Tue, 25 Mar 2014 03:39:13 +0000',
+        u'path': u'/svs-v-barks.png',
+        u'rev': u'3fed741002c12fc',
+        u'revision': 67032897,
+        u'root': u'onedrive',
+        u'size': u'0 bytes',
+        u'thumb_exists': True},
+        {u'bytes': 151164,
+        u'client_mtime': u'Sat, 13 Apr 2013 21:56:36 +0000',
+        u'icon': u'page_white_picture',
+        u'is_dir': False,
+        u'mime_type': u'image/png',
+        u'modified': u'Tue, 25 Mar 2014 01:45:51 +0000',
+        u'path': u'/svs-v-barks.png',
+        u'rev': u'3fed61a002c12fc',
+        u'revision': 67032602,
+        u'root': u'onedrive',
+        u'size': u'147.6 KB',
+        u'thumb_exists': True}]
+}
+
+
+class MockOneDrive(object):
+
+    def put_file(self, full_path, file_obj, overwrite=False, parent_rev=None):
+        return mock_responses['put_file']
+
+    def metadata(self, path, list=True, file_limit=25000, hash=None, rev=None,
+            include_deleted=False):
+        if list:
+            ret = mock_responses['metadata_list']
+        else:
+            ret = mock_responses['metadata_single']
+            ret['path'] = path
+        return ret
+
+    def get_file_and_metadata(*args, **kwargs):
+        pass
+
+    def file_delete(self, path):
+        return mock_responses['metadata_single']
+
+    def revisions(self, path):
+        ret = mock_responses['revisions']
+        for each in ret:
+            each['path'] = path
+        return ret
+
+    def account_info(self):
+        return {'display_name': 'Mr. One Drive'}
+
+@contextmanager
+def patch_client(target, mock_client=None):
+    """Patches a function that returns a OneDriveClient, returning an instance
+    of MockOneDrive instead.
+
+    Usage: ::
+
+        with patch_client('website.addons.onedrive.view.config.get_client') as client:
+            # test view that uses the onedrive client.
+    """
+    with mock.patch(target) as client_getter:
+        client = mock_client or MockOneDrive()
+        client_getter.return_value = client
+        yield client
diff --git a/website/addons/onedrive/utils.py b/website/addons/onedrive/utils.py
new file mode 100644
index 00000000000..3b572e3ed61
--- /dev/null
+++ b/website/addons/onedrive/utils.py
@@ -0,0 +1,97 @@
+# -*- coding: utf-8 -*-
+import time
+import logging
+from datetime import datetime
+
+# from OneDriveSDK
+import onedrivesdk
+from onedrivesdk.helpers import GetAuthCodeServer
+
+from website.util import rubeus
+
+from website.addons.onedrive import settings
+
+
+logger = logging.getLogger(__name__)
+
+
+class OnedriveNodeLogger(object):
+    """Helper class for adding correctly-formatted Onedrive logs to nodes.
+
+    Usage: ::
+
+        from website.project.model import NodeLog
+
+        node = ...
+        auth = ...
+        nodelogger = OnedriveNodeLogger(node, auth)
+        nodelogger.log(NodeLog.FILE_REMOVED, save=True)
+
+
+    :param Node node: The node to add logs to
+    :param Auth auth: Authorization of the person who did the action.
+    """
+    def __init__(self, node, auth, path=None):
+        self.node = node
+        self.auth = auth
+        self.path = path
+
+    def log(self, action, extra=None, save=False):
+        """Log an event. Wraps the Node#add_log method, automatically adding
+        relevant parameters and prefixing log events with `"onedrive_"`.
+
+        :param str action: Log action. Should be a class constant from NodeLog.
+        :param dict extra: Extra parameters to add to the ``params`` dict of the
+            new NodeLog.
+        """
+        params = {
+            'project': self.node.parent_id,
+            'node': self.node._primary_key,
+            'folder_id': self.node.get_addon('onedrive', deleted=True).folder_id,
+            # it used to be "folder": self.node.get_addon('onedrive', deleted=True).folder_name
+            # changed to folder_path to make log show the complete folder path "/folder/subfolder"
+            # instead of just showing the subfolder's name "/subfolder"
+            'folder_name': self.node.get_addon('onedrive', deleted=True).folder_name,
+            'folder': self.node.get_addon('onedrive', deleted=True).folder_path
+        }
+        # If logging a file-related action, add the file's view and download URLs
+        if self.path:
+            params.update({
+                'urls': {
+                    'view': self.node.web_url_for('addon_view_or_download_file', path=self.path, provider='onedrive'),
+                    'download': self.node.web_url_for(
+                        'addon_view_or_download_file',
+                        path=self.path,
+                        provider='onedrive'
+                    )
+                },
+                'path': self.path,
+            })
+        if extra:
+            params.update(extra)
+        # Prefix the action with onedrive_
+        self.node.add_log(
+            action="onedrive_{0}".format(action),
+            params=params,
+            auth=self.auth
+        )
+        if save:
+            self.node.save()
+
+def onedrive_addon_folder(node_settings, auth, **kwargs):
+    """Return the Rubeus/HGrid-formatted response for the root folder only."""
+    # Quit if node settings does not have authentication
+    if not node_settings.has_auth or not node_settings.folder_id:
+        return None
+
+    node = node_settings.owner
+
+    root = rubeus.build_addon_root(
+        node_settings=node_settings,
+        name=node_settings.fetch_folder_name(),
+        permissions=auth,
+        nodeUrl=node.url,
+        nodeApiUrl=node.api_url,
+    )
+
+    return [root]
diff --git a/website/addons/onedrive/views.py b/website/addons/onedrive/views.py
new file mode 100644
index 00000000000..742177019af
--- /dev/null
+++ b/website/addons/onedrive/views.py
@@ -0,0 +1,216 @@
+"""Views for the node settings page."""
+# -*- coding: utf-8 -*-
+import os
+import httplib as http
+
+import logging
+
+from flask import request
+# from OneDriveSDK
+# import onedrivesdk
+# from onedrivesdk.helpers import GetAuthCodeServer
+from website.addons.onedrive.client import OneDriveClient
+from urllib3.exceptions import MaxRetryError
+
+from framework.exceptions import HTTPError, PermissionsError
+from framework.auth.decorators import must_be_logged_in
+
+from website.oauth.models import ExternalAccount
+
+from website.util import permissions
+from website.project.decorators import (
+    must_have_addon, must_be_addon_authorizer,
+    must_have_permission, must_not_be_registration,
+)
+
+from website.addons.onedrive.serializer import OnedriveSerializer
+
+logger = logging.getLogger(__name__)
+
+logging.getLogger('onedrive1').setLevel(logging.WARNING)
+
+@must_be_logged_in
+def onedrive_get_user_settings(auth):
+    """ Returns the list of all of the current user's authorized Onedrive accounts """
+    serializer = OnedriveSerializer(user_settings=auth.user.get_addon('onedrive'))
+    return serializer.serialized_user_settings
+
+
+@must_have_addon('onedrive', 'node')
+@must_have_permission(permissions.WRITE)
+def onedrive_get_config(node_addon, auth, **kwargs):
+    """API that returns the serialized node settings."""
+    return {
+        'result': OnedriveSerializer().serialize_settings(node_addon, auth.user),
+    }
+
+
+@must_not_be_registration
+@must_have_addon('onedrive', 'user')
+@must_have_addon('onedrive', 'node')
+@must_be_addon_authorizer('onedrive')
+@must_have_permission(permissions.WRITE)
+def onedrive_set_config(node_addon, user_addon, auth, **kwargs):
+    """View for changing a node's linked onedrive folder."""
+    folder = request.json.get('selected')
+    serializer = OnedriveSerializer(node_settings=node_addon)
+
+    uid = folder['id']
+    path = folder['path']
+
+    node_addon.set_folder(uid, auth=auth)
+
+    return {
+        'result': {
+            'folder': {
+                'name': path.replace('All Files', '') if path != 'All Files' else '/ (Full Onedrive)',
+                'path': path,
+            },
+            'urls': serializer.addon_serialized_urls,
+        },
+        'message': 'Successfully updated settings.',
+    }
+
+
+@must_have_addon('onedrive', 'user')
+@must_have_addon('onedrive', 'node')
+@must_have_permission(permissions.WRITE)
+def onedrive_add_user_auth(auth, node_addon, user_addon, **kwargs):
+    """Import onedrive credentials from the currently logged-in user to a node.
+    """
+    external_account = ExternalAccount.load(
+        request.json['external_account_id']
+    )
+
+    if external_account not in user_addon.external_accounts:
+        raise HTTPError(http.FORBIDDEN)
+
+    try:
+        node_addon.set_auth(external_account, user_addon.owner)
+    except PermissionsError:
+        raise HTTPError(http.FORBIDDEN)
+
+    node_addon.set_user_auth(user_addon)
+    node_addon.save()
+
+    return {
+        'result': OnedriveSerializer().serialize_settings(node_addon, auth.user),
+        'message': 'Successfully imported access token from profile.',
+    }
+
+
+@must_not_be_registration
+@must_have_addon('onedrive', 'node')
+@must_have_permission(permissions.WRITE)
+def onedrive_remove_user_auth(auth, node_addon, **kwargs):
+    node_addon.deauthorize(auth=auth)
+    node_addon.save()
+
+
+@must_have_addon('onedrive', 'user')
+@must_have_addon('onedrive', 'node')
+@must_have_permission(permissions.WRITE)
+def onedrive_get_share_emails(auth, user_addon, node_addon, **kwargs):
+    """Return a list of emails of the contributors on a project.
+
+    The current user MUST be the user who authenticated Onedrive for the node.
+    """
+    if not node_addon.user_settings:
+        raise HTTPError(http.BAD_REQUEST)
+    # Current user must be the user who authorized the addon
+    if node_addon.user_settings.owner != auth.user:
+        raise HTTPError(http.FORBIDDEN)
+
+    return {
+        'result': {
+            'emails': [
+                contrib.username
+                for contrib in node_addon.owner.contributors
+                if contrib != auth.user
+            ],
+        }
+    }
+
+
+@must_have_addon('onedrive', 'node')
+@must_be_addon_authorizer('onedrive')
+def onedrive_folder_list(node_addon, **kwargs):
+    """Returns a list of folders in Onedrive"""
+    if not node_addon.has_auth:
+        raise HTTPError(http.FORBIDDEN)
+
+    node = node_addon.owner
+    folder_id = request.args.get('folderId')
+    logger.debug('oauth_provider::' +  repr(node_addon.oauth_provider))
+    logger.debug('fetch_access_token::' +  repr(node_addon))
+    logger.debug('node_addon.external_account::' +  repr(node_addon.external_account))
+    logger.debug('node_addon.external_account::oauth_key' +  repr(node_addon.external_account.oauth_key))
+#     logger.debug('node_addon.external_account::access_token' +  repr(node_addon.external_account.access_token)) #exception - no access token
+    logger.debug('node_addon.external_account::expires_at' +  repr(node_addon.external_account.refresh_token)) 
+    logger.debug('node_addon.external_account::expires_at' +  repr(node_addon.external_account.expires_at)) #
+#     raise ValueError('node_addon.external_account::oauth_key' +  repr(node_addon.external_account.oauth_key))
+    
+
+    if folder_id is None:
+        return [{
+            'id': '0',
+            'path': 'All Files',
+            'addon': 'onedrive',
+            'kind': 'folder',
+            'name': '/ (Full Onedrive)',
+            'urls': {
+                'folders': node.api_url_for('onedrive_folder_list', folderId=0),
+            }
+        }]
+
+#    TODO: must refresh token https://dev.onedrive.com/auth/msa_oauth.htm#step-3-get-a-new-access-token-or-refresh-token
+    
+    access_token = node_addon.fetch_access_token()
+    logger.debug('access_token::' +  repr(access_token))
+    
+    oneDriveClient = OneDriveClient(access_token)#node_addon.external_account.refresh_token)
+    items = oneDriveClient.folders()
+    logger.debug('folders::' +  repr(items))
+    
+#     return folders
+    
+#     raise ValueError('made it past onedrive api call::' + repr(folders))
+    
+#    try:
+#        refresh_oauth_key(node_addon.external_account)
+#     client = OnedriveClient(node_addon.external_account.oauth_key)
+#    except OnedriveClientException:
+#        raise HTTPError(http.FORBIDDEN)
+
+#    try:
+#        metadata = client.get_folder(folder_id)
+#    except OnedriveClientException:
+#        raise HTTPError(http.NOT_FOUND)
+#    except MaxRetryError:
+#        raise HTTPError(http.BAD_REQUEST)
+
+    # Raise error if folder was deleted
+#     if metadata.get('is_deleted'):
+#         raise HTTPError(http.NOT_FOUND)
+
+#     folder_path = '/'.join(
+#         [
+#             x['name']
+#             for x in items['path_collection']['entries']
+#         ] + [items['name']]
+#     )
+
+    return [
+        {
+            'addon': 'onedrive',
+            'kind': 'folder',
+            'id': item['id'],
+            'name': item['name'],
+            'path': item['name'], #os.path.join(folder_path, item['name']),
+            'urls': {
+                'folders': node.api_url_for('onedrive_folder_list', folderId=item['id']),
+            }
+        }
+        for item in items
+        #if item['id'] == 'folder' #TODO ADD FOLDER FILTER
+    ]

From 12fdc768fc5f11a780f254e35d5fe59f8386bc2c Mon Sep 17 00:00:00 2001
From: Ryan Casey <rcasey@growingtechnologies.com>
Date: Mon, 23 Nov 2015 07:28:11 -0800
Subject: [PATCH 120/192] Initial OneDrive commit   - 2/3

---
 website/addons/onedrive/README.md             |   7 +-
 website/addons/onedrive/__init__.py           |   8 +-
 website/addons/onedrive/client.py             |  36 +-
 website/addons/onedrive/model.py              |  88 ++-
 website/addons/onedrive/requirements.txt      |   1 -
 website/addons/onedrive/serializer.py         |  40 +-
 website/addons/onedrive/settings/defaults.py  |   4 +-
 .../addons/onedrive/settings/local-dist.py    |   4 +-
 website/addons/onedrive/tests/factories.py    |  32 +-
 website/addons/onedrive/tests/test_client.py  |   8 +-
 website/addons/onedrive/tests/test_models.py  | 255 +--------
 website/addons/onedrive/tests/test_utils.py   |  89 +--
 website/addons/onedrive/tests/test_views.py   | 541 +++++-------------
 .../addons/onedrive/tests/test_webtests.py    |  33 --
 website/addons/onedrive/utils.py              |  15 +-
 website/addons/onedrive/views.py              |  97 ++--
 website/static/storageAddons.json             |   4 +
 17 files changed, 301 insertions(+), 961 deletions(-)
 delete mode 100644 website/addons/onedrive/tests/test_webtests.py

diff --git a/website/addons/onedrive/README.md b/website/addons/onedrive/README.md
index ee88d887c1d..f6a79db405a 100644
--- a/website/addons/onedrive/README.md
+++ b/website/addons/onedrive/README.md
@@ -2,7 +2,6 @@
 
 Enabling the addon for development
 
-1. In `website/settings/local.py` add, `"onedrive"` to the `ADDONS_REQUESTED` list.
-2. If `website/addons/onedrive/settings/local.py` does not yet exist, create a local box settings file with `cp website/addons/onedrive/settings/local-dist.py website/addons/onedrive/settings/local.py`
-...
-?. Enter your OneDrive `client_id` and `client_secret` as `ONEDRIVE_KEY` and `ONEDRIVE_SECRET` in `website/addons/onedrive/settings/local.py`. 
+1. If `website/addons/onedrive/settings/local.py` does not yet exist, create a local onedrive settings file with `cp website/addons/onedrive/settings/local-dist.py website/addons/onedrive/settings/local.py`
+2. Register the addon with Microsoft (https://account.live.com/developers/applications/index) and enter http://localhost:5000/oauth/callback/onedrive/ as the Redirect URL.
+3. Enter your OneDrive `client_id` and `client_secret` as `ONEDRIVE_KEY` and `ONEDRIVE_SECRET` in `website/addons/onedrive/settings/local.py`.
\ No newline at end of file
diff --git a/website/addons/onedrive/__init__.py b/website/addons/onedrive/__init__.py
index bbb02d03a84..b94ef5e1f98 100644
--- a/website/addons/onedrive/__init__.py
+++ b/website/addons/onedrive/__init__.py
@@ -4,12 +4,12 @@
 
 
 MODELS = [
-    model.OnedriveUserSettings,
-    model.OnedriveNodeSettings,
+    model.OneDriveUserSettings,
+    model.OneDriveNodeSettings,
 ]
 
-USER_SETTINGS_MODEL = model.OnedriveUserSettings
-NODE_SETTINGS_MODEL = model.OnedriveNodeSettings
+USER_SETTINGS_MODEL = model.OneDriveUserSettings
+NODE_SETTINGS_MODEL = model.OneDriveNodeSettings
 
 ROUTES = [routes.api_routes]
 
diff --git a/website/addons/onedrive/client.py b/website/addons/onedrive/client.py
index 4dde53e4273..09ff586a355 100644
--- a/website/addons/onedrive/client.py
+++ b/website/addons/onedrive/client.py
@@ -9,14 +9,11 @@
 from framework.exceptions import HTTPError
 
 from website.util.client import BaseClient
-#from website.addons.googledrive import settings
-from website.addons.googledrive import exceptions
+from website.addons.base import exceptions
 from website.addons.onedrive import settings
 
 logger = logging.getLogger(__name__)
 
-logging.getLogger('onedrive1').setLevel(logging.WARNING)
-
 
 class OneDriveAuthClient(BaseClient):
 
@@ -30,12 +27,12 @@ def refresh(self, access_token, refresh_token):
                 'expires_in': '-30',
             }
         )
- 
+
         extra = {
             'client_id': settings.ONEDRIVE_KEY,
             'client_secret': settings.ONEDRIVE_SECRET,
         }
- 
+
         try:
             return client.refresh_token(
                 self._build_url(settings.ONEDRIVE_OAUTH_TOKEN_ENDPOINT),
@@ -43,12 +40,12 @@ def refresh(self, access_token, refresh_token):
                 **extra
             )
         except InvalidGrantError:
-            raise exceptions.ExpiredAuthError()
- 
-    def userinfo(self, access_token):
+            raise exceptions.InvalidAuthError()
+
+    def user_info(self, access_token):
         return self._make_request(
             'GET',
-            self._build_url(settings.MSLIVE_API_URL, 'oauth2', 'v3', 'userinfo'),
+            self._build_url(settings.MSLIVE_API_URL, 'me'),
             params={'access_token': access_token},
             expects=(200, ),
             throws=HTTPError(401)
@@ -74,19 +71,22 @@ def about(self):
             throws=HTTPError(401)
         ).json()
 
-    def folders(self, folder_id='root/children'):
-#         query = ' and '.join([
-#             "'{0}' in parents".format(folder_id),
-#             'trashed = false',
-#             "mimeType = 'application/vnd.google-apps.folder'",
-#         ])
+    def folders(self, folder_id='root/'):
+
+        query = 'folder ne null'
+
+        if folder_id != 'root':
+            folder_id = "items/{}".format(folder_id)
+
         logger.debug('folders::made it1')
+        logger.debug('URLs:' + self._build_url(settings.ONEDRIVE_API_URL, 'drive/', folder_id, '/children/'))
         res = self._make_request(
             'GET',
-            self._build_url(settings.ONEDRIVE_API_URL, 'drive/', folder_id),
-            params={}, #'q': query
+            self._build_url(settings.ONEDRIVE_API_URL, 'drive/', folder_id, '/children/'),
+            params={'filter': query},
             expects=(200, ),
             throws=HTTPError(401)
         )
+        logger.debug('folder_id::' + repr(folder_id))
         logger.debug('res::' + repr(res))
         return res.json()['value']
diff --git a/website/addons/onedrive/model.py b/website/addons/onedrive/model.py
index fa373d31448..525a390a7a8 100644
--- a/website/addons/onedrive/model.py
+++ b/website/addons/onedrive/model.py
@@ -1,28 +1,19 @@
 # -*- coding: utf-8 -*-
 import logging
-import requests
 
-from flask import abort, request
 from datetime import datetime
 
-
-# import onedrivesdk
-# from onedrivesdk.helpers import GetAuthCodeServer
-
-#from onedrivesdk import CredentialsV2, OnedriveClient
-#from onedrivesdk.client import OnedriveClientException
 from modularodm import fields
 
 from framework.auth import Auth
-from framework.exceptions import HTTPError
 
 from website.addons.base import exceptions
 from website.addons.base import AddonOAuthUserSettingsBase, AddonOAuthNodeSettingsBase
 from website.addons.base import StorageAddonBase
 
 from website.addons.onedrive import settings
-from website.addons.onedrive.utils import OnedriveNodeLogger
-from website.addons.onedrive.serializer import OnedriveSerializer
+from website.addons.onedrive.utils import OneDriveNodeLogger
+from website.addons.onedrive.serializer import OneDriveSerializer
 from website.addons.onedrive.client import OneDriveAuthClient
 from website.addons.onedrive.client import OneDriveClient
 
@@ -32,8 +23,9 @@
 
 logging.getLogger('onedrive1').setLevel(logging.WARNING)
 
-class Onedrive(ExternalProvider):
-    name = 'Onedrive'
+
+class OneDrive(ExternalProvider):
+    name = 'onedrive'
     short_name = 'onedrive'
 
     client_id = settings.ONEDRIVE_KEY
@@ -47,16 +39,12 @@ class Onedrive(ExternalProvider):
     _auth_client = OneDriveAuthClient()
     _drive_client = OneDriveClient()
 
-    def handle_callback(self, response):        
-        """View called when the Oauth flow is completed. Adds a new OnedriveUserSettings
+    def handle_callback(self, response):
+        """View called when the Oauth flow is completed. Adds a new OneDriveUserSettings
         record to the user and saves the user's access token and account info.
         """
-        
-        userInfoRequest = requests.get(("{}me?access_token={}").format(settings.MSLIVE_API_URL, response['access_token']))
-        
-        logger.debug("userInfoRequest:: %s", repr(userInfoRequest))
-        
-        userInfo = userInfoRequest.json()
+        userInfo = self._auth_client.user_info(response['access_token'])
+        #  userInfo = userInfoRequest.json()
         logger.debug("userInfo:: %s", repr(userInfo))
 
         return {
@@ -64,7 +52,7 @@ def handle_callback(self, response):
             'display_name': userInfo['name'],
             'profile_url': userInfo['link']
         }
-        
+
     def _refresh_token(self, access_token, refresh_token):
         """ Handles the actual request to refresh tokens
 
@@ -78,7 +66,7 @@ def _refresh_token(self, access_token, refresh_token):
             return token
         else:
             return False
-        
+
     def fetch_access_token(self, force_refresh=False):
         self.refresh_access_token(force=force_refresh)
         return self.account.oauth_key
@@ -100,23 +88,23 @@ def _needs_refresh(self):
             return False
         return (self.account.expires_at - datetime.utcnow()).total_seconds() < settings.REFRESH_TIME
 
-class OnedriveUserSettings(AddonOAuthUserSettingsBase):
+class OneDriveUserSettings(AddonOAuthUserSettingsBase):
     """Stores user-specific onedrive information
     """
-    oauth_provider = Onedrive
-    serializer = OnedriveSerializer
-#     myBase = AddonOAuthUserSettingsBase
+    oauth_provider = OneDrive
+    serializer = OneDriveSerializer
 
 
-class OnedriveNodeSettings(StorageAddonBase, AddonOAuthNodeSettingsBase):
+class OneDriveNodeSettings(StorageAddonBase, AddonOAuthNodeSettingsBase):
 
-    oauth_provider = Onedrive
-    serializer = OnedriveSerializer
+    oauth_provider = OneDrive
+    serializer = OneDriveSerializer
 
     foreign_user_settings = fields.ForeignField(
         'onedriveusersettings', backref='authorized'
     )
     folder_id = fields.StringField(default=None)
+    onedrive_id = fields.StringField(default=None)
     folder_name = fields.StringField()
     folder_path = fields.StringField()
 
@@ -128,12 +116,12 @@ class OnedriveNodeSettings(StorageAddonBase, AddonOAuthNodeSettingsBase):
     def api(self):
         """authenticated ExternalProvider instance"""
         if self._api is None:
-            self._api = Onedrive(self.external_account)
+            self._api = OneDrive(self.external_account)
         return self._api
 
     @property
     def display_name(self):
-        return '{0}: {1}'.format(self.config.full_name, self.folder_id)
+        return '{0}: {1}'.format(self.config.full_name, self.folder_name)
 
     @property
     def has_auth(self):
@@ -149,7 +137,7 @@ def complete(self):
 
     def fetch_folder_name(self):
         self._update_folder_data()
-        return self.folder_name.replace('All Files', '/ (Full Onedrive)')
+        return self.folder_name.replace('All Files', '/ (Full OneDrive)')
 
     def fetch_full_folder_path(self):
         self._update_folder_data()
@@ -159,18 +147,17 @@ def _update_folder_data(self):
         if self.folder_id is None:
             return None
 
-        if not self._folder_data:
+        logger.debug('self::' + repr(self))
+        #request.json.get('selected')
 
-            self.folder_name = self._folder_data['name']
-            self.folder_path = '/'.join(
-                [x['name'] for x in self._folder_data['path_collection']['entries']]
-                + [self._folder_data['name']]
-            )
+        if not self._folder_data:
+            self.path = self.folder_name
             self.save()
 
-    def set_folder(self, folder_id, auth):
-        self.folder_id = str(folder_id)
-        self._update_folder_data()
+    def set_folder(self, folder, auth):
+        self.folder_id = folder['name']
+        self.onedrive_id = folder['id']
+        self.folder_name = folder['name']
         self.save()
 
         if not self.complete:
@@ -182,16 +169,16 @@ def set_folder(self, folder_id, auth):
             self.user_settings.save()
 
         # Add log to node
-        nodelogger = OnedriveNodeLogger(node=self.owner, auth=auth)
+        nodelogger = OneDriveNodeLogger(node=self.owner, auth=auth)  # AddonOAuthNodeSettingsBase.nodelogger(self)
         nodelogger.log(action="folder_selected", save=True)
 
     def set_user_auth(self, user_settings):
-        """Import a user's Onedrive authentication and create a NodeLog.
+        """Import a user's OneDrive authentication and create a NodeLog.
 
-        :param OnedriveUserSettings user_settings: The user settings to link.
+        :param OneDriveUserSettings user_settings: The user settings to link.
         """
         self.user_settings = user_settings
-        nodelogger = OnedriveNodeLogger(node=self.owner, auth=Auth(user_settings.owner))
+        nodelogger = OneDriveNodeLogger(node=self.owner, auth=Auth(user_settings.owner))
         nodelogger.log(action="node_authorized", save=True)
 
     def deauthorize(self, auth=None, add_log=True):
@@ -200,7 +187,7 @@ def deauthorize(self, auth=None, add_log=True):
 
         if add_log:
             extra = {'folder_id': self.folder_id}
-            nodelogger = OnedriveNodeLogger(node=node, auth=auth)
+            nodelogger = OneDriveNodeLogger(node=node, auth=auth)
             nodelogger.log(action="node_deauthorized", extra=extra, save=True)
 
         self.folder_id = None
@@ -211,13 +198,17 @@ def deauthorize(self, auth=None, add_log=True):
         self.save()
 
     def serialize_waterbutler_credentials(self):
+        logger.debug("in serialize_waterbutler_credentials:: %s", repr(self))
         if not self.has_auth:
             raise exceptions.AddonError('Addon is not authorized')
+        return {'token': self.fetch_access_token()}
 
     def serialize_waterbutler_settings(self):
+        logger.debug("in serialize_waterbutler_settings:: {}".format(repr(self)))
+        logger.debug('folder_id::{}'.format(self.folder_id))
         if self.folder_id is None:
             raise exceptions.AddonError('Folder is not configured')
-        return {'folder': self.folder_id}
+        return {'folder': self.onedrive_id}
 
     def create_waterbutler_log(self, auth, action, metadata):
         self.owner.add_log(
@@ -238,7 +229,6 @@ def create_waterbutler_log(self, auth, action, metadata):
     def fetch_access_token(self):
         return self.api.fetch_access_token()
 
-
     ##### Callback overrides #####
     def after_delete(self, node=None, user=None):
         self.deauthorize(Auth(user=user), add_log=True)
diff --git a/website/addons/onedrive/requirements.txt b/website/addons/onedrive/requirements.txt
index e14dc8cba55..e69de29bb2d 100644
--- a/website/addons/onedrive/requirements.txt
+++ b/website/addons/onedrive/requirements.txt
@@ -1 +0,0 @@
-onedrivesdk==1.0.1
\ No newline at end of file
diff --git a/website/addons/onedrive/serializer.py b/website/addons/onedrive/serializer.py
index 79e45bbab02..2e092f572cc 100644
--- a/website/addons/onedrive/serializer.py
+++ b/website/addons/onedrive/serializer.py
@@ -4,20 +4,17 @@
 
 from website.util import api_url_for, web_url_for
 
-# from OneDriveSDK
-import onedrivesdk
-from onedrivesdk.helpers import GetAuthCodeServer
-
-
 logger = logging.getLogger(__name__)
 
-logging.getLogger('onedrive1').setLevel(logging.WARNING)
-
-class OnedriveSerializer(OAuthAddonSerializer):
 
+class OneDriveSerializer(OAuthAddonSerializer):
     def credentials_owner(self, user_settings=None):
         return user_settings.owner or self.user_settings.owner
 
+    @property
+    def addon_short_name(self):
+        return 'onedrive'
+
     @property
     def user_is_owner(self):
         if self.user_settings is None or self.node_settings is None:
@@ -33,39 +30,30 @@ def user_is_owner(self):
 
     @property
     def serialized_urls(self):
-        
-        logger.error('serialized_urls-1')
-        
         ret = self.addon_serialized_urls
         ret.update({'settings': web_url_for('user_addons')})
         return ret
 
     @property
     def addon_serialized_urls(self):
-        logger.error('addon_serialized_urls-1')
         node = self.node_settings.owner
-        
+
         return {
-            #'auth': api_url_for('oauth_connect',
-             #                   service_name='onedrive'),
+            'auth': api_url_for('oauth_connect',
+                               service_name='onedrive'),
             'importAuth': node.api_url_for('onedrive_add_user_auth'),
             'files': node.web_url_for('collect_file_trees'),
             'folders': node.api_url_for('onedrive_folder_list'),
             'config': node.api_url_for('onedrive_set_config'),
-            #'emails': node.api_url_for('onedrive_get_share_emails'),
-            #'share': 'https://app.onedrive.com/files/0/f/{0}'.format(self.node_settings.folder_id),
             'deauthorize': node.api_url_for('onedrive_remove_user_auth'),
             'accounts': node.api_url_for('onedrive_get_user_settings'),
         }
 
     def serialize_settings(self, node_settings, current_user, client=None):
         """View helper that returns a dictionary representation of a
-        OnedriveNodeSettings record. Provides the return value for the
+        OneDriveNodeSettings record. Provides the return value for the
         onedrive config endpoints.
         """
-        
-        logger.error('addon_serialized_settings-1')
-        #TODO: review onedrive serilaized settings to determine if we need to add more
         valid_credentials = True
         user_settings = node_settings.user_settings
         self.node_settings = node_settings
@@ -74,9 +62,9 @@ def serialize_settings(self, node_settings, current_user, client=None):
 
 #        if user_settings:
 #            try:
-#                client = client or OnedriveClient(user_settings.external_accounts[0].oauth_key)
+#                client = client or OneDriveClient(user_settings.external_accounts[0].oauth_key)
 #                client.get_user_info()
-#            except (OnedriveClientException, IndexError):
+#            except (OneDriveClientException, IndexError):
 #                valid_credentials = False
 
         result = {
@@ -100,10 +88,10 @@ def serialize_settings(self, node_settings, current_user, client=None):
             if node_settings.folder_id is None:
                 result['folder'] = {'name': None, 'path': None}
             elif valid_credentials:
-                path = node_settings.fetch_full_folder_path()
-
+                #path = node_settings.fetch_full_folder_path()
+                path = node_settings.folder_id
                 result['folder'] = {
                     'path': path,
-                    'name': path.replace('All Files', '', 1) if path != 'All Files' else '/ (Full Onedrive)'
+                    'name': path.replace('All Files', '', 1) if path != 'All Files' else '/ (Full OneDrive)'
                 }
         return result
diff --git a/website/addons/onedrive/settings/defaults.py b/website/addons/onedrive/settings/defaults.py
index 3f425bb5ec8..e98b5babb98 100644
--- a/website/addons/onedrive/settings/defaults.py
+++ b/website/addons/onedrive/settings/defaults.py
@@ -6,5 +6,5 @@
 
 ONEDRIVE_OAUTH_TOKEN_ENDPOINT = 'https://login.live.com/oauth20_token.srf?'
 ONEDRIVE_OAUTH_AUTH_ENDPOINT = 'https://login.live.com/oauth20_authorize.srf?'
-MSLIVE_API_URL = 'https://apis.live.net/v5.0/' #https://graph.microsoft.com/v1.0/me
-ONEDRIVE_API_URL = 'https://api.onedrive.com/v1.0'
\ No newline at end of file
+MSLIVE_API_URL = 'https://apis.live.net/v5.0/'
+ONEDRIVE_API_URL = 'https://api.onedrive.com/v1.0'
diff --git a/website/addons/onedrive/settings/local-dist.py b/website/addons/onedrive/settings/local-dist.py
index 9d72644bffd..914943f5a4e 100644
--- a/website/addons/onedrive/settings/local-dist.py
+++ b/website/addons/onedrive/settings/local-dist.py
@@ -3,5 +3,5 @@
 these settings.
 """
 # Get an app key and secret at https://account.live.com/developers/applications
-ONEDRIVE_KEY = '000000004416C3D3'
-ONEDRIVE_SECRET = 'JFvPrqEnHuepkaDjZXCcnChQuyTjnk0Z'
+ONEDRIVE_KEY = None
+ONEDRIVE_SECRET = None
diff --git a/website/addons/onedrive/tests/factories.py b/website/addons/onedrive/tests/factories.py
index 71cd638028f..445d8be8aae 100644
--- a/website/addons/onedrive/tests/factories.py
+++ b/website/addons/onedrive/tests/factories.py
@@ -1,27 +1,41 @@
 # -*- coding: utf-8 -*-
 """Factory boy factories for the OneDrive addon."""
+import datetime
 
-from framework.auth import Auth
+from dateutil.relativedelta import relativedelta
 
-from factory import SubFactory, Sequence, post_generation
-from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory
+from factory import SubFactory, Sequence
+from tests.factories import (
+    ModularOdmFactory,
+    UserFactory,
+    ProjectFactory,
+    ExternalAccountFactory)
 
 from website.addons.onedrive.model import (
-    OneDriveUserSettings, OneDriveNodeSettings
+    OneDriveUserSettings,
+    OneDriveNodeSettings,
 )
 
 
-# TODO(sloria): make an abstract UserSettingsFactory that just includes the owner field
+class OneDriveAccountFactory(ExternalAccountFactory):
+    provider = 'onedrive'
+    provider_id = Sequence(lambda n: 'id-{0}'.format(n))
+    oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
+    oauth_secret = Sequence(lambda n: 'secret-{0}'.format(n))
+    expires_at = datetime.datetime.now() + relativedelta(days=1)
+
 class OneDriveUserSettingsFactory(ModularOdmFactory):
-    FACTORY_FOR = OneDriveUserSettings
+    class Meta:
+        model = OneDriveUserSettings
 
     owner = SubFactory(UserFactory)
-    access_token = Sequence(lambda n: 'abcdef{0}'.format(n))
 
 
 class OneDriveNodeSettingsFactory(ModularOdmFactory):
-    FACTORY_FOR = OneDriveNodeSettings
+    class Meta:
+        model = OneDriveNodeSettings
 
     owner = SubFactory(ProjectFactory)
     user_settings = SubFactory(OneDriveUserSettingsFactory)
-    folder = 'Camera Uploads'
+    folder_id = '1234567890'
+    folder_path = 'Drive/Camera Uploads'
diff --git a/website/addons/onedrive/tests/test_client.py b/website/addons/onedrive/tests/test_client.py
index 57e31310e65..0df2b37078e 100644
--- a/website/addons/onedrive/tests/test_client.py
+++ b/website/addons/onedrive/tests/test_client.py
@@ -12,10 +12,10 @@
     OneDriveNodeSettingsFactory,
     OneDriveUserSettingsFactory
 )
-from website.addons.onedrive.client import (
-    get_client, get_node_addon_client, get_node_client,
-    get_client_from_user_settings
-)
+#  from website.addons.onedrive.client import (
+#      get_client, get_node_addon_client, get_node_client,
+#      get_client_from_user_settings
+#  )
 
 
 class TestCore(OsfTestCase):
diff --git a/website/addons/onedrive/tests/test_models.py b/website/addons/onedrive/tests/test_models.py
index afe430f91fa..6118ea3ed39 100644
--- a/website/addons/onedrive/tests/test_models.py
+++ b/website/addons/onedrive/tests/test_models.py
@@ -21,79 +21,20 @@ def setUp(self):
         super(TestUserSettingsModel, self).setUp()
         self.user = UserFactory()
 
-    def test_fields(self):
-        user_settings = OneDriveUserSettings(
-            access_token='12345',
-            onedrive_id='abc',
-            owner=self.user)
-        user_settings.save()
-        retrieved = OneDriveUserSettings.load(user_settings._primary_key)
-        assert_true(retrieved.access_token)
-        assert_true(retrieved.onedrive_id)
-        assert_true(retrieved.owner)
-
-    def test_has_auth(self):
-        user_settings = OneDriveUserSettingsFactory(access_token=None)
-        assert_false(user_settings.has_auth)
-        user_settings.access_token = '12345'
-        user_settings.save()
-        assert_true(user_settings.has_auth)
-
-    def test_clear_clears_associated_node_settings(self):
-        node_settings = OneDriveNodeSettingsFactory.build()
-        user_settings = OneDriveUserSettingsFactory()
-        node_settings.user_settings = user_settings
-        node_settings.save()
-
-        user_settings.clear()
-        user_settings.save()
-
-        # Node settings no longer associated with user settings
-        assert_is(node_settings.user_settings, None)
-        assert_is(node_settings.folder, None)
-
-    def test_clear(self):
-        node_settings = OneDriveNodeSettingsFactory.build()
-        user_settings = OneDriveUserSettingsFactory(access_token='abcde',
-            onedrive_id='abc')
-        node_settings.user_settings = user_settings
-        node_settings.save()
-
-        assert_true(user_settings.access_token)
-        user_settings.clear()
-        user_settings.save()
-        assert_false(user_settings.access_token)
-        assert_false(user_settings.onedrive_id)
-
+#      def test_has_auth(self):
+#          user_settings = OneDriveUserSettingsFactory(access_token=None)
+#          assert_false(user_settings.has_auth)
+#          user_settings.access_token = '12345'
+#          user_settings.save()
+#          assert_true(user_settings.has_auth)
+#
     def test_delete(self):
         user_settings = OneDriveUserSettingsFactory()
-        assert_true(user_settings.has_auth)
+        user_settings.access_token = "122"
         user_settings.delete()
         user_settings.save()
-        assert_false(user_settings.access_token)
-        assert_false(user_settings.onedrive_id)
         assert_true(user_settings.deleted)
 
-    def test_delete_clears_associated_node_settings(self):
-        node_settings = OneDriveNodeSettingsFactory.build()
-        user_settings = OneDriveUserSettingsFactory()
-        node_settings.user_settings = user_settings
-        node_settings.save()
-
-        user_settings.delete()
-        user_settings.save()
-
-        # Node settings no longer associated with user settings
-        assert_is(node_settings.user_settings, None)
-        assert_is(node_settings.folder, None)
-        assert_false(node_settings.deleted)
-
-    def test_to_json(self):
-        user_settings = OneDriveUserSettingsFactory()
-        result = user_settings.to_json()
-        assert_equal(result['has_auth'], user_settings.has_auth)
-
-
 class TestOneDriveNodeSettingsModel(OsfTestCase):
 
     def setUp(self):
@@ -108,46 +49,10 @@ def setUp(self):
             owner=self.project
         )
 
-    def test_complete_true(self):
-        self.node_settings.user_settings.access_token = 'seems legit'
-
-        assert_true(self.node_settings.has_auth)
-        assert_true(self.node_settings.complete)
-
-    def test_complete_false(self):
-        self.node_settings.user_settings.access_token = 'seems legit'
-        self.node_settings.folder = None
-
-        assert_true(self.node_settings.has_auth)
-        assert_false(self.node_settings.complete)
-
-    def test_complete_auth_false(self):
-        self.node_settings.user_settings = None
-
-        assert_false(self.node_settings.has_auth)
-        assert_false(self.node_settings.complete)
-
-    def test_fields(self):
-        node_settings = OneDriveNodeSettings(user_settings=self.user_settings)
-        node_settings.save()
-        assert_true(node_settings.user_settings)
-        assert_equal(node_settings.user_settings.owner, self.user)
-        assert_true(hasattr(node_settings, 'folder'))
-        assert_true(hasattr(node_settings, 'registration_data'))
-
     def test_folder_defaults_to_none(self):
         node_settings = OneDriveNodeSettings(user_settings=self.user_settings)
         node_settings.save()
-        assert_is_none(node_settings.folder)
-
-    def test_has_auth(self):
-        settings = OneDriveNodeSettings(user_settings=self.user_settings)
-        settings.save()
-        assert_false(settings.has_auth)
-
-        settings.user_settings.access_token = '123abc'
-        settings.user_settings.save()
-        assert_true(settings.has_auth)
+        assert_is_none(node_settings.folder_id)
 
     def test_to_json(self):
         settings = self.node_settings
@@ -157,22 +62,22 @@ def test_to_json(self):
 
     def test_delete(self):
         assert_true(self.node_settings.user_settings)
-        assert_true(self.node_settings.folder)
+        assert_true(self.node_settings.folder_id)
         old_logs = self.project.logs
         self.node_settings.delete()
         self.node_settings.save()
         assert_is(self.node_settings.user_settings, None)
-        assert_is(self.node_settings.folder, None)
+        assert_is(self.node_settings.folder_id, None)
         assert_true(self.node_settings.deleted)
-        assert_equal(self.project.logs, old_logs)
+
 
     def test_deauthorize(self):
         assert_true(self.node_settings.user_settings)
-        assert_true(self.node_settings.folder)
+        assert_true(self.node_settings.folder_id)
         self.node_settings.deauthorize(auth=Auth(self.user))
         self.node_settings.save()
         assert_is(self.node_settings.user_settings, None)
-        assert_is(self.node_settings.folder, None)
+        assert_is(self.node_settings.folder_id, None)
 
         last_log = self.project.logs[-1]
         assert_equal(last_log.action, 'onedrive_node_deauthorized')
@@ -181,16 +86,6 @@ def test_deauthorize(self):
         assert_in('project', params)
         assert_in('folder', params)
 
-    def test_set_folder(self):
-        folder_name = 'queen/freddie'
-        self.node_settings.set_folder(folder_name, auth=Auth(self.user))
-        self.node_settings.save()
-        # Folder was set
-        assert_equal(self.node_settings.folder, folder_name)
-        # Log was saved
-        last_log = self.project.logs[-1]
-        assert_equal(last_log.action, 'onedrive_folder_selected')
-
     def test_set_user_auth(self):
         node_settings = OneDriveNodeSettingsFactory()
         user_settings = OneDriveUserSettingsFactory()
@@ -198,22 +93,10 @@ def test_set_user_auth(self):
         node_settings.set_user_auth(user_settings)
         node_settings.save()
 
-        assert_true(node_settings.has_auth)
         assert_equal(node_settings.user_settings, user_settings)
         # A log was saved
         last_log = node_settings.owner.logs[-1]
         assert_equal(last_log.action, 'onedrive_node_authorized')
-        log_params = last_log.params
-        assert_equal(log_params['folder'], node_settings.folder)
-        assert_equal(log_params['node'], node_settings.owner._primary_key)
-        assert_equal(last_log.user, user_settings.owner)
-
-    def test_serialize_credentials(self):
-        self.user_settings.access_token = 'secret'
-        self.user_settings.save()
-        credentials = self.node_settings.serialize_waterbutler_credentials()
-        expected = {'token': self.node_settings.user_settings.access_token}
-        assert_equal(credentials, expected)
 
     def test_serialize_credentials_not_authorized(self):
         self.node_settings.user_settings = None
@@ -221,113 +104,3 @@ def test_serialize_credentials_not_authorized(self):
         with assert_raises(exceptions.AddonError):
             self.node_settings.serialize_waterbutler_credentials()
 
-    def test_serialize_settings(self):
-        settings = self.node_settings.serialize_waterbutler_settings()
-        expected = {'folder': self.node_settings.folder}
-        assert_equal(settings, expected)
-
-    def test_serialize_settings_not_configured(self):
-        self.node_settings.folder = None
-        self.node_settings.save()
-        with assert_raises(exceptions.AddonError):
-            self.node_settings.serialize_waterbutler_settings()
-
-    def test_create_log(self):
-        action = 'file_added'
-        path = 'pizza.nii'
-        self.node_settings.folder = '/SomeOddPath'
-        self.node_settings.save()
-        nlog = len(self.project.logs)
-        self.node_settings.create_waterbutler_log(
-            auth=Auth(user=self.user),
-            action=action,
-            metadata={'path': path},
-        )
-        self.project.reload()
-        assert_equal(len(self.project.logs), nlog + 1)
-        assert_equal(
-            self.project.logs[-1].action,
-            'onedrive_{0}'.format(action),
-        )
-        assert_equal(
-            self.project.logs[-1].params['path'],
-            path,
-        )
-
-    @mock.patch('website.archiver.tasks.archive')
-    def test_does_not_get_copied_to_registrations(self, mock_archive):
-        registration = self.project.register_node(
-            schema=None,
-            auth=Auth(user=self.project.creator),
-            template='Template1',
-            data='hodor'
-        )
-        assert_false(registration.has_addon('onedrive'))
-
-
-class TestNodeSettingsCallbacks(OsfTestCase):
-
-    def setUp(self):
-        super(TestNodeSettingsCallbacks, self).setUp()
-        # Create node settings with auth
-        self.user_settings = OneDriveUserSettingsFactory(access_token='123abc')
-        self.node_settings = OneDriveNodeSettingsFactory(
-            user_settings=self.user_settings,
-            folder='',
-        )
-
-        self.project = self.node_settings.owner
-        self.user = self.user_settings.owner
-
-    def test_after_fork_by_authorized_onedrive_user(self):
-        fork = ProjectFactory()
-        clone, message = self.node_settings.after_fork(
-            node=self.project, fork=fork, user=self.user_settings.owner
-        )
-        assert_equal(clone.user_settings, self.user_settings)
-
-    def test_after_fork_by_unauthorized_onedrive_user(self):
-        fork = ProjectFactory()
-        user = UserFactory()
-        clone, message = self.node_settings.after_fork(
-            node=self.project, fork=fork, user=user,
-            save=True
-        )
-        # need request context for url_for
-        assert_is(clone.user_settings, None)
-
-    def test_before_fork(self):
-        node = ProjectFactory()
-        message = self.node_settings.before_fork(node, self.user)
-        assert_true(message)
-
-    def test_before_remove_contributor_message(self):
-        message = self.node_settings.before_remove_contributor(
-            self.project, self.user)
-        assert_true(message)
-        assert_in(self.user.fullname, message)
-        assert_in(self.project.project_or_component, message)
-
-    def test_after_remove_authorized_onedrive_user_self(self):
-        auth = Auth(user=self.user_settings.owner)
-        message = self.node_settings.after_remove_contributor(
-            self.project, self.user_settings.owner, auth)
-        self.node_settings.save()
-        assert_is_none(self.node_settings.user_settings)
-        assert_true(message)
-        assert_not_in("You can re-authenticate", message)
-
-    def test_after_remove_authorized_onedrive_user_not_self(self):
-        message = self.node_settings.after_remove_contributor(
-            node=self.project, removed=self.user_settings.owner)
-        self.node_settings.save()
-        assert_is_none(self.node_settings.user_settings)
-        assert_true(message)
-        assert_in("You can re-authenticate", message)
-
-    def test_after_delete(self):
-        self.project.remove_node(Auth(user=self.project.creator))
-        # Ensure that changes to node settings have been saved
-        self.node_settings.reload()
-        assert_true(self.node_settings.user_settings is None)
-        assert_true(self.node_settings.folder is None)
diff --git a/website/addons/onedrive/tests/test_utils.py b/website/addons/onedrive/tests/test_utils.py
index 8a5f1cef786..311a7593a28 100644
--- a/website/addons/onedrive/tests/test_utils.py
+++ b/website/addons/onedrive/tests/test_utils.py
@@ -12,7 +12,7 @@
 
 from website.addons.onedrive.tests.utils import OneDriveAddonTestCase
 from website.addons.onedrive import utils
-from website.addons.onedrive.views.config import serialize_folder
+# from website.addons.onedrive.views.config import serialize_folder
 
 
 class TestNodeLogger(OneDriveAddonTestCase):
@@ -41,89 +41,4 @@ def test_log_deauthorized_when_node_settings_are_deleted(self):
         logger.log(action='node_deauthorized', save=True)
 
         last_log = project.logs[-1]
-        assert_equal(last_log.action, 'onedrive_node_deauthorized')
-
-
-def test_get_file_name():
-    assert_equal(utils.get_file_name('foo/bar/baz.txt'), 'baz.txt')
-    assert_equal(utils.get_file_name('/foo/bar/baz.txt'), 'baz.txt')
-    assert_equal(utils.get_file_name('/foo/bar/baz.txt/'), 'baz.txt')
-
-
-def test_is_subdir():
-    assert_true(utils.is_subdir('foo/bar', 'foo'))
-    assert_true(utils.is_subdir('foo', 'foo'))
-    assert_true(utils.is_subdir('foo/bar baz', 'foo'))
-    assert_true(utils.is_subdir('bar baz/foo', 'bar baz'))
-    assert_true(utils.is_subdir('foo', '/'))
-    assert_true(utils.is_subdir('/', '/'))
-
-    assert_false(utils.is_subdir('foo/bar', 'baz'))
-    assert_false(utils.is_subdir('foo/bar', 'bar'))
-    assert_false(utils.is_subdir('foo', 'foo/bar'))
-    assert_false(utils.is_subdir('', 'foo'))
-    assert_false(utils.is_subdir('foo', ''))
-    assert_false(utils.is_subdir('foo', None))
-    assert_false(utils.is_subdir(None, 'foo'))
-    assert_false(utils.is_subdir(None, None))
-    assert_false(utils.is_subdir('', ''))
-
-    assert_true(utils.is_subdir('foo/bar', 'Foo/bar'))
-    assert_true(utils.is_subdir('Foo/bar', 'foo/bar'))
-
-
-def test_clean_path():
-    assert_equal(utils.clean_path('/'), '/')
-    assert_equal(utils.clean_path('/foo/bar/baz/'), 'foo/bar/baz')
-    assert_equal(utils.clean_path(None), '')
-
-
-def test_get_share_folder_uri():
-    expected = 'https://onedrive.com/home/foo?shareoptions=1&share_subfolder=0&share=1'
-    assert_equal(utils.get_share_folder_uri('/foo/'), expected)
-    assert_equal(utils.get_share_folder_uri('foo'), expected)
-
-
-def test_serialize_folder():
-    metadata = {
-        u'bytes': 0,
-        u'icon': u'folder',
-        u'is_dir': True,
-        u'modified': u'Sat, 22 Mar 2014 05:40:29 +0000',
-        u'path': u'/datasets/New Folder',
-        u'rev': u'3fed51f002c12fc',
-        u'revision': 67032351,
-        u'root': u'onedrive',
-        u'size': u'0 bytes',
-        u'thumb_exists': False
-    }
-    result = serialize_folder(metadata)
-    assert_equal(result['path'], metadata['path'])
-    assert_equal(result['name'], 'OneDrive' + metadata['path'])
-
-
-class TestMetadataSerialization(OsfTestCase):
-
-    def test_metadata_to_hgrid(self):
-        metadata = {
-            u'bytes': 123,
-            u'icon': u'file',
-            u'is_dir': False,
-            u'modified': u'Sat, 22 Mar 2014 05:40:29 +0000',
-            u'path': u'/foo/bar/baz.mp3',
-            u'rev': u'3fed51f002c12fc',
-            u'revision': 67032351,
-            u'root': u'onedrive',
-            u'size': u'0 bytes',
-            u'thumb_exists': False,
-            u'mime_type': u'audio/mpeg',
-        }
-        node = ProjectFactory()
-        permissions = {'view': True, 'edit': False}
-        result = utils.metadata_to_hgrid(metadata, node, permissions)
-        assert_equal(result['addon'], 'onedrive')
-        assert_equal(result['permissions'], permissions)
-        filename = utils.get_file_name(metadata['path'])
-        assert_equal(result['name'], filename)
-        assert_equal(result['path'], metadata['path'])
-        assert_equal(result['ext'], os.path.splitext(filename)[1])
+        assert_equal(last_log.action, 'onedrive_node_deauthorized')
\ No newline at end of file
diff --git a/website/addons/onedrive/tests/test_views.py b/website/addons/onedrive/tests/test_views.py
index ef383e5c7f0..0d9abe4f3a1 100644
--- a/website/addons/onedrive/tests/test_views.py
+++ b/website/addons/onedrive/tests/test_views.py
@@ -8,8 +8,6 @@
 
 from framework.auth import Auth
 from website.util import api_url_for, web_url_for
-from onedrive.rest import ErrorResponse
-from onedrive.client import OneDriveOAuth2Flow
 
 from urllib3.exceptions import MaxRetryError
 
@@ -19,8 +17,7 @@
 from website.addons.onedrive.tests.utils import (
     OneDriveAddonTestCase, mock_responses, MockOneDrive, patch_client
 )
-from website.addons.onedrive.views.config import serialize_settings
-from website.addons.onedrive.views.hgrid import onedrive_addon_folder
+
 from website.addons.onedrive import utils
 
 mock_client = MockOneDrive()
@@ -34,381 +31,127 @@ def setUp(self):
         # Log user in
         self.app.authenticate(*self.user.auth)
 
-    def test_onedrive_oauth_start(self):
-        url = api_url_for('onedrive_oauth_start_user')
-        res = self.app.get(url)
-        assert_is_redirect(res)
-        assert_in('&force_reapprove=true', res.location)
-
-    @mock.patch('website.addons.onedrive.views.auth.OneDriveOAuth2Flow.finish')
-    @mock.patch('website.addons.onedrive.views.auth.get_client_from_user_settings')
-    def test_onedrive_oauth_finish(self, mock_get, mock_finish):
-        mock_client = mock.MagicMock()
-        mock_client.account_info.return_value = {'display_name': 'Mr. Drop Box'}
-        mock_get.return_value = mock_client
-        mock_finish.return_value = ('mytoken123', 'myonedriveid', 'done')
-        url = api_url_for('onedrive_oauth_finish')
-        res = self.app.get(url)
-        assert_is_redirect(res)
-
-    @mock.patch('website.addons.onedrive.views.auth.session')
-    @mock.patch('website.addons.onedrive.views.auth.OneDriveOAuth2Flow.finish')
-    def test_onedrive_oauth_finish_cancelled(self, mock_finish, mock_session):
-        node = ProjectFactory(creator=self.user)
-        mock_session.data = {'onedrive_auth_nid': node._id}
-        mock_response = mock.Mock()
-        mock_response.status = 404
-        mock_finish.side_effect = OneDriveOAuth2Flow.NotApprovedException
-        settings = self.user.get_addon('onedrive')
-        url = api_url_for('onedrive_oauth_finish')
-        res = self.app.get(url)
-
-        assert_is_redirect(res)
-        assert_in(node._id, res.headers["location"])
-        assert_false(settings)
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.disable_access_token')
-    def test_onedrive_oauth_delete_user(self, mock_disable_access_token):
-        self.user.add_addon('onedrive')
-        settings = self.user.get_addon('onedrive')
-        settings.access_token = '12345abc'
-        settings.save()
-        assert_true(settings.has_auth)
-        self.user.save()
-        url = api_url_for('onedrive_oauth_delete_user')
-        self.app.delete(url)
-        settings.reload()
-        assert_false(settings.has_auth)
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.disable_access_token')
-    def test_onedrive_oauth_delete_user_with_invalid_credentials(self, mock_disable_access_token):
-        self.user.add_addon('onedrive')
-        settings = self.user.get_addon('onedrive')
-        settings.access_token = '12345abc'
-        settings.save()
-        assert_true(settings.has_auth)
-
-        mock_response = mock.Mock()
-        mock_response.status = 401
-        mock_disable_access_token.side_effect = ErrorResponse(mock_response, "The given OAuth 2 access token doesn't exist or has expired.")
-
-        self.user.save()
-        url = api_url_for('onedrive_oauth_delete_user')
-        self.app.delete(url)
-        settings.reload()
-        assert_false(settings.has_auth)
-
-
-class TestConfigViews(OneDriveAddonTestCase):
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
-    def test_onedrive_user_config_get_has_auth_info(self, mock_account_info):
-        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
-        url = api_url_for('onedrive_user_config_get')
-        res = self.app.get(url, auth=self.user.auth)
-        assert_equal(res.status_code, 200)
-        # The JSON result
-        result = res.json['result']
-        assert_true(result['userHasAuth'])
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
-    def test_onedrive_user_config_get_has_valid_credentials(self, mock_account_info):
-        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
-        url = api_url_for('onedrive_user_config_get')
-        res = self.app.get(url, auth=self.user.auth)
-        assert_equal(res.status_code, 200)
-        # The JSON result
-        result = res.json['result']
-        assert_true(result['validCredentials'])
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
-    def test_onedrive_user_config_get_has_invalid_credentials(self, mock_account_info):
-        mock_response = mock.Mock()
-        mock_response.status = 401
-        mock_account_info.side_effect = ErrorResponse(mock_response, "The given OAuth 2 access token doesn't exist or has expired.")
-        url = api_url_for('onedrive_user_config_get')
-        res = self.app.get(url, auth=self.user.auth)
-        assert_equal(res.status_code, 200)
-        # The JSON result
-        result = res.json['result']
-        assert_false(result['validCredentials'])
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
-    def test_onedrive_user_config_get_returns_correct_urls(self, mock_account_info):
-        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
-        url = api_url_for('onedrive_user_config_get')
-        res = self.app.get(url, auth=self.user.auth)
-        assert_equal(res.status_code, 200)
-        # The JSONified URLs result
-        urls = res.json['result']['urls']
-        assert_equal(urls['delete'], api_url_for('onedrive_oauth_delete_user'))
-        assert_equal(urls['create'], api_url_for('onedrive_oauth_start_user'))
-
-    def test_serialize_settings_helper_returns_correct_urls(self):
-        result = serialize_settings(self.node_settings, self.user, client=mock_client)
-        urls = result['urls']
-
-        assert_equal(urls['config'], self.project.api_url_for('onedrive_config_put'))
-        assert_equal(urls['deauthorize'], self.project.api_url_for('onedrive_deauthorize'))
-        assert_equal(urls['auth'], self.project.api_url_for('onedrive_oauth_start'))
-        assert_equal(urls['importAuth'], self.project.api_url_for('onedrive_import_user_auth'))
-        assert_equal(urls['files'], self.project.web_url_for('collect_file_trees'))
-        # Includes endpoint for fetching folders only
-        # NOTE: Querystring params are in camelCase
-        assert_equal(urls['folders'],
-            self.project.api_url_for('onedrive_hgrid_data_contents', root=1))
-        assert_equal(urls['settings'], web_url_for('user_addons'))
-
-    def test_serialize_settings_helper_returns_correct_auth_info(self):
-        result = serialize_settings(self.node_settings, self.user, client=mock_client)
-        assert_equal(result['nodeHasAuth'], self.node_settings.has_auth)
-        assert_true(result['userHasAuth'])
-        assert_true(result['userIsOwner'])
-
-    def test_serialize_settings_for_user_no_auth(self):
-        no_addon_user = AuthUserFactory()
-        result = serialize_settings(self.node_settings, no_addon_user, client=mock_client)
-        assert_false(result['userIsOwner'])
-        assert_false(result['userHasAuth'])
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
-    def test_serialize_settings_valid_credentials(self, mock_account_info):
-        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
-        result = serialize_settings(self.node_settings, self.user, client=mock_client)
-        assert_true(result['validCredentials'])
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
-    def test_serialize_settings_invalid_credentials(self, mock_account_info):
-        mock_response = mock.Mock()
-        mock_response.status = 401
-        mock_account_info.side_effect = ErrorResponse(mock_response, "The given OAuth 2 access token doesn't exist or has expired.")
-        result = serialize_settings(self.node_settings, self.user)
-        assert_false(result['validCredentials'])
-
-    def test_serialize_settings_helper_returns_correct_folder_info(self):
-        result = serialize_settings(self.node_settings, self.user, client=mock_client)
-        folder = result['folder']
-        assert_equal(folder['name'], self.node_settings.folder)
-        assert_equal(folder['path'], self.node_settings.folder)
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
-    def test_onedrive_config_get(self, mock_account_info):
-        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
-        self.user_settings.save()
-
-        url = self.project.api_url_for('onedrive_config_get')
-
-        res = self.app.get(url, auth=self.user.auth)
-        assert_equal(res.status_code, 200)
-        result = res.json['result']
-        assert_equal(result['ownerName'], self.user_settings.owner.fullname)
-
-        assert_equal(
-            result['urls']['config'],
-            self.project.api_url_for('onedrive_config_put'),
-        )
-
-    def test_onedrive_config_put(self):
-        url = self.project.api_url_for('onedrive_config_put')
-        # Can set folder through API call
-        res = self.app.put_json(url, {'selected': {'path': 'My test folder',
-            'name': 'OneDrive/My test folder'}},
-            auth=self.user.auth)
-        assert_equal(res.status_code, 200)
-        self.node_settings.reload()
-        self.project.reload()
-
-        # Folder was set
-        assert_equal(self.node_settings.folder, 'My test folder')
-        # A log event was created
-        last_log = self.project.logs[-1]
-        assert_equal(last_log.action, 'onedrive_folder_selected')
-        params = last_log.params
-        assert_equal(params['folder'], 'My test folder')
-
-    def test_onedrive_deauthorize(self):
-        url = self.project.api_url_for('onedrive_deauthorize')
-        saved_folder = self.node_settings.folder
-        self.app.delete(url, auth=self.user.auth)
-        self.project.reload()
-        self.node_settings.reload()
-
-        assert_false(self.node_settings.has_auth)
-        assert_is(self.node_settings.user_settings, None)
-        assert_is(self.node_settings.folder, None)
-
-        # A log event was saved
-        last_log = self.project.logs[-1]
-        assert_equal(last_log.action, 'onedrive_node_deauthorized')
-        log_params = last_log.params
-        assert_equal(log_params['node'], self.project._primary_key)
-        assert_equal(log_params['folder'], saved_folder)
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
-    def test_onedrive_import_user_auth_returns_serialized_settings(self, mock_account_info):
-        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
-        # Node does not have user settings
-        self.node_settings.user_settings = None
-        self.node_settings.save()
-        url = self.project.api_url_for('onedrive_import_user_auth')
-        res = self.app.put(url, auth=self.user.auth)
-        self.project.reload()
-        self.node_settings.reload()
-
-        expected_result = serialize_settings(self.node_settings, self.user,
-                                             client=mock_client)
-        result = res.json['result']
-        assert_equal(result, expected_result)
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.account_info')
-    def test_onedrive_import_user_auth_adds_a_log(self, mock_account_info):
-        mock_account_info.return_value = {'display_name': 'Mr. Drop Box'}
-        # Node does not have user settings
-        self.node_settings.user_settings = None
-        self.node_settings.save()
-        url = self.project.api_url_for('onedrive_import_user_auth')
-        self.app.put(url, auth=self.user.auth)
-        self.project.reload()
-        self.node_settings.reload()
-        last_log = self.project.logs[-1]
-
-        assert_equal(last_log.action, 'onedrive_node_authorized')
-        log_params = last_log.params
-        assert_equal(log_params['node'], self.project._primary_key)
-        assert_equal(last_log.user, self.user)
-
-    def test_onedrive_get_share_emails(self):
-        # project has some contributors
-        contrib = AuthUserFactory()
-        self.project.add_contributor(contrib, auth=Auth(self.user))
-        self.project.save()
-        url = self.project.api_url_for('onedrive_get_share_emails')
-        res = self.app.get(url, auth=self.user.auth)
-        result = res.json['result']
-        assert_equal(result['emails'], [u.username for u in self.project.contributors
-                                        if u != self.user])
-        assert_equal(result['url'], utils.get_share_folder_uri(self.node_settings.folder))
-
-    def test_onedrive_get_share_emails_returns_error_if_not_authorizer(self):
-        contrib = AuthUserFactory()
-        contrib.add_addon('onedrive')
-        contrib.save()
-        self.project.add_contributor(contrib, auth=Auth(self.user))
-        self.project.save()
-        url = self.project.api_url_for('onedrive_get_share_emails')
-        # Non-authorizing contributor sends request
-        res = self.app.get(url, auth=contrib.auth, expect_errors=True)
-        assert_equal(res.status_code, httplib.FORBIDDEN)
-
-    def test_onedrive_get_share_emails_requires_user_addon(self):
-        # Node doesn't have auth
-        self.node_settings.user_settings = None
-        self.node_settings.save()
-        url = self.project.api_url_for('onedrive_get_share_emails')
-        # Non-authorizing contributor sends request
-        res = self.app.get(url, auth=self.user.auth, expect_errors=True)
-        assert_equal(res.status_code, httplib.BAD_REQUEST)
-
-
-class TestFilebrowserViews(OneDriveAddonTestCase):
-
-    def test_onedrive_hgrid_data_contents(self):
-        with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
-            url = self.project.api_url_for(
-                'onedrive_hgrid_data_contents',
-                path=self.node_settings.folder,
-            )
-            res = self.app.get(url, auth=self.user.auth)
-            contents = [x for x in mock_client.metadata('', list=True)['contents'] if x['is_dir']]
-            assert_equal(len(res.json), len(contents))
-            first = res.json[0]
-            assert_in('kind', first)
-            assert_equal(first['path'], contents[0]['path'])
-
-    def test_onedrive_hgrid_data_contents_if_folder_is_none_and_folders_only(self):
-        with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
-            self.node_settings.folder = None
-            self.node_settings.save()
-            url = self.project.api_url_for('onedrive_hgrid_data_contents', foldersOnly=True)
-            res = self.app.get(url, auth=self.user.auth)
-            contents = mock_client.metadata('', list=True)['contents']
-            expected = [each for each in contents if each['is_dir']]
-            assert_equal(len(res.json), len(expected))
-
-    def test_onedrive_hgrid_data_contents_folders_only(self):
-        with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
-            url = self.project.api_url_for('onedrive_hgrid_data_contents', foldersOnly=True)
-            res = self.app.get(url, auth=self.user.auth)
-            contents = mock_client.metadata('', list=True)['contents']
-            expected = [each for each in contents if each['is_dir']]
-            assert_equal(len(res.json), len(expected))
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.metadata')
-    def test_onedrive_hgrid_data_contents_include_root(self, mock_metadata):
-        with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
-            url = self.project.api_url_for('onedrive_hgrid_data_contents', root=1)
-
-            res = self.app.get(url, auth=self.user.auth)
-            contents = mock_client.metadata('', list=True)['contents']
-            assert_equal(len(res.json), 1)
-            assert_not_equal(len(res.json), len(contents))
-            first_elem = res.json[0]
-            assert_equal(first_elem['path'], '/')
-
-    @unittest.skip('finish this')
-    def test_onedrive_addon_folder(self):
-        assert 0, 'finish me'
-
-    def test_onedrive_addon_folder_if_folder_is_none(self):
-        # Something is returned on normal circumstances
-        root = onedrive_addon_folder(
-            node_settings=self.node_settings, auth=self.user.auth)
-        assert_true(root)
-
-        # Nothing is returned when there is no folder linked
-        self.node_settings.folder = None
-        self.node_settings.save()
-        root = onedrive_addon_folder(
-            node_settings=self.node_settings, auth=self.user.auth)
-        assert_is_none(root)
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.metadata')
-    def test_onedrive_hgrid_data_contents_deleted(self, mock_metadata):
-        # Example metadata for a deleted folder
-        mock_metadata.return_value = {
-            u'bytes': 0,
-            u'contents': [],
-            u'hash': u'e3c62eb85bc50dfa1107b4ca8047812b',
-            u'icon': u'folder_gray',
-            u'is_deleted': True,
-            u'is_dir': True,
-            u'modified': u'Sat, 29 Mar 2014 20:11:49 +0000',
-            u'path': u'/tests',
-            u'rev': u'3fed844002c12fc',
-            u'revision': 67033156,
-            u'root': u'onedrive',
-            u'size': u'0 bytes',
-            u'thumb_exists': False
-        }
-        url = self.project.api_url_for('onedrive_hgrid_data_contents')
-        res = self.app.get(url, auth=self.user.auth, expect_errors=True)
-        assert_equal(res.status_code, httplib.NOT_FOUND)
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.metadata')
-    def test_onedrive_hgrid_data_contents_returns_error_if_invalid_path(self, mock_metadata):
-        mock_response = mock.Mock()
-        mock_metadata.side_effect = ErrorResponse(mock_response, body='File not found')
-        url = self.project.api_url_for('onedrive_hgrid_data_contents')
-        res = self.app.get(url, auth=self.user.auth, expect_errors=True)
-        assert_equal(res.status_code, httplib.NOT_FOUND)
-
-    @mock.patch('website.addons.onedrive.client.OneDriveClient.metadata')
-    def test_onedrive_hgrid_data_contents_handles_max_retry_error(self, mock_metadata):
-        mock_response = mock.Mock()
-        url = self.project.api_url_for('onedrive_hgrid_data_contents')
-        mock_metadata.side_effect = MaxRetryError(mock_response, url)
-        res = self.app.get(url, auth=self.user.auth, expect_errors=True)
-        assert_equal(res.status_code, httplib.REQUEST_TIMEOUT)
+#      def test_onedrive_oauth_start(self):
+#          url = api_url_for('onedrive_oauth_start_user')
+#          res = self.app.get(url)
+#          assert_is_redirect(res)
+#          assert_in('&force_reapprove=true', res.location)
+
+#  class TestConfigViews(OneDriveAddonTestCase):
+
+#      def test_onedrive_config_put(self):
+#          url = self.project.api_url_for('onedrive_config_put')
+#          # Can set folder through API call
+#          res = self.app.put_json(url, {'selected': {'path': 'My test folder',
+#              'name': 'OneDrive/My test folder'}},
+#              auth=self.user.auth)
+#          assert_equal(res.status_code, 200)
+#          self.node_settings.reload()
+#          self.project.reload()
+#
+#          # Folder was set
+#          assert_equal(self.node_settings.folder, 'My test folder')
+#          # A log event was created
+#          last_log = self.project.logs[-1]
+#          assert_equal(last_log.action, 'onedrive_folder_selected')
+#          params = last_log.params
+#          assert_equal(params['folder'], 'My test folder')
+#
+#      def test_onedrive_deauthorize(self):
+#          url = self.project.api_url_for('onedrive_deauthorize')
+#          saved_folder = self.node_settings.folder
+#          self.app.delete(url, auth=self.user.auth)
+#          self.project.reload()
+#          self.node_settings.reload()
+#
+#          assert_false(self.node_settings.has_auth)
+#          assert_is(self.node_settings.user_settings, None)
+#          assert_is(self.node_settings.folder, None)
+#
+#          # A log event was saved
+#          last_log = self.project.logs[-1]
+#          assert_equal(last_log.action, 'onedrive_node_deauthorized')
+#          log_params = last_log.params
+#          assert_equal(log_params['node'], self.project._primary_key)
+#          assert_equal(log_params['folder'], saved_folder)
+#
+#      def test_onedrive_get_share_emails(self):
+#          # project has some contributors
+#          contrib = AuthUserFactory()
+#          self.project.add_contributor(contrib, auth=Auth(self.user))
+#          self.project.save()
+#          url = self.project.api_url_for('onedrive_get_share_emails')
+#          res = self.app.get(url, auth=self.user.auth)
+#          result = res.json['result']
+#          assert_equal(result['emails'], [u.username for u in self.project.contributors
+#                                          if u != self.user])
+#          assert_equal(result['url'], utils.get_share_folder_uri(self.node_settings.folder))
+
+#      def test_onedrive_get_share_emails_returns_error_if_not_authorizer(self):
+#          contrib = AuthUserFactory()
+#          contrib.add_addon('onedrive')
+#          contrib.save()
+#          self.project.add_contributor(contrib, auth=Auth(self.user))
+#          self.project.save()
+#          url = self.project.api_url_for('onedrive_get_share_emails')
+#          # Non-authorizing contributor sends request
+#          res = self.app.get(url, auth=contrib.auth, expect_errors=True)
+#          assert_equal(res.status_code, httplib.FORBIDDEN)
+
+#      def test_onedrive_get_share_emails_requires_user_addon(self):
+#          # Node doesn't have auth
+#          self.node_settings.user_settings = None
+#          self.node_settings.save()
+#          url = self.project.api_url_for('onedrive_get_share_emails')
+#          # Non-authorizing contributor sends request
+#          res = self.app.get(url, auth=self.user.auth, expect_errors=True)
+#          assert_equal(res.status_code, httplib.BAD_REQUEST)
+
+
+#  class TestFilebrowserViews(OneDriveAddonTestCase):
+
+#      def test_onedrive_hgrid_data_contents(self):
+#          with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
+#              url = self.project.api_url_for(
+#                  'onedrive_hgrid_data_contents',
+#                  path=self.node_settings.folder,
+#              )
+#              res = self.app.get(url, auth=self.user.auth)
+#              contents = [x for x in mock_client.metadata('', list=True)['contents'] if x['is_dir']]
+#              assert_equal(len(res.json), len(contents))
+#              first = res.json[0]
+#              assert_in('kind', first)
+#              assert_equal(first['path'], contents[0]['path'])
+#
+#      def test_onedrive_hgrid_data_contents_if_folder_is_none_and_folders_only(self):
+#          with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
+#              self.node_settings.folder = None
+#              self.node_settings.save()
+#              url = self.project.api_url_for('onedrive_hgrid_data_contents', foldersOnly=True)
+#              res = self.app.get(url, auth=self.user.auth)
+#              contents = mock_client.metadata('', list=True)['contents']
+#              expected = [each for each in contents if each['is_dir']]
+#              assert_equal(len(res.json), len(expected))
+#
+#      def test_onedrive_hgrid_data_contents_folders_only(self):
+#          with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
+#              url = self.project.api_url_for('onedrive_hgrid_data_contents', foldersOnly=True)
+#              res = self.app.get(url, auth=self.user.auth)
+#              contents = mock_client.metadata('', list=True)['contents']
+#              expected = [each for each in contents if each['is_dir']]
+#              assert_equal(len(res.json), len(expected))
+
+#      @mock.patch('website.addons.onedrive.client.OneDriveClient.metadata')
+#      def test_onedrive_hgrid_data_contents_include_root(self, mock_metadata):
+#          with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
+#              url = self.project.api_url_for('onedrive_hgrid_data_contents', root=1)
+#
+#              res = self.app.get(url, auth=self.user.auth)
+#              contents = mock_client.metadata('', list=True)['contents']
+#              assert_equal(len(res.json), 1)
+#              assert_not_equal(len(res.json), len(contents))
+#              first_elem = res.json[0]
+#              assert_equal(first_elem['path'], '/')
 
 
 class TestRestrictions(OneDriveAddonTestCase):
@@ -426,28 +169,18 @@ def setUp(self):
         self.node_settings.folder = 'foo bar/bar'
         self.node_settings.save()
 
-    @mock.patch('website.addons.onedrivesdk.client.OneDriveClient.metadata')
-    def test_restricted_hgrid_data_contents(self, mock_metadata):
-        mock_metadata.return_value = mock_responses['metadata_list']
-
-        # tries to access a parent folder
-        url = self.project.api_url_for('onedrive_hgrid_data_contents',
-            path='foo bar')
-        res = self.app.get(url, auth=self.contrib.auth, expect_errors=True)
-        assert_equal(res.status_code, httplib.FORBIDDEN)
-
-    def test_restricted_config_contrib_no_addon(self):
-        url = self.project.api_url_for('onedrive_config_put')
-        res = self.app.put_json(url, {'selected': {'path': 'foo'}},
-            auth=self.contrib.auth, expect_errors=True)
-        assert_equal(res.status_code, httplib.BAD_REQUEST)
-
-    def test_restricted_config_contrib_not_owner(self):
-        # Contributor has onedrive auth, but is not the node authorizer
-        self.contrib.add_addon('onedrive')
-        self.contrib.save()
-
-        url = self.project.api_url_for('onedrive_config_put')
-        res = self.app.put_json(url, {'selected': {'path': 'foo'}},
-            auth=self.contrib.auth, expect_errors=True)
-        assert_equal(res.status_code, httplib.FORBIDDEN)
+#      def test_restricted_config_contrib_no_addon(self):
+#          url = self.project.api_url_for('onedrive_config_put')
+#          res = self.app.put_json(url, {'selected': {'path': 'foo'}},
+#              auth=self.contrib.auth, expect_errors=True)
+#          assert_equal(res.status_code, httplib.BAD_REQUEST)
+
+#      def test_restricted_config_contrib_not_owner(self):
+#          # Contributor has onedrive auth, but is not the node authorizer
+#          self.contrib.add_addon('onedrive')
+#          self.contrib.save()
+#
+#          url = self.project.api_url_for('onedrive_config_put')
+#          res = self.app.put_json(url, {'selected': {'path': 'foo'}},
+#              auth=self.contrib.auth, expect_errors=True)
+#          assert_equal(res.status_code, httplib.FORBIDDEN)
diff --git a/website/addons/onedrive/tests/test_webtests.py b/website/addons/onedrive/tests/test_webtests.py
deleted file mode 100644
index 66ad4d2c383..00000000000
--- a/website/addons/onedrive/tests/test_webtests.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- coding: utf-8 -*-
-from nose.tools import *  # noqa (PEP8 asserts)
-
-from website.util import api_url_for, web_url_for
-from tests.base import OsfTestCase
-from tests.factories import AuthUserFactory
-
-
-class TestOneDriveIntegration(OsfTestCase):
-
-    def setUp(self):
-        super(TestOneDriveIntegration, self).setUp()
-        self.user = AuthUserFactory()
-        # User is logged in
-        self.app.authenticate(*self.user.auth)
-
-    def test_cant_start_oauth_if_already_authorized(self):
-        # User already has onedrive authorized
-        self.user.add_addon('onedrive')
-        self.user.save()
-        settings = self.user.get_addon('onedrive')
-        settings.access_token = 'abc123foobarbaz'
-        settings.save()
-        assert_true(self.user.get_addon('onedrive').has_auth)
-        # Tries to start oauth again
-        url = api_url_for('onedrive_oauth_start_user')
-        res = self.app.get(url).follow()
-
-        # Is redirected back to settings page
-        assert_equal(
-            res.request.path,
-            web_url_for('user_addons')
-        )
diff --git a/website/addons/onedrive/utils.py b/website/addons/onedrive/utils.py
index 3b572e3ed61..ca58fad0188 100644
--- a/website/addons/onedrive/utils.py
+++ b/website/addons/onedrive/utils.py
@@ -1,22 +1,13 @@
 # -*- coding: utf-8 -*-
-import time
 import logging
-from datetime import datetime
-
-# from OneDriveSDK
-import onedrivesdk
-from onedrivesdk.helpers import GetAuthCodeServer
 
 from website.util import rubeus
 
-from website.addons.onedrive import settings
-
-
 logger = logging.getLogger(__name__)
 
 
-class OnedriveNodeLogger(object):
-    """Helper class for adding correctly-formatted Onedrive logs to nodes.
+class OneDriveNodeLogger(object):
+    """Helper class for adding correctly-formatted OneDrive logs to nodes.
 
     Usage: ::
 
@@ -24,7 +15,7 @@ class OnedriveNodeLogger(object):
 
         node = ...
         auth = ...
-        nodelogger = OnedriveNodeLogger(node, auth)
+        nodelogger = OneDriveNodeLogger(node, auth)
         nodelogger.log(NodeLog.FILE_REMOVED, save=True)
 
 
diff --git a/website/addons/onedrive/views.py b/website/addons/onedrive/views.py
index 742177019af..aeed98c5ecd 100644
--- a/website/addons/onedrive/views.py
+++ b/website/addons/onedrive/views.py
@@ -1,16 +1,11 @@
 """Views for the node settings page."""
 # -*- coding: utf-8 -*-
-import os
 import httplib as http
 
 import logging
 
 from flask import request
-# from OneDriveSDK
-# import onedrivesdk
-# from onedrivesdk.helpers import GetAuthCodeServer
 from website.addons.onedrive.client import OneDriveClient
-from urllib3.exceptions import MaxRetryError
 
 from framework.exceptions import HTTPError, PermissionsError
 from framework.auth.decorators import must_be_logged_in
@@ -23,7 +18,7 @@
     must_have_permission, must_not_be_registration,
 )
 
-from website.addons.onedrive.serializer import OnedriveSerializer
+from website.addons.onedrive.serializer import OneDriveSerializer
 
 logger = logging.getLogger(__name__)
 
@@ -31,8 +26,8 @@
 
 @must_be_logged_in
 def onedrive_get_user_settings(auth):
-    """ Returns the list of all of the current user's authorized Onedrive accounts """
-    serializer = OnedriveSerializer(user_settings=auth.user.get_addon('onedrive'))
+    """ Returns the list of all of the current user's authorized OneDrive accounts """
+    serializer = OneDriveSerializer(user_settings=auth.user.get_addon('onedrive'))
     return serializer.serialized_user_settings
 
 
@@ -41,7 +36,7 @@ def onedrive_get_user_settings(auth):
 def onedrive_get_config(node_addon, auth, **kwargs):
     """API that returns the serialized node settings."""
     return {
-        'result': OnedriveSerializer().serialize_settings(node_addon, auth.user),
+        'result': OneDriveSerializer().serialize_settings(node_addon, auth.user),
     }
 
 
@@ -53,18 +48,20 @@ def onedrive_get_config(node_addon, auth, **kwargs):
 def onedrive_set_config(node_addon, user_addon, auth, **kwargs):
     """View for changing a node's linked onedrive folder."""
     folder = request.json.get('selected')
-    serializer = OnedriveSerializer(node_settings=node_addon)
+    serializer = OneDriveSerializer(node_settings=node_addon)
 
-    uid = folder['id']
-    path = folder['path']
+    logger.debug('folder::' + repr(folder))
+    logger.debug('serializer::' + repr(serializer))
 
-    node_addon.set_folder(uid, auth=auth)
+    name = folder['name']
+
+    node_addon.set_folder(folder, auth=auth)
 
     return {
         'result': {
             'folder': {
-                'name': path.replace('All Files', '') if path != 'All Files' else '/ (Full Onedrive)',
-                'path': path,
+                'name': name,
+                'path': name,
             },
             'urls': serializer.addon_serialized_urls,
         },
@@ -94,7 +91,7 @@ def onedrive_add_user_auth(auth, node_addon, user_addon, **kwargs):
     node_addon.save()
 
     return {
-        'result': OnedriveSerializer().serialize_settings(node_addon, auth.user),
+        'result': OneDriveSerializer().serialize_settings(node_addon, auth.user),
         'message': 'Successfully imported access token from profile.',
     }
 
@@ -113,7 +110,7 @@ def onedrive_remove_user_auth(auth, node_addon, **kwargs):
 def onedrive_get_share_emails(auth, user_addon, node_addon, **kwargs):
     """Return a list of emails of the contributors on a project.
 
-    The current user MUST be the user who authenticated Onedrive for the node.
+    The current user MUST be the user who authenticated OneDrive for the node.
     """
     if not node_addon.user_settings:
         raise HTTPError(http.BAD_REQUEST)
@@ -135,21 +132,18 @@ def onedrive_get_share_emails(auth, user_addon, node_addon, **kwargs):
 @must_have_addon('onedrive', 'node')
 @must_be_addon_authorizer('onedrive')
 def onedrive_folder_list(node_addon, **kwargs):
-    """Returns a list of folders in Onedrive"""
+    """Returns a list of folders in OneDrive"""
     if not node_addon.has_auth:
         raise HTTPError(http.FORBIDDEN)
 
     node = node_addon.owner
     folder_id = request.args.get('folderId')
-    logger.debug('oauth_provider::' +  repr(node_addon.oauth_provider))
-    logger.debug('fetch_access_token::' +  repr(node_addon))
-    logger.debug('node_addon.external_account::' +  repr(node_addon.external_account))
-    logger.debug('node_addon.external_account::oauth_key' +  repr(node_addon.external_account.oauth_key))
-#     logger.debug('node_addon.external_account::access_token' +  repr(node_addon.external_account.access_token)) #exception - no access token
-    logger.debug('node_addon.external_account::expires_at' +  repr(node_addon.external_account.refresh_token)) 
-    logger.debug('node_addon.external_account::expires_at' +  repr(node_addon.external_account.expires_at)) #
-#     raise ValueError('node_addon.external_account::oauth_key' +  repr(node_addon.external_account.oauth_key))
-    
+    logger.debug('oauth_provider::' + repr(node_addon.oauth_provider))
+    logger.debug('fetch_access_token::' + repr(node_addon))
+    logger.debug('node_addon.external_account::' + repr(node_addon.external_account))
+    logger.debug('node_addon.external_account::oauth_key' + repr(node_addon.external_account.oauth_key))
+    logger.debug('node_addon.external_account::expires_at' + repr(node_addon.external_account.refresh_token))
+    logger.debug('node_addon.external_account::expires_at' + repr(node_addon.external_account.expires_at))
 
     if folder_id is None:
         return [{
@@ -157,48 +151,21 @@ def onedrive_folder_list(node_addon, **kwargs):
             'path': 'All Files',
             'addon': 'onedrive',
             'kind': 'folder',
-            'name': '/ (Full Onedrive)',
+            'name': '/ (Full OneDrive)',
             'urls': {
                 'folders': node.api_url_for('onedrive_folder_list', folderId=0),
             }
         }]
 
-#    TODO: must refresh token https://dev.onedrive.com/auth/msa_oauth.htm#step-3-get-a-new-access-token-or-refresh-token
-    
+    if folder_id == '0':
+        folder_id = 'root'
+
     access_token = node_addon.fetch_access_token()
-    logger.debug('access_token::' +  repr(access_token))
-    
-    oneDriveClient = OneDriveClient(access_token)#node_addon.external_account.refresh_token)
-    items = oneDriveClient.folders()
-    logger.debug('folders::' +  repr(items))
-    
-#     return folders
-    
-#     raise ValueError('made it past onedrive api call::' + repr(folders))
-    
-#    try:
-#        refresh_oauth_key(node_addon.external_account)
-#     client = OnedriveClient(node_addon.external_account.oauth_key)
-#    except OnedriveClientException:
-#        raise HTTPError(http.FORBIDDEN)
-
-#    try:
-#        metadata = client.get_folder(folder_id)
-#    except OnedriveClientException:
-#        raise HTTPError(http.NOT_FOUND)
-#    except MaxRetryError:
-#        raise HTTPError(http.BAD_REQUEST)
-
-    # Raise error if folder was deleted
-#     if metadata.get('is_deleted'):
-#         raise HTTPError(http.NOT_FOUND)
-
-#     folder_path = '/'.join(
-#         [
-#             x['name']
-#             for x in items['path_collection']['entries']
-#         ] + [items['name']]
-#     )
+    logger.debug('access_token::' + repr(access_token))
+
+    oneDriveClient = OneDriveClient(access_token)
+    items = oneDriveClient.folders(folder_id)
+    logger.debug('folders::' + repr(items))
 
     return [
         {
@@ -206,11 +173,11 @@ def onedrive_folder_list(node_addon, **kwargs):
             'kind': 'folder',
             'id': item['id'],
             'name': item['name'],
-            'path': item['name'], #os.path.join(folder_path, item['name']),
+            'path': item['name'],
             'urls': {
                 'folders': node.api_url_for('onedrive_folder_list', folderId=item['id']),
             }
         }
         for item in items
-        #if item['id'] == 'folder' #TODO ADD FOLDER FILTER
+
     ]
diff --git a/website/static/storageAddons.json b/website/static/storageAddons.json
index 795349c000b..cd6880f4c38 100644
--- a/website/static/storageAddons.json
+++ b/website/static/storageAddons.json
@@ -42,5 +42,9 @@
     "s3": {
         "fullName": "Amazon S3",
         "externalView": false
+    },
+    "onedrive": {
+        "fullName": "OneDrive",
+        "externalView": false
     }
 }

From 8234e94a22f03ed057b32d0f1f3b2d1c150b1e48 Mon Sep 17 00:00:00 2001
From: Alexandr Melnikov <alexandr.melnikov@dev-pro.net>
Date: Mon, 28 Nov 2016 11:18:50 +0200
Subject: [PATCH 121/192] Initial OneDrive commit   - 3/3

---
 website/addons/onedrive/model.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/website/addons/onedrive/model.py b/website/addons/onedrive/model.py
index 525a390a7a8..f8f462726cb 100644
--- a/website/addons/onedrive/model.py
+++ b/website/addons/onedrive/model.py
@@ -205,7 +205,6 @@ def serialize_waterbutler_credentials(self):
 
     def serialize_waterbutler_settings(self):
         logger.debug("in serialize_waterbutler_settings:: {}".format(repr(self)))
-        logger.debug('folder_id::{}'.format(self.folder_id))
         if self.folder_id is None:
             raise exceptions.AddonError('Folder is not configured')
         return {'folder': self.onedrive_id}

From 1acd9b8eb432508e54e0700657a270d12d7cbdae Mon Sep 17 00:00:00 2001
From: Fitz Elliott <fitz@cos.io>
Date: Fri, 3 Feb 2017 12:28:41 -0500
Subject: [PATCH 122/192] Update initial OneDrive

 * these are already handled in the base class (ExternalAccount)
---
 Dockerfile                                    |   2 +
 addons.json                                   |  12 +-
 api/base/settings/defaults.py                 |   2 +-
 api/base/views.py                             |   1 +
 framework/addons/data/addons.json             |  14 +-
 website/addons/onedrive/client.py             |  99 +-
 website/addons/onedrive/model.py              | 262 +++---
 website/addons/onedrive/routes.py             |  55 +-
 website/addons/onedrive/serializer.py         |  99 +-
 website/addons/onedrive/settings/defaults.py  |   6 +-
 website/addons/onedrive/static/files.js       |   1 +
 website/addons/onedrive/static/node-cfg.js    |   5 +-
 website/addons/onedrive/static/onedrive.css   |  39 -
 .../onedriveAnonymousLogActionList.json       |  10 +
 .../onedrive/static/onedriveFangornConfig.js  |  75 ++
 .../static/onedriveLogActionList.json         |  10 +
 .../onedrive/templates/log_templates.mako     |  53 --
 website/addons/onedrive/tests/test_client.py  |  10 -
 website/addons/onedrive/tests/test_models.py  | 183 ++--
 .../addons/onedrive/tests/test_serializer.py  |  23 +
 website/addons/onedrive/tests/test_views.py   | 210 +----
 website/addons/onedrive/tests/utils.py        | 880 +++++++++++++++---
 website/addons/onedrive/utils.py              |  14 +-
 website/addons/onedrive/views.py              | 192 +---
 website/notifications/constants.py            |   1 +
 website/static/js/addonSettings.js            |   2 +
 website/static/js/fangorn.js                  |  13 +-
 website/static/js/filepage/index.js           |   4 +-
 website/static/js/logTextParser.js            |  11 +
 website/static/js/osfLanguage.js              |   7 +
 website/static/storageAddons.json             |   2 +-
 31 files changed, 1316 insertions(+), 981 deletions(-)
 create mode 100644 website/addons/onedrive/static/files.js
 delete mode 100644 website/addons/onedrive/static/onedrive.css
 create mode 100644 website/addons/onedrive/static/onedriveAnonymousLogActionList.json
 create mode 100644 website/addons/onedrive/static/onedriveFangornConfig.js
 create mode 100644 website/addons/onedrive/static/onedriveLogActionList.json
 delete mode 100644 website/addons/onedrive/templates/log_templates.mako
 create mode 100644 website/addons/onedrive/tests/test_serializer.py

diff --git a/Dockerfile b/Dockerfile
index facd96d7d01..bee4604a6e3 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -106,6 +106,7 @@ COPY ./addons/github/requirements.txt ./addons/github/
 COPY ./addons/gitlab/requirements.txt ./addons/gitlab/
 #COPY ./addons/googledrive/requirements.txt ./addons/googledrive/
 COPY ./addons/mendeley/requirements.txt ./addons/mendeley/
+COPY ./addons/onedrive/requirements.txt /code/addons/onedrive/
 #COPY ./addons/osfstorage/requirements.txt ./addons/osfstorage/
 COPY ./addons/owncloud/requirements.txt ./addons/owncloud/
 COPY ./addons/s3/requirements.txt ./addons/s3/
@@ -156,6 +157,7 @@ COPY ./addons/github/static/ ./addons/github/static/
 COPY ./addons/gitlab/static/ ./addons/gitlab/static/
 COPY ./addons/googledrive/static/ ./addons/googledrive/static/
 COPY ./addons/mendeley/static/ ./addons/mendeley/static/
+COPY ./addons/onedrive/static/ /code/addons/onedrive/static/
 COPY ./addons/osfstorage/static/ ./addons/osfstorage/static/
 COPY ./addons/owncloud/static/ ./addons/owncloud/static/
 COPY ./addons/s3/static/ ./addons/s3/static/
diff --git a/addons.json b/addons.json
index 8617f88f69b..9ea6870884b 100644
--- a/addons.json
+++ b/addons.json
@@ -45,11 +45,13 @@
         "googledrive",
         "owncloud",
         "s3",
-        "bitbucket"
+        "bitbucket",
+        "onedrive"
     ],
     "addons_based_on_ids": [
         "osfstorage",
-        "box"
+        "box",
+        "onedrive"
     ],
     "addons_description": {
         "box": "Box is a file storage add-on. Connect your Box account to an OSF project to interact with files hosted on Box via the OSF.",
@@ -67,7 +69,8 @@
         "twofactor": "Two-factor authentication is a security add-on. By using two-factor authentication, you'll protect your OSF account with both your password and your mobile phone.",
         "wiki": "The wiki is a versatile communication tool. Wikis can be used to explain the main points of your project and can contain information like lab notes or contact information.",
         "googledrive": "Google Drive is a file storage add-on. Connect your Google Drive account to an OSF project to interact with files hosted on Google Drive via the OSF.",
-        "bitbucket": "Bitbucket is a web-based Git repository hosting service. Connect your Bitbucket repo to your OSF project to share your code alongside other materials in your OSF project."
+        "bitbucket": "Bitbucket is a web-based Git repository hosting service. Connect your Bitbucket repo to your OSF project to share your code alongside other materials in your OSF project.",
+        "onedrive": "Microsoft OneDrive is a file storage add-on. Connect your Microsoft OneDrive account to an OSF project to interact with files hosted on Microsoft OneDrive via the OSF."
     },
     "addons_url": {
         "box": "http://www.box.com",
@@ -81,6 +84,7 @@
         "zotero": "http://www.zotero.org",
         "s3": "https://aws.amazon.com/s3/",
         "googledrive": "https://drive.google.com",
-        "bitbucket": "https://bitbucket.org/"
+        "bitbucket": "https://bitbucket.org/",
+        "onedrive": "https://onedrive.live.com"
     }
 }
diff --git a/api/base/settings/defaults.py b/api/base/settings/defaults.py
index 7e2883e3ea5..9b6674abd8a 100644
--- a/api/base/settings/defaults.py
+++ b/api/base/settings/defaults.py
@@ -258,7 +258,7 @@
 VARNISH_SERVERS = osf_settings.VARNISH_SERVERS
 ESI_MEDIA_TYPES = osf_settings.ESI_MEDIA_TYPES
 
-ADDONS_FOLDER_CONFIGURABLE = ['box', 'dropbox', 's3', 'googledrive', 'figshare', 'owncloud']
+ADDONS_FOLDER_CONFIGURABLE = ['box', 'dropbox', 's3', 'googledrive', 'figshare', 'owncloud', 'onedrive']
 ADDONS_OAUTH = ADDONS_FOLDER_CONFIGURABLE + ['dataverse', 'github', 'bitbucket', 'gitlab', 'mendeley', 'zotero', 'forward']
 
 BYPASS_THROTTLE_TOKEN = 'test-token'
diff --git a/api/base/views.py b/api/base/views.py
index a9942052321..deb91356cdb 100644
--- a/api/base/views.py
+++ b/api/base/views.py
@@ -741,6 +741,7 @@ def root(request, format=None, **kwargs):
         github       GitHub
         gitlab       GitLab
         googledrive  Google Drive
+        onedrive     Microsoft OneDrive
         osfstorage   OSF Storage
         s3           Amazon S3
 
diff --git a/framework/addons/data/addons.json b/framework/addons/data/addons.json
index 5a4b02c3c42..fa781f1f12c 100644
--- a/framework/addons/data/addons.json
+++ b/framework/addons/data/addons.json
@@ -157,19 +157,19 @@
             },
             "View / download file versions": {
                 "status": "full",
-                "text": "OneDrive files and their versions can be viewed/downloaded via OSF."
+                "text": "OneDrive files and their versions can be viewed/downloaded via OSF.  OneNote files are unexportable and cannot be downloaded or viewed."
             },
             "Add / update files": {
-                "status": "full",
-                "text": "Adding/updating files in the project via OSF will be reflected in OneDrive."
+                "status": "none",
+                "text": "The OneDrive add-on is read-only."
             },
             "Delete files": {
-                "status": "full",
-                "text": "Files deleted via OSF will be deleted in OneDrive."
+                "status": "none",
+                "text": "The OneDrive add-on is read-only."
             },
             "Logs": {
                 "status": "partial",
-                "text": "OSF keeps track of changes you make to your OneDrive content through OSF, but not for changes made using OneDrive directly."
+                "text": "OSF keeps track of changes you make to your OneDrive add-on configuration.  It does not track changes to OneDrive content."
             },
             "Forking": {
                 "status": "partial",
@@ -177,7 +177,7 @@
             },
             "Registering": {
                 "status": "partial",
-                "text": "OneDrive content will be registered, but version history will not be copied to the registration."
+                "text": "OneDrive content will be registered, but version history will not be copied to the registration. OneNote files are unexportable and will not be archived."
             }
         },
         "Dataverse": {
diff --git a/website/addons/onedrive/client.py b/website/addons/onedrive/client.py
index 09ff586a355..74b9b43a89f 100644
--- a/website/addons/onedrive/client.py
+++ b/website/addons/onedrive/client.py
@@ -1,55 +1,9 @@
 # -*- coding: utf-8 -*-
-import logging
-
-#import requests #TODO: remove this after determining onedrive connection issues w/make_request
-
-from requests_oauthlib import OAuth2Session
-from oauthlib.oauth2 import InvalidGrantError
-
 from framework.exceptions import HTTPError
 
 from website.util.client import BaseClient
-from website.addons.base import exceptions
 from website.addons.onedrive import settings
-
-logger = logging.getLogger(__name__)
-
-
-class OneDriveAuthClient(BaseClient):
-
-    def refresh(self, access_token, refresh_token):
-        client = OAuth2Session(
-            settings.ONEDRIVE_KEY,
-            token={
-                'access_token': access_token,
-                'refresh_token': refresh_token,
-                'token_type': 'Bearer',
-                'expires_in': '-30',
-            }
-        )
-
-        extra = {
-            'client_id': settings.ONEDRIVE_KEY,
-            'client_secret': settings.ONEDRIVE_SECRET,
-        }
-
-        try:
-            return client.refresh_token(
-                self._build_url(settings.ONEDRIVE_OAUTH_TOKEN_ENDPOINT),
-                # ('love')
-                **extra
-            )
-        except InvalidGrantError:
-            raise exceptions.InvalidAuthError()
-
-    def user_info(self, access_token):
-        return self._make_request(
-            'GET',
-            self._build_url(settings.MSLIVE_API_URL, 'me'),
-            params={'access_token': access_token},
-            expects=(200, ),
-            throws=HTTPError(401)
-        ).json()
+from website.addons.onedrive.settings import DEFAULT_ROOT_ID
 
 
 class OneDriveClient(BaseClient):
@@ -63,30 +17,47 @@ def _default_headers(self):
             return {'Authorization': 'bearer {}'.format(self.access_token)}
         return {}
 
-    def about(self):
-        return self._make_request(
-            'GET',
-            self._build_url(settings.ONEDRIVE_API_URL, 'drive', 'v2', 'about', ),
-            expects=(200, ),
-            throws=HTTPError(401)
-        ).json()
+    def folders(self, folder_id=None):
+        """Get list of subfolders of the folder with id ``folder_id``
 
-    def folders(self, folder_id='root/'):
+        API Docs:  https://dev.onedrive.com/items/list.htm
 
-        query = 'folder ne null'
+        :param str folder_id: the id of the parent folder. defaults to ``None``
+        :rtype: list
+        :return: a list of metadata objects representing the child folders of ``folder_id``
+        """
 
-        if folder_id != 'root':
-            folder_id = "items/{}".format(folder_id)
+        if folder_id is None or folder_id == DEFAULT_ROOT_ID:
+            url = self._build_url(settings.ONEDRIVE_API_URL, 'drive', 'root', 'children')
+        else:
+            url = self._build_url(settings.ONEDRIVE_API_URL, 'drive', 'items',
+                                  folder_id, 'children')
 
-        logger.debug('folders::made it1')
-        logger.debug('URLs:' + self._build_url(settings.ONEDRIVE_API_URL, 'drive/', folder_id, '/children/'))
         res = self._make_request(
             'GET',
-            self._build_url(settings.ONEDRIVE_API_URL, 'drive/', folder_id, '/children/'),
-            params={'filter': query},
+            url,
+            params={'filter': 'folder ne null'},
             expects=(200, ),
             throws=HTTPError(401)
         )
-        logger.debug('folder_id::' + repr(folder_id))
-        logger.debug('res::' + repr(res))
         return res.json()['value']
+
+    def user_info_for_token(self, access_token):
+        """Given an access token, return information about the token's owner.
+
+        API Docs::
+
+        https://msdn.microsoft.com/en-us/library/hh826533.aspx#requesting_info_using_rest
+        https://msdn.microsoft.com/en-us/library/hh243648.aspx#user
+
+        :param str access_token: a valid Microsoft Live access token
+        :rtype: dict
+        :return: a dict containing metadata about the token's owner.
+        """
+        return self._make_request(
+            'GET',
+            self._build_url(settings.MSLIVE_API_URL, 'me'),
+            params={'access_token': access_token},
+            expects=(200, ),
+            throws=HTTPError(401)
+        ).json()
diff --git a/website/addons/onedrive/model.py b/website/addons/onedrive/model.py
index f8f462726cb..0244882313e 100644
--- a/website/addons/onedrive/model.py
+++ b/website/addons/onedrive/model.py
@@ -1,30 +1,28 @@
 # -*- coding: utf-8 -*-
+import os
+import urllib
 import logging
 
-from datetime import datetime
-
 from modularodm import fields
 
 from framework.auth import Auth
+from framework.exceptions import HTTPError
+from website.oauth.models import ExternalProvider
 
+from website.util import api_v2_url
 from website.addons.base import exceptions
-from website.addons.base import AddonOAuthUserSettingsBase, AddonOAuthNodeSettingsBase
 from website.addons.base import StorageAddonBase
+from website.addons.base import AddonOAuthUserSettingsBase, AddonOAuthNodeSettingsBase
 
 from website.addons.onedrive import settings
-from website.addons.onedrive.utils import OneDriveNodeLogger
-from website.addons.onedrive.serializer import OneDriveSerializer
-from website.addons.onedrive.client import OneDriveAuthClient
 from website.addons.onedrive.client import OneDriveClient
-
-from website.oauth.models import ExternalProvider
+from website.addons.onedrive.settings import DEFAULT_ROOT_ID
+from website.addons.onedrive.serializer import OneDriveSerializer
 
 logger = logging.getLogger(__name__)
 
-logging.getLogger('onedrive1').setLevel(logging.WARNING)
-
 
-class OneDrive(ExternalProvider):
+class OneDriveProvider(ExternalProvider):
     name = 'onedrive'
     short_name = 'onedrive'
 
@@ -36,98 +34,68 @@ class OneDrive(ExternalProvider):
     auto_refresh_url = settings.ONEDRIVE_OAUTH_TOKEN_ENDPOINT
     default_scopes = ['wl.basic wl.signin onedrive.readwrite wl.offline_access']
 
-    _auth_client = OneDriveAuthClient()
+    refresh_time = settings.REFRESH_TIME
+
     _drive_client = OneDriveClient()
 
     def handle_callback(self, response):
         """View called when the Oauth flow is completed. Adds a new OneDriveUserSettings
         record to the user and saves the user's access token and account info.
         """
-        userInfo = self._auth_client.user_info(response['access_token'])
-        #  userInfo = userInfoRequest.json()
-        logger.debug("userInfo:: %s", repr(userInfo))
+        user_info = self._drive_client.user_info_for_token(response['access_token'])
 
         return {
-            'provider_id': userInfo['id'],
-            'display_name': userInfo['name'],
-            'profile_url': userInfo['link']
+            'provider_id': user_info['id'],
+            'display_name': user_info['name'],
+            'profile_url': user_info['link']
         }
 
-    def _refresh_token(self, access_token, refresh_token):
-        """ Handles the actual request to refresh tokens
-
-        :param str access_token: Access token (oauth key) associated with this account
-        :param str refresh_token: Refresh token used to request a new access token
-        :return dict token: New set of tokens
-        """
-        client = self._auth_client
-        if refresh_token:
-            token = client.refresh(access_token, refresh_token)
-            return token
-        else:
-            return False
-
     def fetch_access_token(self, force_refresh=False):
-        self.refresh_access_token(force=force_refresh)
+        self.refresh_oauth_key(force=force_refresh)
         return self.account.oauth_key
 
-    def refresh_access_token(self, force=False):
-        """ If the token has expired or will soon, handles refreshing and the storage of new tokens
-
-        :param bool force: Indicates whether or not to force the refreshing process, for the purpose of ensuring that authorization has not been unexpectedly removed.
-        """
-        if self._needs_refresh() or force:
-            token = self._refresh_token(self.account.oauth_key, self.account.refresh_token)
-            self.account.oauth_key = token['access_token']
-            self.account.refresh_token = token['refresh_token']
-            self.account.expires_at = datetime.utcfromtimestamp(token['expires_at'])
-            self.account.save()
-
-    def _needs_refresh(self):
-        if self.account.expires_at is None:
-            return False
-        return (self.account.expires_at - datetime.utcnow()).total_seconds() < settings.REFRESH_TIME
 
 class OneDriveUserSettings(AddonOAuthUserSettingsBase):
     """Stores user-specific onedrive information
     """
-    oauth_provider = OneDrive
+    oauth_provider = OneDriveProvider
     serializer = OneDriveSerializer
 
 
 class OneDriveNodeSettings(StorageAddonBase, AddonOAuthNodeSettingsBase):
+    """Individual OneDrive settings for a particular node.
+
+    QUIRKS::
+
+    * OneDrive personal and OneDrive for Business users will have only one drive
+      available.  This addon is built around this assumption.  Users using this with
+      a SharePoint team site will have several other drives available, but this use
+      case is not supported or tested.  See:
+      https://dev.onedrive.com/drives/list-drives.htm#remarks
 
-    oauth_provider = OneDrive
+    * OneDrive is an ID-based provider like Box. The identifier for the root folder
+      is defined in the settings.
+
+    """
+
+    oauth_provider = OneDriveProvider
     serializer = OneDriveSerializer
 
     foreign_user_settings = fields.ForeignField(
         'onedriveusersettings', backref='authorized'
     )
     folder_id = fields.StringField(default=None)
-    onedrive_id = fields.StringField(default=None)
-    folder_name = fields.StringField()
     folder_path = fields.StringField()
 
-    _folder_data = None
-
     _api = None
 
     @property
     def api(self):
         """authenticated ExternalProvider instance"""
         if self._api is None:
-            self._api = OneDrive(self.external_account)
+            self._api = OneDriveProvider(self.external_account)
         return self._api
 
-    @property
-    def display_name(self):
-        return '{0}: {1}'.format(self.config.full_name, self.folder_name)
-
-    @property
-    def has_auth(self):
-        """Whether an access token is associated with this node."""
-        return bool(self.user_settings and self.user_settings.has_auth)
-
     @property
     def complete(self):
         return bool(self.has_auth and self.user_settings.verify_oauth_access(
@@ -135,29 +103,97 @@ def complete(self):
             external_account=self.external_account,
         ))
 
-    def fetch_folder_name(self):
-        self._update_folder_data()
-        return self.folder_name.replace('All Files', '/ (Full OneDrive)')
+    @property
+    def folder_name(self):
+        if not self.folder_id:
+            return None
 
-    def fetch_full_folder_path(self):
-        self._update_folder_data()
-        return self.folder_path
+        if self.folder_id != DEFAULT_ROOT_ID:
+            # `urllib` does not properly handle unicode.
+            # encode input to `str`, decode output back to `unicode`
+            return urllib.unquote(os.path.split(self.folder_path)[1].encode('utf-8')).decode('utf-8')
+        else:
+            return '/ (Full OneDrive)'
 
-    def _update_folder_data(self):
-        if self.folder_id is None:
-            return None
+    def fetch_folder_name(self):
+        """Required.  Called by base views"""
+        return self.folder_name
 
-        logger.debug('self::' + repr(self))
-        #request.json.get('selected')
+    def clear_settings(self):
+        self.folder_id = None
+        self.folder_path = None
+
+    def get_folders(self, folder_id=None, **kwargs):
+        """Get list of folders underneath the folder with id ``folder_id``.  If
+        ``folder_id`` is ``None``, return a single entry representing the root folder.
+        In OneDrive, the root folder has a unique id, so fetch that and return it.
+
+        This method returns a list of dicts with metadata about each folder under ``folder_id``.
+        These dicts have the following properties::
+
+            {
+                'addon': 'onedrive',          # short name of the addon
+                'id': folder_id,              # id of the folder.  root may need special casing
+                'path': '/',                  # human-readable path of the folder
+                'kind': 'folder',             # always 'folder'
+                'name': '/ (Full OneDrive)',  # human readable name of the folder. root may need special casing
+                'urls': {                     # urls to fetch information about the folder
+                    'folders': api_v2_url(    # url to get subfolders of this folder.
+                        'nodes/{}/addons/onedrive/folders/'.format(self.owner._id),
+                         params={'id': folder_id}
+                    ),
+                }
+            }
+
+        Some providers include additional information::
+
+        * figshare includes ``permissions``, ``hasChildren``
+
+        * googledrive includes ``urls.fetch``
+
+        :param str folder_id: the id of the folder to fetch subfolders of. Defaults to ``None``
+        :rtype: list
+        :return: a list of dicts with metadata about the subfolder of ``folder_id``.
+        """
 
-        if not self._folder_data:
-            self.path = self.folder_name
-            self.save()
+        if folder_id is None:
+            return [{
+                'id': DEFAULT_ROOT_ID,
+                'path': '/',
+                'addon': 'onedrive',
+                'kind': 'folder',
+                'name': '/ (Full OneDrive)',
+                'urls': {
+                    'folders': api_v2_url('nodes/{}/addons/onedrive/folders/'.format(self.owner._id),
+                                          params={'id': DEFAULT_ROOT_ID}),
+                }
+            }]
+
+        try:
+            access_token = self.fetch_access_token()
+        except exceptions.InvalidAuthError:
+            raise HTTPError(403)
+
+        oneDriveClient = OneDriveClient(access_token)
+        items = oneDriveClient.folders(folder_id)
+        return [
+            {
+                'addon': 'onedrive',
+                'kind': 'folder',
+                'id': item['id'],
+                'name': item['name'],
+                'path': item['name'],
+                'urls': {
+                    'folders': api_v2_url('nodes/{}/addons/onedrive/folders/'.format(self.owner._id),
+                                          params={'id': item['id']}),
+                }
+            }
+            for item in items
+        ]
 
     def set_folder(self, folder, auth):
-        self.folder_id = folder['name']
-        self.onedrive_id = folder['id']
-        self.folder_name = folder['name']
+        self.folder_id = folder['id']
+        self.folder_path = folder['path']
         self.save()
 
         if not self.complete:
@@ -168,59 +204,62 @@ def set_folder(self, folder, auth):
             )
             self.user_settings.save()
 
-        # Add log to node
-        nodelogger = OneDriveNodeLogger(node=self.owner, auth=auth)  # AddonOAuthNodeSettingsBase.nodelogger(self)
-        nodelogger.log(action="folder_selected", save=True)
-
-    def set_user_auth(self, user_settings):
-        """Import a user's OneDrive authentication and create a NodeLog.
+        self.nodelogger.log(action='folder_selected', save=True)
 
-        :param OneDriveUserSettings user_settings: The user settings to link.
-        """
-        self.user_settings = user_settings
-        nodelogger = OneDriveNodeLogger(node=self.owner, auth=Auth(user_settings.owner))
-        nodelogger.log(action="node_authorized", save=True)
+    @property
+    def selected_folder_name(self):
+        if self.folder_id is None:
+            return ''
+        elif self.folder_id == DEFAULT_ROOT_ID:
+            return '/ (Full OneDrive)'
+        else:
+            return self.folder_name
 
-    def deauthorize(self, auth=None, add_log=True):
+    def deauthorize(self, auth=None, add_log=True, save=False):
         """Remove user authorization from this node and log the event."""
-        node = self.owner
 
         if add_log:
             extra = {'folder_id': self.folder_id}
-            nodelogger = OneDriveNodeLogger(node=node, auth=auth)
-            nodelogger.log(action="node_deauthorized", extra=extra, save=True)
+            self.nodelogger.log(action='node_deauthorized', extra=extra, save=True)
 
-        self.folder_id = None
-        self._update_folder_data()
-        self.user_settings = None
+        self.clear_settings()
         self.clear_auth()
 
-        self.save()
+        if save:
+            self.save()
 
     def serialize_waterbutler_credentials(self):
-        logger.debug("in serialize_waterbutler_credentials:: %s", repr(self))
         if not self.has_auth:
             raise exceptions.AddonError('Addon is not authorized')
         return {'token': self.fetch_access_token()}
 
     def serialize_waterbutler_settings(self):
-        logger.debug("in serialize_waterbutler_settings:: {}".format(repr(self)))
         if self.folder_id is None:
             raise exceptions.AddonError('Folder is not configured')
-        return {'folder': self.onedrive_id}
+        return {'folder': self.folder_id}
 
     def create_waterbutler_log(self, auth, action, metadata):
         self.owner.add_log(
             'onedrive_{0}'.format(action),
             auth=auth,
             params={
-                'path': metadata['materialized'],
+                'path': metadata['path'],
                 'project': self.owner.parent_id,
                 'node': self.owner._id,
-                'folder': self.folder_id,
+                'folder': self.folder_path,
                 'urls': {
-                    'view': self.owner.web_url_for('addon_view_or_download_file', provider='onedrive', action='view', path=metadata['path']),
-                    'download': self.owner.web_url_for('addon_view_or_download_file', provider='onedrive', action='download', path=metadata['path']),
+                    'view': self.owner.web_url_for(
+                        'addon_view_or_download_file',
+                        provider='onedrive',
+                        action='view',
+                        path=metadata['path']
+                    ),
+                    'download': self.owner.web_url_for(
+                        'addon_view_or_download_file',
+                        provider='onedrive',
+                        action='download',
+                        path=metadata['path']
+                    ),
                 },
             },
         )
@@ -228,12 +267,9 @@ def create_waterbutler_log(self, auth, action, metadata):
     def fetch_access_token(self):
         return self.api.fetch_access_token()
 
-    ##### Callback overrides #####
-    def after_delete(self, node=None, user=None):
-        self.deauthorize(Auth(user=user), add_log=True)
-        self.save()
+    def after_delete(self, node, user):
+        self.deauthorize(Auth(user=user), add_log=True, save=True)
 
     def on_delete(self):
         self.deauthorize(add_log=False)
-        self.clear_auth()
         self.save()
diff --git a/website/addons/onedrive/routes.py b/website/addons/onedrive/routes.py
index 299e0504e50..85c76e2b814 100644
--- a/website/addons/onedrive/routes.py
+++ b/website/addons/onedrive/routes.py
@@ -7,66 +7,67 @@
 
 api_routes = {
     'rules': [
+
+        #### Profile settings ###
+
         Rule(
             [
                 '/settings/onedrive/accounts/',
             ],
             'get',
-            views.onedrive_get_user_settings,
+            views.onedrive_account_list,
             json_renderer,
         ),
+
+        ##### Node settings #####
+
         Rule(
             [
-                '/project/<pid>/onedrive/settings/',
-                '/project/<pid>/node/<nid>/onedrive/settings/'
+                '/project/<pid>/onedrive/folders/',
+                '/project/<pid>/node/<nid>/onedrive/folders/',
             ],
             'get',
-            views.onedrive_get_config,
+            views.onedrive_folder_list,
             json_renderer,
         ),
+
         Rule(
             [
-                '/project/<pid>/onedrive/settings/',
-                '/project/<pid>/node/<nid>/onedrive/settings/'
+                '/project/<pid>/onedrive/config/',
+                '/project/<pid>/node/<nid>/onedrive/config/'
             ],
-            'put',
-            views.onedrive_set_config,
+            'get',
+            views.onedrive_get_config,
             json_renderer,
         ),
+
         Rule(
             [
-                '/project/<pid>/onedrive/user_auth/',
-                '/project/<pid>/node/<nid>/onedrive/user_auth/'
+                '/project/<pid>/onedrive/config/',
+                '/project/<pid>/node/<nid>/onedrive/config/'
             ],
             'put',
-            views.onedrive_add_user_auth,
+            views.onedrive_set_config,
             json_renderer,
         ),
+
         Rule(
             [
-                '/project/<pid>/onedrive/user_auth/',
-                '/project/<pid>/node/<nid>/onedrive/user_auth/'
+                '/project/<pid>/onedrive/config/',
+                '/project/<pid>/node/<nid>/onedrive/config/'
             ],
             'delete',
-            views.onedrive_remove_user_auth,
-            json_renderer,
-        ),
-        Rule(
-            [
-                '/project/<pid>/onedrive/config/share/',
-                '/project/<pid>/node/<nid>/onedrive/config/share/'
-            ],
-            'get',
-            views.onedrive_get_share_emails,
+            views.onedrive_deauthorize_node,
             json_renderer,
         ),
+
         Rule(
             [
-                '/project/<pid>/onedrive/folders/',
-                '/project/<pid>/node/<nid>/onedrive/folders/',
+                '/project/<pid>/onedrive/import-auth/',
+                '/project/<pid>/node/<nid>/onedrive/import-auth/'
             ],
-            'get',
-            views.onedrive_folder_list,
+            'put',
+            views.onedrive_import_auth,
             json_renderer,
         ),
     ],
diff --git a/website/addons/onedrive/serializer.py b/website/addons/onedrive/serializer.py
index 2e092f572cc..8c44c3a61f3 100644
--- a/website/addons/onedrive/serializer.py
+++ b/website/addons/onedrive/serializer.py
@@ -1,97 +1,36 @@
-import logging
+from oauthlib.oauth2 import InvalidGrantError
 
-from website.addons.base.serializer import OAuthAddonSerializer
+from website.util import api_url_for
+from website.addons.base.serializer import StorageAddonSerializer
 
-from website.util import api_url_for, web_url_for
 
-logger = logging.getLogger(__name__)
+class OneDriveSerializer(StorageAddonSerializer):
 
+    addon_short_name = 'onedrive'
 
-class OneDriveSerializer(OAuthAddonSerializer):
-    def credentials_owner(self, user_settings=None):
-        return user_settings.owner or self.user_settings.owner
-
-    @property
-    def addon_short_name(self):
-        return 'onedrive'
-
-    @property
-    def user_is_owner(self):
-        if self.user_settings is None or self.node_settings is None:
+    def credentials_are_valid(self, user_settings, client):
+        try:
+            self.node_settings.fetch_access_token()
+        except (InvalidGrantError, AttributeError):
             return False
+        return True
 
-        user_accounts = self.user_settings.external_accounts
-        return bool(
-            (
-                self.node_settings.has_auth and
-                (self.node_settings.external_account in user_accounts)
-            ) or len(user_accounts)
-        )
-
-    @property
-    def serialized_urls(self):
-        ret = self.addon_serialized_urls
-        ret.update({'settings': web_url_for('user_addons')})
-        return ret
+    def serialized_folder(self, node_settings):
+        return {
+            'name': node_settings.folder_name,
+            'path': node_settings.folder_path,
+        }
 
     @property
     def addon_serialized_urls(self):
         node = self.node_settings.owner
 
         return {
-            'auth': api_url_for('oauth_connect',
-                               service_name='onedrive'),
-            'importAuth': node.api_url_for('onedrive_add_user_auth'),
+            'auth': api_url_for('oauth_connect', service_name='onedrive'),
+            'importAuth': node.api_url_for('onedrive_import_auth'),
             'files': node.web_url_for('collect_file_trees'),
             'folders': node.api_url_for('onedrive_folder_list'),
             'config': node.api_url_for('onedrive_set_config'),
-            'deauthorize': node.api_url_for('onedrive_remove_user_auth'),
-            'accounts': node.api_url_for('onedrive_get_user_settings'),
+            'deauthorize': node.api_url_for('onedrive_deauthorize_node'),
+            'accounts': node.api_url_for('onedrive_account_list'),
         }
-
-    def serialize_settings(self, node_settings, current_user, client=None):
-        """View helper that returns a dictionary representation of a
-        OneDriveNodeSettings record. Provides the return value for the
-        onedrive config endpoints.
-        """
-        valid_credentials = True
-        user_settings = node_settings.user_settings
-        self.node_settings = node_settings
-        current_user_settings = current_user.get_addon('onedrive')
-        user_is_owner = user_settings is not None and user_settings.owner == current_user
-
-#        if user_settings:
-#            try:
-#                client = client or OneDriveClient(user_settings.external_accounts[0].oauth_key)
-#                client.get_user_info()
-#            except (OneDriveClientException, IndexError):
-#                valid_credentials = False
-
-        result = {
-            'userIsOwner': user_is_owner,
-            'nodeHasAuth': node_settings.has_auth,
-            'urls': self.addon_serialized_urls,
-            'validCredentials': valid_credentials,
-            'userHasAuth': current_user_settings is not None and current_user_settings.has_auth,
-        }
-
-        if node_settings.has_auth:
-            # Add owner's profile URL
-            result['urls']['owner'] = web_url_for(
-                'profile_view_id',
-                uid=user_settings.owner._id
-            )
-            result['ownerName'] = user_settings.owner.fullname
-            # Show available folders
-            # path = node_settings.folder
-
-            if node_settings.folder_id is None:
-                result['folder'] = {'name': None, 'path': None}
-            elif valid_credentials:
-                #path = node_settings.fetch_full_folder_path()
-                path = node_settings.folder_id
-                result['folder'] = {
-                    'path': path,
-                    'name': path.replace('All Files', '', 1) if path != 'All Files' else '/ (Full OneDrive)'
-                }
-        return result
diff --git a/website/addons/onedrive/settings/defaults.py b/website/addons/onedrive/settings/defaults.py
index e98b5babb98..358514c0f71 100644
--- a/website/addons/onedrive/settings/defaults.py
+++ b/website/addons/onedrive/settings/defaults.py
@@ -2,9 +2,11 @@
 ONEDRIVE_KEY = None
 ONEDRIVE_SECRET = None
 
-REFRESH_TIME = 5 * 60  # 5 minutes
-
 ONEDRIVE_OAUTH_TOKEN_ENDPOINT = 'https://login.live.com/oauth20_token.srf?'
 ONEDRIVE_OAUTH_AUTH_ENDPOINT = 'https://login.live.com/oauth20_authorize.srf?'
 MSLIVE_API_URL = 'https://apis.live.net/v5.0/'
 ONEDRIVE_API_URL = 'https://api.onedrive.com/v1.0'
+
+REFRESH_TIME = 30 * 60  # 30 minutes
+
+DEFAULT_ROOT_ID = 'root'  # id string to identify the root folder
diff --git a/website/addons/onedrive/static/files.js b/website/addons/onedrive/static/files.js
new file mode 100644
index 00000000000..7221f1b32e5
--- /dev/null
+++ b/website/addons/onedrive/static/files.js
@@ -0,0 +1 @@
+require('./onedriveFangornConfig.js');
diff --git a/website/addons/onedrive/static/node-cfg.js b/website/addons/onedrive/static/node-cfg.js
index c634a4ccb75..676ea6514f4 100644
--- a/website/addons/onedrive/static/node-cfg.js
+++ b/website/addons/onedrive/static/node-cfg.js
@@ -1,7 +1,6 @@
 'use strict';
 
-require('./onedrive.css');
 var OauthAddonNodeConfig = require('js/oauthAddonNodeConfig').OauthAddonNodeConfig;
 
-var url = window.contextVars.node.urls.api + 'onedrive/settings/';
-new OauthAddonNodeConfig('Onedrive', '#onedriveScope', url, '#onedriveGrid');
+var url = window.contextVars.node.urls.api + 'onedrive/config/';
+new OauthAddonNodeConfig('Microsoft OneDrive', '#onedriveScope', url, '#onedriveGrid');
diff --git a/website/addons/onedrive/static/onedrive.css b/website/addons/onedrive/static/onedrive.css
deleted file mode 100644
index 6becf911ec4..00000000000
--- a/website/addons/onedrive/static/onedrive.css
+++ /dev/null
@@ -1,39 +0,0 @@
-.onedrive-confirm-selection {
-    padding-top: 10px;
-}
-.onedrive-folder-picker {
-    margin-top: 10px;
-}
-
-.selected-folder {
-    margin: 12px;
-    font-size: 1.3em;
-}
-
-.onedrive-loading-text {
-    padding-top: 20px;
-}
-
-
-.btn-onedrive {
-    color: #333;
-    background-color: #fff;
-    border-color: #ccc;
-}
-
-.btn-onedrive:hover,
-.btn-onedrive:focus,
-.btn-onedrive:active,
-.btn-onedrive.active,
-.open .dropdown-toggle.btn-onedrive {
-    color: #333;
-    background-color: #ebebeb;
-    border-color: #adadad;
-}
-
-.onedrive-folderpicker-odd {
-    background-color: #f5f5f5;
-}
-.onedrive-folderpicker-even {
-    background-color: #fff;
-}
\ No newline at end of file
diff --git a/website/addons/onedrive/static/onedriveAnonymousLogActionList.json b/website/addons/onedrive/static/onedriveAnonymousLogActionList.json
new file mode 100644
index 00000000000..c6695e5e847
--- /dev/null
+++ b/website/addons/onedrive/static/onedriveAnonymousLogActionList.json
@@ -0,0 +1,10 @@
+{
+    "onedrive_file_added" : "A user added a file to Microsoft OneDrive in a project",
+    "onedrive_file_removed" : "A user removed a file from Microsoft OneDrive in a project",
+    "onedrive_file_updated" : "A user updated a file in Microsoft OneDrive in a project",
+    "onedrive_folder_created" : "A user created a folder in Microsoft OneDrive in a project",
+    "onedrive_folder_selected" : "A user linked a Microsoft OneDrive folder to a project",
+    "onedrive_node_authorized" : "A user authorized the Microsoft OneDrive addon for a project",
+    "onedrive_node_deauthorized" : "A user deauthorized the Microsoft OneDrive addon for a project",
+    "onedrive_node_deauthorized_no_user" : "Microsoft OneDrive addon for a project deauthorized"
+}
diff --git a/website/addons/onedrive/static/onedriveFangornConfig.js b/website/addons/onedrive/static/onedriveFangornConfig.js
new file mode 100644
index 00000000000..f719bcea711
--- /dev/null
+++ b/website/addons/onedrive/static/onedriveFangornConfig.js
@@ -0,0 +1,75 @@
+'use strict';
+/**
+ * OneDrive FileBrowser configuration module.
+ */
+
+var m = require('mithril');
+var $ = require('jquery');
+var URI = require('URIjs');
+var Fangorn = require('js/fangorn').Fangorn;
+var waterbutler = require('js/waterbutler');
+var $osf = require('js/osfHelpers');
+
+// Cross browser key codes for the Command key
+var commandKeys = [224, 17, 91, 93];
+
+// Define Fangorn Button Actions
+var _onedriveItemButtons = {
+    view: function (ctrl, args, children) {
+        var tb = args.treebeard;
+        var item = args.item;
+        var buttons = [];
+        if (tb.options.placement !== 'fileview') {
+            if (item.kind === 'folder') {
+                // Download Zip File
+                buttons.push(
+                    m.component(Fangorn.Components.button, {
+                        onclick: function (event) {
+                            Fangorn.ButtonEvents._downloadZipEvent.call(tb, event, item);
+                        },
+                        icon: 'fa fa-download',
+                        className: 'text-primary'
+                    }, 'Download as zip')
+                );
+            }
+            else if (item.kind === 'file') {
+                buttons.push(
+                    m.component(Fangorn.Components.button, {
+                        onclick: function (event) {
+                            Fangorn.ButtonEvents._downloadEvent.call(tb, event, item);
+                        },
+                        icon: 'fa fa-download',
+                        className: 'text-primary'
+                    }, 'Download')
+                );
+                if (item.data.permissions && item.data.permissions.view) {
+                    buttons.push(
+                        m.component(Fangorn.Components.button, {
+                            onclick: function(event) {
+                                Fangorn.ButtonEvents._gotoFileEvent.call(tb, item);
+                            },
+                            icon: 'fa fa-file-o',
+                            className : 'text-info'
+                        }, 'View')
+                    );
+                    if (!item.data.permissions.private) {
+                        buttons.push(
+                            m('a.text-info.fangorn-toolbar-icon', {href: item.data.extra.webView}, [
+                                m('i.fa.fa-external-link'),
+                                m('span', 'View on OneDrive')
+                            ])
+                        );
+                    }
+                }
+            }
+        }
+
+        return m('span', buttons); // Tell fangorn this function is used.
+    }
+};
+
+
+// Register configuration
+Fangorn.config.onedrive = {
+    itemButtons: _onedriveItemButtons,
+};
diff --git a/website/addons/onedrive/static/onedriveLogActionList.json b/website/addons/onedrive/static/onedriveLogActionList.json
new file mode 100644
index 00000000000..850cbaed323
--- /dev/null
+++ b/website/addons/onedrive/static/onedriveLogActionList.json
@@ -0,0 +1,10 @@
+{
+  "onedrive_file_added" : "${user} added file ${path} to Microsoft OneDrive in ${node}",
+  "onedrive_file_removed" : "${user} removed ${path_type} ${path} from Microsoft OneDrive in ${node}",
+  "onedrive_file_updated" : "${user} updated file ${path} in Microsoft OneDrive in ${node}",
+  "onedrive_folder_created" : "${user} created folder ${path} in Microsoft OneDrive in ${node}",
+  "onedrive_folder_selected" : "${user} linked Microsoft OneDrive folder ${onedrive_folder} to ${node}",
+  "onedrive_node_authorized" : "${user} authorized the Microsoft OneDrive addon for ${node}",
+  "onedrive_node_deauthorized" : "${user} deauthorized the Microsoft OneDrive addon for ${node}",
+  "onedrive_node_deauthorized_no_user" : "Microsoft OneDrive addon for ${node} deauthorized"
+}
diff --git a/website/addons/onedrive/templates/log_templates.mako b/website/addons/onedrive/templates/log_templates.mako
deleted file mode 100644
index b4611237bb0..00000000000
--- a/website/addons/onedrive/templates/log_templates.mako
+++ /dev/null
@@ -1,53 +0,0 @@
-<script type="text/html" id="onedrive_file_added">
-added file
-<a class="overflow log-file-link" data-bind="click: NodeActions.addonFileRedirect">
-    {{ stripSlash(params.path) }}</a> to
-Onedrive in
-<a class="log-node-title-link overflow" data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
-</script>
-
-<script type="text/html" id="onedrive_folder_created">
-created folder
-<span class="overflow log-folder">{{ stripSlash(params.path) }}</span> in
-Onedrive in
-<a class="log-node-title-link overflow" data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
-</script>
-
-<script type="text/html" id="onedrive_file_updated">
-updated file
-<a class="overflow log-file-link" data-bind="click: NodeActions.addonFileRedirect">
-    {{ stripSlash(params.path) }}</a> to
-Onedrive in
-<a class="log-node-title-link overflow" data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
-</script>
-
-
-<script type="text/html" id="onedrive_file_removed">
-removed {{ pathType(params.path) }} <span class="overflow">
-    {{ stripSlash(params.path) }}</span> from
-Onedrive in
-<a class="log-node-title-link overflow" data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
-</script>
-
-
-<script type="text/html" id="onedrive_folder_selected">
-linked Onedrive folder
-<span class="overflow">
-    {{ params.folder === 'All Files' ? '/ (Full Onedrive)' : (params.folder || '').replace('All Files','')}}
-</span> to
-<a class="log-node-title-link overflow" data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
-</script>
-
-
-<script type="text/html" id="onedrive_node_deauthorized">
-deauthorized the Onedrive addon for
-<a class="log-node-title-link overflow"
-    data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
-</script>
-
-
-<script type="text/html" id="onedrive_node_authorized">
-authorized the Onedrive addon for
-<a class="log-node-title-link overflow"
-    data-bind="attr: {href: nodeUrl}">{{ nodeTitle }}</a>
-</script>
diff --git a/website/addons/onedrive/tests/test_client.py b/website/addons/onedrive/tests/test_client.py
index 0df2b37078e..7268165816e 100644
--- a/website/addons/onedrive/tests/test_client.py
+++ b/website/addons/onedrive/tests/test_client.py
@@ -1,21 +1,11 @@
 # -*- coding: utf-8 -*-
 
 from nose.tools import *  # noqa (PEP8 asserts)
-#from onedrive.client import OneDriveClient
 
 from tests.base import OsfTestCase
 from tests.factories import UserFactory
 
-from website.addons.base.exceptions import AddonError
 from website.addons.onedrive.model import OneDriveUserSettings
-from website.addons.onedrive.tests.factories import (
-    OneDriveNodeSettingsFactory,
-    OneDriveUserSettingsFactory
-)
-#  from website.addons.onedrive.client import (
-#      get_client, get_node_addon_client, get_node_client,
-#      get_client_from_user_settings
-#  )
 
 
 class TestCore(OsfTestCase):
diff --git a/website/addons/onedrive/tests/test_models.py b/website/addons/onedrive/tests/test_models.py
index 6118ea3ed39..009bc5c3aca 100644
--- a/website/addons/onedrive/tests/test_models.py
+++ b/website/addons/onedrive/tests/test_models.py
@@ -4,103 +4,112 @@
 from nose.tools import *  # noqa (PEP8 asserts)
 
 from framework.auth import Auth
-from website.addons.onedrive.model import (
-    OneDriveUserSettings, OneDriveNodeSettings
-)
 from tests.base import OsfTestCase
-from tests.factories import UserFactory, ProjectFactory
+from website.addons.base.testing import models
+
+from website.addons.onedrive import model
+from website.addons.onedrive.client import OneDriveClient
 from website.addons.onedrive.tests.factories import (
-    OneDriveUserSettingsFactory, OneDriveNodeSettingsFactory,
+    OneDriveAccountFactory,
+    OneDriveNodeSettingsFactory,
+    OneDriveUserSettingsFactory,
 )
-from website.addons.base import exceptions
 
+class TestOneDriveProvider(OsfTestCase):
+    def setUp(self):
+        super(TestOneDriveProvider, self).setUp()
+        self.provider = model.OneDriveProvider()
 
-class TestUserSettingsModel(OsfTestCase):
+    @mock.patch.object(OneDriveClient, 'user_info_for_token')
+    def test_handle_callback(self, mock_client):
+        fake_response = {'access_token': 'abc123'}
+        fake_info = {'id': '12345', 'name': 'fakename', 'link': 'fakeUrl'}
+        mock_client.return_value = fake_info
+        res = self.provider.handle_callback(fake_response)
+        assert_equal(res['provider_id'], '12345')
+        assert_equal(res['display_name'], 'fakename')
+        assert_equal(res['profile_url'], 'fakeUrl')
 
-    def setUp(self):
-        super(TestUserSettingsModel, self).setUp()
-        self.user = UserFactory()
-
-#      def test_has_auth(self):
-#          user_settings = OneDriveUserSettingsFactory(access_token=None)
-#          assert_false(user_settings.has_auth)
-#          user_settings.access_token = '12345'
-#          user_settings.save()
-#          assert_true(user_settings.has_auth)
-#
-    def test_delete(self):
-        user_settings = OneDriveUserSettingsFactory()
-        user_settings.access_token = "122"
-        user_settings.delete()
-        user_settings.save()
-        assert_true(user_settings.deleted)
-
-class TestOneDriveNodeSettingsModel(OsfTestCase):
+
+class TestUserSettings(models.OAuthAddonUserSettingTestSuiteMixin, OsfTestCase):
+
+    short_name = 'onedrive'
+    full_name = 'Microsoft OneDrive'
+    ExternalAccountFactory = OneDriveAccountFactory
+
+
+class TestNodeSettings(models.OAuthAddonNodeSettingsTestSuiteMixin, OsfTestCase):
+
+    short_name = 'onedrive'
+    full_name = 'Microsoft OneDrive'
+    ExternalAccountFactory = OneDriveAccountFactory
+
+    NodeSettingsFactory = OneDriveNodeSettingsFactory
+    NodeSettingsClass = model.OneDriveNodeSettings
+    UserSettingsFactory = OneDriveUserSettingsFactory
 
     def setUp(self):
-        super(TestOneDriveNodeSettingsModel, self).setUp()
-        self.user = UserFactory()
-        self.user.add_addon('onedrive')
-        self.user.save()
-        self.user_settings = self.user.get_addon('onedrive')
-        self.project = ProjectFactory()
-        self.node_settings = OneDriveNodeSettingsFactory(
-            user_settings=self.user_settings,
-            owner=self.project
+        self.mock_refresh = mock.patch.object(
+            model.OneDriveProvider,
+            'refresh_oauth_key'
+        )
+        self.mock_refresh.return_value = True
+        self.mock_refresh.start()
+        super(TestNodeSettings, self).setUp()
+
+    def tearDown(self):
+        self.mock_refresh.stop()
+        super(TestNodeSettings, self).tearDown()
+
+
+    @mock.patch('website.addons.onedrive.model.OneDriveProvider')
+    def test_api_not_cached(self, mock_odp):
+        # The first call to .api returns a new object
+        api = self.node_settings.api
+        mock_odp.assert_called_once()
+        assert_equal(api, mock_odp())
+
+    @mock.patch('website.addons.onedrive.model.OneDriveProvider')
+    def test_api_cached(self, mock_odp):
+        # Repeated calls to .api returns the same object
+        self.node_settings._api = 'testapi'
+        api = self.node_settings.api
+        assert_false(mock_odp.called)
+        assert_equal(api, 'testapi')
+
+    def test_selected_folder_name_root(self):
+        self.node_settings.folder_id = 'root'
+
+        assert_equal(
+            self.node_settings.selected_folder_name,
+            "/ (Full OneDrive)"
         )
 
-    def test_folder_defaults_to_none(self):
-        node_settings = OneDriveNodeSettings(user_settings=self.user_settings)
-        node_settings.save()
-        assert_is_none(node_settings.folder_id)
-
-    def test_to_json(self):
-        settings = self.node_settings
-        user = UserFactory()
-        result = settings.to_json(user)
-        assert_equal(result['addon_short_name'], 'onedrive')
-
-    def test_delete(self):
-        assert_true(self.node_settings.user_settings)
-        assert_true(self.node_settings.folder_id)
-        old_logs = self.project.logs
-        self.node_settings.delete()
-        self.node_settings.save()
-        assert_is(self.node_settings.user_settings, None)
-        assert_is(self.node_settings.folder_id, None)
-        assert_true(self.node_settings.deleted)
+    def test_selected_folder_name_empty(self):
+        self.node_settings.folder_id = None
 
+        assert_equal(
+            self.node_settings.selected_folder_name,
+            ''
+        )
 
-    def test_deauthorize(self):
-        assert_true(self.node_settings.user_settings)
-        assert_true(self.node_settings.folder_id)
-        self.node_settings.deauthorize(auth=Auth(self.user))
-        self.node_settings.save()
-        assert_is(self.node_settings.user_settings, None)
-        assert_is(self.node_settings.folder_id, None)
-
-        last_log = self.project.logs[-1]
-        assert_equal(last_log.action, 'onedrive_node_deauthorized')
-        params = last_log.params
-        assert_in('node', params)
-        assert_in('project', params)
-        assert_in('folder', params)
-
-    def test_set_user_auth(self):
-        node_settings = OneDriveNodeSettingsFactory()
-        user_settings = OneDriveUserSettingsFactory()
-
-        node_settings.set_user_auth(user_settings)
-        node_settings.save()
-
-        assert_equal(node_settings.user_settings, user_settings)
-        # A log was saved
-        last_log = node_settings.owner.logs[-1]
-        assert_equal(last_log.action, 'onedrive_node_authorized')
-
-    def test_serialize_credentials_not_authorized(self):
-        self.node_settings.user_settings = None
-        self.node_settings.save()
-        with assert_raises(exceptions.AddonError):
-            self.node_settings.serialize_waterbutler_credentials()
+    ## Overrides ##
 
+    def test_set_folder(self):
+        folder = {
+            'id': 'fake-folder-id',
+            'name': 'fake-folder-name',
+            'path': 'fake_path'
+        }
+        self.node_settings.set_folder(folder, auth=Auth(self.user))
+        self.node_settings.save()
+        # Folder was set
+        assert_equal(self.node_settings.folder_id, folder['id'])
+        # Log was saved
+        last_log = self.node.logs[-1]
+        assert_equal(last_log.action, '{0}_folder_selected'.format(self.short_name))
+
+    def test_serialize_settings(self):
+        settings = self.node_settings.serialize_waterbutler_settings()
+        expected = {'folder': self.node_settings.folder_id}
+        assert_equal(settings, expected)
diff --git a/website/addons/onedrive/tests/test_serializer.py b/website/addons/onedrive/tests/test_serializer.py
new file mode 100644
index 00000000000..bda59a9f78c
--- /dev/null
+++ b/website/addons/onedrive/tests/test_serializer.py
@@ -0,0 +1,23 @@
+# -*- coding: utf-8 -*-
+"""Serializer tests for the OneDrive addon."""
+
+import mock
+from nose.tools import *  # noqa (PEP8 asserts)
+
+from tests.base import OsfTestCase
+from website.addons.onedrive.model import OneDriveProvider
+from website.addons.onedrive.serializer import OneDriveSerializer
+from website.addons.onedrive.tests.factories import OneDriveAccountFactory
+from website.addons.base.testing.serializers import StorageAddonSerializerTestSuiteMixin
+
+
+class TestOneDriveSerializer(StorageAddonSerializerTestSuiteMixin, OsfTestCase):
+
+    addon_short_name = 'onedrive'
+
+    Serializer = OneDriveSerializer
+    ExternalAccountFactory = OneDriveAccountFactory
+    client = OneDriveProvider
+
+    def set_provider_id(self, pid):
+        self.node_settings.folder_id = pid
diff --git a/website/addons/onedrive/tests/test_views.py b/website/addons/onedrive/tests/test_views.py
index 0d9abe4f3a1..742885c19d0 100644
--- a/website/addons/onedrive/tests/test_views.py
+++ b/website/addons/onedrive/tests/test_views.py
@@ -1,186 +1,56 @@
 # -*- coding: utf-8 -*-
-"""Views tests for the OneDrive addon."""
-import os
-import unittest
-from nose.tools import *  # noqa (PEP8 asserts)
-import mock
-import httplib
-
-from framework.auth import Auth
-from website.util import api_url_for, web_url_for
 
-from urllib3.exceptions import MaxRetryError
-
-from tests.base import OsfTestCase, assert_is_redirect
-from tests.factories import AuthUserFactory, ProjectFactory
+import mock
+from nose.tools import *  # noqa
 
-from website.addons.onedrive.tests.utils import (
-    OneDriveAddonTestCase, mock_responses, MockOneDrive, patch_client
-)
+from website.addons.base.testing import views
+from website.addons.onedrive.client import OneDriveClient
+from website.addons.onedrive.serializer import OneDriveSerializer
+from website.addons.onedrive.tests.utils import OneDriveAddonTestCase
+from website.addons.onedrive.tests.utils import raw_subfolder_response
+from website.addons.onedrive.tests.utils import raw_root_folder_response
 
-from website.addons.onedrive import utils
 
-mock_client = MockOneDrive()
+class TestAuthViews(OneDriveAddonTestCase, views.OAuthAddonAuthViewsTestCaseMixin):
+    pass
 
 
-class TestAuthViews(OsfTestCase):
+class TestConfigViews(OneDriveAddonTestCase, views.OAuthAddonConfigViewsTestCaseMixin):
+    folder = {
+        'path': 'Drive/Camera Uploads',
+        'id': '1234567890'
+    }
+    Serializer = OneDriveSerializer
+    client = OneDriveClient
 
     def setUp(self):
-        super(TestAuthViews, self).setUp()
-        self.user = AuthUserFactory()
-        # Log user in
-        self.app.authenticate(*self.user.auth)
-
-#      def test_onedrive_oauth_start(self):
-#          url = api_url_for('onedrive_oauth_start_user')
-#          res = self.app.get(url)
-#          assert_is_redirect(res)
-#          assert_in('&force_reapprove=true', res.location)
-
-#  class TestConfigViews(OneDriveAddonTestCase):
-
-#      def test_onedrive_config_put(self):
-#          url = self.project.api_url_for('onedrive_config_put')
-#          # Can set folder through API call
-#          res = self.app.put_json(url, {'selected': {'path': 'My test folder',
-#              'name': 'OneDrive/My test folder'}},
-#              auth=self.user.auth)
-#          assert_equal(res.status_code, 200)
-#          self.node_settings.reload()
-#          self.project.reload()
-#
-#          # Folder was set
-#          assert_equal(self.node_settings.folder, 'My test folder')
-#          # A log event was created
-#          last_log = self.project.logs[-1]
-#          assert_equal(last_log.action, 'onedrive_folder_selected')
-#          params = last_log.params
-#          assert_equal(params['folder'], 'My test folder')
-#
-#      def test_onedrive_deauthorize(self):
-#          url = self.project.api_url_for('onedrive_deauthorize')
-#          saved_folder = self.node_settings.folder
-#          self.app.delete(url, auth=self.user.auth)
-#          self.project.reload()
-#          self.node_settings.reload()
-#
-#          assert_false(self.node_settings.has_auth)
-#          assert_is(self.node_settings.user_settings, None)
-#          assert_is(self.node_settings.folder, None)
-#
-#          # A log event was saved
-#          last_log = self.project.logs[-1]
-#          assert_equal(last_log.action, 'onedrive_node_deauthorized')
-#          log_params = last_log.params
-#          assert_equal(log_params['node'], self.project._primary_key)
-#          assert_equal(log_params['folder'], saved_folder)
-#
-#      def test_onedrive_get_share_emails(self):
-#          # project has some contributors
-#          contrib = AuthUserFactory()
-#          self.project.add_contributor(contrib, auth=Auth(self.user))
-#          self.project.save()
-#          url = self.project.api_url_for('onedrive_get_share_emails')
-#          res = self.app.get(url, auth=self.user.auth)
-#          result = res.json['result']
-#          assert_equal(result['emails'], [u.username for u in self.project.contributors
-#                                          if u != self.user])
-#          assert_equal(result['url'], utils.get_share_folder_uri(self.node_settings.folder))
-
-#      def test_onedrive_get_share_emails_returns_error_if_not_authorizer(self):
-#          contrib = AuthUserFactory()
-#          contrib.add_addon('onedrive')
-#          contrib.save()
-#          self.project.add_contributor(contrib, auth=Auth(self.user))
-#          self.project.save()
-#          url = self.project.api_url_for('onedrive_get_share_emails')
-#          # Non-authorizing contributor sends request
-#          res = self.app.get(url, auth=contrib.auth, expect_errors=True)
-#          assert_equal(res.status_code, httplib.FORBIDDEN)
+        super(TestConfigViews, self).setUp()
 
-#      def test_onedrive_get_share_emails_requires_user_addon(self):
-#          # Node doesn't have auth
-#          self.node_settings.user_settings = None
-#          self.node_settings.save()
-#          url = self.project.api_url_for('onedrive_get_share_emails')
-#          # Non-authorizing contributor sends request
-#          res = self.app.get(url, auth=self.user.auth, expect_errors=True)
-#          assert_equal(res.status_code, httplib.BAD_REQUEST)
+        self.mock_folders = mock.patch.object(OneDriveClient, 'folders')
+        self.mock_folders.return_value = raw_root_folder_response
+        self.mock_folders.start()
 
+        self.mock_fetch = mock.patch.object(
+            self.node_settings.__class__,
+            'fetch_access_token'
+        )
+        self.mock_fetch.return_value = self.external_account.oauth_key
+        self.mock_fetch.start()
 
-#  class TestFilebrowserViews(OneDriveAddonTestCase):
+    def tearDown(self):
+        self.mock_folders.stop()
+        self.mock_fetch.stop()
+        super(TestConfigViews, self).tearDown()
 
-#      def test_onedrive_hgrid_data_contents(self):
-#          with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
-#              url = self.project.api_url_for(
-#                  'onedrive_hgrid_data_contents',
-#                  path=self.node_settings.folder,
-#              )
-#              res = self.app.get(url, auth=self.user.auth)
-#              contents = [x for x in mock_client.metadata('', list=True)['contents'] if x['is_dir']]
-#              assert_equal(len(res.json), len(contents))
-#              first = res.json[0]
-#              assert_in('kind', first)
-#              assert_equal(first['path'], contents[0]['path'])
-#
-#      def test_onedrive_hgrid_data_contents_if_folder_is_none_and_folders_only(self):
-#          with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
-#              self.node_settings.folder = None
-#              self.node_settings.save()
-#              url = self.project.api_url_for('onedrive_hgrid_data_contents', foldersOnly=True)
-#              res = self.app.get(url, auth=self.user.auth)
-#              contents = mock_client.metadata('', list=True)['contents']
-#              expected = [each for each in contents if each['is_dir']]
-#              assert_equal(len(res.json), len(expected))
-#
-#      def test_onedrive_hgrid_data_contents_folders_only(self):
-#          with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
-#              url = self.project.api_url_for('onedrive_hgrid_data_contents', foldersOnly=True)
-#              res = self.app.get(url, auth=self.user.auth)
-#              contents = mock_client.metadata('', list=True)['contents']
-#              expected = [each for each in contents if each['is_dir']]
-#              assert_equal(len(res.json), len(expected))
+    @mock.patch.object(OneDriveClient, 'folders')
+    def test_folder_list_not_root(self, mock_drive_client_folders):
+        mock_drive_client_folders.return_value = raw_subfolder_response
 
-#      @mock.patch('website.addons.onedrive.client.OneDriveClient.metadata')
-#      def test_onedrive_hgrid_data_contents_include_root(self, mock_metadata):
-#          with patch_client('website.addons.onedrive.views.hgrid.get_node_client'):
-#              url = self.project.api_url_for('onedrive_hgrid_data_contents', root=1)
-#
-#              res = self.app.get(url, auth=self.user.auth)
-#              contents = mock_client.metadata('', list=True)['contents']
-#              assert_equal(len(res.json), 1)
-#              assert_not_equal(len(res.json), len(contents))
-#              first_elem = res.json[0]
-#              assert_equal(first_elem['path'], '/')
-
-
-class TestRestrictions(OneDriveAddonTestCase):
-
-    def setUp(self):
-        super(OneDriveAddonTestCase, self).setUp()
-
-        # Nasty contributor who will try to access folders that he shouldn't have
-        # access to
-        self.contrib = AuthUserFactory()
-        self.project.add_contributor(self.contrib, auth=Auth(self.user))
-        self.project.save()
-
-        # Set shared folder
-        self.node_settings.folder = 'foo bar/bar'
+        self.node_settings.set_auth(external_account=self.external_account, user=self.user)
         self.node_settings.save()
 
-#      def test_restricted_config_contrib_no_addon(self):
-#          url = self.project.api_url_for('onedrive_config_put')
-#          res = self.app.put_json(url, {'selected': {'path': 'foo'}},
-#              auth=self.contrib.auth, expect_errors=True)
-#          assert_equal(res.status_code, httplib.BAD_REQUEST)
-
-#      def test_restricted_config_contrib_not_owner(self):
-#          # Contributor has onedrive auth, but is not the node authorizer
-#          self.contrib.add_addon('onedrive')
-#          self.contrib.save()
-#
-#          url = self.project.api_url_for('onedrive_config_put')
-#          res = self.app.put_json(url, {'selected': {'path': 'foo'}},
-#              auth=self.contrib.auth, expect_errors=True)
-#          assert_equal(res.status_code, httplib.FORBIDDEN)
+        folderId = '12345'
+        url = self.project.api_url_for('onedrive_folder_list', folder_id=folderId)
+        res = self.app.get(url, auth=self.user.auth)
+        assert_equal(res.status_code, 200)
+        assert_equal(len(res.json), len(raw_subfolder_response))
diff --git a/website/addons/onedrive/tests/utils.py b/website/addons/onedrive/tests/utils.py
index 775afc8b179..a1f906574ee 100644
--- a/website/addons/onedrive/tests/utils.py
+++ b/website/addons/onedrive/tests/utils.py
@@ -1,168 +1,744 @@
 # -*- coding: utf-8 -*-
-import mock
-from contextlib import contextmanager
 
-from modularodm import storage
+from website.addons.onedrive.model import OneDriveProvider
+from website.addons.onedrive.tests.factories import OneDriveAccountFactory
+from website.addons.base.testing import OAuthAddonTestCaseMixin, AddonTestCase
 
-from framework.mongo import set_up_storage
 
-from website.addons.base.testing import AddonTestCase
-from website.addons.onedrive import MODELS
-
-
-def init_storage():
-    set_up_storage(MODELS, storage_class=storage.MongoStorage)
-
-
-class OneDriveAddonTestCase(AddonTestCase):
+class OneDriveAddonTestCase(OAuthAddonTestCaseMixin, AddonTestCase):
 
     ADDON_SHORT_NAME = 'onedrive'
-
-    def set_user_settings(self, settings):
-        settings.access_token = '12345abc'
-        settings.onedrive_id = 'myonedriveid'
+    ExternalAccountFactory = OneDriveAccountFactory
+    Provider = OneDriveProvider
 
     def set_node_settings(self, settings):
-        settings.folder = 'foo'
+        super(OneDriveAddonTestCase, self).set_node_settings(settings)
+        settings.folder_id = '1234567890'
+        settings.folder_path = 'Drive/Camera Uploads'
+        settings.external_account = self.external_account
 
 
-mock_responses = {
-    'put_file': {
-        'bytes': 77,
-        'icon': 'page_white_text',
-        'is_dir': False,
-        'mime_type': 'text/plain',
-        'modified': 'Wed, 20 Jul 2011 22:04:50 +0000',
-        'path': '/magnum-opus.txt',
-        'rev': '362e2029684fe',
-        'revision': 221922,
-        'root': 'onedrive',
-        'size': '77 bytes',
-        'thumb_exists': False
+raw_root_folder_response = [
+    {
+      "createdBy": {
+        "application": {
+          "displayName": "local-cosdev",
+          "id": "44174239"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2017-02-23T02:25:24.687Z",
+      "cTag": "adDpGNEQ1MEU0MDBERkU3RDRFITEzMi42MzYyMzQxMzUyNDg3NzAwMDA",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMzIuMA",
+      "id": "F4D50E400DFE7D4E!132",
+      "lastModifiedBy": {
+        "application": {
+          "displayName": "local-cosdev",
+          "id": "44174239"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-23T02:25:24.877Z",
+      "name": "Apps",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 0,
+      "webUrl": "https://1drv.ms/f/s!AE59_g1ADtX0gQQ",
+      "fileSystemInfo": {
+        "createdDateTime": "2017-02-23T02:25:24.687Z",
+        "lastModifiedDateTime": "2017-02-23T02:25:24.687Z"
+      },
+      "folder": {
+        "childCount": 1
+      },
+      "specialFolder": {
+        "name": "apps"
+      }
     },
-    'metadata_list': {
-        "size": "0 bytes",
-        "hash": "37eb1ba1849d4b0fb0b28caf7ef3af52",
-        "bytes": 0,
-        "thumb_exists": False,
-        "rev": "714f029684fe",
-        "modified": "Wed, 27 Apr 2011 22:18:51 +0000",
-        "path": "/Public",
-        "is_dir": True,
-        "icon": "folder_public",
-        "root": "onedrive",
-        "contents": [
-            {
-                "size": "0 bytes",
-                "rev": "35c1f029684fe",
-                "thumb_exists": False,
-                "bytes": 0,
-                "modified": "Mon, 18 Jul 2011 20:13:43 +0000",
-                "client_mtime": "Wed, 20 Apr 2011 16:20:19 +0000",
-                "path": "/Public/latest.txt",
-                "is_dir": False,
-                "icon": "page_white_text",
-                "root": "onedrive",
-                "mime_type": "text/plain",
-                "revision": 220191
-            },
-            {
-                u'bytes': 0,
-                u'icon': u'folder',
-                u'is_dir': True,
-                u'modified': u'Sat, 22 Mar 2014 05:40:29 +0000',
-                u'path': u'/datasets/New Folder',
-                u'rev': u'3fed51f002c12fc',
-                u'revision': 67032351,
-                u'root': u'onedrive',
-                u'size': u'0 bytes',
-                u'thumb_exists': False
-            }
-        ],
-        "revision": 29007
+    {
+      "createdBy": {
+        "application": {
+          "displayName": "OneDrive website",
+          "id": "44048800"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2015-12-17T19:56:12.63Z",
+      "cTag": "adDpGNEQ1MEU0MDBERkU3RDRFITEwNi42MzYyMjA5NjY3MzQ3MDAwMDA",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMDYuMA",
+      "id": "F4D50E400DFE7D4E!106",
+      "lastModifiedBy": {
+        "application": {
+          "displayName": "OneDrive website",
+          "id": "44048800"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-07T20:37:53.47Z",
+      "name": "Documents",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 1056811,
+      "webUrl": "https://1drv.ms/f/s!AE59_g1ADtX0ag",
+      "fileSystemInfo": {
+        "createdDateTime": "2015-12-17T19:56:12.63Z",
+        "lastModifiedDateTime": "2015-12-17T19:56:12.63Z"
+      },
+      "folder": {
+        "childCount": 1
+      },
+      "specialFolder": {
+        "name": "documents"
+      }
     },
-    'metadata_single': {
-        u'bytes': 74,
-        u'client_mtime': u'Mon, 13 Jan 2014 20:24:15 +0000',
-        u'icon': u'page_white',
-        u'is_dir': False,
-        u'mime_type': u'text/csv',
-        u'modified': u'Fri, 21 Mar 2014 05:46:36 +0000',
-        u'path': '/datasets/foo.txt',
-        u'rev': u'a2149fb64',
-        u'revision': 10,
-        u'root': u'app_folder',
-        u'size': u'74 bytes',
-        u'thumb_exists': False
+    {
+      "createdBy": {
+        "application": {
+          "displayName": "local-cosdev",
+          "id": "44174239"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2017-02-23T02:25:42.93Z",
+      "cTag": "adDpGNEQ1MEU0MDBERkU3RDRFITEzNC42MzYyMzQxMzU0MjkzMDAwMDA",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMzQuMA",
+      "id": "F4D50E400DFE7D4E!134",
+      "lastModifiedBy": {
+        "application": {
+          "displayName": "local-cosdev",
+          "id": "44174239"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-23T02:25:42.93Z",
+      "name": "Music",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 0,
+      "webUrl": "https://1drv.ms/f/s!AE59_g1ADtX0gQY",
+      "fileSystemInfo": {
+        "createdDateTime": "2017-02-23T02:25:42.93Z",
+        "lastModifiedDateTime": "2017-02-23T02:25:42.93Z"
+      },
+      "folder": {
+        "childCount": 0
+      },
+      "specialFolder": {
+        "name": "music"
+      }
     },
-    'revisions': [{u'bytes': 0,
-        u'client_mtime': u'Wed, 31 Dec 1969 23:59:59 +0000',
-        u'icon': u'page_white_picture',
-        u'is_deleted': True,
-        u'is_dir': False,
-        u'mime_type': u'image/png',
-        u'modified': u'Tue, 25 Mar 2014 03:39:13 +0000',
-        u'path': u'/svs-v-barks.png',
-        u'rev': u'3fed741002c12fc',
-        u'revision': 67032897,
-        u'root': u'onedrive',
-        u'size': u'0 bytes',
-        u'thumb_exists': True},
-        {u'bytes': 151164,
-        u'client_mtime': u'Sat, 13 Apr 2013 21:56:36 +0000',
-        u'icon': u'page_white_picture',
-        u'is_dir': False,
-        u'mime_type': u'image/png',
-        u'modified': u'Tue, 25 Mar 2014 01:45:51 +0000',
-        u'path': u'/svs-v-barks.png',
-        u'rev': u'3fed61a002c12fc',
-        u'revision': 67032602,
-        u'root': u'onedrive',
-        u'size': u'147.6 KB',
-        u'thumb_exists': True}]
-}
-
-
-class MockOneDrive(object):
-
-    def put_file(self, full_path, file_obj, overwrite=False, parent_rev=None):
-        return mock_responses['put_file']
-
-    def metadata(self, path, list=True, file_limit=25000, hash=None, rev=None,
-            include_deleted=False):
-        if list:
-            ret = mock_responses['metadata_list']
-        else:
-            ret = mock_responses['metadata_single']
-            ret['path'] = path
-        return ret
-
-    def get_file_and_metadata(*args, **kwargs):
-        pass
-
-    def file_delete(self, path):
-        return mock_responses['metadata_single']
-
-    def revisions(self, path):
-        ret = mock_responses['revisions']
-        for each in ret:
-            each['path'] = path
-        return ret
-
-    def account_info(self):
-        return {'display_name': 'Mr. One Drive'}
+    {
+      "createdBy": {
+        "application": {
+          "displayName": "OneDrive website",
+          "id": "44048800"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2015-12-17T19:56:12.24Z",
+      "cTag": "adDpGNEQ1MEU0MDBERkU3RDRFITEwNS42MzYyMjA5Njk5MTgzMDAwMDA",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMDUuMA",
+      "id": "F4D50E400DFE7D4E!105",
+      "lastModifiedBy": {
+        "application": {
+          "displayName": "OneDrive website",
+          "id": "44048800"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-07T20:43:11.83Z",
+      "name": "Pictures",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 13,
+      "webUrl": "https://1drv.ms/f/s!AE59_g1ADtX0aQ",
+      "fileSystemInfo": {
+        "createdDateTime": "2015-12-17T19:56:12.24Z",
+        "lastModifiedDateTime": "2015-12-17T19:56:12.24Z"
+      },
+      "folder": {
+        "childCount": 1
+      },
+      "specialFolder": {
+        "name": "photos"
+      }
+    },
+    {
+      "createdBy": {
+        "application": {
+          "displayName": "OneDrive website",
+          "id": "44048800"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2015-12-17T19:56:30.89Z",
+      "cTag": "adDpGNEQ1MEU0MDBERkU3RDRFITEwNy42MzYwOTMxMzUyMDc4MDAwMDA",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMDcuMA",
+      "id": "F4D50E400DFE7D4E!107",
+      "lastModifiedBy": {
+        "application": {
+          "displayName": "OneDrive website",
+          "id": "44048800"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2016-09-12T21:45:20.78Z",
+      "name": "Tenkum",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 1588779,
+      "webUrl": "https://1drv.ms/f/s!AE59_g1ADtX0aw",
+      "fileSystemInfo": {
+        "createdDateTime": "2015-12-17T19:56:30.89Z",
+        "lastModifiedDateTime": "2015-12-17T19:56:30.89Z"
+      },
+      "folder": {
+        "childCount": 5
+      }
+    },
+    {
+      "createdBy": {
+        "application": {
+          "displayName": "OneDrive website",
+          "id": "44048800"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2017-02-23T07:27:44.253Z",
+      "cTag": "adDpGNEQ1MEU0MDBERkU3RDRFITE1NC42MzYyMzQzMTcxMDY2MzAwMDA",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNTQuMA",
+      "id": "F4D50E400DFE7D4E!154",
+      "lastModifiedBy": {
+        "application": {
+          "displayName": "OneDrive website",
+          "id": "44048800"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-23T07:28:30.663Z",
+      "name": "foo",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 28359,
+      "webUrl": "https://1drv.ms/o/s!AE59_g1ADtX0gRo",
+      "fileSystemInfo": {
+        "createdDateTime": "2017-02-23T07:27:44.253Z",
+        "lastModifiedDateTime": "2017-02-23T07:27:44.253Z"
+      },
+      "package": {
+        "type": "oneNote"
+      }
+    },
+    {
+      "@content.downloadUrl": "https://public.bn1303.livefilestore.com/y3meR_7rVWrrLE-4_8eWU09UhEHrtVojgGVrPDBh3M8Qq0Iut6Y5-x68vBGXmra-p9X6d5PcWocISnjJQMa_nQ1QMw5HUTrT0AhFq6_hurW6lwJ0qBwlzsUYWzUoLfMu9KqdUnaBghT1NiMHSyPSlUO0UgAant5d85tXtn3xqy94i9yLzq8_6spoZ_ffgYX7l-FwQBRxaDz8q6LN7SFT1JQV9S_1Fr_BDCbtitKip_UgO0",
+      "createdBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2017-02-23T03:11:31.37Z",
+      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITEzNi4yNTg",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMzYuMg",
+      "id": "F4D50E400DFE7D4E!136",
+      "lastModifiedBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-23T03:11:41.32Z",
+      "name": "foo 1.txt",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 14,
+      "webUrl": "https://1drv.ms/t/s!AE59_g1ADtX0gQg",
+      "file": {
+        "hashes": {
+          "crc32Hash": "82872CD6",
+          "sha1Hash": "12779E2CF3B4108A897FC5C6A986D4F2A4BB9026"
+        },
+        "mimeType": "text/plain"
+      },
+      "fileSystemInfo": {
+        "createdDateTime": "2017-02-23T03:11:31.37Z",
+        "lastModifiedDateTime": "2017-02-23T03:11:41.307Z"
+      }
+    },
+    {
+      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3mLQF-L6CLmfw-0FIxfJo6dYEkn0E_rtkcPWNXiQ6SWdt68K9EzqVb08tgPAo3S-1gTFv0xhfRndRPGcz3Ed7fm6sTP4-A9tJ5NpMjMaVVRO9Ds60TdvDrv-C6N4xgG96dB73_pAXgu7pBwDszrCixFvU75WDNW4o2C8G2cSj9hs8",
+      "createdBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2017-02-23T03:12:07.257Z",
+      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITEzNy4yNjk",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMzcuMTI",
+      "id": "F4D50E400DFE7D4E!137",
+      "lastModifiedBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-25T14:21:56.633Z",
+      "name": "foo.docx",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 15122,
+      "webUrl": "https://1drv.ms/w/s!AE59_g1ADtX0gQk",
+      "file": {
+        "hashes": {
+          "crc32Hash": "D8FEF070",
+          "sha1Hash": "DF4BA34A942459421A122AF0E9F8F2E3369174B7"
+        },
+        "mimeType": "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
+      },
+      "fileSystemInfo": {
+        "createdDateTime": "2017-02-23T03:12:07.257Z",
+        "lastModifiedDateTime": "2017-02-23T03:12:52.63Z"
+      }
+    },
+    {
+      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3mi1bPzwA871FD5vV5ylbGhndSxFcuzaP2W7SUmv6ythXicF6LoairKEJC1geR6jImpd4Zjeyrae__LKt0jdcM7wwOiWMqjbZ4g2ooLjmIyp0l8z3O-ic42SE2_UfLnW2jjMYeBQ3dFA-Jm_1qrml9Z759E0gRMKWMSsC3MjnfwSo",
+      "createdBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2017-02-23T06:19:04.02Z",
+      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITE0OS4yNjk",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNDkuMTM",
+      "id": "F4D50E400DFE7D4E!149",
+      "lastModifiedBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-25T14:22:00.88Z",
+      "name": "foo.pptx",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 30523,
+      "webUrl": "https://1drv.ms/p/s!AE59_g1ADtX0gRU",
+      "file": {
+        "hashes": {
+          "crc32Hash": "2CB42AEC",
+          "sha1Hash": "B75AE7590C5953B0157CBAB9DCBD1C2672F033FE"
+        },
+        "mimeType": "application/vnd.openxmlformats-officedocument.presentationml.presentation"
+      },
+      "fileSystemInfo": {
+        "createdDateTime": "2017-02-23T06:19:04.02Z",
+        "lastModifiedDateTime": "2017-02-23T06:34:14.997Z"
+      }
+    },
+    {
+      "@content.downloadUrl": "https://public.bn1303.livefilestore.com/y3mo0SZyPfHP8KaGX-1Sd2EyxdzpetQ56CC-Wnk4wAPEVUaAcbYMvqJG3JsdA5J65xQQMbL7u7GBKf-Av2aXngTjYyKV4efKHdKRCcMx0BdpuAZrexpCJmzU7AcdU5iHnsk5ItApBUlotO8hl1lZGFNRJfDclTOJujr45aEAeHI6CT16tAmxIH6DfiAC2l4iK_vJsilRFc-m32XBQU8HpiwXjigJiLxffP-KyEGsMIgooo",
+      "createdBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2017-02-23T03:10:44.713Z",
+      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITEzNS4yNTg",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMzUuMg",
+      "id": "F4D50E400DFE7D4E!135",
+      "lastModifiedBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-23T03:10:56.25Z",
+      "name": "foo.txt",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 14,
+      "webUrl": "https://1drv.ms/t/s!AE59_g1ADtX0gQc",
+      "file": {
+        "hashes": {
+          "crc32Hash": "9E0BA90F",
+          "sha1Hash": "F8B9668ECA3938C835AF1E9DCACFA52603511FF3"
+        },
+        "mimeType": "text/plain"
+      },
+      "fileSystemInfo": {
+        "createdDateTime": "2017-02-23T03:10:44.713Z",
+        "lastModifiedDateTime": "2017-02-23T03:10:56.237Z"
+      }
+    },
+    {
+      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3mDcrFEI4yJFJe_Nb3oq2lZ_DXKDXaXWq4ZnUvsNQPCX4NlEQ3B1ypO4uUJ7XIzkh1q5bBUbUeRjEoNJberX70FAtY0L55GpYAPD4rlwwU83c6zTBmRB6b00Yd-I6xhXQSJ7hEVeklwoSURh0FZ-nMr3obVqsUnIzks46OQEPs7aQ",
+      "createdBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2017-02-23T03:13:37.727Z",
+      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITE0NC4yNjU",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNDQuMTU",
+      "id": "F4D50E400DFE7D4E!144",
+      "lastModifiedBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-25T14:21:58.38Z",
+      "name": "foo.xlsx",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 8036,
+      "webUrl": "https://1drv.ms/x/s!AE59_g1ADtX0gRA",
+      "file": {
+        "hashes": {
+          "crc32Hash": "2DCEE45F",
+          "sha1Hash": "98927311DD9AE3966C9A7D4DAF4579A87C870EFB"
+        },
+        "mimeType": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+      },
+      "fileSystemInfo": {
+        "createdDateTime": "2017-02-23T03:13:37.727Z",
+        "lastModifiedDateTime": "2017-02-23T03:24:53.483Z"
+      }
+    },
+    {
+      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3mUbHeGrn5Qh1ZwPHCKp4czfgGAGz_-ePntZpq_47wbGU6VccDDTq2149EnUS9hoQ40V07lPVuSMv-2qBCwFqe40t5f0EBcrCJbFzNktZ0f_UrLNnMPBl1TemukaqqOXGY0iyqHvz-ole1jC_DsWo_t-2qGd2Oa8V_Veh8KK8UHsc",
+      "createdBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2017-02-23T03:13:05.643Z",
+      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITE0Mi4yNzA",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNDIuMTI",
+      "id": "F4D50E400DFE7D4E!142",
+      "lastModifiedBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-25T14:21:57.36Z",
+      "name": "foo1.docx",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 14912,
+      "webUrl": "https://1drv.ms/w/s!AE59_g1ADtX0gQ4",
+      "file": {
+        "hashes": {
+          "crc32Hash": "551418A8",
+          "sha1Hash": "FDA866479C801C92860ADA0AFD4C850F21078EE7"
+        },
+        "mimeType": "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
+      },
+      "fileSystemInfo": {
+        "createdDateTime": "2017-02-23T03:13:05.643Z",
+        "lastModifiedDateTime": "2017-02-23T03:13:29.087Z"
+      }
+    },
+    {
+      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3m8qKRRwHNq1a3YJo5b3HDCisfHEoIQfX-BrS62q2sNhZja3dPlT6qW0_CHhTA61M5_XnxdKknGE3Rg9Vv8NZN5-Xi72TQJGS16VhfgO53iyJxRml99FSXXrhkH-0y7iXrI4ibBuch7u7-m1sErEbgERviZ3RmD84HttNZg-Hn4kM",
+      "createdBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2017-02-23T06:34:18.873Z",
+      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITE1Mi4yNzE",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNTIuMTc",
+      "id": "F4D50E400DFE7D4E!152",
+      "lastModifiedBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-25T14:22:01.797Z",
+      "name": "foo1.pptx",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 30701,
+      "webUrl": "https://1drv.ms/p/s!AE59_g1ADtX0gRg",
+      "file": {
+        "hashes": {
+          "crc32Hash": "ADD1D585",
+          "sha1Hash": "0346CB868CD2C03B09341D4232AD2D38B459A699"
+        },
+        "mimeType": "application/vnd.openxmlformats-officedocument.presentationml.presentation"
+      },
+      "fileSystemInfo": {
+        "createdDateTime": "2017-02-23T06:34:18.873Z",
+        "lastModifiedDateTime": "2017-02-23T07:27:07.12Z"
+      }
+    },
+    {
+      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3mx4zP2_eOo43jA6xRHtVi7jfozdtka4XygTf4YsMrZJytqg9I36Fd43K6EpCxEH15163NKVkvQjiROuOn9m3xPtZzu-g3Pzt5hE8CHDsoS1iH36PgBkOd3P49-5GIW_Y_OJybBA3YkG64DHCPjSFftBrfdX5w-zxBTKXYBA3CGG0",
+      "createdBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2017-02-23T03:45:08.03Z",
+      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITE0Ny4yNjU",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNDcuMjQ",
+      "id": "F4D50E400DFE7D4E!147",
+      "lastModifiedBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-25T14:22:00.06Z",
+      "name": "foo1.xlsx",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 8043,
+      "webUrl": "https://1drv.ms/x/s!AE59_g1ADtX0gRM",
+      "file": {
+        "hashes": {
+          "crc32Hash": "7441963D",
+          "sha1Hash": "0078FE7CF1088EECADEBD374905D0560FDF3FD97"
+        },
+        "mimeType": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+      },
+      "fileSystemInfo": {
+        "createdDateTime": "2017-02-23T03:45:08.03Z",
+        "lastModifiedDateTime": "2017-02-23T06:18:59.52Z"
+      }
+    },
+    {
+      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3m38TiJnMWVpml53HkELL0YRerqKsy8nK1lU3lZUYo48-EXez--3_TZ7VtE_L1sSnxx4VZ0q2fva_ICwHBkjzl8S2xgRzSNqLYfuklja6-770qju2Wrw8gQGeT58XBI6aaFuxa-pgPiYFiF6yAE4Ngj7LVeEx4dVW5BO51Gn4cY5o",
+      "createdBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2017-02-23T07:29:04.897Z",
+      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITE1OS4yNjY",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNTkuMTc",
+      "id": "F4D50E400DFE7D4E!159",
+      "lastModifiedBy": {
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2017-02-25T14:22:02.903Z",
+      "name": "foo2.xlsx",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 10541,
+      "webUrl": "https://1drv.ms/x/s!AE59_g1ADtX0gR8",
+      "file": {
+        "hashes": {
+          "crc32Hash": "B4AD5B8D",
+          "sha1Hash": "AAF14BB6C3E373A7C044A208A9D3A30DD100E293"
+        },
+        "mimeType": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+      },
+      "fileSystemInfo": {
+        "createdDateTime": "2017-02-23T07:29:04.897Z",
+        "lastModifiedDateTime": "2017-02-23T07:30:04.57Z"
+      }
+    },
+    {
+      "@content.downloadUrl": "https://public.bn1303.livefilestore.com/y3mZjrqNTRpDIy54W750IhRdbVbfh7RdFdtJ6Vmx6EIUuUVyGZTyy9CWwUFrWlnbmGtQ7OVKRnU9kkx_zN1hv-7HGSxBRRl3hjEcWgRcRoss4qCnNvmabwxW0J1rSc3oss1a8jj7J-hUmUDTa5EasvlsJPs9t8XmyuoF1PVgnTjOCyDjPpXDAjaziaojxWlQh0-t35XiXymBi4lfebfgf1a37RT1raPJ79pj1_KLJ5tgtE",
+      "createdBy": {
+        "application": {
+          "displayName": "OneDrive website",
+          "id": "44048800"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "createdDateTime": "2015-12-17T19:56:11.88Z",
+      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITEwNC4yNTc",
+      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMDQuMw",
+      "id": "F4D50E400DFE7D4E!104",
+      "lastModifiedBy": {
+        "application": {
+          "displayName": "OneDrive website",
+          "id": "44048800"
+        },
+        "user": {
+          "displayName": "Fitz Elliott",
+          "id": "f4d50e400dfe7d4e"
+        }
+      },
+      "lastModifiedDateTime": "2015-12-17T19:56:29.963Z",
+      "name": "Getting started with OneDrive.pdf",
+      "parentReference": {
+        "driveId": "f4d50e400dfe7d4e",
+        "id": "F4D50E400DFE7D4E!103",
+        "path": "/drive/root:"
+      },
+      "size": 1311269,
+      "webUrl": "https://1drv.ms/b/s!AE59_g1ADtX0aA",
+      "file": {
+        "hashes": {
+          "crc32Hash": "F8DDF9BE",
+          "sha1Hash": "A9C4ACF2DA75FC49056976433AC32142D2C71AB1"
+        },
+        "mimeType": "application/pdf"
+      },
+      "fileSystemInfo": {
+        "createdDateTime": "2015-12-17T19:56:11.88Z",
+        "lastModifiedDateTime": "2015-12-17T19:56:11.88Z"
+      }
+    }
+]
 
-@contextmanager
-def patch_client(target, mock_client=None):
-    """Patches a function that returns a OneDriveClient, returning an instance
-    of MockOneDrive instead.
+raw_subfolder_response = [
+    {
+        "@content.downloadUrl": "https://public.bn1303.livefilestore.com/173450918374509173450",
+        "createdBy": {
+            "application": {
+                "displayName": "local-thingapp",
+                "id": "994562945"
+            },
+            "user": {
+                "displayName": "Fitz Elliott",
+                "id": "992349"
+            }
+        },
+        "createdDateTime": "2017-02-07T20:37:50.73Z",
+        "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITEzMC4yNTc",
+        "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMzAuMw",
+        "id": "FE830D1CB134A0!130",
+        "lastModifiedBy": {
+            "application": {
+                "displayName": "local-THINGAPP",
+                "id": "994562945"
+            },
+            "user": {
+                "displayName": "Fitz Elliott",
+                "id": "992349"
+            }
+        },
+        "lastModifiedDateTime": "2017-02-07T20:37:53.47Z",
+        "name": "Periodic Table of the Operators A4 300dpi.jpg",
+        "parentReference": {
+            "driveId": "fe830d1cb134a0",
+            "id": "FE830D1CB134A0!130",
+            "name": "Documents",
+            "path": "/drive/root:/Documents"
+        },
+        "size": 1056811,
+        "webUrl": "https://1drv.ms/i/s!LE93_m9sd3WJ82",
+        "file": {
+            "hashes": {
+                "crc32Hash": "B0D38EF0",
+                "sha1Hash": "DE751E0D3D8292A349A4698C59BDE514CD633589"
+            },
+            "mimeType": "image/jpeg"
+        },
+        "fileSystemInfo": {
+            "createdDateTime": "2017-02-07T20:37:50.73Z",
+            "lastModifiedDateTime": "2017-02-07T20:37:50.73Z"
+        },
+        "image": {
+            "height": 2456,
+            "width": 3477
+        },
+        "photo": {
+            "takenDateTime": "2017-02-07T20:37:50.73Z"
+        }
+    }
+]
 
-    Usage: ::
 
-        with patch_client('website.addons.onedrive.view.config.get_client') as client:
-            # test view that uses the onedrive client.
-    """
-    with mock.patch(target) as client_getter:
-        client = mock_client or MockOneDrive()
-        client_getter.return_value = client
-        yield client
diff --git a/website/addons/onedrive/utils.py b/website/addons/onedrive/utils.py
index ca58fad0188..e5864f63f4b 100644
--- a/website/addons/onedrive/utils.py
+++ b/website/addons/onedrive/utils.py
@@ -45,27 +45,35 @@ def log(self, action, extra=None, save=False):
             'folder_name': self.node.get_addon('onedrive', deleted=True).folder_name,
             'folder': self.node.get_addon('onedrive', deleted=True).folder_path
         }
+
         # If logging a file-related action, add the file's view and download URLs
         if self.path:
             params.update({
                 'urls': {
-                    'view': self.node.web_url_for('addon_view_or_download_file', path=self.path, provider='onedrive'),
+                    'view': self.node.web_url_for(
+                        'addon_view_or_download_file',
+                        path=self.path,
+                        provider='onedrive',
+                    ),
                     'download': self.node.web_url_for(
                         'addon_view_or_download_file',
                         path=self.path,
-                        provider='onedrive'
+                        provider='onedrive',
                     )
                 },
                 'path': self.path,
             })
+
         if extra:
             params.update(extra)
+
         # Prefix the action with onedrive_
         self.node.add_log(
-            action="onedrive_{0}".format(action),
+            action='onedrive_{0}'.format(action),
             params=params,
             auth=self.auth
         )
+
         if save:
             self.node.save()
 
diff --git a/website/addons/onedrive/views.py b/website/addons/onedrive/views.py
index aeed98c5ecd..f5e8fd23b53 100644
--- a/website/addons/onedrive/views.py
+++ b/website/addons/onedrive/views.py
@@ -1,183 +1,53 @@
 """Views for the node settings page."""
 # -*- coding: utf-8 -*-
-import httplib as http
-
-import logging
 
 from flask import request
-from website.addons.onedrive.client import OneDriveClient
-
-from framework.exceptions import HTTPError, PermissionsError
-from framework.auth.decorators import must_be_logged_in
-
-from website.oauth.models import ExternalAccount
 
-from website.util import permissions
 from website.project.decorators import (
     must_have_addon, must_be_addon_authorizer,
-    must_have_permission, must_not_be_registration,
 )
 
+from website.addons.base import generic_views
 from website.addons.onedrive.serializer import OneDriveSerializer
 
-logger = logging.getLogger(__name__)
-
-logging.getLogger('onedrive1').setLevel(logging.WARNING)
-
-@must_be_logged_in
-def onedrive_get_user_settings(auth):
-    """ Returns the list of all of the current user's authorized OneDrive accounts """
-    serializer = OneDriveSerializer(user_settings=auth.user.get_addon('onedrive'))
-    return serializer.serialized_user_settings
 
+SHORT_NAME = 'onedrive'
+FULL_NAME = 'OneDrive'
 
-@must_have_addon('onedrive', 'node')
-@must_have_permission(permissions.WRITE)
-def onedrive_get_config(node_addon, auth, **kwargs):
-    """API that returns the serialized node settings."""
-    return {
-        'result': OneDriveSerializer().serialize_settings(node_addon, auth.user),
-    }
-
-
-@must_not_be_registration
-@must_have_addon('onedrive', 'user')
-@must_have_addon('onedrive', 'node')
-@must_be_addon_authorizer('onedrive')
-@must_have_permission(permissions.WRITE)
-def onedrive_set_config(node_addon, user_addon, auth, **kwargs):
-    """View for changing a node's linked onedrive folder."""
-    folder = request.json.get('selected')
-    serializer = OneDriveSerializer(node_settings=node_addon)
-
-    logger.debug('folder::' + repr(folder))
-    logger.debug('serializer::' + repr(serializer))
+onedrive_account_list = generic_views.account_list(
+    SHORT_NAME,
+    OneDriveSerializer
+)
 
-    name = folder['name']
+onedrive_get_config = generic_views.get_config(
+    SHORT_NAME,
+    OneDriveSerializer
+)
 
+def _set_folder(node_addon, folder, auth):
     node_addon.set_folder(folder, auth=auth)
-
-    return {
-        'result': {
-            'folder': {
-                'name': name,
-                'path': name,
-            },
-            'urls': serializer.addon_serialized_urls,
-        },
-        'message': 'Successfully updated settings.',
-    }
-
-
-@must_have_addon('onedrive', 'user')
-@must_have_addon('onedrive', 'node')
-@must_have_permission(permissions.WRITE)
-def onedrive_add_user_auth(auth, node_addon, user_addon, **kwargs):
-    """Import onedrive credentials from the currently logged-in user to a node.
-    """
-    external_account = ExternalAccount.load(
-        request.json['external_account_id']
-    )
-
-    if external_account not in user_addon.external_accounts:
-        raise HTTPError(http.FORBIDDEN)
-
-    try:
-        node_addon.set_auth(external_account, user_addon.owner)
-    except PermissionsError:
-        raise HTTPError(http.FORBIDDEN)
-
-    node_addon.set_user_auth(user_addon)
     node_addon.save()
 
-    return {
-        'result': OneDriveSerializer().serialize_settings(node_addon, auth.user),
-        'message': 'Successfully imported access token from profile.',
-    }
-
-
-@must_not_be_registration
-@must_have_addon('onedrive', 'node')
-@must_have_permission(permissions.WRITE)
-def onedrive_remove_user_auth(auth, node_addon, **kwargs):
-    node_addon.deauthorize(auth=auth)
-    node_addon.save()
-
-
-@must_have_addon('onedrive', 'user')
-@must_have_addon('onedrive', 'node')
-@must_have_permission(permissions.WRITE)
-def onedrive_get_share_emails(auth, user_addon, node_addon, **kwargs):
-    """Return a list of emails of the contributors on a project.
-
-    The current user MUST be the user who authenticated OneDrive for the node.
-    """
-    if not node_addon.user_settings:
-        raise HTTPError(http.BAD_REQUEST)
-    # Current user must be the user who authorized the addon
-    if node_addon.user_settings.owner != auth.user:
-        raise HTTPError(http.FORBIDDEN)
+onedrive_set_config = generic_views.set_config(
+    SHORT_NAME,
+    FULL_NAME,
+    OneDriveSerializer,
+    _set_folder
+)
 
-    return {
-        'result': {
-            'emails': [
-                contrib.username
-                for contrib in node_addon.owner.contributors
-                if contrib != auth.user
-            ],
-        }
-    }
+onedrive_import_auth = generic_views.import_auth(
+    SHORT_NAME,
+    OneDriveSerializer
+)
 
+onedrive_deauthorize_node = generic_views.deauthorize_node(
+    SHORT_NAME
+)
 
-@must_have_addon('onedrive', 'node')
-@must_be_addon_authorizer('onedrive')
+@must_have_addon(SHORT_NAME, 'node')
+@must_be_addon_authorizer(SHORT_NAME)
 def onedrive_folder_list(node_addon, **kwargs):
-    """Returns a list of folders in OneDrive"""
-    if not node_addon.has_auth:
-        raise HTTPError(http.FORBIDDEN)
-
-    node = node_addon.owner
-    folder_id = request.args.get('folderId')
-    logger.debug('oauth_provider::' + repr(node_addon.oauth_provider))
-    logger.debug('fetch_access_token::' + repr(node_addon))
-    logger.debug('node_addon.external_account::' + repr(node_addon.external_account))
-    logger.debug('node_addon.external_account::oauth_key' + repr(node_addon.external_account.oauth_key))
-    logger.debug('node_addon.external_account::expires_at' + repr(node_addon.external_account.refresh_token))
-    logger.debug('node_addon.external_account::expires_at' + repr(node_addon.external_account.expires_at))
-
-    if folder_id is None:
-        return [{
-            'id': '0',
-            'path': 'All Files',
-            'addon': 'onedrive',
-            'kind': 'folder',
-            'name': '/ (Full OneDrive)',
-            'urls': {
-                'folders': node.api_url_for('onedrive_folder_list', folderId=0),
-            }
-        }]
-
-    if folder_id == '0':
-        folder_id = 'root'
-
-    access_token = node_addon.fetch_access_token()
-    logger.debug('access_token::' + repr(access_token))
-
-    oneDriveClient = OneDriveClient(access_token)
-    items = oneDriveClient.folders(folder_id)
-    logger.debug('folders::' + repr(items))
-
-    return [
-        {
-            'addon': 'onedrive',
-            'kind': 'folder',
-            'id': item['id'],
-            'name': item['name'],
-            'path': item['name'],
-            'urls': {
-                'folders': node.api_url_for('onedrive_folder_list', folderId=item['id']),
-            }
-        }
-        for item in items
-
-    ]
+    """ Returns all the subsequent folders under the folder id passed.
+    """
+    folder_id = request.args.get('folder_id')
+    return node_addon.get_folders(folder_id=folder_id)
diff --git a/website/notifications/constants.py b/website/notifications/constants.py
index 9fa6a53098b..51a12ee343e 100644
--- a/website/notifications/constants.py
+++ b/website/notifications/constants.py
@@ -32,5 +32,6 @@
     'bitbucket': 'Bitbucket',
     'googledrive': 'Google Drive',
     'owncloud': 'ownCloud',
+    'onedrive': 'Microsoft OneDrive',
     's3': 'Amazon S3'
 }
diff --git a/website/static/js/addonSettings.js b/website/static/js/addonSettings.js
index 1eae8f57a53..53e67a050f2 100644
--- a/website/static/js/addonSettings.js
+++ b/website/static/js/addonSettings.js
@@ -95,6 +95,8 @@ var OAuthAddonSettingsViewModel = oop.defclass({
                         self.setMessage('Add-on successfully authorized. If you wish to link a different account, log out of dropbox.com before attempting to connect to a second Dropbox account on the OSF. This will clear the credentials stored in your browser.', 'text-success');
                     } else if (self.name === 'bitbucket') {
                         self.setMessage('Add-on successfully authorized. If you wish to link a different account, log out of bitbucket.org before attempting to connect to a second Bitbucket account on the OSF. This will clear the credentials stored in your browser.', 'text-success');
+                    } else if (self.name === 'onedrive') {
+                        self.setMessage('Add-on successfully authorized. If you wish to link a different account, log out of onedrive.live.com before attempting to connect to a second OneDrive account on the OSF. This will clear the credentials stored in your browser.', 'text-success');
                     } else {
                         self.setMessage('Add-on successfully authorized. To link this add-on to an OSF project, go to the settings page of the project, enable ' + self.properName + ', and choose content to connect.', 'text-success');
                     }
diff --git a/website/static/js/fangorn.js b/website/static/js/fangorn.js
index df7d9ae4742..e16ab6c9266 100644
--- a/website/static/js/fangorn.js
+++ b/website/static/js/fangorn.js
@@ -2100,7 +2100,16 @@ var FGToolbar = {
         }
         // multiple selection icons
         // Special cased to not show 'delete multiple' for github or published dataverses
-        if(items.length > 1 && ctrl.tb.multiselected()[0].data.provider !== 'github' && ctrl.tb.options.placement !== 'fileview' && !(ctrl.tb.multiselected()[0].data.provider === 'dataverse' && ctrl.tb.multiselected()[0].parent().data.version === 'latest-published') ) {
+        if(
+            (items.length > 1) &&
+            (ctrl.tb.multiselected()[0].data.provider !== 'github') &&
+            (ctrl.tb.multiselected()[0].data.provider !== 'onedrive') &&
+            (ctrl.tb.options.placement !== 'fileview') &&
+            !(
+                (ctrl.tb.multiselected()[0].data.provider === 'dataverse') &&
+                (ctrl.tb.multiselected()[0].parent().data.version === 'latest-published')
+            )
+        ) {
             if (showDeleteMultiple(items)) {
                 var preprintPath = getPreprintPath(window.contextVars.node.preprintFileId);
                 if (preprintPath && multiselectContainsPreprint(items, preprintPath)) {
@@ -2547,7 +2556,7 @@ function allowedToMove(folder, item, mustBeIntra) {
         item.data.permissions.edit &&
         (!mustBeIntra || (item.data.provider === folder.data.provider && item.data.nodeId === folder.data.nodeId)) &&
         !(item.data.provider === 'figshare' && item.data.extra && item.data.extra.status === 'public') &&
-        (item.data.provider !== 'bitbucket') && (item.data.provider !== 'gitlab')
+        (item.data.provider !== 'bitbucket') && (item.data.provider !== 'gitlab') && (item.data.provider !== 'onedrive')
     );
 }
 
diff --git a/website/static/js/filepage/index.js b/website/static/js/filepage/index.js
index 2c198368cb6..ae2d419fde4 100644
--- a/website/static/js/filepage/index.js
+++ b/website/static/js/filepage/index.js
@@ -136,8 +136,8 @@ var FileViewPage = {
                     '. It needs to be checked in before any changes can be made.'
                 ])));
             }
-        } else if (self.file.provider === 'bitbucket' || self.file.provider === 'gitlab') {
-            self.canEdit = function() { return false; };  // Bitbucket and GitLab are read-only
+        } else if (self.file.provider === 'bitbucket' || self.file.provider === 'gitlab' || self.file.provider === 'onedrive') {
+            self.canEdit = function() { return false; };  // Bitbucket, OneDrive, and GitLab are read-only
         } else {
             self.canEdit = function() {
                 return self.context.currentUser.canEdit;
diff --git a/website/static/js/logTextParser.js b/website/static/js/logTextParser.js
index 2c5762c5521..328ae030fc5 100644
--- a/website/static/js/logTextParser.js
+++ b/website/static/js/logTextParser.js
@@ -574,6 +574,17 @@ var LogPieces = {
         }
     },
 
+    onedrive_folder: {
+        view: function(ctrl, logObject) {
+            var folder = logObject.attributes.params.folder;
+
+            if(paramIsReturned(folder, logObject)){
+                return m('span', folder === '/' ? '/ (Full OneDrive)' : folder);
+            }
+            return m('span', '');
+        }
+    },
+
     citation: {
         view: function(ctrl, logObject) {
             return returnTextParams('citation_name', '', logObject);
diff --git a/website/static/js/osfLanguage.js b/website/static/js/osfLanguage.js
index 6a3a9ea7321..4e4b213a813 100644
--- a/website/static/js/osfLanguage.js
+++ b/website/static/js/osfLanguage.js
@@ -94,6 +94,13 @@ module.exports = {
                 'associated with this account.',
             deauthError: 'Could not disconnect the Google Drive account at this time',
         },
+        onedrive: {
+            // Shown on clicking "Delete Access Token" for onedrive
+            confirmDeauth: 'Are you sure you want to disconnect the Microsoft OneDrive account? ' +
+                'This will revoke access to Microsoft OneDrive for all projects you have ' +
+                'associated with this account.',
+            deauthError: 'Could not disconnect the Microsoft OneDrive account at this time',
+        },
         owncloud: {
             authError: 'Invalid ownCloud server',
             authInvalid: 'Invalid credentials. Please enter a valid username and password.',
diff --git a/website/static/storageAddons.json b/website/static/storageAddons.json
index cd6880f4c38..5bffbec206c 100644
--- a/website/static/storageAddons.json
+++ b/website/static/storageAddons.json
@@ -45,6 +45,6 @@
     },
     "onedrive": {
         "fullName": "OneDrive",
-        "externalView": false
+        "externalView": true
     }
 }

From 8585ee1304010cfed26340496d1c92d08ca3d574 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Tue, 5 Sep 2017 13:43:29 -0400
Subject: [PATCH 123/192] Rewrite OneDrive addon as Django app

[#OSF-8573]
---
 {website/addons => addons}/onedrive/README.md |   4 +-
 addons/onedrive/__init__.py                   |   1 +
 addons/onedrive/apps.py                       |  43 +
 {website/addons => addons}/onedrive/client.py |   6 +-
 addons/onedrive/migrations/0001_initial.py    |  55 ++
 .../onedrive/migrations}/__init__.py          |   0
 .../model.py => addons/onedrive/models.py     |  50 +-
 .../onedrive/requirements.txt                 |   0
 {website/addons => addons}/onedrive/routes.py |  10 +-
 .../addons => addons}/onedrive/serializer.py  |   2 +-
 .../onedrive/settings/__init__.py             |   0
 .../onedrive/settings/defaults.py             |   0
 .../onedrive/settings/local-dist.py           |   0
 .../onedrive/static/comicon.png               | Bin
 .../onedrive/static/files.js                  |   0
 .../onedrive/static/node-cfg.js               |   0
 .../onedriveAnonymousLogActionList.json       |   0
 .../onedrive/static/onedriveFangornConfig.js  |   0
 .../static/onedriveLogActionList.json         |   0
 addons/onedrive/tests/__init__.py             |   0
 addons/onedrive/tests/conftest.py             |   1 +
 .../onedrive/tests/factories.py               |  25 +-
 .../onedrive/tests/test_client.py             |  15 +-
 .../onedrive/tests/test_models.py             |  66 +-
 .../onedrive/tests/test_serializer.py         |  13 +-
 .../onedrive/tests/test_views.py              |  23 +-
 addons/onedrive/tests/utils.py                | 744 ++++++++++++++++++
 {website/addons => addons}/onedrive/views.py  |   4 +-
 api/base/settings/defaults.py                 |   1 +
 osf/migrations/0065_auto_20171024_1334.py     |  50 ++
 website/addons/onedrive/__init__.py           |  48 --
 website/addons/onedrive/tests/test_utils.py   |  44 --
 website/addons/onedrive/tests/utils.py        | 744 ------------------
 website/addons/onedrive/utils.py              |  96 ---
 34 files changed, 995 insertions(+), 1050 deletions(-)
 rename {website/addons => addons}/onedrive/README.md (50%)
 create mode 100644 addons/onedrive/__init__.py
 create mode 100644 addons/onedrive/apps.py
 rename {website/addons => addons}/onedrive/client.py (93%)
 create mode 100644 addons/onedrive/migrations/0001_initial.py
 rename {website/addons/onedrive/tests => addons/onedrive/migrations}/__init__.py (100%)
 rename website/addons/onedrive/model.py => addons/onedrive/models.py (89%)
 rename {website/addons => addons}/onedrive/requirements.txt (100%)
 rename {website/addons => addons}/onedrive/routes.py (97%)
 rename {website/addons => addons}/onedrive/serializer.py (94%)
 rename {website/addons => addons}/onedrive/settings/__init__.py (100%)
 rename {website/addons => addons}/onedrive/settings/defaults.py (100%)
 rename {website/addons => addons}/onedrive/settings/local-dist.py (100%)
 rename {website/addons => addons}/onedrive/static/comicon.png (100%)
 rename {website/addons => addons}/onedrive/static/files.js (100%)
 rename {website/addons => addons}/onedrive/static/node-cfg.js (100%)
 rename {website/addons => addons}/onedrive/static/onedriveAnonymousLogActionList.json (100%)
 rename {website/addons => addons}/onedrive/static/onedriveFangornConfig.js (100%)
 rename {website/addons => addons}/onedrive/static/onedriveLogActionList.json (100%)
 create mode 100644 addons/onedrive/tests/__init__.py
 create mode 100644 addons/onedrive/tests/conftest.py
 rename {website/addons => addons}/onedrive/tests/factories.py (58%)
 rename {website/addons => addons}/onedrive/tests/test_client.py (60%)
 rename {website/addons => addons}/onedrive/tests/test_models.py (61%)
 rename {website/addons => addons}/onedrive/tests/test_serializer.py (54%)
 rename {website/addons => addons}/onedrive/tests/test_views.py (71%)
 create mode 100644 addons/onedrive/tests/utils.py
 rename {website/addons => addons}/onedrive/views.py (90%)
 create mode 100644 osf/migrations/0065_auto_20171024_1334.py
 delete mode 100644 website/addons/onedrive/__init__.py
 delete mode 100644 website/addons/onedrive/tests/test_utils.py
 delete mode 100644 website/addons/onedrive/tests/utils.py
 delete mode 100644 website/addons/onedrive/utils.py

diff --git a/website/addons/onedrive/README.md b/addons/onedrive/README.md
similarity index 50%
rename from website/addons/onedrive/README.md
rename to addons/onedrive/README.md
index f6a79db405a..212f33e1ce8 100644
--- a/website/addons/onedrive/README.md
+++ b/addons/onedrive/README.md
@@ -2,6 +2,6 @@
 
 Enabling the addon for development
 
-1. If `website/addons/onedrive/settings/local.py` does not yet exist, create a local onedrive settings file with `cp website/addons/onedrive/settings/local-dist.py website/addons/onedrive/settings/local.py`
+1. If `addons/onedrive/settings/local.py` does not yet exist, create a local onedrive settings file with `cp addons/onedrive/settings/local-dist.py addons/onedrive/settings/local.py`
 2. Register the addon with Microsoft (https://account.live.com/developers/applications/index) and enter http://localhost:5000/oauth/callback/onedrive/ as the Redirect URL.
-3. Enter your OneDrive `client_id` and `client_secret` as `ONEDRIVE_KEY` and `ONEDRIVE_SECRET` in `website/addons/onedrive/settings/local.py`.
\ No newline at end of file
+3. Enter your OneDrive `client_id` and `client_secret` as `ONEDRIVE_KEY` and `ONEDRIVE_SECRET` in `addons/onedrive/settings/local.py`.
diff --git a/addons/onedrive/__init__.py b/addons/onedrive/__init__.py
new file mode 100644
index 00000000000..7bbd1d8d8a3
--- /dev/null
+++ b/addons/onedrive/__init__.py
@@ -0,0 +1 @@
+default_app_config = 'addons.onedrive.apps.OneDriveAddonAppConfig'
diff --git a/addons/onedrive/apps.py b/addons/onedrive/apps.py
new file mode 100644
index 00000000000..99b7a15b53e
--- /dev/null
+++ b/addons/onedrive/apps.py
@@ -0,0 +1,43 @@
+from addons.base.apps import BaseAddonAppConfig, generic_root_folder
+
+onedrive_root_folder = generic_root_folder('onedrive')
+
+class OneDriveAddonAppConfig(BaseAddonAppConfig):
+
+    name = 'addons.onedrive'
+    label = 'addons_onedrive'
+    full_name = 'OneDrive'
+    short_name = 'onedrive'
+    owners = ['user', 'node']
+    configs = ['accounts', 'node']
+    categories = ['storage']
+    has_hgrid_files = True
+    max_file_size = 250  # MB
+
+    @property
+    def get_hgrid_data(self):
+        return onedrive_root_folder
+
+    FILE_ADDED = 'onedrive_file_added'
+    FILE_REMOVED = 'onedrive_file_removed'
+    FILE_UPDATED = 'onedrive_file_updated'
+    FOLDER_CREATED = 'onedrive_folder_created'
+    FOLDER_SELECTED = 'onedrive_folder_selected'
+    NODE_AUTHORIZED = 'onedrive_node_authorized'
+    NODE_DEAUTHORIZED = 'onedrive_node_deauthorized'
+    NODE_DEAUTHORIZED_NO_USER = 'onedrive_node_deauthorized_no_user'
+
+    actions = (FILE_ADDED, FILE_REMOVED, FILE_UPDATED, FOLDER_CREATED, FOLDER_SELECTED, NODE_AUTHORIZED, NODE_DEAUTHORIZED, NODE_DEAUTHORIZED_NO_USER)
+
+    @property
+    def routes(self):
+        from . import routes
+        return [routes.api_routes]
+
+    @property
+    def user_settings(self):
+        return self.get_model('UserSettings')
+
+    @property
+    def node_settings(self):
+        return self.get_model('NodeSettings')
diff --git a/website/addons/onedrive/client.py b/addons/onedrive/client.py
similarity index 93%
rename from website/addons/onedrive/client.py
rename to addons/onedrive/client.py
index 74b9b43a89f..8bfa06a9ab9 100644
--- a/website/addons/onedrive/client.py
+++ b/addons/onedrive/client.py
@@ -2,8 +2,8 @@
 from framework.exceptions import HTTPError
 
 from website.util.client import BaseClient
-from website.addons.onedrive import settings
-from website.addons.onedrive.settings import DEFAULT_ROOT_ID
+from addons.onedrive import settings
+from addons.onedrive.settings import DEFAULT_ROOT_ID
 
 
 class OneDriveClient(BaseClient):
@@ -28,7 +28,7 @@ def folders(self, folder_id=None):
         """
 
         if folder_id is None or folder_id == DEFAULT_ROOT_ID:
-            url = self._build_url(settings.ONEDRIVE_API_URL, 'drive', 'root', 'children')
+            url = self._build_url(settings.ONEDRIVE_API_URL, 'drive', DEFAULT_ROOT_ID, 'children')
         else:
             url = self._build_url(settings.ONEDRIVE_API_URL, 'drive', 'items',
                                   folder_id, 'children')
diff --git a/addons/onedrive/migrations/0001_initial.py b/addons/onedrive/migrations/0001_initial.py
new file mode 100644
index 00000000000..75fa1be4b9a
--- /dev/null
+++ b/addons/onedrive/migrations/0001_initial.py
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.4 on 2017-10-24 18:34
+from __future__ import unicode_literals
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+import osf.models.base
+import osf.utils.datetime_aware_jsonfield
+
+
+class Migration(migrations.Migration):
+
+    initial = True
+
+    dependencies = [
+        migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+        ('osf', '0065_auto_20171024_1334'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='NodeSettings',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)),
+                ('deleted', models.BooleanField(default=False)),
+                ('folder_id', models.TextField(blank=True, null=True)),
+                ('folder_path', models.TextField(blank=True, null=True)),
+                ('external_account', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='addons_onedrive_node_settings', to='osf.ExternalAccount')),
+                ('owner', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='addons_onedrive_node_settings', to='osf.AbstractNode')),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
+        migrations.CreateModel(
+            name='UserSettings',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)),
+                ('deleted', models.BooleanField(default=False)),
+                ('oauth_grants', osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONField(blank=True, default=dict, encoder=osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONEncoder)),
+                ('owner', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='addons_onedrive_user_settings', to=settings.AUTH_USER_MODEL)),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='user_settings',
+            field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='addons_onedrive.UserSettings'),
+        ),
+    ]
diff --git a/website/addons/onedrive/tests/__init__.py b/addons/onedrive/migrations/__init__.py
similarity index 100%
rename from website/addons/onedrive/tests/__init__.py
rename to addons/onedrive/migrations/__init__.py
diff --git a/website/addons/onedrive/model.py b/addons/onedrive/models.py
similarity index 89%
rename from website/addons/onedrive/model.py
rename to addons/onedrive/models.py
index 0244882313e..4f021c2aa2c 100644
--- a/website/addons/onedrive/model.py
+++ b/addons/onedrive/models.py
@@ -3,27 +3,38 @@
 import urllib
 import logging
 
-from modularodm import fields
-
+from django.db import models
+
+from addons.base import exceptions
+from addons.base.models import (BaseOAuthNodeSettings, BaseOAuthUserSettings,
+                                BaseStorageAddon)
+from addons.onedrive import settings
+from addons.onedrive.client import OneDriveClient
+from addons.onedrive.settings import DEFAULT_ROOT_ID
+from addons.onedrive.serializer import OneDriveSerializer
 from framework.auth import Auth
 from framework.exceptions import HTTPError
-from website.oauth.models import ExternalProvider
-
+from osf.models.external import ExternalProvider
+from osf.models.files import File, Folder, BaseFileNode
 from website.util import api_v2_url
-from website.addons.base import exceptions
-from website.addons.base import StorageAddonBase
-from website.addons.base import AddonOAuthUserSettingsBase, AddonOAuthNodeSettingsBase
-
-from website.addons.onedrive import settings
-from website.addons.onedrive.client import OneDriveClient
-from website.addons.onedrive.settings import DEFAULT_ROOT_ID
-from website.addons.onedrive.serializer import OneDriveSerializer
 
 logger = logging.getLogger(__name__)
 
 
+class OneDriveFileNode(BaseFileNode):
+    _provider = 'onedrive'
+
+
+class OneDriveFolder(OneDriveFileNode, Folder):
+    pass
+
+
+class OneDriveFile(OneDriveFileNode, File):
+    pass
+
+
 class OneDriveProvider(ExternalProvider):
-    name = 'onedrive'
+    name = 'Microsoft OneDrive'
     short_name = 'onedrive'
 
     client_id = settings.ONEDRIVE_KEY
@@ -55,14 +66,14 @@ def fetch_access_token(self, force_refresh=False):
         return self.account.oauth_key
 
 
-class OneDriveUserSettings(AddonOAuthUserSettingsBase):
+class UserSettings(BaseOAuthUserSettings):
     """Stores user-specific onedrive information
     """
     oauth_provider = OneDriveProvider
     serializer = OneDriveSerializer
 
 
-class OneDriveNodeSettings(StorageAddonBase, AddonOAuthNodeSettingsBase):
+class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
     """Individual OneDrive settings for a particular node.
 
     QUIRKS::
@@ -77,15 +88,12 @@ class OneDriveNodeSettings(StorageAddonBase, AddonOAuthNodeSettingsBase):
       is defined in the settings.
 
     """
-
     oauth_provider = OneDriveProvider
     serializer = OneDriveSerializer
 
-    foreign_user_settings = fields.ForeignField(
-        'onedriveusersettings', backref='authorized'
-    )
-    folder_id = fields.StringField(default=None)
-    folder_path = fields.StringField()
+    folder_id = models.TextField(null=True, blank=True)
+    folder_path = models.TextField(null=True, blank=True)
+    user_settings = models.ForeignKey(UserSettings, null=True, blank=True)
 
     _api = None
 
diff --git a/website/addons/onedrive/requirements.txt b/addons/onedrive/requirements.txt
similarity index 100%
rename from website/addons/onedrive/requirements.txt
rename to addons/onedrive/requirements.txt
diff --git a/website/addons/onedrive/routes.py b/addons/onedrive/routes.py
similarity index 97%
rename from website/addons/onedrive/routes.py
rename to addons/onedrive/routes.py
index 85c76e2b814..ebdbcab1b97 100644
--- a/website/addons/onedrive/routes.py
+++ b/addons/onedrive/routes.py
@@ -2,14 +2,12 @@
 """OneDrive addon routes."""
 from framework.routing import Rule, json_renderer
 
-from website.addons.onedrive import views
+from addons.onedrive import views
 
 
 api_routes = {
     'rules': [
-
         #### Profile settings ###
-
         Rule(
             [
                 '/settings/onedrive/accounts/',
@@ -18,9 +16,7 @@
             views.onedrive_account_list,
             json_renderer,
         ),
-
         ##### Node settings #####
-
         Rule(
             [
                 '/project/<pid>/onedrive/folders/',
@@ -30,7 +26,6 @@
             views.onedrive_folder_list,
             json_renderer,
         ),
-
         Rule(
             [
                 '/project/<pid>/onedrive/config/',
@@ -40,7 +35,6 @@
             views.onedrive_get_config,
             json_renderer,
         ),
-
         Rule(
             [
                 '/project/<pid>/onedrive/config/',
@@ -50,7 +44,6 @@
             views.onedrive_set_config,
             json_renderer,
         ),
-
         Rule(
             [
                 '/project/<pid>/onedrive/config/',
@@ -60,7 +53,6 @@
             views.onedrive_deauthorize_node,
             json_renderer,
         ),
-
         Rule(
             [
                 '/project/<pid>/onedrive/import-auth/',
diff --git a/website/addons/onedrive/serializer.py b/addons/onedrive/serializer.py
similarity index 94%
rename from website/addons/onedrive/serializer.py
rename to addons/onedrive/serializer.py
index 8c44c3a61f3..20e8a3c592d 100644
--- a/website/addons/onedrive/serializer.py
+++ b/addons/onedrive/serializer.py
@@ -1,7 +1,7 @@
 from oauthlib.oauth2 import InvalidGrantError
 
 from website.util import api_url_for
-from website.addons.base.serializer import StorageAddonSerializer
+from addons.base.serializer import StorageAddonSerializer
 
 
 class OneDriveSerializer(StorageAddonSerializer):
diff --git a/website/addons/onedrive/settings/__init__.py b/addons/onedrive/settings/__init__.py
similarity index 100%
rename from website/addons/onedrive/settings/__init__.py
rename to addons/onedrive/settings/__init__.py
diff --git a/website/addons/onedrive/settings/defaults.py b/addons/onedrive/settings/defaults.py
similarity index 100%
rename from website/addons/onedrive/settings/defaults.py
rename to addons/onedrive/settings/defaults.py
diff --git a/website/addons/onedrive/settings/local-dist.py b/addons/onedrive/settings/local-dist.py
similarity index 100%
rename from website/addons/onedrive/settings/local-dist.py
rename to addons/onedrive/settings/local-dist.py
diff --git a/website/addons/onedrive/static/comicon.png b/addons/onedrive/static/comicon.png
similarity index 100%
rename from website/addons/onedrive/static/comicon.png
rename to addons/onedrive/static/comicon.png
diff --git a/website/addons/onedrive/static/files.js b/addons/onedrive/static/files.js
similarity index 100%
rename from website/addons/onedrive/static/files.js
rename to addons/onedrive/static/files.js
diff --git a/website/addons/onedrive/static/node-cfg.js b/addons/onedrive/static/node-cfg.js
similarity index 100%
rename from website/addons/onedrive/static/node-cfg.js
rename to addons/onedrive/static/node-cfg.js
diff --git a/website/addons/onedrive/static/onedriveAnonymousLogActionList.json b/addons/onedrive/static/onedriveAnonymousLogActionList.json
similarity index 100%
rename from website/addons/onedrive/static/onedriveAnonymousLogActionList.json
rename to addons/onedrive/static/onedriveAnonymousLogActionList.json
diff --git a/website/addons/onedrive/static/onedriveFangornConfig.js b/addons/onedrive/static/onedriveFangornConfig.js
similarity index 100%
rename from website/addons/onedrive/static/onedriveFangornConfig.js
rename to addons/onedrive/static/onedriveFangornConfig.js
diff --git a/website/addons/onedrive/static/onedriveLogActionList.json b/addons/onedrive/static/onedriveLogActionList.json
similarity index 100%
rename from website/addons/onedrive/static/onedriveLogActionList.json
rename to addons/onedrive/static/onedriveLogActionList.json
diff --git a/addons/onedrive/tests/__init__.py b/addons/onedrive/tests/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/addons/onedrive/tests/conftest.py b/addons/onedrive/tests/conftest.py
new file mode 100644
index 00000000000..da9f243685b
--- /dev/null
+++ b/addons/onedrive/tests/conftest.py
@@ -0,0 +1 @@
+from osf_tests.conftest import *  # noqa
diff --git a/website/addons/onedrive/tests/factories.py b/addons/onedrive/tests/factories.py
similarity index 58%
rename from website/addons/onedrive/tests/factories.py
rename to addons/onedrive/tests/factories.py
index 445d8be8aae..7ac6d0a54b6 100644
--- a/website/addons/onedrive/tests/factories.py
+++ b/addons/onedrive/tests/factories.py
@@ -1,20 +1,13 @@
 # -*- coding: utf-8 -*-
 """Factory boy factories for the OneDrive addon."""
-import datetime
-
 from dateutil.relativedelta import relativedelta
+from django.utils import timezone
 
 from factory import SubFactory, Sequence
-from tests.factories import (
-    ModularOdmFactory,
-    UserFactory,
-    ProjectFactory,
-    ExternalAccountFactory)
+from factory.django import DjangoModelFactory
+from osf_tests.factories import UserFactory, ProjectFactory, ExternalAccountFactory
 
-from website.addons.onedrive.model import (
-    OneDriveUserSettings,
-    OneDriveNodeSettings,
-)
+from addons.onedrive.models import UserSettings, NodeSettings
 
 
 class OneDriveAccountFactory(ExternalAccountFactory):
@@ -22,18 +15,18 @@ class OneDriveAccountFactory(ExternalAccountFactory):
     provider_id = Sequence(lambda n: 'id-{0}'.format(n))
     oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
     oauth_secret = Sequence(lambda n: 'secret-{0}'.format(n))
-    expires_at = datetime.datetime.now() + relativedelta(days=1)
+    expires_at = timezone.now() + relativedelta(days=1)
 
-class OneDriveUserSettingsFactory(ModularOdmFactory):
+class OneDriveUserSettingsFactory(DjangoModelFactory):
     class Meta:
-        model = OneDriveUserSettings
+        model = UserSettings
 
     owner = SubFactory(UserFactory)
 
 
-class OneDriveNodeSettingsFactory(ModularOdmFactory):
+class OneDriveNodeSettingsFactory(DjangoModelFactory):
     class Meta:
-        model = OneDriveNodeSettings
+        model = NodeSettings
 
     owner = SubFactory(ProjectFactory)
     user_settings = SubFactory(OneDriveUserSettingsFactory)
diff --git a/website/addons/onedrive/tests/test_client.py b/addons/onedrive/tests/test_client.py
similarity index 60%
rename from website/addons/onedrive/tests/test_client.py
rename to addons/onedrive/tests/test_client.py
index 7268165816e..7480f13693a 100644
--- a/website/addons/onedrive/tests/test_client.py
+++ b/addons/onedrive/tests/test_client.py
@@ -1,17 +1,16 @@
 # -*- coding: utf-8 -*-
+import pytest
+import unittest
 
-from nose.tools import *  # noqa (PEP8 asserts)
+from osf_tests.factories import UserFactory
 
-from tests.base import OsfTestCase
-from tests.factories import UserFactory
+from addons.onedrive.models import UserSettings
 
-from website.addons.onedrive.model import OneDriveUserSettings
+pytestmark = pytest.mark.django_db
 
-
-class TestCore(OsfTestCase):
+class TestCore(unittest.TestCase):
 
     def setUp(self):
-
         super(TestCore, self).setUp()
 
         self.user = UserFactory()
@@ -24,6 +23,6 @@ def setUp(self):
 
     def test_get_addon_returns_onedrive_user_settings(self):
         result = self.user.get_addon('onedrive')
-        assert_true(isinstance(result, OneDriveUserSettings))
+        assert isinstance(result, UserSettings)
 
 
diff --git a/website/addons/onedrive/tests/test_models.py b/addons/onedrive/tests/test_models.py
similarity index 61%
rename from website/addons/onedrive/tests/test_models.py
rename to addons/onedrive/tests/test_models.py
index 009bc5c3aca..1bd1728c598 100644
--- a/website/addons/onedrive/tests/test_models.py
+++ b/addons/onedrive/tests/test_models.py
@@ -1,24 +1,26 @@
 # -*- coding: utf-8 -*-
 import mock
-
-from nose.tools import *  # noqa (PEP8 asserts)
+import pytest
+import unittest
 
 from framework.auth import Auth
-from tests.base import OsfTestCase
-from website.addons.base.testing import models
+from addons.base.tests.models import OAuthAddonNodeSettingsTestSuiteMixin
+from addons.base.tests.models import OAuthAddonUserSettingTestSuiteMixin
 
-from website.addons.onedrive import model
-from website.addons.onedrive.client import OneDriveClient
-from website.addons.onedrive.tests.factories import (
+from addons.onedrive.models import NodeSettings, OneDriveProvider
+from addons.onedrive.client import OneDriveClient
+from addons.onedrive.tests.factories import (
     OneDriveAccountFactory,
     OneDriveNodeSettingsFactory,
     OneDriveUserSettingsFactory,
 )
 
-class TestOneDriveProvider(OsfTestCase):
+pytestmark = pytest.mark.django_db
+
+class TestOneDriveProvider(unittest.TestCase):
     def setUp(self):
         super(TestOneDriveProvider, self).setUp()
-        self.provider = model.OneDriveProvider()
+        self.provider = OneDriveProvider()
 
     @mock.patch.object(OneDriveClient, 'user_info_for_token')
     def test_handle_callback(self, mock_client):
@@ -26,31 +28,31 @@ def test_handle_callback(self, mock_client):
         fake_info = {'id': '12345', 'name': 'fakename', 'link': 'fakeUrl'}
         mock_client.return_value = fake_info
         res = self.provider.handle_callback(fake_response)
-        assert_equal(res['provider_id'], '12345')
-        assert_equal(res['display_name'], 'fakename')
-        assert_equal(res['profile_url'], 'fakeUrl')
+        assert res['provider_id'] == '12345'
+        assert res['display_name'] == 'fakename'
+        assert res['profile_url'] == 'fakeUrl'
 
 
-class TestUserSettings(models.OAuthAddonUserSettingTestSuiteMixin, OsfTestCase):
+class TestUserSettings(OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase):
 
     short_name = 'onedrive'
     full_name = 'Microsoft OneDrive'
     ExternalAccountFactory = OneDriveAccountFactory
 
 
-class TestNodeSettings(models.OAuthAddonNodeSettingsTestSuiteMixin, OsfTestCase):
+class TestNodeSettings(OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCase):
 
     short_name = 'onedrive'
     full_name = 'Microsoft OneDrive'
     ExternalAccountFactory = OneDriveAccountFactory
 
     NodeSettingsFactory = OneDriveNodeSettingsFactory
-    NodeSettingsClass = model.OneDriveNodeSettings
+    NodeSettingsClass = NodeSettings
     UserSettingsFactory = OneDriveUserSettingsFactory
 
     def setUp(self):
         self.mock_refresh = mock.patch.object(
-            model.OneDriveProvider,
+            OneDriveProvider,
             'refresh_oauth_key'
         )
         self.mock_refresh.return_value = True
@@ -61,40 +63,30 @@ def tearDown(self):
         self.mock_refresh.stop()
         super(TestNodeSettings, self).tearDown()
 
-
-    @mock.patch('website.addons.onedrive.model.OneDriveProvider')
+    @mock.patch('addons.onedrive.models.OneDriveProvider')
     def test_api_not_cached(self, mock_odp):
         # The first call to .api returns a new object
         api = self.node_settings.api
         mock_odp.assert_called_once()
-        assert_equal(api, mock_odp())
+        assert api == mock_odp()
 
-    @mock.patch('website.addons.onedrive.model.OneDriveProvider')
+    @mock.patch('addons.onedrive.models.OneDriveProvider')
     def test_api_cached(self, mock_odp):
         # Repeated calls to .api returns the same object
         self.node_settings._api = 'testapi'
         api = self.node_settings.api
-        assert_false(mock_odp.called)
-        assert_equal(api, 'testapi')
+        assert mock_odp.called is False
+        assert api == 'testapi'
 
     def test_selected_folder_name_root(self):
         self.node_settings.folder_id = 'root'
-
-        assert_equal(
-            self.node_settings.selected_folder_name,
-            "/ (Full OneDrive)"
-        )
+        assert self.node_settings.selected_folder_name == '/ (Full OneDrive)'
 
     def test_selected_folder_name_empty(self):
         self.node_settings.folder_id = None
-
-        assert_equal(
-            self.node_settings.selected_folder_name,
-            ''
-        )
+        assert self.node_settings.selected_folder_name ==  ''
 
     ## Overrides ##
-
     def test_set_folder(self):
         folder = {
             'id': 'fake-folder-id',
@@ -104,12 +96,12 @@ def test_set_folder(self):
         self.node_settings.set_folder(folder, auth=Auth(self.user))
         self.node_settings.save()
         # Folder was set
-        assert_equal(self.node_settings.folder_id, folder['id'])
+        assert self.node_settings.folder_id == folder['id']
         # Log was saved
-        last_log = self.node.logs[-1]
-        assert_equal(last_log.action, '{0}_folder_selected'.format(self.short_name))
+        last_log = self.node.logs.latest()
+        assert last_log.action == '{0}_folder_selected'.format(self.short_name)
 
     def test_serialize_settings(self):
         settings = self.node_settings.serialize_waterbutler_settings()
         expected = {'folder': self.node_settings.folder_id}
-        assert_equal(settings, expected)
+        assert settings == expected
diff --git a/website/addons/onedrive/tests/test_serializer.py b/addons/onedrive/tests/test_serializer.py
similarity index 54%
rename from website/addons/onedrive/tests/test_serializer.py
rename to addons/onedrive/tests/test_serializer.py
index bda59a9f78c..2e3bfaf5765 100644
--- a/website/addons/onedrive/tests/test_serializer.py
+++ b/addons/onedrive/tests/test_serializer.py
@@ -1,15 +1,14 @@
 # -*- coding: utf-8 -*-
 """Serializer tests for the OneDrive addon."""
+import pytest
 
-import mock
-from nose.tools import *  # noqa (PEP8 asserts)
-
+from addons.onedrive.models import OneDriveProvider
+from addons.onedrive.serializer import OneDriveSerializer
+from addons.onedrive.tests.factories import OneDriveAccountFactory
+from addons.base.tests.serializers import StorageAddonSerializerTestSuiteMixin
 from tests.base import OsfTestCase
-from website.addons.onedrive.model import OneDriveProvider
-from website.addons.onedrive.serializer import OneDriveSerializer
-from website.addons.onedrive.tests.factories import OneDriveAccountFactory
-from website.addons.base.testing.serializers import StorageAddonSerializerTestSuiteMixin
 
+pytestmark = pytest.mark.django_db
 
 class TestOneDriveSerializer(StorageAddonSerializerTestSuiteMixin, OsfTestCase):
 
diff --git a/website/addons/onedrive/tests/test_views.py b/addons/onedrive/tests/test_views.py
similarity index 71%
rename from website/addons/onedrive/tests/test_views.py
rename to addons/onedrive/tests/test_views.py
index 742885c19d0..38bb63d64ac 100644
--- a/website/addons/onedrive/tests/test_views.py
+++ b/addons/onedrive/tests/test_views.py
@@ -1,21 +1,20 @@
 # -*- coding: utf-8 -*-
-
 import mock
-from nose.tools import *  # noqa
+import pytest
 
-from website.addons.base.testing import views
-from website.addons.onedrive.client import OneDriveClient
-from website.addons.onedrive.serializer import OneDriveSerializer
-from website.addons.onedrive.tests.utils import OneDriveAddonTestCase
-from website.addons.onedrive.tests.utils import raw_subfolder_response
-from website.addons.onedrive.tests.utils import raw_root_folder_response
+from addons.base.tests import views
+from addons.onedrive.client import OneDriveClient
+from addons.onedrive.serializer import OneDriveSerializer
+from addons.onedrive.tests.utils import OneDriveAddonTestCase, raw_subfolder_response, raw_root_folder_response
+from tests.base import OsfTestCase
 
+pytestmark = pytest.mark.django_db
 
-class TestAuthViews(OneDriveAddonTestCase, views.OAuthAddonAuthViewsTestCaseMixin):
+class TestAuthViews(OneDriveAddonTestCase, views.OAuthAddonAuthViewsTestCaseMixin, OsfTestCase):
     pass
 
 
-class TestConfigViews(OneDriveAddonTestCase, views.OAuthAddonConfigViewsTestCaseMixin):
+class TestConfigViews(OneDriveAddonTestCase, views.OAuthAddonConfigViewsTestCaseMixin, OsfTestCase):
     folder = {
         'path': 'Drive/Camera Uploads',
         'id': '1234567890'
@@ -52,5 +51,5 @@ def test_folder_list_not_root(self, mock_drive_client_folders):
         folderId = '12345'
         url = self.project.api_url_for('onedrive_folder_list', folder_id=folderId)
         res = self.app.get(url, auth=self.user.auth)
-        assert_equal(res.status_code, 200)
-        assert_equal(len(res.json), len(raw_subfolder_response))
+        assert res.status_code == 200
+        assert len(res.json) == len(raw_subfolder_response)
diff --git a/addons/onedrive/tests/utils.py b/addons/onedrive/tests/utils.py
new file mode 100644
index 00000000000..99afd4e50a0
--- /dev/null
+++ b/addons/onedrive/tests/utils.py
@@ -0,0 +1,744 @@
+# -*- coding: utf-8 -*-
+
+from addons.onedrive.models import OneDriveProvider
+from addons.onedrive.tests.factories import OneDriveAccountFactory
+from addons.base.tests.base import OAuthAddonTestCaseMixin, AddonTestCase
+
+
+class OneDriveAddonTestCase(OAuthAddonTestCaseMixin, AddonTestCase):
+
+    ADDON_SHORT_NAME = 'onedrive'
+    ExternalAccountFactory = OneDriveAccountFactory
+    Provider = OneDriveProvider
+
+    def set_node_settings(self, settings):
+        super(OneDriveAddonTestCase, self).set_node_settings(settings)
+        settings.folder_id = '1234567890'
+        settings.folder_path = 'Drive/Camera Uploads'
+        settings.external_account = self.external_account
+
+
+raw_root_folder_response = [
+    {
+      'createdBy': {
+        'application': {
+          'displayName': 'local-cosdev',
+          'id': '44174239'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2017-02-23T02:25:24.687Z',
+      'cTag': 'adDpGNEQ1MEU0MDBERkU3RDRFITEzMi42MzYyMzQxMzUyNDg3NzAwMDA',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExMzIuMA',
+      'id': 'F4D50E400DFE7D4E!132',
+      'lastModifiedBy': {
+        'application': {
+          'displayName': 'local-cosdev',
+          'id': '44174239'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-23T02:25:24.877Z',
+      'name': 'Apps',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 0,
+      'webUrl': 'https://1drv.ms/f/s!AE59_g1ADtX0gQQ',
+      'fileSystemInfo': {
+        'createdDateTime': '2017-02-23T02:25:24.687Z',
+        'lastModifiedDateTime': '2017-02-23T02:25:24.687Z'
+      },
+      'folder': {
+        'childCount': 1
+      },
+      'specialFolder': {
+        'name': 'apps'
+      }
+    },
+    {
+      'createdBy': {
+        'application': {
+          'displayName': 'OneDrive website',
+          'id': '44048800'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2015-12-17T19:56:12.63Z',
+      'cTag': 'adDpGNEQ1MEU0MDBERkU3RDRFITEwNi42MzYyMjA5NjY3MzQ3MDAwMDA',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExMDYuMA',
+      'id': 'F4D50E400DFE7D4E!106',
+      'lastModifiedBy': {
+        'application': {
+          'displayName': 'OneDrive website',
+          'id': '44048800'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-07T20:37:53.47Z',
+      'name': 'Documents',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 1056811,
+      'webUrl': 'https://1drv.ms/f/s!AE59_g1ADtX0ag',
+      'fileSystemInfo': {
+        'createdDateTime': '2015-12-17T19:56:12.63Z',
+        'lastModifiedDateTime': '2015-12-17T19:56:12.63Z'
+      },
+      'folder': {
+        'childCount': 1
+      },
+      'specialFolder': {
+        'name': 'documents'
+      }
+    },
+    {
+      'createdBy': {
+        'application': {
+          'displayName': 'local-cosdev',
+          'id': '44174239'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2017-02-23T02:25:42.93Z',
+      'cTag': 'adDpGNEQ1MEU0MDBERkU3RDRFITEzNC42MzYyMzQxMzU0MjkzMDAwMDA',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExMzQuMA',
+      'id': 'F4D50E400DFE7D4E!134',
+      'lastModifiedBy': {
+        'application': {
+          'displayName': 'local-cosdev',
+          'id': '44174239'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-23T02:25:42.93Z',
+      'name': 'Music',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 0,
+      'webUrl': 'https://1drv.ms/f/s!AE59_g1ADtX0gQY',
+      'fileSystemInfo': {
+        'createdDateTime': '2017-02-23T02:25:42.93Z',
+        'lastModifiedDateTime': '2017-02-23T02:25:42.93Z'
+      },
+      'folder': {
+        'childCount': 0
+      },
+      'specialFolder': {
+        'name': 'music'
+      }
+    },
+    {
+      'createdBy': {
+        'application': {
+          'displayName': 'OneDrive website',
+          'id': '44048800'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2015-12-17T19:56:12.24Z',
+      'cTag': 'adDpGNEQ1MEU0MDBERkU3RDRFITEwNS42MzYyMjA5Njk5MTgzMDAwMDA',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExMDUuMA',
+      'id': 'F4D50E400DFE7D4E!105',
+      'lastModifiedBy': {
+        'application': {
+          'displayName': 'OneDrive website',
+          'id': '44048800'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-07T20:43:11.83Z',
+      'name': 'Pictures',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 13,
+      'webUrl': 'https://1drv.ms/f/s!AE59_g1ADtX0aQ',
+      'fileSystemInfo': {
+        'createdDateTime': '2015-12-17T19:56:12.24Z',
+        'lastModifiedDateTime': '2015-12-17T19:56:12.24Z'
+      },
+      'folder': {
+        'childCount': 1
+      },
+      'specialFolder': {
+        'name': 'photos'
+      }
+    },
+    {
+      'createdBy': {
+        'application': {
+          'displayName': 'OneDrive website',
+          'id': '44048800'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2015-12-17T19:56:30.89Z',
+      'cTag': 'adDpGNEQ1MEU0MDBERkU3RDRFITEwNy42MzYwOTMxMzUyMDc4MDAwMDA',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExMDcuMA',
+      'id': 'F4D50E400DFE7D4E!107',
+      'lastModifiedBy': {
+        'application': {
+          'displayName': 'OneDrive website',
+          'id': '44048800'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2016-09-12T21:45:20.78Z',
+      'name': 'Tenkum',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 1588779,
+      'webUrl': 'https://1drv.ms/f/s!AE59_g1ADtX0aw',
+      'fileSystemInfo': {
+        'createdDateTime': '2015-12-17T19:56:30.89Z',
+        'lastModifiedDateTime': '2015-12-17T19:56:30.89Z'
+      },
+      'folder': {
+        'childCount': 5
+      }
+    },
+    {
+      'createdBy': {
+        'application': {
+          'displayName': 'OneDrive website',
+          'id': '44048800'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2017-02-23T07:27:44.253Z',
+      'cTag': 'adDpGNEQ1MEU0MDBERkU3RDRFITE1NC42MzYyMzQzMTcxMDY2MzAwMDA',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExNTQuMA',
+      'id': 'F4D50E400DFE7D4E!154',
+      'lastModifiedBy': {
+        'application': {
+          'displayName': 'OneDrive website',
+          'id': '44048800'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-23T07:28:30.663Z',
+      'name': 'foo',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 28359,
+      'webUrl': 'https://1drv.ms/o/s!AE59_g1ADtX0gRo',
+      'fileSystemInfo': {
+        'createdDateTime': '2017-02-23T07:27:44.253Z',
+        'lastModifiedDateTime': '2017-02-23T07:27:44.253Z'
+      },
+      'package': {
+        'type': 'oneNote'
+      }
+    },
+    {
+      '@content.downloadUrl': 'https://public.bn1303.livefilestore.com/y3meR_7rVWrrLE-4_8eWU09UhEHrtVojgGVrPDBh3M8Qq0Iut6Y5-x68vBGXmra-p9X6d5PcWocISnjJQMa_nQ1QMw5HUTrT0AhFq6_hurW6lwJ0qBwlzsUYWzUoLfMu9KqdUnaBghT1NiMHSyPSlUO0UgAant5d85tXtn3xqy94i9yLzq8_6spoZ_ffgYX7l-FwQBRxaDz8q6LN7SFT1JQV9S_1Fr_BDCbtitKip_UgO0',
+      'createdBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2017-02-23T03:11:31.37Z',
+      'cTag': 'aYzpGNEQ1MEU0MDBERkU3RDRFITEzNi4yNTg',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExMzYuMg',
+      'id': 'F4D50E400DFE7D4E!136',
+      'lastModifiedBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-23T03:11:41.32Z',
+      'name': 'foo 1.txt',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 14,
+      'webUrl': 'https://1drv.ms/t/s!AE59_g1ADtX0gQg',
+      'file': {
+        'hashes': {
+          'crc32Hash': '82872CD6',
+          'sha1Hash': '12779E2CF3B4108A897FC5C6A986D4F2A4BB9026'
+        },
+        'mimeType': 'text/plain'
+      },
+      'fileSystemInfo': {
+        'createdDateTime': '2017-02-23T03:11:31.37Z',
+        'lastModifiedDateTime': '2017-02-23T03:11:41.307Z'
+      }
+    },
+    {
+      '@content.downloadUrl': 'https://kdqyaa.bn1303.livefilestore.com/y3mLQF-L6CLmfw-0FIxfJo6dYEkn0E_rtkcPWNXiQ6SWdt68K9EzqVb08tgPAo3S-1gTFv0xhfRndRPGcz3Ed7fm6sTP4-A9tJ5NpMjMaVVRO9Ds60TdvDrv-C6N4xgG96dB73_pAXgu7pBwDszrCixFvU75WDNW4o2C8G2cSj9hs8',
+      'createdBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2017-02-23T03:12:07.257Z',
+      'cTag': 'aYzpGNEQ1MEU0MDBERkU3RDRFITEzNy4yNjk',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExMzcuMTI',
+      'id': 'F4D50E400DFE7D4E!137',
+      'lastModifiedBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-25T14:21:56.633Z',
+      'name': 'foo.docx',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 15122,
+      'webUrl': 'https://1drv.ms/w/s!AE59_g1ADtX0gQk',
+      'file': {
+        'hashes': {
+          'crc32Hash': 'D8FEF070',
+          'sha1Hash': 'DF4BA34A942459421A122AF0E9F8F2E3369174B7'
+        },
+        'mimeType': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
+      },
+      'fileSystemInfo': {
+        'createdDateTime': '2017-02-23T03:12:07.257Z',
+        'lastModifiedDateTime': '2017-02-23T03:12:52.63Z'
+      }
+    },
+    {
+      '@content.downloadUrl': 'https://kdqyaa.bn1303.livefilestore.com/y3mi1bPzwA871FD5vV5ylbGhndSxFcuzaP2W7SUmv6ythXicF6LoairKEJC1geR6jImpd4Zjeyrae__LKt0jdcM7wwOiWMqjbZ4g2ooLjmIyp0l8z3O-ic42SE2_UfLnW2jjMYeBQ3dFA-Jm_1qrml9Z759E0gRMKWMSsC3MjnfwSo',
+      'createdBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2017-02-23T06:19:04.02Z',
+      'cTag': 'aYzpGNEQ1MEU0MDBERkU3RDRFITE0OS4yNjk',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExNDkuMTM',
+      'id': 'F4D50E400DFE7D4E!149',
+      'lastModifiedBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-25T14:22:00.88Z',
+      'name': 'foo.pptx',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 30523,
+      'webUrl': 'https://1drv.ms/p/s!AE59_g1ADtX0gRU',
+      'file': {
+        'hashes': {
+          'crc32Hash': '2CB42AEC',
+          'sha1Hash': 'B75AE7590C5953B0157CBAB9DCBD1C2672F033FE'
+        },
+        'mimeType': 'application/vnd.openxmlformats-officedocument.presentationml.presentation'
+      },
+      'fileSystemInfo': {
+        'createdDateTime': '2017-02-23T06:19:04.02Z',
+        'lastModifiedDateTime': '2017-02-23T06:34:14.997Z'
+      }
+    },
+    {
+      '@content.downloadUrl': 'https://public.bn1303.livefilestore.com/y3mo0SZyPfHP8KaGX-1Sd2EyxdzpetQ56CC-Wnk4wAPEVUaAcbYMvqJG3JsdA5J65xQQMbL7u7GBKf-Av2aXngTjYyKV4efKHdKRCcMx0BdpuAZrexpCJmzU7AcdU5iHnsk5ItApBUlotO8hl1lZGFNRJfDclTOJujr45aEAeHI6CT16tAmxIH6DfiAC2l4iK_vJsilRFc-m32XBQU8HpiwXjigJiLxffP-KyEGsMIgooo',
+      'createdBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2017-02-23T03:10:44.713Z',
+      'cTag': 'aYzpGNEQ1MEU0MDBERkU3RDRFITEzNS4yNTg',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExMzUuMg',
+      'id': 'F4D50E400DFE7D4E!135',
+      'lastModifiedBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-23T03:10:56.25Z',
+      'name': 'foo.txt',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 14,
+      'webUrl': 'https://1drv.ms/t/s!AE59_g1ADtX0gQc',
+      'file': {
+        'hashes': {
+          'crc32Hash': '9E0BA90F',
+          'sha1Hash': 'F8B9668ECA3938C835AF1E9DCACFA52603511FF3'
+        },
+        'mimeType': 'text/plain'
+      },
+      'fileSystemInfo': {
+        'createdDateTime': '2017-02-23T03:10:44.713Z',
+        'lastModifiedDateTime': '2017-02-23T03:10:56.237Z'
+      }
+    },
+    {
+      '@content.downloadUrl': 'https://kdqyaa.bn1303.livefilestore.com/y3mDcrFEI4yJFJe_Nb3oq2lZ_DXKDXaXWq4ZnUvsNQPCX4NlEQ3B1ypO4uUJ7XIzkh1q5bBUbUeRjEoNJberX70FAtY0L55GpYAPD4rlwwU83c6zTBmRB6b00Yd-I6xhXQSJ7hEVeklwoSURh0FZ-nMr3obVqsUnIzks46OQEPs7aQ',
+      'createdBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2017-02-23T03:13:37.727Z',
+      'cTag': 'aYzpGNEQ1MEU0MDBERkU3RDRFITE0NC4yNjU',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExNDQuMTU',
+      'id': 'F4D50E400DFE7D4E!144',
+      'lastModifiedBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-25T14:21:58.38Z',
+      'name': 'foo.xlsx',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 8036,
+      'webUrl': 'https://1drv.ms/x/s!AE59_g1ADtX0gRA',
+      'file': {
+        'hashes': {
+          'crc32Hash': '2DCEE45F',
+          'sha1Hash': '98927311DD9AE3966C9A7D4DAF4579A87C870EFB'
+        },
+        'mimeType': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
+      },
+      'fileSystemInfo': {
+        'createdDateTime': '2017-02-23T03:13:37.727Z',
+        'lastModifiedDateTime': '2017-02-23T03:24:53.483Z'
+      }
+    },
+    {
+      '@content.downloadUrl': 'https://kdqyaa.bn1303.livefilestore.com/y3mUbHeGrn5Qh1ZwPHCKp4czfgGAGz_-ePntZpq_47wbGU6VccDDTq2149EnUS9hoQ40V07lPVuSMv-2qBCwFqe40t5f0EBcrCJbFzNktZ0f_UrLNnMPBl1TemukaqqOXGY0iyqHvz-ole1jC_DsWo_t-2qGd2Oa8V_Veh8KK8UHsc',
+      'createdBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2017-02-23T03:13:05.643Z',
+      'cTag': 'aYzpGNEQ1MEU0MDBERkU3RDRFITE0Mi4yNzA',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExNDIuMTI',
+      'id': 'F4D50E400DFE7D4E!142',
+      'lastModifiedBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-25T14:21:57.36Z',
+      'name': 'foo1.docx',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 14912,
+      'webUrl': 'https://1drv.ms/w/s!AE59_g1ADtX0gQ4',
+      'file': {
+        'hashes': {
+          'crc32Hash': '551418A8',
+          'sha1Hash': 'FDA866479C801C92860ADA0AFD4C850F21078EE7'
+        },
+        'mimeType': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
+      },
+      'fileSystemInfo': {
+        'createdDateTime': '2017-02-23T03:13:05.643Z',
+        'lastModifiedDateTime': '2017-02-23T03:13:29.087Z'
+      }
+    },
+    {
+      '@content.downloadUrl': 'https://kdqyaa.bn1303.livefilestore.com/y3m8qKRRwHNq1a3YJo5b3HDCisfHEoIQfX-BrS62q2sNhZja3dPlT6qW0_CHhTA61M5_XnxdKknGE3Rg9Vv8NZN5-Xi72TQJGS16VhfgO53iyJxRml99FSXXrhkH-0y7iXrI4ibBuch7u7-m1sErEbgERviZ3RmD84HttNZg-Hn4kM',
+      'createdBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2017-02-23T06:34:18.873Z',
+      'cTag': 'aYzpGNEQ1MEU0MDBERkU3RDRFITE1Mi4yNzE',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExNTIuMTc',
+      'id': 'F4D50E400DFE7D4E!152',
+      'lastModifiedBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-25T14:22:01.797Z',
+      'name': 'foo1.pptx',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 30701,
+      'webUrl': 'https://1drv.ms/p/s!AE59_g1ADtX0gRg',
+      'file': {
+        'hashes': {
+          'crc32Hash': 'ADD1D585',
+          'sha1Hash': '0346CB868CD2C03B09341D4232AD2D38B459A699'
+        },
+        'mimeType': 'application/vnd.openxmlformats-officedocument.presentationml.presentation'
+      },
+      'fileSystemInfo': {
+        'createdDateTime': '2017-02-23T06:34:18.873Z',
+        'lastModifiedDateTime': '2017-02-23T07:27:07.12Z'
+      }
+    },
+    {
+      '@content.downloadUrl': 'https://kdqyaa.bn1303.livefilestore.com/y3mx4zP2_eOo43jA6xRHtVi7jfozdtka4XygTf4YsMrZJytqg9I36Fd43K6EpCxEH15163NKVkvQjiROuOn9m3xPtZzu-g3Pzt5hE8CHDsoS1iH36PgBkOd3P49-5GIW_Y_OJybBA3YkG64DHCPjSFftBrfdX5w-zxBTKXYBA3CGG0',
+      'createdBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2017-02-23T03:45:08.03Z',
+      'cTag': 'aYzpGNEQ1MEU0MDBERkU3RDRFITE0Ny4yNjU',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExNDcuMjQ',
+      'id': 'F4D50E400DFE7D4E!147',
+      'lastModifiedBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-25T14:22:00.06Z',
+      'name': 'foo1.xlsx',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 8043,
+      'webUrl': 'https://1drv.ms/x/s!AE59_g1ADtX0gRM',
+      'file': {
+        'hashes': {
+          'crc32Hash': '7441963D',
+          'sha1Hash': '0078FE7CF1088EECADEBD374905D0560FDF3FD97'
+        },
+        'mimeType': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
+      },
+      'fileSystemInfo': {
+        'createdDateTime': '2017-02-23T03:45:08.03Z',
+        'lastModifiedDateTime': '2017-02-23T06:18:59.52Z'
+      }
+    },
+    {
+      '@content.downloadUrl': 'https://kdqyaa.bn1303.livefilestore.com/y3m38TiJnMWVpml53HkELL0YRerqKsy8nK1lU3lZUYo48-EXez--3_TZ7VtE_L1sSnxx4VZ0q2fva_ICwHBkjzl8S2xgRzSNqLYfuklja6-770qju2Wrw8gQGeT58XBI6aaFuxa-pgPiYFiF6yAE4Ngj7LVeEx4dVW5BO51Gn4cY5o',
+      'createdBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2017-02-23T07:29:04.897Z',
+      'cTag': 'aYzpGNEQ1MEU0MDBERkU3RDRFITE1OS4yNjY',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExNTkuMTc',
+      'id': 'F4D50E400DFE7D4E!159',
+      'lastModifiedBy': {
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2017-02-25T14:22:02.903Z',
+      'name': 'foo2.xlsx',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 10541,
+      'webUrl': 'https://1drv.ms/x/s!AE59_g1ADtX0gR8',
+      'file': {
+        'hashes': {
+          'crc32Hash': 'B4AD5B8D',
+          'sha1Hash': 'AAF14BB6C3E373A7C044A208A9D3A30DD100E293'
+        },
+        'mimeType': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
+      },
+      'fileSystemInfo': {
+        'createdDateTime': '2017-02-23T07:29:04.897Z',
+        'lastModifiedDateTime': '2017-02-23T07:30:04.57Z'
+      }
+    },
+    {
+      '@content.downloadUrl': 'https://public.bn1303.livefilestore.com/y3mZjrqNTRpDIy54W750IhRdbVbfh7RdFdtJ6Vmx6EIUuUVyGZTyy9CWwUFrWlnbmGtQ7OVKRnU9kkx_zN1hv-7HGSxBRRl3hjEcWgRcRoss4qCnNvmabwxW0J1rSc3oss1a8jj7J-hUmUDTa5EasvlsJPs9t8XmyuoF1PVgnTjOCyDjPpXDAjaziaojxWlQh0-t35XiXymBi4lfebfgf1a37RT1raPJ79pj1_KLJ5tgtE',
+      'createdBy': {
+        'application': {
+          'displayName': 'OneDrive website',
+          'id': '44048800'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'createdDateTime': '2015-12-17T19:56:11.88Z',
+      'cTag': 'aYzpGNEQ1MEU0MDBERkU3RDRFITEwNC4yNTc',
+      'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExMDQuMw',
+      'id': 'F4D50E400DFE7D4E!104',
+      'lastModifiedBy': {
+        'application': {
+          'displayName': 'OneDrive website',
+          'id': '44048800'
+        },
+        'user': {
+          'displayName': 'Fitz Elliott',
+          'id': 'f4d50e400dfe7d4e'
+        }
+      },
+      'lastModifiedDateTime': '2015-12-17T19:56:29.963Z',
+      'name': 'Getting started with OneDrive.pdf',
+      'parentReference': {
+        'driveId': 'f4d50e400dfe7d4e',
+        'id': 'F4D50E400DFE7D4E!103',
+        'path': '/drive/root:'
+      },
+      'size': 1311269,
+      'webUrl': 'https://1drv.ms/b/s!AE59_g1ADtX0aA',
+      'file': {
+        'hashes': {
+          'crc32Hash': 'F8DDF9BE',
+          'sha1Hash': 'A9C4ACF2DA75FC49056976433AC32142D2C71AB1'
+        },
+        'mimeType': 'application/pdf'
+      },
+      'fileSystemInfo': {
+        'createdDateTime': '2015-12-17T19:56:11.88Z',
+        'lastModifiedDateTime': '2015-12-17T19:56:11.88Z'
+      }
+    }
+]
+
+raw_subfolder_response = [
+    {
+        '@content.downloadUrl': 'https://public.bn1303.livefilestore.com/173450918374509173450',
+        'createdBy': {
+            'application': {
+                'displayName': 'local-thingapp',
+                'id': '994562945'
+            },
+            'user': {
+                'displayName': 'Fitz Elliott',
+                'id': '992349'
+            }
+        },
+        'createdDateTime': '2017-02-07T20:37:50.73Z',
+        'cTag': 'aYzpGNEQ1MEU0MDBERkU3RDRFITEzMC4yNTc',
+        'eTag': 'aRjRENTBFNDAwREZFN0Q0RSExMzAuMw',
+        'id': 'FE830D1CB134A0!130',
+        'lastModifiedBy': {
+            'application': {
+                'displayName': 'local-THINGAPP',
+                'id': '994562945'
+            },
+            'user': {
+                'displayName': 'Fitz Elliott',
+                'id': '992349'
+            }
+        },
+        'lastModifiedDateTime': '2017-02-07T20:37:53.47Z',
+        'name': 'Periodic Table of the Operators A4 300dpi.jpg',
+        'parentReference': {
+            'driveId': 'fe830d1cb134a0',
+            'id': 'FE830D1CB134A0!130',
+            'name': 'Documents',
+            'path': '/drive/root:/Documents'
+        },
+        'size': 1056811,
+        'webUrl': 'https://1drv.ms/i/s!LE93_m9sd3WJ82',
+        'file': {
+            'hashes': {
+                'crc32Hash': 'B0D38EF0',
+                'sha1Hash': 'DE751E0D3D8292A349A4698C59BDE514CD633589'
+            },
+            'mimeType': 'image/jpeg'
+        },
+        'fileSystemInfo': {
+            'createdDateTime': '2017-02-07T20:37:50.73Z',
+            'lastModifiedDateTime': '2017-02-07T20:37:50.73Z'
+        },
+        'image': {
+            'height': 2456,
+            'width': 3477
+        },
+        'photo': {
+            'takenDateTime': '2017-02-07T20:37:50.73Z'
+        }
+    }
+]
+
+
diff --git a/website/addons/onedrive/views.py b/addons/onedrive/views.py
similarity index 90%
rename from website/addons/onedrive/views.py
rename to addons/onedrive/views.py
index f5e8fd23b53..feafc5b3203 100644
--- a/website/addons/onedrive/views.py
+++ b/addons/onedrive/views.py
@@ -7,8 +7,8 @@
     must_have_addon, must_be_addon_authorizer,
 )
 
-from website.addons.base import generic_views
-from website.addons.onedrive.serializer import OneDriveSerializer
+from addons.base import generic_views
+from addons.onedrive.serializer import OneDriveSerializer
 
 
 SHORT_NAME = 'onedrive'
diff --git a/api/base/settings/defaults.py b/api/base/settings/defaults.py
index 9b6674abd8a..11ded0de0ef 100644
--- a/api/base/settings/defaults.py
+++ b/api/base/settings/defaults.py
@@ -107,6 +107,7 @@
     'addons.gitlab',
     'addons.googledrive',
     'addons.mendeley',
+    'addons.onedrive',
     'addons.owncloud',
     'addons.s3',
     'addons.twofactor',
diff --git a/osf/migrations/0065_auto_20171024_1334.py b/osf/migrations/0065_auto_20171024_1334.py
new file mode 100644
index 00000000000..3bab4bac032
--- /dev/null
+++ b/osf/migrations/0065_auto_20171024_1334.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.4 on 2017-10-24 18:34
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0064_auto_20171019_0918'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='OneDriveFileNode',
+            fields=[
+            ],
+            options={
+                'proxy': True,
+                'indexes': [],
+            },
+            bases=('osf.basefilenode',),
+        ),
+        migrations.AlterField(
+            model_name='basefilenode',
+            name='type',
+            field=models.CharField(choices=[('osf.trashedfilenode', 'trashed file node'), ('osf.trashedfile', 'trashed file'), ('osf.trashedfolder', 'trashed folder'), ('osf.osfstoragefilenode', 'osf storage file node'), ('osf.osfstoragefile', 'osf storage file'), ('osf.osfstoragefolder', 'osf storage folder'), ('osf.bitbucketfilenode', 'bitbucket file node'), ('osf.bitbucketfolder', 'bitbucket folder'), ('osf.bitbucketfile', 'bitbucket file'), ('osf.boxfilenode', 'box file node'), ('osf.boxfolder', 'box folder'), ('osf.boxfile', 'box file'), ('osf.dataversefilenode', 'dataverse file node'), ('osf.dataversefolder', 'dataverse folder'), ('osf.dataversefile', 'dataverse file'), ('osf.dropboxfilenode', 'dropbox file node'), ('osf.dropboxfolder', 'dropbox folder'), ('osf.dropboxfile', 'dropbox file'), ('osf.figsharefilenode', 'figshare file node'), ('osf.figsharefolder', 'figshare folder'), ('osf.figsharefile', 'figshare file'), ('osf.githubfilenode', 'github file node'), ('osf.githubfolder', 'github folder'), ('osf.githubfile', 'github file'), ('osf.googledrivefilenode', 'google drive file node'), ('osf.googledrivefolder', 'google drive folder'), ('osf.googledrivefile', 'google drive file'), ('osf.onedrivefilenode', 'one drive file node'), ('osf.onedrivefolder', 'one drive folder'), ('osf.onedrivefile', 'one drive file'), ('osf.owncloudfilenode', 'owncloud file node'), ('osf.owncloudfolder', 'owncloud folder'), ('osf.owncloudfile', 'owncloud file'), ('osf.s3filenode', 's3 file node'), ('osf.s3folder', 's3 folder'), ('osf.s3file', 's3 file')], db_index=True, max_length=255),
+        ),
+        migrations.CreateModel(
+            name='OneDriveFile',
+            fields=[
+            ],
+            options={
+                'proxy': True,
+                'indexes': [],
+            },
+            bases=('osf.onedrivefilenode', models.Model),
+        ),
+        migrations.CreateModel(
+            name='OneDriveFolder',
+            fields=[
+            ],
+            options={
+                'proxy': True,
+                'indexes': [],
+            },
+            bases=('osf.onedrivefilenode', models.Model),
+        ),
+    ]
diff --git a/website/addons/onedrive/__init__.py b/website/addons/onedrive/__init__.py
deleted file mode 100644
index b94ef5e1f98..00000000000
--- a/website/addons/onedrive/__init__.py
+++ /dev/null
@@ -1,48 +0,0 @@
-import os
-
-from website.addons.onedrive import model, routes, utils
-
-
-MODELS = [
-    model.OneDriveUserSettings,
-    model.OneDriveNodeSettings,
-]
-
-USER_SETTINGS_MODEL = model.OneDriveUserSettings
-NODE_SETTINGS_MODEL = model.OneDriveNodeSettings
-
-ROUTES = [routes.api_routes]
-
-SHORT_NAME = 'onedrive'
-FULL_NAME = 'OneDrive'
-
-OWNERS = ['user', 'node']
-
-ADDED_DEFAULT = []
-ADDED_MANDATORY = []
-
-VIEWS = []
-CONFIGS = ['accounts', 'node']
-
-CATEGORIES = ['storage']
-
-# TODO: Deprecate in favor of webpack/CommonJS bundles
-INCLUDE_JS = {
-    'widget': [],
-    'page': [],
-    'files': []
-}
-
-INCLUDE_CSS = {
-    'widget': [],
-    'page': [],
-}
-
-HAS_HGRID_FILES = True
-GET_HGRID_DATA = utils.onedrive_addon_folder
-
-MAX_FILE_SIZE = 250  # MB
-
-HERE = os.path.dirname(os.path.abspath(__file__))
-NODE_SETTINGS_TEMPLATE = None  # use default node settings template
-USER_SETTINGS_TEMPLATE = None  # use default user settings template
diff --git a/website/addons/onedrive/tests/test_utils.py b/website/addons/onedrive/tests/test_utils.py
deleted file mode 100644
index 311a7593a28..00000000000
--- a/website/addons/onedrive/tests/test_utils.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Tests for website.addons.onedrive.utils."""
-import os
-
-from nose.tools import *  # noqa (PEP8 asserts)
-
-from framework.auth import Auth
-from website.project.model import NodeLog
-
-from tests.base import OsfTestCase
-from tests.factories import ProjectFactory
-
-from website.addons.onedrive.tests.utils import OneDriveAddonTestCase
-from website.addons.onedrive import utils
-# from website.addons.onedrive.views.config import serialize_folder
-
-
-class TestNodeLogger(OneDriveAddonTestCase):
-
-    def test_log_file_added(self):
-        logger = utils.OneDriveNodeLogger(
-            node=self.project,
-            auth=Auth(self.user),
-        )
-        logger.log(NodeLog.FILE_ADDED, save=True)
-
-        last_log = self.project.logs[-1]
-
-        assert_equal(last_log.action, "onedrive_{0}".format(NodeLog.FILE_ADDED))
-
-    # Regression test for https://github.com/CenterForOpenScience/osf.io/issues/1557
-    def test_log_deauthorized_when_node_settings_are_deleted(self):
-        project = ProjectFactory()
-        project.add_addon('onedrive', auth=Auth(project.creator))
-        dbox_settings = project.get_addon('onedrive')
-        dbox_settings.delete(save=True)
-        # sanity check
-        assert_true(dbox_settings.deleted)
-
-        logger = utils.OneDriveNodeLogger(node=project, auth=Auth(self.user))
-        logger.log(action='node_deauthorized', save=True)
-
-        last_log = project.logs[-1]
-        assert_equal(last_log.action, 'onedrive_node_deauthorized')
\ No newline at end of file
diff --git a/website/addons/onedrive/tests/utils.py b/website/addons/onedrive/tests/utils.py
deleted file mode 100644
index a1f906574ee..00000000000
--- a/website/addons/onedrive/tests/utils.py
+++ /dev/null
@@ -1,744 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from website.addons.onedrive.model import OneDriveProvider
-from website.addons.onedrive.tests.factories import OneDriveAccountFactory
-from website.addons.base.testing import OAuthAddonTestCaseMixin, AddonTestCase
-
-
-class OneDriveAddonTestCase(OAuthAddonTestCaseMixin, AddonTestCase):
-
-    ADDON_SHORT_NAME = 'onedrive'
-    ExternalAccountFactory = OneDriveAccountFactory
-    Provider = OneDriveProvider
-
-    def set_node_settings(self, settings):
-        super(OneDriveAddonTestCase, self).set_node_settings(settings)
-        settings.folder_id = '1234567890'
-        settings.folder_path = 'Drive/Camera Uploads'
-        settings.external_account = self.external_account
-
-
-raw_root_folder_response = [
-    {
-      "createdBy": {
-        "application": {
-          "displayName": "local-cosdev",
-          "id": "44174239"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2017-02-23T02:25:24.687Z",
-      "cTag": "adDpGNEQ1MEU0MDBERkU3RDRFITEzMi42MzYyMzQxMzUyNDg3NzAwMDA",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMzIuMA",
-      "id": "F4D50E400DFE7D4E!132",
-      "lastModifiedBy": {
-        "application": {
-          "displayName": "local-cosdev",
-          "id": "44174239"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-23T02:25:24.877Z",
-      "name": "Apps",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 0,
-      "webUrl": "https://1drv.ms/f/s!AE59_g1ADtX0gQQ",
-      "fileSystemInfo": {
-        "createdDateTime": "2017-02-23T02:25:24.687Z",
-        "lastModifiedDateTime": "2017-02-23T02:25:24.687Z"
-      },
-      "folder": {
-        "childCount": 1
-      },
-      "specialFolder": {
-        "name": "apps"
-      }
-    },
-    {
-      "createdBy": {
-        "application": {
-          "displayName": "OneDrive website",
-          "id": "44048800"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2015-12-17T19:56:12.63Z",
-      "cTag": "adDpGNEQ1MEU0MDBERkU3RDRFITEwNi42MzYyMjA5NjY3MzQ3MDAwMDA",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMDYuMA",
-      "id": "F4D50E400DFE7D4E!106",
-      "lastModifiedBy": {
-        "application": {
-          "displayName": "OneDrive website",
-          "id": "44048800"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-07T20:37:53.47Z",
-      "name": "Documents",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 1056811,
-      "webUrl": "https://1drv.ms/f/s!AE59_g1ADtX0ag",
-      "fileSystemInfo": {
-        "createdDateTime": "2015-12-17T19:56:12.63Z",
-        "lastModifiedDateTime": "2015-12-17T19:56:12.63Z"
-      },
-      "folder": {
-        "childCount": 1
-      },
-      "specialFolder": {
-        "name": "documents"
-      }
-    },
-    {
-      "createdBy": {
-        "application": {
-          "displayName": "local-cosdev",
-          "id": "44174239"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2017-02-23T02:25:42.93Z",
-      "cTag": "adDpGNEQ1MEU0MDBERkU3RDRFITEzNC42MzYyMzQxMzU0MjkzMDAwMDA",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMzQuMA",
-      "id": "F4D50E400DFE7D4E!134",
-      "lastModifiedBy": {
-        "application": {
-          "displayName": "local-cosdev",
-          "id": "44174239"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-23T02:25:42.93Z",
-      "name": "Music",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 0,
-      "webUrl": "https://1drv.ms/f/s!AE59_g1ADtX0gQY",
-      "fileSystemInfo": {
-        "createdDateTime": "2017-02-23T02:25:42.93Z",
-        "lastModifiedDateTime": "2017-02-23T02:25:42.93Z"
-      },
-      "folder": {
-        "childCount": 0
-      },
-      "specialFolder": {
-        "name": "music"
-      }
-    },
-    {
-      "createdBy": {
-        "application": {
-          "displayName": "OneDrive website",
-          "id": "44048800"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2015-12-17T19:56:12.24Z",
-      "cTag": "adDpGNEQ1MEU0MDBERkU3RDRFITEwNS42MzYyMjA5Njk5MTgzMDAwMDA",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMDUuMA",
-      "id": "F4D50E400DFE7D4E!105",
-      "lastModifiedBy": {
-        "application": {
-          "displayName": "OneDrive website",
-          "id": "44048800"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-07T20:43:11.83Z",
-      "name": "Pictures",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 13,
-      "webUrl": "https://1drv.ms/f/s!AE59_g1ADtX0aQ",
-      "fileSystemInfo": {
-        "createdDateTime": "2015-12-17T19:56:12.24Z",
-        "lastModifiedDateTime": "2015-12-17T19:56:12.24Z"
-      },
-      "folder": {
-        "childCount": 1
-      },
-      "specialFolder": {
-        "name": "photos"
-      }
-    },
-    {
-      "createdBy": {
-        "application": {
-          "displayName": "OneDrive website",
-          "id": "44048800"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2015-12-17T19:56:30.89Z",
-      "cTag": "adDpGNEQ1MEU0MDBERkU3RDRFITEwNy42MzYwOTMxMzUyMDc4MDAwMDA",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMDcuMA",
-      "id": "F4D50E400DFE7D4E!107",
-      "lastModifiedBy": {
-        "application": {
-          "displayName": "OneDrive website",
-          "id": "44048800"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2016-09-12T21:45:20.78Z",
-      "name": "Tenkum",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 1588779,
-      "webUrl": "https://1drv.ms/f/s!AE59_g1ADtX0aw",
-      "fileSystemInfo": {
-        "createdDateTime": "2015-12-17T19:56:30.89Z",
-        "lastModifiedDateTime": "2015-12-17T19:56:30.89Z"
-      },
-      "folder": {
-        "childCount": 5
-      }
-    },
-    {
-      "createdBy": {
-        "application": {
-          "displayName": "OneDrive website",
-          "id": "44048800"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2017-02-23T07:27:44.253Z",
-      "cTag": "adDpGNEQ1MEU0MDBERkU3RDRFITE1NC42MzYyMzQzMTcxMDY2MzAwMDA",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNTQuMA",
-      "id": "F4D50E400DFE7D4E!154",
-      "lastModifiedBy": {
-        "application": {
-          "displayName": "OneDrive website",
-          "id": "44048800"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-23T07:28:30.663Z",
-      "name": "foo",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 28359,
-      "webUrl": "https://1drv.ms/o/s!AE59_g1ADtX0gRo",
-      "fileSystemInfo": {
-        "createdDateTime": "2017-02-23T07:27:44.253Z",
-        "lastModifiedDateTime": "2017-02-23T07:27:44.253Z"
-      },
-      "package": {
-        "type": "oneNote"
-      }
-    },
-    {
-      "@content.downloadUrl": "https://public.bn1303.livefilestore.com/y3meR_7rVWrrLE-4_8eWU09UhEHrtVojgGVrPDBh3M8Qq0Iut6Y5-x68vBGXmra-p9X6d5PcWocISnjJQMa_nQ1QMw5HUTrT0AhFq6_hurW6lwJ0qBwlzsUYWzUoLfMu9KqdUnaBghT1NiMHSyPSlUO0UgAant5d85tXtn3xqy94i9yLzq8_6spoZ_ffgYX7l-FwQBRxaDz8q6LN7SFT1JQV9S_1Fr_BDCbtitKip_UgO0",
-      "createdBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2017-02-23T03:11:31.37Z",
-      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITEzNi4yNTg",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMzYuMg",
-      "id": "F4D50E400DFE7D4E!136",
-      "lastModifiedBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-23T03:11:41.32Z",
-      "name": "foo 1.txt",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 14,
-      "webUrl": "https://1drv.ms/t/s!AE59_g1ADtX0gQg",
-      "file": {
-        "hashes": {
-          "crc32Hash": "82872CD6",
-          "sha1Hash": "12779E2CF3B4108A897FC5C6A986D4F2A4BB9026"
-        },
-        "mimeType": "text/plain"
-      },
-      "fileSystemInfo": {
-        "createdDateTime": "2017-02-23T03:11:31.37Z",
-        "lastModifiedDateTime": "2017-02-23T03:11:41.307Z"
-      }
-    },
-    {
-      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3mLQF-L6CLmfw-0FIxfJo6dYEkn0E_rtkcPWNXiQ6SWdt68K9EzqVb08tgPAo3S-1gTFv0xhfRndRPGcz3Ed7fm6sTP4-A9tJ5NpMjMaVVRO9Ds60TdvDrv-C6N4xgG96dB73_pAXgu7pBwDszrCixFvU75WDNW4o2C8G2cSj9hs8",
-      "createdBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2017-02-23T03:12:07.257Z",
-      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITEzNy4yNjk",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMzcuMTI",
-      "id": "F4D50E400DFE7D4E!137",
-      "lastModifiedBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-25T14:21:56.633Z",
-      "name": "foo.docx",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 15122,
-      "webUrl": "https://1drv.ms/w/s!AE59_g1ADtX0gQk",
-      "file": {
-        "hashes": {
-          "crc32Hash": "D8FEF070",
-          "sha1Hash": "DF4BA34A942459421A122AF0E9F8F2E3369174B7"
-        },
-        "mimeType": "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
-      },
-      "fileSystemInfo": {
-        "createdDateTime": "2017-02-23T03:12:07.257Z",
-        "lastModifiedDateTime": "2017-02-23T03:12:52.63Z"
-      }
-    },
-    {
-      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3mi1bPzwA871FD5vV5ylbGhndSxFcuzaP2W7SUmv6ythXicF6LoairKEJC1geR6jImpd4Zjeyrae__LKt0jdcM7wwOiWMqjbZ4g2ooLjmIyp0l8z3O-ic42SE2_UfLnW2jjMYeBQ3dFA-Jm_1qrml9Z759E0gRMKWMSsC3MjnfwSo",
-      "createdBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2017-02-23T06:19:04.02Z",
-      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITE0OS4yNjk",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNDkuMTM",
-      "id": "F4D50E400DFE7D4E!149",
-      "lastModifiedBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-25T14:22:00.88Z",
-      "name": "foo.pptx",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 30523,
-      "webUrl": "https://1drv.ms/p/s!AE59_g1ADtX0gRU",
-      "file": {
-        "hashes": {
-          "crc32Hash": "2CB42AEC",
-          "sha1Hash": "B75AE7590C5953B0157CBAB9DCBD1C2672F033FE"
-        },
-        "mimeType": "application/vnd.openxmlformats-officedocument.presentationml.presentation"
-      },
-      "fileSystemInfo": {
-        "createdDateTime": "2017-02-23T06:19:04.02Z",
-        "lastModifiedDateTime": "2017-02-23T06:34:14.997Z"
-      }
-    },
-    {
-      "@content.downloadUrl": "https://public.bn1303.livefilestore.com/y3mo0SZyPfHP8KaGX-1Sd2EyxdzpetQ56CC-Wnk4wAPEVUaAcbYMvqJG3JsdA5J65xQQMbL7u7GBKf-Av2aXngTjYyKV4efKHdKRCcMx0BdpuAZrexpCJmzU7AcdU5iHnsk5ItApBUlotO8hl1lZGFNRJfDclTOJujr45aEAeHI6CT16tAmxIH6DfiAC2l4iK_vJsilRFc-m32XBQU8HpiwXjigJiLxffP-KyEGsMIgooo",
-      "createdBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2017-02-23T03:10:44.713Z",
-      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITEzNS4yNTg",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMzUuMg",
-      "id": "F4D50E400DFE7D4E!135",
-      "lastModifiedBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-23T03:10:56.25Z",
-      "name": "foo.txt",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 14,
-      "webUrl": "https://1drv.ms/t/s!AE59_g1ADtX0gQc",
-      "file": {
-        "hashes": {
-          "crc32Hash": "9E0BA90F",
-          "sha1Hash": "F8B9668ECA3938C835AF1E9DCACFA52603511FF3"
-        },
-        "mimeType": "text/plain"
-      },
-      "fileSystemInfo": {
-        "createdDateTime": "2017-02-23T03:10:44.713Z",
-        "lastModifiedDateTime": "2017-02-23T03:10:56.237Z"
-      }
-    },
-    {
-      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3mDcrFEI4yJFJe_Nb3oq2lZ_DXKDXaXWq4ZnUvsNQPCX4NlEQ3B1ypO4uUJ7XIzkh1q5bBUbUeRjEoNJberX70FAtY0L55GpYAPD4rlwwU83c6zTBmRB6b00Yd-I6xhXQSJ7hEVeklwoSURh0FZ-nMr3obVqsUnIzks46OQEPs7aQ",
-      "createdBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2017-02-23T03:13:37.727Z",
-      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITE0NC4yNjU",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNDQuMTU",
-      "id": "F4D50E400DFE7D4E!144",
-      "lastModifiedBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-25T14:21:58.38Z",
-      "name": "foo.xlsx",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 8036,
-      "webUrl": "https://1drv.ms/x/s!AE59_g1ADtX0gRA",
-      "file": {
-        "hashes": {
-          "crc32Hash": "2DCEE45F",
-          "sha1Hash": "98927311DD9AE3966C9A7D4DAF4579A87C870EFB"
-        },
-        "mimeType": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-      },
-      "fileSystemInfo": {
-        "createdDateTime": "2017-02-23T03:13:37.727Z",
-        "lastModifiedDateTime": "2017-02-23T03:24:53.483Z"
-      }
-    },
-    {
-      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3mUbHeGrn5Qh1ZwPHCKp4czfgGAGz_-ePntZpq_47wbGU6VccDDTq2149EnUS9hoQ40V07lPVuSMv-2qBCwFqe40t5f0EBcrCJbFzNktZ0f_UrLNnMPBl1TemukaqqOXGY0iyqHvz-ole1jC_DsWo_t-2qGd2Oa8V_Veh8KK8UHsc",
-      "createdBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2017-02-23T03:13:05.643Z",
-      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITE0Mi4yNzA",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNDIuMTI",
-      "id": "F4D50E400DFE7D4E!142",
-      "lastModifiedBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-25T14:21:57.36Z",
-      "name": "foo1.docx",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 14912,
-      "webUrl": "https://1drv.ms/w/s!AE59_g1ADtX0gQ4",
-      "file": {
-        "hashes": {
-          "crc32Hash": "551418A8",
-          "sha1Hash": "FDA866479C801C92860ADA0AFD4C850F21078EE7"
-        },
-        "mimeType": "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
-      },
-      "fileSystemInfo": {
-        "createdDateTime": "2017-02-23T03:13:05.643Z",
-        "lastModifiedDateTime": "2017-02-23T03:13:29.087Z"
-      }
-    },
-    {
-      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3m8qKRRwHNq1a3YJo5b3HDCisfHEoIQfX-BrS62q2sNhZja3dPlT6qW0_CHhTA61M5_XnxdKknGE3Rg9Vv8NZN5-Xi72TQJGS16VhfgO53iyJxRml99FSXXrhkH-0y7iXrI4ibBuch7u7-m1sErEbgERviZ3RmD84HttNZg-Hn4kM",
-      "createdBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2017-02-23T06:34:18.873Z",
-      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITE1Mi4yNzE",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNTIuMTc",
-      "id": "F4D50E400DFE7D4E!152",
-      "lastModifiedBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-25T14:22:01.797Z",
-      "name": "foo1.pptx",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 30701,
-      "webUrl": "https://1drv.ms/p/s!AE59_g1ADtX0gRg",
-      "file": {
-        "hashes": {
-          "crc32Hash": "ADD1D585",
-          "sha1Hash": "0346CB868CD2C03B09341D4232AD2D38B459A699"
-        },
-        "mimeType": "application/vnd.openxmlformats-officedocument.presentationml.presentation"
-      },
-      "fileSystemInfo": {
-        "createdDateTime": "2017-02-23T06:34:18.873Z",
-        "lastModifiedDateTime": "2017-02-23T07:27:07.12Z"
-      }
-    },
-    {
-      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3mx4zP2_eOo43jA6xRHtVi7jfozdtka4XygTf4YsMrZJytqg9I36Fd43K6EpCxEH15163NKVkvQjiROuOn9m3xPtZzu-g3Pzt5hE8CHDsoS1iH36PgBkOd3P49-5GIW_Y_OJybBA3YkG64DHCPjSFftBrfdX5w-zxBTKXYBA3CGG0",
-      "createdBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2017-02-23T03:45:08.03Z",
-      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITE0Ny4yNjU",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNDcuMjQ",
-      "id": "F4D50E400DFE7D4E!147",
-      "lastModifiedBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-25T14:22:00.06Z",
-      "name": "foo1.xlsx",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 8043,
-      "webUrl": "https://1drv.ms/x/s!AE59_g1ADtX0gRM",
-      "file": {
-        "hashes": {
-          "crc32Hash": "7441963D",
-          "sha1Hash": "0078FE7CF1088EECADEBD374905D0560FDF3FD97"
-        },
-        "mimeType": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-      },
-      "fileSystemInfo": {
-        "createdDateTime": "2017-02-23T03:45:08.03Z",
-        "lastModifiedDateTime": "2017-02-23T06:18:59.52Z"
-      }
-    },
-    {
-      "@content.downloadUrl": "https://kdqyaa.bn1303.livefilestore.com/y3m38TiJnMWVpml53HkELL0YRerqKsy8nK1lU3lZUYo48-EXez--3_TZ7VtE_L1sSnxx4VZ0q2fva_ICwHBkjzl8S2xgRzSNqLYfuklja6-770qju2Wrw8gQGeT58XBI6aaFuxa-pgPiYFiF6yAE4Ngj7LVeEx4dVW5BO51Gn4cY5o",
-      "createdBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2017-02-23T07:29:04.897Z",
-      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITE1OS4yNjY",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExNTkuMTc",
-      "id": "F4D50E400DFE7D4E!159",
-      "lastModifiedBy": {
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2017-02-25T14:22:02.903Z",
-      "name": "foo2.xlsx",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 10541,
-      "webUrl": "https://1drv.ms/x/s!AE59_g1ADtX0gR8",
-      "file": {
-        "hashes": {
-          "crc32Hash": "B4AD5B8D",
-          "sha1Hash": "AAF14BB6C3E373A7C044A208A9D3A30DD100E293"
-        },
-        "mimeType": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
-      },
-      "fileSystemInfo": {
-        "createdDateTime": "2017-02-23T07:29:04.897Z",
-        "lastModifiedDateTime": "2017-02-23T07:30:04.57Z"
-      }
-    },
-    {
-      "@content.downloadUrl": "https://public.bn1303.livefilestore.com/y3mZjrqNTRpDIy54W750IhRdbVbfh7RdFdtJ6Vmx6EIUuUVyGZTyy9CWwUFrWlnbmGtQ7OVKRnU9kkx_zN1hv-7HGSxBRRl3hjEcWgRcRoss4qCnNvmabwxW0J1rSc3oss1a8jj7J-hUmUDTa5EasvlsJPs9t8XmyuoF1PVgnTjOCyDjPpXDAjaziaojxWlQh0-t35XiXymBi4lfebfgf1a37RT1raPJ79pj1_KLJ5tgtE",
-      "createdBy": {
-        "application": {
-          "displayName": "OneDrive website",
-          "id": "44048800"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "createdDateTime": "2015-12-17T19:56:11.88Z",
-      "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITEwNC4yNTc",
-      "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMDQuMw",
-      "id": "F4D50E400DFE7D4E!104",
-      "lastModifiedBy": {
-        "application": {
-          "displayName": "OneDrive website",
-          "id": "44048800"
-        },
-        "user": {
-          "displayName": "Fitz Elliott",
-          "id": "f4d50e400dfe7d4e"
-        }
-      },
-      "lastModifiedDateTime": "2015-12-17T19:56:29.963Z",
-      "name": "Getting started with OneDrive.pdf",
-      "parentReference": {
-        "driveId": "f4d50e400dfe7d4e",
-        "id": "F4D50E400DFE7D4E!103",
-        "path": "/drive/root:"
-      },
-      "size": 1311269,
-      "webUrl": "https://1drv.ms/b/s!AE59_g1ADtX0aA",
-      "file": {
-        "hashes": {
-          "crc32Hash": "F8DDF9BE",
-          "sha1Hash": "A9C4ACF2DA75FC49056976433AC32142D2C71AB1"
-        },
-        "mimeType": "application/pdf"
-      },
-      "fileSystemInfo": {
-        "createdDateTime": "2015-12-17T19:56:11.88Z",
-        "lastModifiedDateTime": "2015-12-17T19:56:11.88Z"
-      }
-    }
-]
-
-raw_subfolder_response = [
-    {
-        "@content.downloadUrl": "https://public.bn1303.livefilestore.com/173450918374509173450",
-        "createdBy": {
-            "application": {
-                "displayName": "local-thingapp",
-                "id": "994562945"
-            },
-            "user": {
-                "displayName": "Fitz Elliott",
-                "id": "992349"
-            }
-        },
-        "createdDateTime": "2017-02-07T20:37:50.73Z",
-        "cTag": "aYzpGNEQ1MEU0MDBERkU3RDRFITEzMC4yNTc",
-        "eTag": "aRjRENTBFNDAwREZFN0Q0RSExMzAuMw",
-        "id": "FE830D1CB134A0!130",
-        "lastModifiedBy": {
-            "application": {
-                "displayName": "local-THINGAPP",
-                "id": "994562945"
-            },
-            "user": {
-                "displayName": "Fitz Elliott",
-                "id": "992349"
-            }
-        },
-        "lastModifiedDateTime": "2017-02-07T20:37:53.47Z",
-        "name": "Periodic Table of the Operators A4 300dpi.jpg",
-        "parentReference": {
-            "driveId": "fe830d1cb134a0",
-            "id": "FE830D1CB134A0!130",
-            "name": "Documents",
-            "path": "/drive/root:/Documents"
-        },
-        "size": 1056811,
-        "webUrl": "https://1drv.ms/i/s!LE93_m9sd3WJ82",
-        "file": {
-            "hashes": {
-                "crc32Hash": "B0D38EF0",
-                "sha1Hash": "DE751E0D3D8292A349A4698C59BDE514CD633589"
-            },
-            "mimeType": "image/jpeg"
-        },
-        "fileSystemInfo": {
-            "createdDateTime": "2017-02-07T20:37:50.73Z",
-            "lastModifiedDateTime": "2017-02-07T20:37:50.73Z"
-        },
-        "image": {
-            "height": 2456,
-            "width": 3477
-        },
-        "photo": {
-            "takenDateTime": "2017-02-07T20:37:50.73Z"
-        }
-    }
-]
-
-
diff --git a/website/addons/onedrive/utils.py b/website/addons/onedrive/utils.py
deleted file mode 100644
index e5864f63f4b..00000000000
--- a/website/addons/onedrive/utils.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# -*- coding: utf-8 -*-
-import logging
-
-from website.util import rubeus
-
-logger = logging.getLogger(__name__)
-
-
-class OneDriveNodeLogger(object):
-    """Helper class for adding correctly-formatted OneDrive logs to nodes.
-
-    Usage: ::
-
-        from website.project.model import NodeLog
-
-        node = ...
-        auth = ...
-        nodelogger = OneDriveNodeLogger(node, auth)
-        nodelogger.log(NodeLog.FILE_REMOVED, save=True)
-
-
-    :param Node node: The node to add logs to
-    :param Auth auth: Authorization of the person who did the action.
-    """
-    def __init__(self, node, auth, path=None):
-        self.node = node
-        self.auth = auth
-        self.path = path
-
-    def log(self, action, extra=None, save=False):
-        """Log an event. Wraps the Node#add_log method, automatically adding
-        relevant parameters and prefixing log events with `"onedrive_"`.
-
-        :param str action: Log action. Should be a class constant from NodeLog.
-        :param dict extra: Extra parameters to add to the ``params`` dict of the
-            new NodeLog.
-        """
-        params = {
-            'project': self.node.parent_id,
-            'node': self.node._primary_key,
-            'folder_id': self.node.get_addon('onedrive', deleted=True).folder_id,
-            # it used to be "folder": self.node.get_addon('onedrive', deleted=True).folder_name
-            # changed to folder_path to make log show the complete folder path "/folder/subfolder"
-            # instead of just showing the subfolder's name "/subfolder"
-            'folder_name': self.node.get_addon('onedrive', deleted=True).folder_name,
-            'folder': self.node.get_addon('onedrive', deleted=True).folder_path
-        }
-
-        # If logging a file-related action, add the file's view and download URLs
-        if self.path:
-            params.update({
-                'urls': {
-                    'view': self.node.web_url_for(
-                        'addon_view_or_download_file',
-                        path=self.path,
-                        provider='onedrive',
-                    ),
-                    'download': self.node.web_url_for(
-                        'addon_view_or_download_file',
-                        path=self.path,
-                        provider='onedrive',
-                    )
-                },
-                'path': self.path,
-            })
-
-        if extra:
-            params.update(extra)
-
-        # Prefix the action with onedrive_
-        self.node.add_log(
-            action='onedrive_{0}'.format(action),
-            params=params,
-            auth=self.auth
-        )
-
-        if save:
-            self.node.save()
-
-def onedrive_addon_folder(node_settings, auth, **kwargs):
-    """Return the Rubeus/HGrid-formatted response for the root folder only."""
-    # Quit if node settings does not have authentication
-    if not node_settings.has_auth or not node_settings.folder_id:
-        return None
-
-    node = node_settings.owner
-
-    root = rubeus.build_addon_root(
-        node_settings=node_settings,
-        name=node_settings.fetch_folder_name(),
-        permissions=auth,
-        nodeUrl=node.url,
-        nodeApiUrl=node.api_url,
-    )
-
-    return [root]

From 855bd10a963fb2d7043296228bdf2ab0103d181d Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Tue, 21 Nov 2017 11:51:10 -0500
Subject: [PATCH 124/192] Update migrations

---
 addons/onedrive/migrations/0001_initial.py       |  4 ++--
 osf/migrations/0066_merge_20171121_1050.py       | 16 ++++++++++++++++
 ...171024_1334.py => 0067_auto_20171121_1050.py} |  6 +++---
 3 files changed, 21 insertions(+), 5 deletions(-)
 create mode 100644 osf/migrations/0066_merge_20171121_1050.py
 rename osf/migrations/{0065_auto_20171024_1334.py => 0067_auto_20171121_1050.py} (72%)

diff --git a/addons/onedrive/migrations/0001_initial.py b/addons/onedrive/migrations/0001_initial.py
index 75fa1be4b9a..3f2d62da419 100644
--- a/addons/onedrive/migrations/0001_initial.py
+++ b/addons/onedrive/migrations/0001_initial.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Generated by Django 1.11.4 on 2017-10-24 18:34
+# Generated by Django 1.11.7 on 2017-11-21 16:50
 from __future__ import unicode_literals
 
 from django.conf import settings
@@ -15,7 +15,7 @@ class Migration(migrations.Migration):
 
     dependencies = [
         migrations.swappable_dependency(settings.AUTH_USER_MODEL),
-        ('osf', '0065_auto_20171024_1334'),
+        ('osf', '0067_auto_20171121_1050'),
     ]
 
     operations = [
diff --git a/osf/migrations/0066_merge_20171121_1050.py b/osf/migrations/0066_merge_20171121_1050.py
new file mode 100644
index 00000000000..8c3658528ff
--- /dev/null
+++ b/osf/migrations/0066_merge_20171121_1050.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.7 on 2017-11-21 16:50
+from __future__ import unicode_literals
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0065_preprintservice_original_publication_date'),
+        ('osf', '0065_auto_20171024_1330'),
+    ]
+
+    operations = [
+    ]
diff --git a/osf/migrations/0065_auto_20171024_1334.py b/osf/migrations/0067_auto_20171121_1050.py
similarity index 72%
rename from osf/migrations/0065_auto_20171024_1334.py
rename to osf/migrations/0067_auto_20171121_1050.py
index 3bab4bac032..5a6e43da58c 100644
--- a/osf/migrations/0065_auto_20171024_1334.py
+++ b/osf/migrations/0067_auto_20171121_1050.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Generated by Django 1.11.4 on 2017-10-24 18:34
+# Generated by Django 1.11.7 on 2017-11-21 16:50
 from __future__ import unicode_literals
 
 from django.db import migrations, models
@@ -8,7 +8,7 @@
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('osf', '0064_auto_20171019_0918'),
+        ('osf', '0066_merge_20171121_1050'),
     ]
 
     operations = [
@@ -25,7 +25,7 @@ class Migration(migrations.Migration):
         migrations.AlterField(
             model_name='basefilenode',
             name='type',
-            field=models.CharField(choices=[('osf.trashedfilenode', 'trashed file node'), ('osf.trashedfile', 'trashed file'), ('osf.trashedfolder', 'trashed folder'), ('osf.osfstoragefilenode', 'osf storage file node'), ('osf.osfstoragefile', 'osf storage file'), ('osf.osfstoragefolder', 'osf storage folder'), ('osf.bitbucketfilenode', 'bitbucket file node'), ('osf.bitbucketfolder', 'bitbucket folder'), ('osf.bitbucketfile', 'bitbucket file'), ('osf.boxfilenode', 'box file node'), ('osf.boxfolder', 'box folder'), ('osf.boxfile', 'box file'), ('osf.dataversefilenode', 'dataverse file node'), ('osf.dataversefolder', 'dataverse folder'), ('osf.dataversefile', 'dataverse file'), ('osf.dropboxfilenode', 'dropbox file node'), ('osf.dropboxfolder', 'dropbox folder'), ('osf.dropboxfile', 'dropbox file'), ('osf.figsharefilenode', 'figshare file node'), ('osf.figsharefolder', 'figshare folder'), ('osf.figsharefile', 'figshare file'), ('osf.githubfilenode', 'github file node'), ('osf.githubfolder', 'github folder'), ('osf.githubfile', 'github file'), ('osf.googledrivefilenode', 'google drive file node'), ('osf.googledrivefolder', 'google drive folder'), ('osf.googledrivefile', 'google drive file'), ('osf.onedrivefilenode', 'one drive file node'), ('osf.onedrivefolder', 'one drive folder'), ('osf.onedrivefile', 'one drive file'), ('osf.owncloudfilenode', 'owncloud file node'), ('osf.owncloudfolder', 'owncloud folder'), ('osf.owncloudfile', 'owncloud file'), ('osf.s3filenode', 's3 file node'), ('osf.s3folder', 's3 folder'), ('osf.s3file', 's3 file')], db_index=True, max_length=255),
+            field=models.CharField(choices=[('osf.trashedfilenode', 'trashed file node'), ('osf.trashedfile', 'trashed file'), ('osf.trashedfolder', 'trashed folder'), ('osf.osfstoragefilenode', 'osf storage file node'), ('osf.osfstoragefile', 'osf storage file'), ('osf.osfstoragefolder', 'osf storage folder'), ('osf.bitbucketfilenode', 'bitbucket file node'), ('osf.bitbucketfolder', 'bitbucket folder'), ('osf.bitbucketfile', 'bitbucket file'), ('osf.boxfilenode', 'box file node'), ('osf.boxfolder', 'box folder'), ('osf.boxfile', 'box file'), ('osf.dataversefilenode', 'dataverse file node'), ('osf.dataversefolder', 'dataverse folder'), ('osf.dataversefile', 'dataverse file'), ('osf.dropboxfilenode', 'dropbox file node'), ('osf.dropboxfolder', 'dropbox folder'), ('osf.dropboxfile', 'dropbox file'), ('osf.figsharefilenode', 'figshare file node'), ('osf.figsharefolder', 'figshare folder'), ('osf.figsharefile', 'figshare file'), ('osf.githubfilenode', 'github file node'), ('osf.githubfolder', 'github folder'), ('osf.githubfile', 'github file'), ('osf.gitlabfilenode', 'git lab file node'), ('osf.gitlabfolder', 'git lab folder'), ('osf.gitlabfile', 'git lab file'), ('osf.googledrivefilenode', 'google drive file node'), ('osf.googledrivefolder', 'google drive folder'), ('osf.googledrivefile', 'google drive file'), ('osf.onedrivefilenode', 'one drive file node'), ('osf.onedrivefolder', 'one drive folder'), ('osf.onedrivefile', 'one drive file'), ('osf.owncloudfilenode', 'owncloud file node'), ('osf.owncloudfolder', 'owncloud folder'), ('osf.owncloudfile', 'owncloud file'), ('osf.s3filenode', 's3 file node'), ('osf.s3folder', 's3 folder'), ('osf.s3file', 's3 file')], db_index=True, max_length=255),
         ),
         migrations.CreateModel(
             name='OneDriveFile',

From 69cdb101a661653e62d7a6af2b062721fc0984f8 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Tue, 21 Nov 2017 13:11:26 -0500
Subject: [PATCH 125/192] Fix minor style errors and remove unused imports

---
 addons/onedrive/models.py                     |  4 +--
 .../onedrive/static/onedriveFangornConfig.js  |  7 -----
 addons/onedrive/tests/test_client.py          | 30 +++++++++----------
 addons/onedrive/tests/test_models.py          |  2 +-
 4 files changed, 18 insertions(+), 25 deletions(-)

diff --git a/addons/onedrive/models.py b/addons/onedrive/models.py
index 4f021c2aa2c..80b27f4c046 100644
--- a/addons/onedrive/models.py
+++ b/addons/onedrive/models.py
@@ -182,8 +182,8 @@ def get_folders(self, folder_id=None, **kwargs):
         except exceptions.InvalidAuthError:
             raise HTTPError(403)
 
-        oneDriveClient = OneDriveClient(access_token)
-        items = oneDriveClient.folders(folder_id)
+        client = OneDriveClient(access_token)
+        items = client.folders(folder_id)
         return [
             {
                 'addon': 'onedrive',
diff --git a/addons/onedrive/static/onedriveFangornConfig.js b/addons/onedrive/static/onedriveFangornConfig.js
index f719bcea711..ba53a94caf5 100644
--- a/addons/onedrive/static/onedriveFangornConfig.js
+++ b/addons/onedrive/static/onedriveFangornConfig.js
@@ -4,14 +4,7 @@
  */
 
 var m = require('mithril');
-var $ = require('jquery');
-var URI = require('URIjs');
 var Fangorn = require('js/fangorn').Fangorn;
-var waterbutler = require('js/waterbutler');
-var $osf = require('js/osfHelpers');
-
-// Cross browser key codes for the Command key
-var commandKeys = [224, 17, 91, 93];
 
 // Define Fangorn Button Actions
 var _onedriveItemButtons = {
diff --git a/addons/onedrive/tests/test_client.py b/addons/onedrive/tests/test_client.py
index 7480f13693a..a721ac3af3b 100644
--- a/addons/onedrive/tests/test_client.py
+++ b/addons/onedrive/tests/test_client.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 import pytest
-import unittest
 
 from osf_tests.factories import UserFactory
 
@@ -8,21 +7,22 @@
 
 pytestmark = pytest.mark.django_db
 
-class TestCore(unittest.TestCase):
+class TestCore:
 
-    def setUp(self):
-        super(TestCore, self).setUp()
+    @pytest.fixture()
+    def user(self):
+        ret = UserFactory()
+        ret.add_addon('onedrive')
+        ret.save()
+        return ret
 
-        self.user = UserFactory()
-        self.user.add_addon('onedrive')
-        self.user.save()
+    @pytest.fixture()
+    def user_settings(self, user):
+        settings = user.get_addon('onedrive')
+        settings.access_token = '12345'
+        settings.save()
+        return settings
 
-        self.settings = self.user.get_addon('onedrive')
-        self.settings.access_token = '12345'
-        self.settings.save()
-
-    def test_get_addon_returns_onedrive_user_settings(self):
-        result = self.user.get_addon('onedrive')
+    def test_get_addon_returns_onedrive_user_settings(self, user_settings, user):
+        result = user.get_addon('onedrive')
         assert isinstance(result, UserSettings)
-
-
diff --git a/addons/onedrive/tests/test_models.py b/addons/onedrive/tests/test_models.py
index 1bd1728c598..8c02e5bd317 100644
--- a/addons/onedrive/tests/test_models.py
+++ b/addons/onedrive/tests/test_models.py
@@ -84,7 +84,7 @@ def test_selected_folder_name_root(self):
 
     def test_selected_folder_name_empty(self):
         self.node_settings.folder_id = None
-        assert self.node_settings.selected_folder_name ==  ''
+        assert self.node_settings.selected_folder_name == ''
 
     ## Overrides ##
     def test_set_folder(self):

From a6980f38df66a99c9599df1416a5751687e4abf9 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Tue, 21 Nov 2017 14:28:34 -0500
Subject: [PATCH 126/192] Revert "Generalize Machines, mixins
 [OSF-8576][OSF-8876]"

---
 api/actions/permissions.py                    |  57 ----
 api/actions/serializers.py                    | 121 +++-----
 api/actions/urls.py                           |   2 +-
 api/actions/views.py                          | 109 ++++---
 api/base/filters.py                           |   4 +-
 api/base/settings/defaults.py                 |   1 +
 api/preprint_providers/serializers.py         |   3 +-
 api/preprint_providers/views.py               |  20 +-
 api/preprint_providers/workflows.py           |  29 --
 api/preprints/permissions.py                  |   4 +-
 api/preprints/serializers.py                  |  11 +-
 api/preprints/views.py                        |  39 +--
 api/users/serializers.py                      |   5 +
 api/users/urls.py                             |   1 +
 api/users/views.py                            |  73 ++++-
 .../views/test_preprint_provider_detail.py    |   2 +-
 .../test_preprint_provider_preprints_list.py  |  14 +-
 api_tests/preprints/filters/test_filters.py   |   4 +-
 .../preprints/views/test_preprint_actions.py  |  11 +-
 .../preprints/views/test_preprint_detail.py   |  23 +-
 .../preprints/views/test_preprint_list.py     |  20 +-
 api_tests/reviews/mixins/comment_settings.py  |  11 +-
 api_tests/reviews/mixins/filter_mixins.py     |  12 +-
 api_tests/users/views/test_user_actions.py    |  33 ++-
 framework/auth/oauth_scopes.py                |   4 +-
 osf/exceptions.py                             |  11 -
 .../commands/create_fake_preprint_actions.py  |  10 +-
 osf/management/commands/update_auth_groups.py |   4 +-
 osf/migrations/0060_reviews.py                |   2 +-
 osf/migrations/0062_accept_preprints.py       |   6 +-
 osf/migrations/0066_auto_20171031_1409.py     |  24 --
 osf/models/__init__.py                        |   2 +-
 osf/models/action.py                          |  22 +-
 osf/models/mixins.py                          | 122 +-------
 osf/models/preprint_provider.py               |  10 +-
 osf/models/preprint_service.py                |   7 +-
 osf/utils/machines.py                         | 145 ---------
 osf/utils/workflows.py                        |  64 ----
 osf_tests/factories.py                        |  15 +-
 osf_tests/test_reviewable.py                  |  26 +-
 reviews/__init__.py                           |   0
 reviews/apps.py                               |   8 +
 reviews/exceptions.py                         |   6 +
 reviews/models/__init__.py                    |   2 +
 reviews/models/mixins.py                      | 277 ++++++++++++++++++
 .../permissions.py                            |  64 +++-
 reviews/test/.gitkeep                         |   0
 reviews/workflow.py                           |  90 ++++++
 tests/test_notifications.py                   |   8 +-
 website/reviews/listeners.py                  |  39 ---
 50 files changed, 807 insertions(+), 770 deletions(-)
 delete mode 100644 api/actions/permissions.py
 delete mode 100644 api/preprint_providers/workflows.py
 delete mode 100644 osf/migrations/0066_auto_20171031_1409.py
 delete mode 100644 osf/utils/machines.py
 delete mode 100644 osf/utils/workflows.py
 create mode 100644 reviews/__init__.py
 create mode 100644 reviews/apps.py
 create mode 100644 reviews/exceptions.py
 create mode 100644 reviews/models/__init__.py
 create mode 100644 reviews/models/mixins.py
 rename {api/preprint_providers => reviews}/permissions.py (60%)
 create mode 100644 reviews/test/.gitkeep
 create mode 100644 reviews/workflow.py
 delete mode 100644 website/reviews/listeners.py

diff --git a/api/actions/permissions.py b/api/actions/permissions.py
deleted file mode 100644
index 302ef66fb53..00000000000
--- a/api/actions/permissions.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from rest_framework import permissions as drf_permissions
-
-from api.base.utils import get_user_auth
-from osf.models.action import ReviewAction
-from osf.models.mixins import ReviewableMixin, ReviewProviderMixin
-from osf.utils.workflows import DefaultTriggers
-from website.util import permissions as osf_permissions
-
-# Required permission to perform each action. `None` means no permissions required.
-TRIGGER_PERMISSIONS = {
-    DefaultTriggers.SUBMIT.value: None,
-    DefaultTriggers.ACCEPT.value: 'accept_submissions',
-    DefaultTriggers.REJECT.value: 'reject_submissions',
-    DefaultTriggers.EDIT_COMMENT.value: 'edit_review_comments',
-}
-
-
-class ReviewActionPermission(drf_permissions.BasePermission):
-    def has_object_permission(self, request, view, obj):
-        auth = get_user_auth(request)
-        if auth.user is None:
-            return False
-
-        target = None
-        provider = None
-        if isinstance(obj, ReviewAction):
-            target = obj.target
-            provider = target.provider
-        elif isinstance(obj, ReviewableMixin):
-            target = obj
-            provider = target.provider
-        elif isinstance(obj, ReviewProviderMixin):
-            provider = obj
-        else:
-            raise ValueError('Not a reviews-related model: {}'.format(obj))
-
-        serializer = view.get_serializer()
-
-        if request.method in drf_permissions.SAFE_METHODS:
-            # Moderators and node contributors can view actions
-            is_node_contributor = target is not None and target.node.has_permission(auth.user, osf_permissions.READ)
-            return is_node_contributor or auth.user.has_perm('view_actions', provider)
-        else:
-            # Moderators and node admins can trigger state changes.
-            is_node_admin = target is not None and target.node.has_permission(auth.user, osf_permissions.ADMIN)
-            if not (is_node_admin or auth.user.has_perm('view_submissions', provider)):
-                return False
-
-            # User can trigger state changes on this reviewable, but can they use this trigger in particular?
-            serializer = view.get_serializer(data=request.data)
-            serializer.is_valid(raise_exception=True)
-            trigger = serializer.validated_data.get('trigger')
-            permission = TRIGGER_PERMISSIONS[trigger]
-            return permission is None or request.user.has_perm(permission, target.provider)
diff --git a/api/actions/serializers.py b/api/actions/serializers.py
index 1ec72408ee2..c3a5fc5267b 100644
--- a/api/actions/serializers.py
+++ b/api/actions/serializers.py
@@ -12,9 +12,12 @@
 from api.base.serializers import RelationshipField
 from api.base.serializers import HideIfProviderCommentsAnonymous
 from api.base.serializers import HideIfProviderCommentsPrivate
-from osf.exceptions import InvalidTriggerError
+
 from osf.models import PreprintService
-from osf.utils.workflows import DefaultStates, DefaultTriggers
+
+from reviews.exceptions import InvalidTriggerError
+from reviews.workflow import Triggers
+from reviews.workflow import States
 
 
 class ReviewableCountsRelationshipField(RelationshipField):
@@ -48,27 +51,15 @@ def get_meta_information(self, metadata, provider):
 
 
 class TargetRelationshipField(RelationshipField):
-    _target_class = None
-
-    def __init__(self, *args, **kwargs):
-        self._target_class = kwargs.pop('target_class', None)
-        super(TargetRelationshipField, self).__init__(*args, **kwargs)
-
-    @property
-    def TargetClass(self):
-        if self._target_class:
-            return self._target_class
-        raise NotImplementedError()
-
-    def get_object(self, object_id):
-        return self.TargetClass.load(object_id)
+    def get_object(self, preprint_id):
+        return PreprintService.objects.get(guids___id=preprint_id)
 
     def to_internal_value(self, data):
-        target = self.get_object(data)
-        return {'target': target}
+        preprint = self.get_object(data)
+        return {'target': preprint}
 
 
-class BaseActionSerializer(JSONAPISerializer):
+class ActionSerializer(JSONAPISerializer):
     filterable_fields = frozenset([
         'id',
         'trigger',
@@ -76,28 +67,44 @@ class BaseActionSerializer(JSONAPISerializer):
         'to_state',
         'date_created',
         'date_modified',
+        'provider',
         'target',
     ])
 
     id = ser.CharField(source='_id', read_only=True)
 
-    trigger = ser.ChoiceField(choices=DefaultTriggers.choices())
+    trigger = ser.ChoiceField(choices=Triggers.choices())
 
     comment = HideIfProviderCommentsPrivate(ser.CharField(max_length=65535, required=False))
 
-    from_state = ser.ChoiceField(choices=DefaultStates.choices(), read_only=True)
-    to_state = ser.ChoiceField(choices=DefaultStates.choices(), read_only=True)
+    from_state = ser.ChoiceField(choices=States.choices(), read_only=True)
+    to_state = ser.ChoiceField(choices=States.choices(), read_only=True)
 
     date_created = ser.DateTimeField(read_only=True)
     date_modified = ser.DateTimeField(read_only=True)
 
-    creator = RelationshipField(
+    provider = RelationshipField(
+        read_only=True,
+        related_view='preprint_providers:preprint_provider-detail',
+        related_view_kwargs={'provider_id': '<target.provider._id>'},
+        filter_key='target__provider___id',
+    )
+
+    target = TargetRelationshipField(
+        read_only=False,
+        required=True,
+        related_view='preprints:preprint-detail',
+        related_view_kwargs={'preprint_id': '<target._id>'},
+        filter_key='target__guids___id',
+    )
+
+    creator = HideIfProviderCommentsAnonymous(RelationshipField(
         read_only=True,
         related_view='users:user-detail',
         related_view_kwargs={'user_id': '<creator._id>'},
         filter_key='creator__guids___id',
         always_embed=True,
-    )
+    ))
 
     links = LinksField(
         {
@@ -105,27 +112,26 @@ class BaseActionSerializer(JSONAPISerializer):
         }
     )
 
-    @property
-    def get_action_url(self):
-        raise NotImplementedError()
-
     def get_absolute_url(self, obj):
         return self.get_action_url(obj)
 
+    def get_action_url(self, obj):
+        return utils.absolute_reverse('actions:action-detail', kwargs={'action_id': obj._id, 'version': self.context['request'].parser_context['kwargs']['version']})
+
     def create(self, validated_data):
         trigger = validated_data.pop('trigger')
         user = validated_data.pop('user')
         target = validated_data.pop('target')
         comment = validated_data.pop('comment', '')
         try:
-            if trigger == DefaultTriggers.ACCEPT.value:
-                return target.run_accept(user, comment)
-            if trigger == DefaultTriggers.REJECT.value:
-                return target.run_reject(user, comment)
-            if trigger == DefaultTriggers.EDIT_COMMENT.value:
-                return target.run_edit_comment(user, comment)
-            if trigger == DefaultTriggers.SUBMIT.value:
-                return target.run_submit(user)
+            if trigger == Triggers.ACCEPT.value:
+                return target.reviews_accept(user, comment)
+            if trigger == Triggers.REJECT.value:
+                return target.reviews_reject(user, comment)
+            if trigger == Triggers.EDIT_COMMENT.value:
+                return target.reviews_edit_comment(user, comment)
+            if trigger == Triggers.SUBMIT.value:
+                return target.reviews_submit(user)
         except InvalidTriggerError as e:
             # Invalid transition from the current state
             raise Conflict(e.message)
@@ -134,46 +140,3 @@ def create(self, validated_data):
 
     class Meta:
         type_ = 'actions'
-        abstract = True
-
-class ReviewActionSerializer(BaseActionSerializer):
-    class Meta:
-        type_ = 'review-actions'
-
-    filterable_fields = frozenset([
-        'id',
-        'trigger',
-        'from_state',
-        'to_state',
-        'date_created',
-        'date_modified',
-        'provider',
-        'target',
-    ])
-
-    provider = RelationshipField(
-        read_only=True,
-        related_view='preprint_providers:preprint_provider-detail',
-        related_view_kwargs={'provider_id': '<target.provider._id>'},
-        filter_key='target__provider___id',
-    )
-
-    creator = HideIfProviderCommentsAnonymous(RelationshipField(
-        read_only=True,
-        related_view='users:user-detail',
-        related_view_kwargs={'user_id': '<creator._id>'},
-        filter_key='creator__guids___id',
-        always_embed=True,
-    ))
-
-    target = TargetRelationshipField(
-        target_class=PreprintService,
-        read_only=False,
-        required=True,
-        related_view='preprints:preprint-detail',
-        related_view_kwargs={'preprint_id': '<target._id>'},
-        filter_key='target__guids___id',
-    )
-
-    def get_action_url(self, obj):
-        return utils.absolute_reverse('actions:action-detail', kwargs={'action_id': obj._id, 'version': self.context['request'].parser_context['kwargs']['version']})
diff --git a/api/actions/urls.py b/api/actions/urls.py
index 7c1fe88d2f2..adbd3513a5f 100644
--- a/api/actions/urls.py
+++ b/api/actions/urls.py
@@ -5,6 +5,6 @@
 app_name = 'osf'
 
 urlpatterns = [
-    url(r'^reviews/$', views.ReviewActionList.as_view(), name=views.ReviewActionList.view_name),
+    url(r'^$', views.CreateAction.as_view(), name=views.CreateAction.view_name),
     url(r'^(?P<action_id>\w+)/$', views.ActionDetail.as_view(), name=views.ActionDetail.view_name),
 ]
diff --git a/api/actions/views.py b/api/actions/views.py
index 55487dda430..6645e45802f 100644
--- a/api/actions/views.py
+++ b/api/actions/views.py
@@ -2,22 +2,26 @@
 from __future__ import unicode_literals
 
 from django.shortcuts import get_object_or_404
-from guardian.shortcuts import get_objects_for_user
 from rest_framework import generics
 from rest_framework import permissions
-from rest_framework.exceptions import NotFound
 
-from api.actions.permissions import ReviewActionPermission
-from api.actions.serializers import ReviewActionSerializer
-from api.base.filters import ListFilterMixin
+from framework.auth.oauth_scopes import CoreScopes
+from osf.models import Action
+from reviews import permissions as reviews_permissions
+
+from api.actions.serializers import ActionSerializer
+from api.base.exceptions import Conflict
+from api.base.parsers import (
+    JSONAPIMultipleRelationshipsParser,
+    JSONAPIMultipleRelationshipsParserForRegularJSON,
+)
+from api.base.utils import absolute_reverse
 from api.base.views import JSONAPIBaseView
 from api.base import permissions as base_permissions
-from framework.auth.oauth_scopes import CoreScopes
-from osf.models import PreprintProvider, ReviewAction
 
 
-def get_review_actions_queryset():
-    return ReviewAction.objects.include(
+def get_actions_queryset():
+    return Action.objects.include(
         'creator',
         'creator__guids',
         'target',
@@ -29,7 +33,7 @@ def get_review_actions_queryset():
 class ActionDetail(JSONAPIBaseView, generics.RetrieveAPIView):
     """Action Detail
 
-    Actions represent state changes and/or comments on any actionable object (e.g. preprints, noderequests)
+    Actions represent state changes and/or comments on a reviewable object (e.g. a preprint)
 
     ##Action Attributes
 
@@ -59,30 +63,29 @@ class ActionDetail(JSONAPIBaseView, generics.RetrieveAPIView):
     permission_classes = (
         permissions.IsAuthenticatedOrReadOnly,
         base_permissions.TokenHasScope,
-        ReviewActionPermission,
+        reviews_permissions.ActionPermission,
     )
 
     required_read_scopes = [CoreScopes.ACTIONS_READ]
     required_write_scopes = [CoreScopes.ACTIONS_WRITE]
 
-    serializer_class = ReviewActionSerializer
+    serializer_class = ActionSerializer
     view_category = 'actions'
     view_name = 'action-detail'
 
     def get_object(self):
-        action = None
-        if ReviewAction.objects.filter(_id=self.kwargs['action_id']):
-            action = get_object_or_404(get_review_actions_queryset(), _id=self.kwargs['action_id'])
-        if not action:
-            raise NotFound('Unable to find specified Action')
+        action = get_object_or_404(get_actions_queryset(), _id=self.kwargs['action_id'])
         self.check_object_permissions(self.request, action)
         return action
 
 
-class ReviewActionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin):
-    """List of review actions viewable by this user *Read-only*
+class CreateAction(JSONAPIBaseView, generics.ListCreateAPIView):
+    """Create Actions *Write-only*
 
-    Actions represent state changes and/or comments on a reviewable object (e.g. a preprint)
+    Use this endpoint to create a new Action and thereby trigger a state change on a preprint.
+
+    GETting from this endpoint will always return an empty list.
+    Use `/user/me/actions/` or `/preprints/<guid>/actions/` to read lists of actions.
 
     ##Action Attributes
 
@@ -116,28 +119,66 @@ class ReviewActionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin):
     + `filter[<fieldname>]=<Str>` -- fields and values to filter the search results on.
 
     Actions may be filtered by their `id`, `from_state`, `to_state`, `date_created`, `date_modified`, `creator`, `provider`, `target`
+
+    ###Creating New Actions
+
+    Create a new Action by POSTing to `/actions/`, including the target preprint and the action trigger.
+
+    Valid triggers are: `submit`, `accept`, `reject`, and `edit_comment`
+
+        Method:        POST
+        URL:           /actions/
+        Query Params:  <none>
+        Body (JSON):   {
+                        "data": {
+                            "attributes": {
+                                "trigger": {trigger},           # required
+                                "comment": {comment},
+                            },
+                            "relationships": {
+                                "target": {                     # required
+                                    "data": {
+                                        "type": "preprints",
+                                        "id": {preprint_id}
+                                    }
+                                },
+                            }
+                        }
+                    }
+        Success:       201 CREATED + action representation
     """
-    # Permissions handled in get_default_django_query
     permission_classes = (
-        permissions.IsAuthenticated,
+        permissions.IsAuthenticatedOrReadOnly,
         base_permissions.TokenHasScope,
+        reviews_permissions.ActionPermission,
     )
 
-    required_read_scopes = [CoreScopes.ACTIONS_READ]
-    required_write_scopes = [CoreScopes.NULL]
+    required_read_scopes = [CoreScopes.NULL]
+    required_write_scopes = [CoreScopes.ACTIONS_WRITE]
 
-    serializer_class = ReviewActionSerializer
-    model_class = ReviewAction
+    parser_classes = (JSONAPIMultipleRelationshipsParser, JSONAPIMultipleRelationshipsParserForRegularJSON,)
+
+    serializer_class = ActionSerializer
 
-    ordering = ('-date_created',)
     view_category = 'actions'
-    view_name = 'review-action-list'
+    view_name = 'create-action'
+
+    # overrides ListCreateAPIView
+    def perform_create(self, serializer):
+        target = serializer.validated_data['target']
+        self.check_object_permissions(self.request, target)
+
+        if not target.provider.is_reviewed:
+            raise Conflict('{} is an unmoderated provider. If you are an admin, set up moderation by setting `reviews_workflow` at {}'.format(
+                target.provider.name,
+                absolute_reverse('preprint_providers:preprint_provider-detail', kwargs={
+                    'provider_id': target.provider._id,
+                    'version': self.request.parser_context['kwargs']['version']
+                })
+            ))
 
-    # overrides ListFilterMixin
-    def get_default_queryset(self):
-        provider_queryset = get_objects_for_user(self.request.user, 'view_actions', PreprintProvider)
-        return get_review_actions_queryset().filter(target__node__is_public=True, target__provider__in=provider_queryset)
+        serializer.save(user=self.request.user)
 
-    # overrides ListAPIView
+    # overrides ListCreateAPIView
     def get_queryset(self):
-        return self.get_queryset_from_request()
+        return Action.objects.none()
diff --git a/api/base/filters.py b/api/base/filters.py
index b8b8c5b1764..0c448d87ced 100644
--- a/api/base/filters.py
+++ b/api/base/filters.py
@@ -19,7 +19,7 @@
 from rest_framework.filters import OrderingFilter
 from osf.models import Subject, PreprintProvider, Node
 from osf.models.base import GuidMixin
-from osf.utils.workflows import DefaultStates
+from reviews.workflow import States
 
 
 def lowercase(lower):
@@ -504,7 +504,7 @@ def preprints_queryset(self, base_queryset, auth_user, allow_contribs=True):
             admin_user_query = Q(node__contributor__user_id=auth_user.id, node__contributor__admin=True)
             reviews_user_query = Q(node__is_public=True, provider__in=get_objects_for_user(auth_user, 'view_submissions', PreprintProvider))
             if allow_contribs:
-                contrib_user_query = ~Q(machine_state=DefaultStates.INITIAL.value) & Q(node__contributor__user_id=auth_user.id, node__contributor__read=True)
+                contrib_user_query = ~Q(reviews_state=States.INITIAL.value) & Q(node__contributor__user_id=auth_user.id, node__contributor__read=True)
                 query = (no_user_query | contrib_user_query | admin_user_query | reviews_user_query)
             else:
                 query = (no_user_query | admin_user_query | reviews_user_query)
diff --git a/api/base/settings/defaults.py b/api/base/settings/defaults.py
index 1512d6f1822..11ded0de0ef 100644
--- a/api/base/settings/defaults.py
+++ b/api/base/settings/defaults.py
@@ -93,6 +93,7 @@
 
     # OSF
     'osf',
+    'reviews',
 
     # Addons
     'addons.osfstorage',
diff --git a/api/preprint_providers/serializers.py b/api/preprint_providers/serializers.py
index 7e576316d07..8bddfd91478 100644
--- a/api/preprint_providers/serializers.py
+++ b/api/preprint_providers/serializers.py
@@ -2,10 +2,11 @@
 from rest_framework import serializers as ser
 from rest_framework.exceptions import ValidationError
 
+from reviews.workflow import Workflows
+
 from api.actions.serializers import ReviewableCountsRelationshipField
 from api.base.utils import absolute_reverse, get_user_auth
 from api.base.serializers import JSONAPISerializer, LinksField, RelationshipField, ShowIfVersion
-from api.preprint_providers.workflows import Workflows
 
 
 class PreprintProviderSerializer(JSONAPISerializer):
diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index a55c29f0c72..2fb9c67c1d2 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -1,10 +1,16 @@
-
-from django.db.models import Q
 from guardian.shortcuts import get_objects_for_user
+from django.db.models import Q
+
 from rest_framework import generics
 from rest_framework import permissions as drf_permissions
 from rest_framework.exceptions import NotAuthenticated
 
+from framework.auth.oauth_scopes import CoreScopes
+
+from osf.models import AbstractNode, Subject, PreprintProvider
+
+from reviews import permissions as reviews_permissions
+
 from api.base import permissions as base_permissions
 from api.base.exceptions import InvalidFilterValue, InvalidFilterOperator, Conflict
 from api.base.filters import PreprintFilterMixin, ListFilterMixin
@@ -14,11 +20,9 @@
 from api.licenses.views import LicenseList
 from api.taxonomies.serializers import TaxonomySerializer
 from api.preprint_providers.serializers import PreprintProviderSerializer
-from api.preprint_providers.permissions import CanSetUpProvider, PERMISSIONS
 from api.preprints.serializers import PreprintSerializer
+
 from api.preprints.permissions import PreprintPublishedOrAdmin
-from framework.auth.oauth_scopes import CoreScopes
-from osf.models import AbstractNode, Subject, PreprintProvider
 
 class PreprintProviderList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin):
     """
@@ -95,8 +99,8 @@ def build_query_from_field(self, field_name, operation):
                 raise NotAuthenticated()
             value = operation['value'].lstrip('[').rstrip(']')
             permissions = [v.strip() for v in value.split(',')]
-            if any(p not in PERMISSIONS for p in permissions):
-                valid_permissions = ', '.join(PERMISSIONS.keys())
+            if any(p not in reviews_permissions.PERMISSIONS for p in permissions):
+                valid_permissions = ', '.join(reviews_permissions.PERMISSIONS.keys())
                 raise InvalidFilterValue('Invalid permission! Valid values are: {}'.format(valid_permissions))
             return Q(id__in=get_objects_for_user(auth_user, permissions, PreprintProvider, any_perm=True))
 
@@ -175,7 +179,7 @@ class PreprintProviderDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView):
     permission_classes = (
         drf_permissions.IsAuthenticatedOrReadOnly,
         base_permissions.TokenHasScope,
-        CanSetUpProvider,
+        reviews_permissions.CanSetUpProvider,
     )
 
     required_read_scopes = [CoreScopes.ALWAYS_PUBLIC]
diff --git a/api/preprint_providers/workflows.py b/api/preprint_providers/workflows.py
deleted file mode 100644
index a643b0d57e6..00000000000
--- a/api/preprint_providers/workflows.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from enum import unique
-
-from osf.utils.workflows import ChoiceEnum, DefaultStates
-
-
-@unique
-class Workflows(ChoiceEnum):
-    NONE = None
-    PRE_MODERATION = 'pre-moderation'
-    POST_MODERATION = 'post-moderation'
-
-PUBLIC_STATES = {
-    Workflows.NONE.value: (
-        DefaultStates.INITIAL.value,
-        DefaultStates.PENDING.value,
-        DefaultStates.ACCEPTED.value,
-        DefaultStates.REJECTED.value,
-    ),
-    Workflows.PRE_MODERATION.value: (
-        DefaultStates.ACCEPTED.value,
-    ),
-    Workflows.POST_MODERATION.value: (
-        DefaultStates.PENDING.value,
-        DefaultStates.ACCEPTED.value,
-    )
-}
diff --git a/api/preprints/permissions.py b/api/preprints/permissions.py
index eb4bf812510..2a6f4fdde76 100644
--- a/api/preprints/permissions.py
+++ b/api/preprints/permissions.py
@@ -4,8 +4,8 @@
 
 from api.base.utils import get_user_auth
 from osf.models import PreprintService
-from osf.utils.workflows import DefaultStates
 from website.util import permissions as osf_permissions
+from reviews.workflow import States
 
 
 class PreprintPublishedOrAdmin(permissions.BasePermission):
@@ -21,7 +21,7 @@ def has_object_permission(self, request, view, obj):
                 user_has_permissions = (obj.verified_publishable or
                     (node.is_public and auth.user.has_perm('view_submissions', obj.provider)) or
                     node.has_permission(auth.user, osf_permissions.ADMIN) or
-                    (node.is_contributor(auth.user) and obj.machine_state != DefaultStates.INITIAL.value)
+                    (node.is_contributor(auth.user) and obj.reviews_state != States.INITIAL.value)
                 )
                 return user_has_permissions
         else:
diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py
index 203e89f464e..dfc387f235e 100644
--- a/api/preprints/serializers.py
+++ b/api/preprints/serializers.py
@@ -89,7 +89,7 @@ class PreprintSerializer(JSONAPISerializer):
         related_view_kwargs={'node_id': '<node._id>'},
     )
 
-    reviews_state = ser.CharField(source='machine_state', read_only=True, max_length=15)
+    reviews_state = ser.CharField(read_only=True, max_length=15)
     date_last_transitioned = DateByVersion(read_only=True)
 
     citation = RelationshipField(
@@ -132,8 +132,8 @@ class PreprintSerializer(JSONAPISerializer):
         read_only=False
     )
 
-    review_actions = RelationshipField(
-        related_view='preprints:preprint-review-action-list',
+    actions = RelationshipField(
+        related_view='preprints:preprint-action-list',
         related_view_kwargs={'preprint_id': '<_id>'}
     )
 
@@ -189,9 +189,8 @@ def update(self, preprint, validated_data):
         if published and preprint.provider.is_reviewed:
             raise Conflict('{} uses a moderation workflow, so preprints must be submitted for review instead of published directly. Submit a preprint by creating a `submit` Action at {}'.format(
                 preprint.provider.name,
-                absolute_reverse('preprints:preprint-review-action-list', kwargs={
-                    'version': self.context['request'].parser_context['kwargs']['version'],
-                    'preprint_id': preprint._id
+                absolute_reverse('actions:create-action', kwargs={
+                    'version': self.context['request'].parser_context['kwargs']['version']
                 })
             ))
 
diff --git a/api/preprints/views.py b/api/preprints/views.py
index 68d58f49416..0ebda342966 100644
--- a/api/preprints/views.py
+++ b/api/preprints/views.py
@@ -5,12 +5,12 @@
 from rest_framework import permissions as drf_permissions
 
 from framework.auth.oauth_scopes import CoreScopes
-from osf.models import ReviewAction, PreprintService
+from osf.models import Action, PreprintService
 from osf.utils.requests import check_select_for_update
+from reviews import permissions as reviews_permissions
 
-from api.actions.permissions import ReviewActionPermission
-from api.actions.serializers import ReviewActionSerializer
-from api.actions.views import get_review_actions_queryset
+from api.actions.serializers import ActionSerializer
+from api.actions.views import get_actions_queryset
 from api.base.exceptions import Conflict
 from api.base.views import JSONAPIBaseView, WaterButlerMixin
 from api.base.filters import ListFilterMixin, PreprintFilterMixin
@@ -18,7 +18,7 @@
     JSONAPIMultipleRelationshipsParser,
     JSONAPIMultipleRelationshipsParserForRegularJSON,
 )
-from api.base.utils import absolute_reverse, get_user_auth
+from api.base.utils import get_user_auth
 from api.base import permissions as base_permissions
 from api.citations.utils import render_citation, preprint_csl
 from api.preprints.serializers import (
@@ -407,7 +407,7 @@ def create(self, request, *args, **kwargs):
         return super(PreprintContributorsList, self).create(request, *args, **kwargs)
 
 
-class PreprintActionList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMixin, PreprintMixin):
+class PreprintActionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, PreprintMixin):
     """Action List *Read-only*
 
     Actions represent state changes and/or comments on a reviewable object (e.g. a preprint)
@@ -448,39 +448,22 @@ class PreprintActionList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilter
     permission_classes = (
         drf_permissions.IsAuthenticatedOrReadOnly,
         base_permissions.TokenHasScope,
-        ReviewActionPermission,
+        reviews_permissions.ActionPermission,
     )
 
     required_read_scopes = [CoreScopes.ACTIONS_READ]
     required_write_scopes = [CoreScopes.ACTIONS_WRITE]
 
-    parser_classes = (JSONAPIMultipleRelationshipsParser, JSONAPIMultipleRelationshipsParserForRegularJSON,)
-    serializer_class = ReviewActionSerializer
-    model_class = ReviewAction
+    serializer_class = ActionSerializer
+    model_class = Action
 
     ordering = ('-date_created',)
     view_category = 'preprints'
-    view_name = 'preprint-review-action-list'
-
-    # overrides ListCreateAPIView
-    def perform_create(self, serializer):
-        target = serializer.validated_data['target']
-        self.check_object_permissions(self.request, target)
-
-        if not target.provider.is_reviewed:
-            raise Conflict('{} is an unmoderated provider. If you are an admin, set up moderation by setting `reviews_workflow` at {}'.format(
-                target.provider.name,
-                absolute_reverse('preprint_providers:preprint_provider-detail', kwargs={
-                    'provider_id': target.provider._id,
-                    'version': self.request.parser_context['kwargs']['version']
-                })
-            ))
-
-        serializer.save(user=self.request.user)
+    view_name = 'preprint-action-list'
 
     # overrides ListFilterMixin
     def get_default_queryset(self):
-        return get_review_actions_queryset().filter(target_id=self.get_preprint().id)
+        return get_actions_queryset().filter(target_id=self.get_preprint().id)
 
     # overrides ListAPIView
     def get_queryset(self):
diff --git a/api/users/serializers.py b/api/users/serializers.py
index 2e2a2127e94..9613573c0a4 100644
--- a/api/users/serializers.py
+++ b/api/users/serializers.py
@@ -86,6 +86,11 @@ class UserSerializer(JSONAPISerializer):
         self_view_kwargs={'user_id': '<_id>'},
     ))
 
+    actions = ShowIfCurrentUser(RelationshipField(
+        related_view='users:user-action-list',
+        related_view_kwargs={'user_id': '<_id>'},
+    ))
+
     class Meta:
         type_ = 'users'
 
diff --git a/api/users/urls.py b/api/users/urls.py
index ab72931e6ff..68a81943a2d 100644
--- a/api/users/urls.py
+++ b/api/users/urls.py
@@ -6,6 +6,7 @@
 urlpatterns = [
     url(r'^$', views.UserList.as_view(), name=views.UserList.view_name),
     url(r'^(?P<user_id>\w+)/$', views.UserDetail.as_view(), name=views.UserDetail.view_name),
+    url(r'^(?P<user_id>\w+)/actions/$', views.UserActionList.as_view(), name=views.UserActionList.view_name),
     url(r'^(?P<user_id>\w+)/addons/$', views.UserAddonList.as_view(), name=views.UserAddonList.view_name),
     url(r'^(?P<user_id>\w+)/addons/(?P<provider>\w+)/$', views.UserAddonDetail.as_view(), name=views.UserAddonDetail.view_name),
     url(r'^(?P<user_id>\w+)/addons/(?P<provider>\w+)/accounts/$', views.UserAddonAccountList.as_view(), name=views.UserAddonAccountList.view_name),
diff --git a/api/users/views.py b/api/users/views.py
index 0c7a7cd0010..162ad81f358 100644
--- a/api/users/views.py
+++ b/api/users/views.py
@@ -1,6 +1,10 @@
 from django.apps import apps
 
+from guardian.shortcuts import get_objects_for_user
+
 from api.addons.views import AddonSettingsMixin
+from api.actions.views import get_actions_queryset
+from api.actions.serializers import ActionSerializer
 from api.base import permissions as base_permissions
 from api.base.exceptions import Conflict, UserGone
 from api.base.filters import ListFilterMixin, PreprintFilterMixin
@@ -37,7 +41,9 @@
                         PreprintService,
                         Node,
                         Registration,
-                        OSFUser)
+                        OSFUser,
+                        PreprintProvider,
+                        Action,)
 
 
 class UserMixin(object):
@@ -780,3 +786,68 @@ def perform_destroy(self, instance):
             if val['id'] in current_institutions:
                 user.remove_institution(val['id'])
         user.save()
+
+
+class UserActionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, UserMixin):
+    """List of actions viewable by this user *Read-only*
+
+    Actions represent state changes and/or comments on a reviewable object (e.g. a preprint)
+
+    ##Action Attributes
+
+        name                            type                                description
+        ====================================================================================
+        date_created                    iso8601 timestamp                   timestamp that the action was created
+        date_modified                   iso8601 timestamp                   timestamp that the action was last modified
+        from_state                      string                              state of the reviewable before this action was created
+        to_state                        string                              state of the reviewable after this action was created
+        comment                         string                              comment explaining the state change
+        trigger                         string                              name of the trigger for this action
+
+    ##Relationships
+
+    ###Target
+    Link to the object (e.g. preprint) this action acts on
+
+    ###Provider
+    Link to detail for the target object's provider
+
+    ###Creator
+    Link to the user that created this action
+
+    ##Links
+    - `self` -- Detail page for the current action
+
+    ##Query Params
+
+    + `page=<Int>` -- page number of results to view, default 1
+
+    + `filter[<fieldname>]=<Str>` -- fields and values to filter the search results on.
+
+    Actions may be filtered by their `id`, `from_state`, `to_state`, `date_created`, `date_modified`, `creator`, `provider`, `target`
+    """
+    # Permissions handled in get_default_django_query
+    permission_classes = (
+        drf_permissions.IsAuthenticated,
+        base_permissions.TokenHasScope,
+        CurrentUser,
+    )
+
+    required_read_scopes = [CoreScopes.ACTIONS_READ]
+    required_write_scopes = [CoreScopes.NULL]
+
+    serializer_class = ActionSerializer
+    model_class = Action
+
+    ordering = ('-date_created',)
+    view_category = 'users'
+    view_name = 'user-action-list'
+
+    # overrides ListFilterMixin
+    def get_default_queryset(self):
+        provider_queryset = get_objects_for_user(self.get_user(), 'view_actions', PreprintProvider)
+        return get_actions_queryset().filter(target__node__is_public=True, target__provider__in=provider_queryset)
+
+    # overrides ListAPIView
+    def get_queryset(self):
+        return self.get_queryset_from_request()
diff --git a/api_tests/preprint_providers/views/test_preprint_provider_detail.py b/api_tests/preprint_providers/views/test_preprint_provider_detail.py
index 975bc94202c..18da85ae498 100644
--- a/api_tests/preprint_providers/views/test_preprint_provider_detail.py
+++ b/api_tests/preprint_providers/views/test_preprint_provider_detail.py
@@ -1,11 +1,11 @@
 import pytest
 
 from api.base.settings.defaults import API_BASE
-from api.preprint_providers.permissions import GroupHelper
 from osf_tests.factories import (
     PreprintProviderFactory,
     AuthUserFactory,
 )
+from reviews.permissions import GroupHelper
 
 @pytest.mark.django_db
 class TestPreprintProviderExists:
diff --git a/api_tests/preprint_providers/views/test_preprint_provider_preprints_list.py b/api_tests/preprint_providers/views/test_preprint_provider_preprints_list.py
index 38b217fef9a..05f370a7b39 100644
--- a/api_tests/preprint_providers/views/test_preprint_provider_preprints_list.py
+++ b/api_tests/preprint_providers/views/test_preprint_provider_preprints_list.py
@@ -56,11 +56,11 @@ def test_provider_filter_equals_returns_multiple(self, app, user, provider_one,
 
     def test_reviews_state_counts(self, app, user, provider_one, preprint_one, preprint_two, preprint_three, url):
         url = '{}meta[reviews_state_counts]=true'.format(url)
-        preprint_one.machine_state = 'pending'
+        preprint_one.reviews_state = 'pending'
         preprint_one.save()
-        preprint_two.machine_state = 'pending'
+        preprint_two.reviews_state = 'pending'
         preprint_two.save()
-        preprint_three.machine_state = 'accepted'
+        preprint_three.reviews_state = 'accepted'
         preprint_three.save()
 
         expected = {
@@ -114,10 +114,10 @@ def expected_reviewables(self, provider, user):
             PreprintFactory(is_published=False, provider=provider, project=ProjectFactory(is_public=True)),
             PreprintFactory(is_published=False, provider=provider, project=ProjectFactory(is_public=True)),
         ]
-        preprints[0].run_submit(user)
-        preprints[0].run_accept(user, 'comment')
-        preprints[1].run_submit(user)
-        preprints[2].run_submit(user)
+        preprints[0].reviews_submit(user)
+        preprints[0].reviews_accept(user, 'comment')
+        preprints[1].reviews_submit(user)
+        preprints[2].reviews_submit(user)
         return preprints
 
     @pytest.fixture
diff --git a/api_tests/preprints/filters/test_filters.py b/api_tests/preprints/filters/test_filters.py
index 26a87e19c33..85457847a44 100644
--- a/api_tests/preprints/filters/test_filters.py
+++ b/api_tests/preprints/filters/test_filters.py
@@ -1,11 +1,13 @@
 import pytest
 
-from api.preprint_providers.permissions import GroupHelper
+from framework.auth.core import Auth
 from osf_tests.factories import (
     PreprintFactory,
     AuthUserFactory,
     SubjectFactory,
+    PreprintProviderFactory
 )
+from reviews.permissions import GroupHelper
 
 
 @pytest.mark.django_db
diff --git a/api_tests/preprints/views/test_preprint_actions.py b/api_tests/preprints/views/test_preprint_actions.py
index 4d465a69494..c2ee5d5f4bf 100644
--- a/api_tests/preprints/views/test_preprint_actions.py
+++ b/api_tests/preprints/views/test_preprint_actions.py
@@ -1,17 +1,18 @@
 import pytest
 
 from api.base.settings.defaults import API_BASE
-from api.preprint_providers.permissions import GroupHelper
 from osf_tests.factories import (
+    PreprintFactory,
     AuthUserFactory,
 )
+from reviews.permissions import GroupHelper
 from website.util import permissions as osf_permissions
 
-from api_tests.reviews.mixins.filter_mixins import ReviewActionFilterMixin
-from api_tests.reviews.mixins.comment_settings import ReviewActionCommentSettingsMixin
+from api_tests.reviews.mixins.filter_mixins import ActionFilterMixin
+from api_tests.reviews.mixins.comment_settings import ActionCommentSettingsMixin
 
 
-class TestPreprintActionFilters(ReviewActionFilterMixin):
+class TestPreprintActionFilters(ActionFilterMixin):
 
     @pytest.fixture()
     def preprint(self, all_actions):
@@ -43,7 +44,7 @@ def test_unauthorized_user(self, app, url):
         assert res.status_code == 403
 
 
-class TestReviewActionSettings(ReviewActionCommentSettingsMixin):
+class TestActionSettings(ActionCommentSettingsMixin):
     @pytest.fixture()
     def url(self, preprint):
         return '/{}preprints/{}/actions/'.format(API_BASE, preprint._id)
diff --git a/api_tests/preprints/views/test_preprint_detail.py b/api_tests/preprints/views/test_preprint_detail.py
index 0ea047810df..2369476e073 100644
--- a/api_tests/preprints/views/test_preprint_detail.py
+++ b/api_tests/preprints/views/test_preprint_detail.py
@@ -1,11 +1,12 @@
+import functools
 import mock
+from django.db.models import Q
 import pytest
 
 from api.base.settings.defaults import API_BASE
 from api_tests import utils as test_utils
 from framework.auth.core import Auth
 from osf.models import PreprintService, NodeLicense
-from osf.utils.workflows import DefaultStates
 from osf_tests.factories import (
     PreprintFactory,
     AuthUserFactory,
@@ -14,6 +15,10 @@
     PreprintProviderFactory,
 )
 from rest_framework import exceptions
+from tests.base import fake, capture_signals
+from website.project.signals import contributor_added
+from website.identifiers.utils import build_ezid_metadata
+from reviews.workflow import States
 
 
 def build_preprint_update_payload(node_id, attributes=None, relationships=None):
@@ -64,7 +69,7 @@ def test_preprint_detail(self, app, user, preprint, url, res, data):
 
     #   test contributors in preprint data
         assert data['relationships'].get('contributors', None)
-        assert data['relationships']['contributors'].get('data', None) is None
+        assert data['relationships']['contributors'].get('data', None) == None
 
     #   test node type and id in preprint data
         assert data['relationships']['node']['data'].get('id', None) == preprint.node._id
@@ -863,19 +868,19 @@ def file_one_public_project(self, admin, public_project):
 
     @pytest.fixture()
     def unpublished_preprint(self, admin, provider, subject, public_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=public_project, is_published=False, machine_state='initial')
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=public_project, is_published=False, reviews_state='initial')
 
     @pytest.fixture()
     def private_preprint(self, admin, provider, subject, private_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=private_project, is_published=False, machine_state='accepted')
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=private_project, is_published=False, reviews_state='accepted')
 
     @pytest.fixture()
     def abandoned_private_preprint(self, admin, provider, subject, private_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=private_project, is_published=False, machine_state='initial')
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=private_project, is_published=False, reviews_state='initial')
 
     @pytest.fixture()
     def abandoned_public_preprint(self, admin, provider, subject, public_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=public_project, is_published=False, machine_state='initial')
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=provider, subjects=[[subject._id]], project=public_project, is_published=False, reviews_state='initial')
 
     @pytest.fixture()
     def abandoned_private_url(self, abandoned_private_preprint):
@@ -1009,15 +1014,15 @@ def file_one_private_project(self, admin, private_project):
 
     @pytest.fixture()
     def unpublished_reviews_preprint(self, admin, reviews_provider, subject, public_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=reviews_provider, subjects=[[subject._id]], project=public_project, is_published=False, machine_state=DefaultStates.PENDING.value)
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=reviews_provider, subjects=[[subject._id]], project=public_project, is_published=False, reviews_state=States.PENDING.value)
 
     @pytest.fixture()
     def unpublished_reviews_initial_preprint(self, admin, reviews_provider, subject, public_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=reviews_provider, subjects=[[subject._id]], project=public_project, is_published=False, machine_state=DefaultStates.INITIAL.value)
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunrises.pdf', provider=reviews_provider, subjects=[[subject._id]], project=public_project, is_published=False, reviews_state=States.INITIAL.value)
 
     @pytest.fixture()
     def private_reviews_preprint(self, admin, reviews_provider, subject, private_project):
-        return PreprintFactory(creator=admin, filename='toe_socks_and_sunsets.pdf', provider=reviews_provider, subjects=[[subject._id]], project=private_project, is_published=False, machine_state=DefaultStates.PENDING.value)
+        return PreprintFactory(creator=admin, filename='toe_socks_and_sunsets.pdf', provider=reviews_provider, subjects=[[subject._id]], project=private_project, is_published=False, reviews_state=States.PENDING.value)
 
     @pytest.fixture()
     def unpublished_url(self, unpublished_reviews_preprint):
diff --git a/api_tests/preprints/views/test_preprint_list.py b/api_tests/preprints/views/test_preprint_list.py
index 7a40cffc192..e38b3140192 100644
--- a/api_tests/preprints/views/test_preprint_list.py
+++ b/api_tests/preprints/views/test_preprint_list.py
@@ -14,7 +14,6 @@
 from api_tests.reviews.mixins.filter_mixins import ReviewableFilterMixin
 from framework.auth.core import Auth
 from osf.models import PreprintService, Node
-from osf.utils.workflows import DefaultStates
 from osf_tests.factories import (
     ProjectFactory,
     PreprintFactory,
@@ -25,6 +24,7 @@
 from tests.base import ApiTestCase, capture_signals
 from website.project import signals as project_signals
 from website.util import permissions
+from reviews.workflow import States
 
 def build_preprint_create_payload(node_id=None, provider_id=None, file_id=None, attrs={}):
     payload = {
@@ -197,11 +197,11 @@ def expected_reviewables(self, user):
             PreprintFactory(is_published=False, project=ProjectFactory(is_public=True)),
             PreprintFactory(is_published=False, project=ProjectFactory(is_public=True)),
         ]
-        preprints[0].run_submit(user)
-        preprints[0].run_accept(user, 'comment')
-        preprints[1].run_submit(user)
-        preprints[1].run_reject(user, 'comment')
-        preprints[2].run_submit(user)
+        preprints[0].reviews_submit(user)
+        preprints[0].reviews_accept(user, 'comment')
+        preprints[1].reviews_submit(user)
+        preprints[1].reviews_reject(user, 'comment')
+        preprints[2].reviews_submit(user)
         return preprints
 
     @pytest.fixture
@@ -505,7 +505,7 @@ def url(self):
 
     @pytest.fixture()
     def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, machine_state=DefaultStates.PENDING.value)
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=States.PENDING.value)
 
     def test_unpublished_visible_to_admins(self, app, user_admin_contrib, preprint_unpublished, preprint_published, url):
         res = app.get(url, auth=user_admin_contrib.auth)
@@ -552,7 +552,7 @@ def url(self):
 
     @pytest.fixture()
     def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, machine_state=DefaultStates.INITIAL.value)
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=States.INITIAL.value)
 
     def test_unpublished_visible_to_admins(self, app, user_admin_contrib, preprint_unpublished, preprint_published, url):
         res = app.get(url, auth=user_admin_contrib.auth)
@@ -648,7 +648,7 @@ def project_public(self, user_admin_contrib, user_write_contrib):
 
     @pytest.fixture()
     def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, machine_state=DefaultStates.INITIAL.value)
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=States.INITIAL.value)
 
     @pytest.fixture()
     def list_url(self):
@@ -701,7 +701,7 @@ def project_public(self, user_admin_contrib, user_write_contrib):
 
     @pytest.fixture()
     def preprint_unpublished(self, user_admin_contrib, provider_one, project_public, subject):
-        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, machine_state=DefaultStates.PENDING.value)
+        return PreprintFactory(creator=user_admin_contrib, filename='mgla.pdf', provider=provider_one, subjects=[[subject._id]], project=project_public, is_published=False, reviews_state=States.PENDING.value)
 
     @pytest.fixture()
     def list_url(self):
diff --git a/api_tests/reviews/mixins/comment_settings.py b/api_tests/reviews/mixins/comment_settings.py
index baa280959e2..8826da5b8be 100644
--- a/api_tests/reviews/mixins/comment_settings.py
+++ b/api_tests/reviews/mixins/comment_settings.py
@@ -1,17 +1,20 @@
+from datetime import timedelta
+
 import pytest
+from furl import furl
 
-from api.preprint_providers.permissions import GroupHelper
 from osf_tests.factories import (
-    ReviewActionFactory,
+    ActionFactory,
     AuthUserFactory,
     PreprintFactory,
     PreprintProviderFactory,
 )
+from reviews.permissions import GroupHelper
 from website.util import permissions as osf_permissions
 
 
 @pytest.mark.django_db
-class ReviewActionCommentSettingsMixin(object):
+class ActionCommentSettingsMixin(object):
 
     @pytest.fixture()
     def url(self):
@@ -27,7 +30,7 @@ def preprint(self, provider):
 
     @pytest.fixture()
     def actions(self, preprint):
-        return [ReviewActionFactory(target=preprint) for _ in range(5)]
+        return [ActionFactory(target=preprint) for _ in range(5)]
 
     @pytest.fixture()
     def provider_admin(self, provider):
diff --git a/api_tests/reviews/mixins/filter_mixins.py b/api_tests/reviews/mixins/filter_mixins.py
index ae0d1569695..e1cca87a4eb 100644
--- a/api_tests/reviews/mixins/filter_mixins.py
+++ b/api_tests/reviews/mixins/filter_mixins.py
@@ -3,14 +3,14 @@
 import pytest
 from furl import furl
 
-from api.preprint_providers.permissions import GroupHelper
 from osf_tests.factories import (
-    ReviewActionFactory,
+    ActionFactory,
     AuthUserFactory,
     PreprintFactory,
     PreprintProviderFactory,
     ProjectFactory,
 )
+from reviews.permissions import GroupHelper
 
 
 def get_actual(app, url, user=None, sort=None, expect_errors=False, **filters):
@@ -42,7 +42,7 @@ def get_actual(app, url, user=None, sort=None, expect_errors=False, **filters):
 
 
 @pytest.mark.django_db
-class ReviewActionFilterMixin(object):
+class ActionFilterMixin(object):
 
     @pytest.fixture()
     def url(self):
@@ -58,7 +58,7 @@ def all_actions(self, providers):
         for provider in providers:
             preprint = PreprintFactory(provider=provider, project=ProjectFactory(is_public=True))
             for _ in range(5):
-                actions.append(ReviewActionFactory(target=preprint))
+                actions.append(ActionFactory(target=preprint))
         return actions
 
     @pytest.fixture()
@@ -164,8 +164,8 @@ def test_reviewable_filters(self, app, url, user, expected_reviewables):
         reviewable = expected_reviewables[0]
 
         # filter by reviews_state
-        expected = set([r._id for r in expected_reviewables if r.machine_state == reviewable.machine_state])
-        actual = get_actual(app, url, user, reviews_state=reviewable.machine_state)
+        expected = set([r._id for r in expected_reviewables if r.reviews_state == reviewable.reviews_state])
+        actual = get_actual(app, url, user, reviews_state=reviewable.reviews_state)
         assert expected == actual
 
         # order by date_last_transitioned
diff --git a/api_tests/users/views/test_user_actions.py b/api_tests/users/views/test_user_actions.py
index 60b83c847f2..1286e666279 100644
--- a/api_tests/users/views/test_user_actions.py
+++ b/api_tests/users/views/test_user_actions.py
@@ -2,25 +2,28 @@
 import mock
 
 from api.base.settings.defaults import API_BASE
-from api.preprint_providers.permissions import GroupHelper
+
 from osf_tests.factories import (
     PreprintFactory,
     AuthUserFactory,
     PreprintProviderFactory,
 )
+
 from website.util import permissions as osf_permissions
 
-from api_tests.reviews.mixins.filter_mixins import ReviewActionFilterMixin
+from reviews.permissions import GroupHelper
 
+from api_tests.reviews.mixins.filter_mixins import ActionFilterMixin
 
-class TestReviewActionFilters(ReviewActionFilterMixin):
+
+class TestActionFilters(ActionFilterMixin):
     @pytest.fixture()
     def url(self):
-        return '/{}actions/reviews/'.format(API_BASE)
+        return '/{}users/me/actions/'.format(API_BASE)
 
     @pytest.fixture()
     def expected_actions(self, all_actions, allowed_providers):
-        actions = super(TestReviewActionFilters, self).expected_actions(all_actions, allowed_providers)
+        actions = super(TestActionFilters, self).expected_actions(all_actions, allowed_providers)
         node = actions[0].target.node
         node.is_public = False
         node.save()
@@ -36,7 +39,7 @@ def test_no_permission(self, app, url, expected_actions):
 
 
 @pytest.mark.django_db
-class TestReviewActionCreate(object):
+class TestActionCreate(object):
     def create_payload(self, reviewable_id=None, **attrs):
         payload = {
             'data': {
@@ -55,8 +58,8 @@ def create_payload(self, reviewable_id=None, **attrs):
         return payload
 
     @pytest.fixture()
-    def url(self, preprint):
-        return '/{}preprints/{}/actions/'.format(API_BASE, preprint._id)
+    def url(self):
+        return '/{}actions/'.format(API_BASE)
 
     @pytest.fixture()
     def provider(self):
@@ -80,7 +83,7 @@ def moderator(self, provider):
 
     @mock.patch('website.preprints.tasks.get_and_set_preprint_identifiers.si')
     def test_create_permissions(self, mock_ezid, app, url, preprint, node_admin, moderator):
-        assert preprint.machine_state == 'initial'
+        assert preprint.reviews_state == 'initial'
 
         submit_payload = self.create_payload(preprint._id, trigger='submit')
 
@@ -97,7 +100,7 @@ def test_create_permissions(self, mock_ezid, app, url, preprint, node_admin, mod
         res = app.post_json_api(url, submit_payload, auth=node_admin.auth)
         assert res.status_code == 201
         preprint.refresh_from_db()
-        assert preprint.machine_state == 'pending'
+        assert preprint.reviews_state == 'pending'
         assert not preprint.is_published
 
         accept_payload = self.create_payload(preprint._id, trigger='accept', comment='This is good.')
@@ -122,14 +125,14 @@ def test_create_permissions(self, mock_ezid, app, url, preprint, node_admin, mod
 
         # Still unchanged after all those tries
         preprint.refresh_from_db()
-        assert preprint.machine_state == 'pending'
+        assert preprint.reviews_state == 'pending'
         assert not preprint.is_published
 
         # Moderator can accept
         res = app.post_json_api(url, accept_payload, auth=moderator.auth)
         assert res.status_code == 201
         preprint.refresh_from_db()
-        assert preprint.machine_state == 'accepted'
+        assert preprint.reviews_state == 'accepted'
         assert preprint.is_published
 
         # Check if "get_and_set_preprint_identifiers" is called once.
@@ -167,7 +170,7 @@ def test_bad_requests(self, app, url, preprint, provider, moderator):
             provider.reviews_workflow = workflow
             provider.save()
             for state, trigger in transitions:
-                preprint.machine_state = state
+                preprint.reviews_state = state
                 preprint.save()
                 bad_payload = self.create_payload(preprint._id, trigger=trigger)
                 res = app.post_json_api(url, bad_payload, auth=moderator.auth, expect_errors=True)
@@ -213,7 +216,7 @@ def test_valid_transitions(self, mock_ezid, app, url, preprint, provider, modera
             provider.reviews_workflow = workflow
             provider.save()
             for from_state, trigger, to_state in transitions:
-                preprint.machine_state = from_state
+                preprint.reviews_state = from_state
                 preprint.is_published = False
                 preprint.date_published = None
                 preprint.date_last_transitioned = None
@@ -226,7 +229,7 @@ def test_valid_transitions(self, mock_ezid, app, url, preprint, provider, modera
                 assert action.trigger == trigger
 
                 preprint.refresh_from_db()
-                assert preprint.machine_state == to_state
+                assert preprint.reviews_state == to_state
                 if preprint.in_public_reviews_state:
                     assert preprint.is_published
                     assert preprint.date_published == action.date_created
diff --git a/framework/auth/oauth_scopes.py b/framework/auth/oauth_scopes.py
index d3bed583f4f..8490554f128 100644
--- a/framework/auth/oauth_scopes.py
+++ b/framework/auth/oauth_scopes.py
@@ -95,8 +95,8 @@ class CoreScopes(object):
 
     SEARCH = 'search_read'
 
-    ACTIONS_READ = 'actions_read'
-    ACTIONS_WRITE = 'actions_write'
+    ACTIONS_READ = 'review_logs_read'
+    ACTIONS_WRITE = 'review_logs_write'
 
     PROVIDERS_WRITE = 'providers_write'
 
diff --git a/osf/exceptions.py b/osf/exceptions.py
index c1c87489ebf..3533fdf89ac 100644
--- a/osf/exceptions.py
+++ b/osf/exceptions.py
@@ -114,14 +114,3 @@ class ValidationTypeError(ValidationError, MODMValidationTypeError):
 
 class NaiveDatetimeException(Exception):
     pass
-
-class InvalidTriggerError(Exception):
-    def __init__(self, trigger, state, valid_triggers):
-        self.trigger = trigger
-        self.state = state
-        self.valid_triggers = valid_triggers
-        self.message = 'Cannot trigger "{}" from state "{}". Valid triggers: {}'.format(trigger, state, valid_triggers)
-
-class InvalidTransitionError(Exception):
-    def __init__(self, machine, transition):
-        self.message = 'Machine "{}" received invalid transitions: "{}" expected but not defined'.format(machine, transition)
diff --git a/osf/management/commands/create_fake_preprint_actions.py b/osf/management/commands/create_fake_preprint_actions.py
index 64f2630f606..a517097e382 100644
--- a/osf/management/commands/create_fake_preprint_actions.py
+++ b/osf/management/commands/create_fake_preprint_actions.py
@@ -8,8 +8,8 @@
 
 from django.core.management.base import BaseCommand
 
-from osf.models import ReviewAction, PreprintService, OSFUser
-from osf.utils.workflows import DefaultStates, DefaultTriggers
+from reviews import workflow
+from osf.models import Action, PreprintService, OSFUser
 
 logger = logging.getLogger(__name__)
 
@@ -44,11 +44,11 @@ def handle(self, *args, **options):
             user = OSFUser.objects.get(guids___id=user_guid)
 
         fake = Faker()
-        triggers = [a.value for a in DefaultTriggers]
-        states = [s.value for s in DefaultStates]
+        triggers = [a.value for a in workflow.Triggers]
+        states = [s.value for s in workflow.States]
         for preprint in PreprintService.objects.filter(actions__isnull=True):
             for i in range(num_actions):
-                action = ReviewAction(
+                action = Action(
                     target=preprint,
                     creator=user,
                     trigger=random.choice(triggers),
diff --git a/osf/management/commands/update_auth_groups.py b/osf/management/commands/update_auth_groups.py
index 0670396a3ee..30ea165915b 100644
--- a/osf/management/commands/update_auth_groups.py
+++ b/osf/management/commands/update_auth_groups.py
@@ -6,8 +6,8 @@
 from django.core.management.base import BaseCommand
 from django.db import transaction
 
-from api.preprint_providers.permissions import GroupHelper
-from osf.models.mixins import ReviewProviderMixin
+from reviews.models import ReviewProviderMixin
+from reviews.permissions import GroupHelper
 
 logger = logging.getLogger(__name__)
 
diff --git a/osf/migrations/0060_reviews.py b/osf/migrations/0060_reviews.py
index 845a568eab1..b95512c3218 100644
--- a/osf/migrations/0060_reviews.py
+++ b/osf/migrations/0060_reviews.py
@@ -6,9 +6,9 @@
 from django.core.management.sql import emit_post_migrate_signal
 from django.db import migrations, models
 import django.db.models.deletion
-from api.preprint_providers.permissions import GroupHelper
 import osf.models.base
 import osf.utils.fields
+from reviews.permissions import GroupHelper
 
 
 def create_provider_auth_groups(apps, schema_editor):
diff --git a/osf/migrations/0062_accept_preprints.py b/osf/migrations/0062_accept_preprints.py
index 7eeec8c1cb9..2e93be4e382 100644
--- a/osf/migrations/0062_accept_preprints.py
+++ b/osf/migrations/0062_accept_preprints.py
@@ -5,15 +5,15 @@
 from django.db import migrations
 from django.db.models import F
 
-from osf.utils.workflows import DefaultStates
+from reviews.workflow import States
 
 
 # When a preprint provider is set up with a reviews/moderation workflow,
 # make sure all existing preprints will be in a public state.
 def accept_all_published_preprints(apps, schema_editor):
     Preprint = apps.get_model('osf', 'PreprintService')
-    published_preprints = Preprint.objects.filter(is_published=True, reviews_state=DefaultStates.INITIAL.value)
-    published_preprints.update(reviews_state=DefaultStates.ACCEPTED.value, date_last_transitioned=F('date_published'))
+    published_preprints = Preprint.objects.filter(is_published=True, reviews_state=States.INITIAL.value)
+    published_preprints.update(reviews_state=States.ACCEPTED.value, date_last_transitioned=F('date_published'))
 
 
 class Migration(migrations.Migration):
diff --git a/osf/migrations/0066_auto_20171031_1409.py b/osf/migrations/0066_auto_20171031_1409.py
deleted file mode 100644
index 5cc7b04e48d..00000000000
--- a/osf/migrations/0066_auto_20171031_1409.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by Django 1.11.4 on 2017-10-31 19:09
-from __future__ import unicode_literals
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
-
-    dependencies = [
-        ('osf', '0065_preprintservice_original_publication_date'),
-    ]
-
-    operations = [
-        migrations.RenameModel(
-            old_name='Action',
-            new_name='ReviewAction',
-        ),
-        migrations.RenameField(
-            model_name='preprintservice',
-            old_name='reviews_state',
-            new_name='machine_state',
-        ),
-    ]
diff --git a/osf/models/__init__.py b/osf/models/__init__.py
index 0a83b8e3e17..828aee2ff9f 100644
--- a/osf/models/__init__.py
+++ b/osf/models/__init__.py
@@ -35,4 +35,4 @@
 from osf.models.admin_log_entry import AdminLogEntry  # noqa
 from osf.models.maintenance_state import MaintenanceState  # noqa
 from osf.models.quickfiles import QuickFilesNode  # noqa
-from osf.models.action import ReviewAction  # noqa
+from osf.models.action import Action  # noqa
diff --git a/osf/models/action.py b/osf/models/action.py
index 0e12eaf5fa7..f3c712bf93d 100644
--- a/osf/models/action.py
+++ b/osf/models/action.py
@@ -5,32 +5,26 @@
 
 from include import IncludeManager
 
+from reviews.workflow import Triggers
+from reviews.workflow import States
+
 from osf.models.base import BaseModel, ObjectIDMixin
 from osf.utils.fields import NonNaiveDateTimeField
-from osf.utils.workflows import DefaultStates, DefaultTriggers
 
 
-class BaseAction(ObjectIDMixin, BaseModel):
-    class Meta:
-        abstract = True
+class Action(ObjectIDMixin, BaseModel):
 
     objects = IncludeManager()
 
+    target = models.ForeignKey('PreprintService', related_name='actions', on_delete=models.CASCADE)
     creator = models.ForeignKey('OSFUser', related_name='+', on_delete=models.CASCADE)
 
-    trigger = models.CharField(max_length=31, choices=DefaultTriggers.choices())
-    from_state = models.CharField(max_length=31, choices=DefaultStates.choices())
-    to_state = models.CharField(max_length=31, choices=DefaultStates.choices())
+    trigger = models.CharField(max_length=31, choices=Triggers.choices())
+    from_state = models.CharField(max_length=31, choices=States.choices())
+    to_state = models.CharField(max_length=31, choices=States.choices())
 
     comment = models.TextField(blank=True)
 
     is_deleted = models.BooleanField(default=False)
     date_created = NonNaiveDateTimeField(auto_now_add=True)
     date_modified = NonNaiveDateTimeField(auto_now=True)
-
-    @property
-    def target(self):
-        raise NotImplementedError()
-
-class ReviewAction(BaseAction):
-    target = models.ForeignKey('PreprintService', related_name='actions', on_delete=models.CASCADE)
diff --git a/osf/models/mixins.py b/osf/models/mixins.py
index 75efda5046c..b2f31d5ca94 100644
--- a/osf/models/mixins.py
+++ b/osf/models/mixins.py
@@ -1,18 +1,11 @@
 import pytz
-
 from django.apps import apps
+from django.db import models
 from django.core.exceptions import ObjectDoesNotExist
-from django.db import models, transaction
-from include import IncludeQuerySet
-
-from api.preprint_providers.workflows import Workflows, PUBLIC_STATES
 from framework.analytics import increment_user_activity_counters
-from osf.exceptions import InvalidTriggerError
 from osf.models.node_relation import NodeRelation
 from osf.models.nodelog import NodeLog
 from osf.models.tag import Tag
-from osf.utils.machines import ReviewsMachine
-from osf.utils.workflows import DefaultStates, DefaultTriggers
 from website.exceptions import NodeStateError
 from website import settings
 
@@ -463,116 +456,3 @@ def get_extra_log_params(self, comment):
         """Return extra data to pass as `params` to `Node.add_log` when a new comment is
         created, edited, deleted or restored."""
         return {}
-
-
-class MachineableMixin(models.Model):
-    class Meta:
-        abstract = True
-
-    # NOTE: machine_state should rarely/never be modified directly -- use the state transition methods below
-    machine_state = models.CharField(max_length=15, db_index=True, choices=DefaultStates.choices(), default=DefaultStates.INITIAL.value)
-
-    date_last_transitioned = models.DateTimeField(null=True, blank=True, db_index=True)
-
-    @property
-    def MachineClass(self):
-        raise NotImplementedError()
-
-    def run_submit(self, user):
-        """Run the 'submit' state transition and create a corresponding Action.
-
-        Params:
-            user: The user triggering this transition.
-        """
-        return self.__run_transition(DefaultTriggers.SUBMIT.value, user=user)
-
-    def run_accept(self, user, comment):
-        """Run the 'accept' state transition and create a corresponding Action.
-
-        Params:
-            user: The user triggering this transition.
-            comment: Text describing why.
-        """
-        return self.__run_transition(DefaultTriggers.ACCEPT.value, user=user, comment=comment)
-
-    def run_reject(self, user, comment):
-        """Run the 'reject' state transition and create a corresponding Action.
-
-        Params:
-            user: The user triggering this transition.
-            comment: Text describing why.
-        """
-        return self.__run_transition(DefaultTriggers.REJECT.value, user=user, comment=comment)
-
-    def run_edit_comment(self, user, comment):
-        """Run the 'edit_comment' state transition and create a corresponding Action.
-
-        Params:
-            user: The user triggering this transition.
-            comment: New comment text.
-        """
-        return self.__run_transition(DefaultTriggers.EDIT_COMMENT.value, user=user, comment=comment)
-
-    def __run_transition(self, trigger, **kwargs):
-        machine = self.MachineClass(self, 'machine_state')
-        trigger_fn = getattr(machine, trigger)
-        with transaction.atomic():
-            result = trigger_fn(**kwargs)
-            action = machine.action
-            if not result or action is None:
-                valid_triggers = machine.get_triggers(self.machine_state)
-                raise InvalidTriggerError(trigger, self.machine_state, valid_triggers)
-            return action
-
-
-class ReviewableMixin(MachineableMixin):
-    """Something that may be included in a reviewed collection and is subject to a reviews workflow.
-    """
-
-    class Meta:
-        abstract = True
-
-    MachineClass = ReviewsMachine
-
-    @property
-    def in_public_reviews_state(self):
-        public_states = PUBLIC_STATES.get(self.provider.reviews_workflow)
-        if not public_states:
-            return False
-        return self.machine_state in public_states
-
-
-class ReviewProviderMixin(models.Model):
-    """A reviewed/moderated collection of objects.
-    """
-
-    REVIEWABLE_RELATION_NAME = None
-
-    class Meta:
-        abstract = True
-
-    reviews_workflow = models.CharField(null=True, blank=True, max_length=15, choices=Workflows.choices())
-    reviews_comments_private = models.NullBooleanField()
-    reviews_comments_anonymous = models.NullBooleanField()
-
-    @property
-    def is_reviewed(self):
-        return self.reviews_workflow is not None
-
-    def get_reviewable_state_counts(self):
-        assert self.REVIEWABLE_RELATION_NAME, 'REVIEWABLE_RELATION_NAME must be set to compute state counts'
-        qs = getattr(self, self.REVIEWABLE_RELATION_NAME)
-        if isinstance(qs, IncludeQuerySet):
-            qs = qs.include(None)
-        qs = qs.filter(node__isnull=False, node__is_deleted=False, node__is_public=True).values('machine_state').annotate(count=models.Count('*'))
-        counts = {state.value: 0 for state in DefaultStates}
-        counts.update({row['machine_state']: row['count'] for row in qs if row['machine_state'] in counts})
-        return counts
-
-    def add_admin(self, user):
-        from api.preprint_providers.permissions import GroupHelper
-        return GroupHelper(self).get_group('admin').user_set.add(user)
-
-    def add_moderator(self, user):
-        from api.preprint_providers.permissions import GroupHelper
-        return GroupHelper(self).get_group('moderator').user_set.add(user)
diff --git a/osf/models/preprint_provider.py b/osf/models/preprint_provider.py
index 665ac813cfb..e6ae473ea7f 100644
--- a/osf/models/preprint_provider.py
+++ b/osf/models/preprint_provider.py
@@ -4,13 +4,15 @@
 from django.db.models.signals import post_save
 from django.dispatch import receiver
 
-from api.preprint_providers.permissions import GroupHelper, PERMISSIONS
 from osf.models.base import BaseModel, ObjectIDMixin
 from osf.models.licenses import NodeLicense
-from osf.models.mixins import ReviewProviderMixin
 from osf.models.subject import Subject
 from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
 from osf.utils.fields import EncryptedTextField
+
+from reviews import permissions as reviews_permissions
+from reviews.models import ReviewProviderMixin
+
 from website import settings
 from website.util import api_v2_url
 
@@ -60,7 +62,7 @@ class PreprintProvider(ObjectIDMixin, ReviewProviderMixin, BaseModel):
                                         null=True, blank=True, on_delete=models.CASCADE)
 
     class Meta:
-        permissions = tuple(PERMISSIONS.items()) + (
+        permissions = tuple(reviews_permissions.PERMISSIONS.items()) + (
             # custom permissions for use in the OSF Admin App
             ('view_preprintprovider', 'Can view preprint provider details'),
         )
@@ -127,4 +129,4 @@ def rules_to_subjects(rules):
 @receiver(post_save, sender=PreprintProvider)
 def create_provider_auth_groups(sender, instance, created, **kwargs):
     if created:
-        GroupHelper(instance).update_provider_auth_groups()
+        reviews_permissions.GroupHelper(instance).update_provider_auth_groups()
diff --git a/osf/models/preprint_service.py b/osf/models/preprint_service.py
index 394c73cac57..b1c982a24c6 100644
--- a/osf/models/preprint_service.py
+++ b/osf/models/preprint_service.py
@@ -10,16 +10,17 @@
 from framework.postcommit_tasks.handlers import enqueue_postcommit_task
 from framework.exceptions import PermissionsError
 from osf.models import NodeLog, Subject
-from osf.models.mixins import ReviewableMixin
 from osf.models.validators import validate_subject_hierarchy
 from osf.utils.fields import NonNaiveDateTimeField
-from osf.utils.workflows import DefaultStates
 from website.preprints.tasks import on_preprint_updated, get_and_set_preprint_identifiers
 from website.project.licenses import set_license
 from website.util import api_v2_url
 from website.util.permissions import ADMIN
 from website import settings, mails
 
+from reviews.models.mixins import ReviewableMixin
+from reviews.workflow import States
+
 from osf.models.base import BaseModel, GuidMixin
 from osf.models.identifiers import IdentifierMixin, Identifier
 
@@ -184,7 +185,7 @@ def set_published(self, published, auth, save=False):
             self.node._has_abandoned_preprint = False
 
             # In case this provider is ever set up to use a reviews workflow, put this preprint in a sensible state
-            self.machine_state = DefaultStates.ACCEPTED.value
+            self.reviews_state = States.ACCEPTED.value
             self.date_last_transitioned = self.date_published
 
             self.node.add_log(
diff --git a/osf/utils/machines.py b/osf/utils/machines.py
deleted file mode 100644
index 6ec7fdc62ac..00000000000
--- a/osf/utils/machines.py
+++ /dev/null
@@ -1,145 +0,0 @@
-
-from django.utils import timezone
-from transitions import Machine
-
-from api.preprint_providers.workflows import Workflows
-from framework.auth import Auth
-from framework.postcommit_tasks.handlers import enqueue_postcommit_task
-from osf.exceptions import InvalidTransitionError
-from osf.models.action import ReviewAction
-from osf.models.nodelog import NodeLog
-from osf.utils.workflows import DefaultStates, DEFAULT_TRANSITIONS
-from website.preprints.tasks import get_and_set_preprint_identifiers
-from website.reviews import signals as reviews_signals
-from website.settings import DOMAIN
-
-
-class BaseMachine(Machine):
-
-    action = None
-    from_state = None
-
-    def __init__(self, machineable, state_attr, **kwargs):
-        self.machineable = machineable
-        self.__state_attr = state_attr
-        states = kwargs.get('states', [s.value for s in DefaultStates])
-        transitions = kwargs.get('transitions', DEFAULT_TRANSITIONS)
-        self._validate_transitions(transitions)
-
-        super(BaseMachine, self).__init__(
-            states=states,
-            transitions=transitions,
-            initial=self.state,
-            send_event=True,
-            prepare_event=['initialize_machine'],
-            ignore_invalid_triggers=True,
-        )
-
-    @property
-    def state(self):
-        return getattr(self.machineable, self.__state_attr)
-
-    @state.setter
-    def state(self, value):
-        setattr(self.machineable, self.__state_attr, value)
-
-    @property
-    def ActionClass(self):
-        raise NotImplementedError()
-
-    def _validate_transitions(self, transitions):
-        for transition in set(sum([t['after'] for t in transitions], [])):
-            if not hasattr(self, transition):
-                raise InvalidTransitionError(self, transition)
-
-    def initialize_machine(self, ev):
-        self.action = None
-        self.from_state = ev.state
-
-    def save_action(self, ev):
-        user = ev.kwargs.get('user')
-        self.action = self.ActionClass.objects.create(
-            target=self.machineable,
-            creator=user,
-            trigger=ev.event.name,
-            from_state=self.from_state.name,
-            to_state=ev.state.name,
-            comment=ev.kwargs.get('comment', ''),
-        )
-
-    def update_last_transitioned(self, ev):
-        now = self.action.date_created if self.action is not None else timezone.now()
-        self.machineable.date_last_transitioned = now
-
-class ReviewsMachine(BaseMachine):
-    ActionClass = ReviewAction
-
-    def save_changes(self, ev):
-        node = self.machineable.node
-        node._has_abandoned_preprint = False
-        now = self.action.date_created if self.action is not None else timezone.now()
-        should_publish = self.machineable.in_public_reviews_state
-        if should_publish and not self.machineable.is_published:
-            if not (self.machineable.node.preprint_file and self.machineable.node.preprint_file.node == self.machineable.node):
-                raise ValueError('Preprint node is not a valid preprint; cannot publish.')
-            if not self.machineable.provider:
-                raise ValueError('Preprint provider not specified; cannot publish.')
-            if not self.machineable.subjects.exists():
-                raise ValueError('Preprint must have at least one subject to be published.')
-            self.machineable.date_published = now
-            self.machineable.is_published = True
-            enqueue_postcommit_task(get_and_set_preprint_identifiers, (), {'preprint_id': self.machineable._id}, celery=True)
-        elif not should_publish and self.machineable.is_published:
-            self.machineable.is_published = False
-        self.machineable.save()
-        node.save()
-
-    def resubmission_allowed(self, ev):
-        return self.machineable.provider.reviews_workflow == Workflows.PRE_MODERATION.value
-
-    def notify_submit(self, ev):
-        context = self.get_context()
-        context['referrer'] = ev.kwargs.get('user')
-        user = ev.kwargs.get('user')
-        auth = Auth(user)
-        self.machineable.node.add_log(
-            action=NodeLog.PREPRINT_INITIATED,
-            params={
-                'preprint': self.machineable._id
-            },
-            auth=auth,
-            save=False,
-        )
-        recipients = list(self.machineable.node.contributors)
-        reviews_signals.reviews_email_submit.send(context=context, recipients=recipients)
-
-    def notify_resubmit(self, ev):
-        context = self.get_context()
-        reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
-                                           template='reviews_resubmission_confirmation',
-                                           action=self.action)
-
-    def notify_accept_reject(self, ev):
-        context = self.get_context()
-        context['notify_comment'] = not self.machineable.provider.reviews_comments_private and self.action.comment
-        context['is_rejected'] = self.action.to_state == DefaultStates.REJECTED.value
-        context['was_pending'] = self.action.from_state == DefaultStates.PENDING.value
-        reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
-                                           template='reviews_submission_status',
-                                           action=self.action)
-    def notify_edit_comment(self, ev):
-        context = self.get_context()
-        if not self.machineable.provider.reviews_comments_private and self.action.comment:
-            reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
-                                               template='reviews_update_comment',
-                                               action=self.action)
-
-    def get_context(self):
-        return {
-            'domain': DOMAIN,
-            'reviewable': self.machineable,
-            'workflow': self.machineable.provider.reviews_workflow,
-            'provider_url': self.machineable.provider.domain or '{domain}preprints/{provider_id}'.format(domain=DOMAIN, provider_id=self.machineable.provider._id),
-            'provider_contact_email': self.machineable.provider.email_contact or 'contact@osf.io',
-            'provider_support_email': self.machineable.provider.email_support or 'support@osf.io',
-        }
diff --git a/osf/utils/workflows.py b/osf/utils/workflows.py
deleted file mode 100644
index 595c3a20847..00000000000
--- a/osf/utils/workflows.py
+++ /dev/null
@@ -1,64 +0,0 @@
-
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-from enum import Enum
-from enum import unique
-
-class ChoiceEnum(Enum):
-    @classmethod
-    def choices(cls):
-        return tuple((v, unicode(v).title()) for v in cls.values())
-
-    @classmethod
-    def values(cls):
-        return tuple(c.value for c in cls)
-
-@unique
-class DefaultStates(ChoiceEnum):
-    INITIAL = 'initial'
-    PENDING = 'pending'
-    ACCEPTED = 'accepted'
-    REJECTED = 'rejected'
-
-
-@unique
-class DefaultTriggers(ChoiceEnum):
-    SUBMIT = 'submit'
-    ACCEPT = 'accept'
-    REJECT = 'reject'
-    EDIT_COMMENT = 'edit_comment'
-
-DEFAULT_TRANSITIONS = [
-    {
-        'trigger': DefaultTriggers.SUBMIT.value,
-        'source': [DefaultStates.INITIAL.value],
-        'dest': DefaultStates.PENDING.value,
-        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_submit'],
-    },
-    {
-        'trigger': DefaultTriggers.SUBMIT.value,
-        'source': [DefaultStates.PENDING.value, DefaultStates.REJECTED.value],
-        'conditions': 'resubmission_allowed',
-        'dest': DefaultStates.PENDING.value,
-        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_resubmit'],
-    },
-    {
-        'trigger': DefaultTriggers.ACCEPT.value,
-        'source': [DefaultStates.PENDING.value, DefaultStates.REJECTED.value],
-        'dest': DefaultStates.ACCEPTED.value,
-        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_accept_reject'],
-    },
-    {
-        'trigger': DefaultTriggers.REJECT.value,
-        'source': [DefaultStates.PENDING.value, DefaultStates.ACCEPTED.value],
-        'dest': DefaultStates.REJECTED.value,
-        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_accept_reject'],
-    },
-    {
-        'trigger': DefaultTriggers.EDIT_COMMENT.value,
-        'source': [DefaultStates.PENDING.value, DefaultStates.REJECTED.value, DefaultStates.ACCEPTED.value],
-        'dest': '=',
-        'after': ['save_action', 'save_changes', 'notify_edit_comment'],
-    },
-]
diff --git a/osf_tests/factories.py b/osf_tests/factories.py
index b55b95fbabd..be74dfc0a81 100644
--- a/osf_tests/factories.py
+++ b/osf_tests/factories.py
@@ -1,4 +1,5 @@
 # -*- coding: utf-8 -*-
+import functools
 import time
 
 import datetime
@@ -14,6 +15,7 @@
 from django.db.utils import IntegrityError
 from faker import Factory
 
+from reviews import workflow
 from website import settings
 from website.notifications.constants import NOTIFICATION_TYPES
 from website.util import permissions
@@ -25,7 +27,6 @@
 from osf import models
 from osf.models.sanctions import Sanction
 from osf.utils.names import impute_names_model
-from osf.utils.workflows import DefaultStates, DefaultTriggers
 from addons.osfstorage.models import OsfStorageFile
 
 fake = Factory.create()
@@ -582,7 +583,7 @@ def _create(cls, target_class, *args, **kwargs):
         subjects = kwargs.pop('subjects', None) or [[SubjectFactory()._id]]
         instance.node.preprint_article_doi = doi
 
-        instance.machine_state = kwargs.pop('machine_state', 'initial')
+        instance.reviews_state = kwargs.pop('reviews_state', 'initial')
 
         user = kwargs.pop('creator', None) or instance.node.creator
         if not instance.node.is_contributor(user):
@@ -802,14 +803,14 @@ class Meta:
         model = models.ArchiveJob
 
 
-class ReviewActionFactory(DjangoModelFactory):
+class ActionFactory(DjangoModelFactory):
     class Meta:
-        model = models.ReviewAction
+        model = models.Action
 
-    trigger = FuzzyChoice(choices=DefaultTriggers.values())
+    trigger = FuzzyChoice(choices=workflow.Triggers.values())
     comment = factory.Faker('text')
-    from_state = FuzzyChoice(choices=DefaultStates.values())
-    to_state = FuzzyChoice(choices=DefaultStates.values())
+    from_state = FuzzyChoice(choices=workflow.States.values())
+    to_state = FuzzyChoice(choices=workflow.States.values())
 
     target = factory.SubFactory(PreprintFactory)
     creator = factory.SubFactory(AuthUserFactory)
diff --git a/osf_tests/test_reviewable.py b/osf_tests/test_reviewable.py
index 7c8754f4a1b..f493dbcf20c 100644
--- a/osf_tests/test_reviewable.py
+++ b/osf_tests/test_reviewable.py
@@ -1,8 +1,8 @@
 import pytest
 
 from osf.models import PreprintService
-from osf.utils.workflows import DefaultStates
 from osf_tests.factories import PreprintFactory, AuthUserFactory
+from reviews.workflow import States
 
 @pytest.mark.django_db
 class TestReviewable:
@@ -10,22 +10,22 @@ class TestReviewable:
     def test_state_changes(self):
         user = AuthUserFactory()
         preprint = PreprintFactory(provider__reviews_workflow='pre-moderation', is_published=False)
-        assert preprint.machine_state == DefaultStates.INITIAL.value
+        assert preprint.reviews_state == States.INITIAL.value
 
-        preprint.run_submit(user)
-        assert preprint.machine_state == DefaultStates.PENDING.value
+        preprint.reviews_submit(user)
+        assert preprint.reviews_state == States.PENDING.value
 
-        preprint.run_accept(user, 'comment')
-        assert preprint.machine_state == DefaultStates.ACCEPTED.value
+        preprint.reviews_accept(user, 'comment')
+        assert preprint.reviews_state == States.ACCEPTED.value
         from_db = PreprintService.objects.get(id=preprint.id)
-        assert from_db.machine_state == DefaultStates.ACCEPTED.value
+        assert from_db.reviews_state == States.ACCEPTED.value
 
-        preprint.run_reject(user, 'comment')
-        assert preprint.machine_state == DefaultStates.REJECTED.value
+        preprint.reviews_reject(user, 'comment')
+        assert preprint.reviews_state == States.REJECTED.value
         from_db.refresh_from_db()
-        assert from_db.machine_state == DefaultStates.REJECTED.value
+        assert from_db.reviews_state == States.REJECTED.value
 
-        preprint.run_accept(user, 'comment')
-        assert preprint.machine_state == DefaultStates.ACCEPTED.value
+        preprint.reviews_accept(user, 'comment')
+        assert preprint.reviews_state == States.ACCEPTED.value
         from_db.refresh_from_db()
-        assert from_db.machine_state == DefaultStates.ACCEPTED.value
+        assert from_db.reviews_state == States.ACCEPTED.value
diff --git a/reviews/__init__.py b/reviews/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/reviews/apps.py b/reviews/apps.py
new file mode 100644
index 00000000000..09ab21d5a6e
--- /dev/null
+++ b/reviews/apps.py
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.apps import AppConfig
+
+
+class ReviewsConfig(AppConfig):
+    name = 'reviews'
diff --git a/reviews/exceptions.py b/reviews/exceptions.py
new file mode 100644
index 00000000000..d155f961743
--- /dev/null
+++ b/reviews/exceptions.py
@@ -0,0 +1,6 @@
+class InvalidTriggerError(Exception):
+    def __init__(self, trigger, state, valid_triggers):
+        self.trigger = trigger
+        self.state = state
+        self.valid_triggers = valid_triggers
+        self.message = 'Cannot trigger "{}" from state "{}". Valid triggers: {}'.format(trigger, state, valid_triggers)
diff --git a/reviews/models/__init__.py b/reviews/models/__init__.py
new file mode 100644
index 00000000000..8fd9a1ce1c0
--- /dev/null
+++ b/reviews/models/__init__.py
@@ -0,0 +1,2 @@
+from .mixins import ReviewProviderMixin  # noqa
+from .mixins import ReviewableMixin  # noqa
diff --git a/reviews/models/mixins.py b/reviews/models/mixins.py
new file mode 100644
index 00000000000..47abbb10a18
--- /dev/null
+++ b/reviews/models/mixins.py
@@ -0,0 +1,277 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from include import IncludeQuerySet
+from transitions import Machine
+from framework.auth import Auth
+from framework.postcommit_tasks.handlers import enqueue_postcommit_task
+
+from django.db import models
+from django.db import transaction
+from django.utils import timezone
+
+from osf.models.action import Action
+from osf.models import NodeLog
+from reviews import workflow
+from reviews.exceptions import InvalidTriggerError
+from website.preprints.tasks import get_and_set_preprint_identifiers
+
+from website import settings
+
+from website.mails import mails
+from website.notifications.emails import get_user_subscriptions
+from website.notifications import utils
+from website.notifications import emails
+from website.reviews import signals as reviews_signals
+
+
+class ReviewProviderMixin(models.Model):
+    """A reviewed/moderated collection of objects.
+    """
+
+    REVIEWABLE_RELATION_NAME = None
+
+    class Meta:
+        abstract = True
+
+    reviews_workflow = models.CharField(null=True, blank=True, max_length=15, choices=workflow.Workflows.choices())
+    reviews_comments_private = models.NullBooleanField()
+    reviews_comments_anonymous = models.NullBooleanField()
+
+    @property
+    def is_reviewed(self):
+        return self.reviews_workflow is not None
+
+    def get_reviewable_state_counts(self):
+        assert self.REVIEWABLE_RELATION_NAME, 'REVIEWABLE_RELATION_NAME must be set to compute state counts'
+        qs = getattr(self, self.REVIEWABLE_RELATION_NAME)
+        if isinstance(qs, IncludeQuerySet):
+            qs = qs.include(None)
+        qs = qs.filter(node__isnull=False, node__is_deleted=False, node__is_public=True).values('reviews_state').annotate(count=models.Count('*'))
+        counts = {state.value: 0 for state in workflow.States}
+        counts.update({row['reviews_state']: row['count'] for row in qs if row['reviews_state'] in counts})
+        return counts
+
+    def add_admin(self, user):
+        from reviews.permissions import GroupHelper
+        return GroupHelper(self).get_group('admin').user_set.add(user)
+
+    def add_moderator(self, user):
+        from reviews.permissions import GroupHelper
+        return GroupHelper(self).get_group('moderator').user_set.add(user)
+
+
+class ReviewableMixin(models.Model):
+    """Something that may be included in a reviewed collection and is subject to a reviews workflow.
+    """
+
+    class Meta:
+        abstract = True
+
+    # NOTE: reviews_state should rarely/never be modified directly -- use the state transition methods below
+    reviews_state = models.CharField(max_length=15, db_index=True, choices=workflow.States.choices(), default=workflow.States.INITIAL.value)
+
+    date_last_transitioned = models.DateTimeField(null=True, blank=True, db_index=True)
+
+    @property
+    def in_public_reviews_state(self):
+        public_states = workflow.PUBLIC_STATES.get(self.provider.reviews_workflow)
+        if not public_states:
+            return False
+        return self.reviews_state in public_states
+
+    def reviews_submit(self, user):
+        """Run the 'submit' state transition and create a corresponding Action.
+
+        Params:
+            user: The user triggering this transition.
+        """
+        return self.__run_transition(workflow.Triggers.SUBMIT.value, user=user)
+
+    def reviews_accept(self, user, comment):
+        """Run the 'accept' state transition and create a corresponding Action.
+
+        Params:
+            user: The user triggering this transition.
+            comment: Text describing why.
+        """
+        return self.__run_transition(workflow.Triggers.ACCEPT.value, user=user, comment=comment)
+
+    def reviews_reject(self, user, comment):
+        """Run the 'reject' state transition and create a corresponding Action.
+
+        Params:
+            user: The user triggering this transition.
+            comment: Text describing why.
+        """
+        return self.__run_transition(workflow.Triggers.REJECT.value, user=user, comment=comment)
+
+    def reviews_edit_comment(self, user, comment):
+        """Run the 'edit_comment' state transition and create a corresponding Action.
+
+        Params:
+            user: The user triggering this transition.
+            comment: New comment text.
+        """
+        return self.__run_transition(workflow.Triggers.EDIT_COMMENT.value, user=user, comment=comment)
+
+    def __run_transition(self, trigger, **kwargs):
+        reviews_machine = ReviewsMachine(self, 'reviews_state')
+        trigger_fn = getattr(reviews_machine, trigger)
+        with transaction.atomic():
+            result = trigger_fn(**kwargs)
+            action = reviews_machine.action
+            if not result or action is None:
+                valid_triggers = reviews_machine.get_triggers(self.reviews_state)
+                raise InvalidTriggerError(trigger, self.reviews_state, valid_triggers)
+            return action
+
+
+class ReviewsMachine(Machine):
+
+    action = None
+    from_state = None
+
+    def __init__(self, reviewable, state_attr):
+        self.reviewable = reviewable
+        self.__state_attr = state_attr
+
+        super(ReviewsMachine, self).__init__(
+            states=[s.value for s in workflow.States],
+            transitions=workflow.TRANSITIONS,
+            initial=self.state,
+            send_event=True,
+            prepare_event=['initialize_machine'],
+            ignore_invalid_triggers=True,
+        )
+
+    @property
+    def state(self):
+        return getattr(self.reviewable, self.__state_attr)
+
+    @state.setter
+    def state(self, value):
+        setattr(self.reviewable, self.__state_attr, value)
+
+    def initialize_machine(self, ev):
+        self.action = None
+        self.from_state = ev.state
+
+    def save_action(self, ev):
+        user = ev.kwargs.get('user')
+        self.action = Action.objects.create(
+            target=self.reviewable,
+            creator=user,
+            trigger=ev.event.name,
+            from_state=self.from_state.name,
+            to_state=ev.state.name,
+            comment=ev.kwargs.get('comment', ''),
+        )
+
+    def update_last_transitioned(self, ev):
+        now = self.action.date_created if self.action is not None else timezone.now()
+        self.reviewable.date_last_transitioned = now
+
+    def save_changes(self, ev):
+        node = self.reviewable.node
+        node._has_abandoned_preprint = False
+        now = self.action.date_created if self.action is not None else timezone.now()
+        should_publish = self.reviewable.in_public_reviews_state
+        if should_publish and not self.reviewable.is_published:
+            if not (self.reviewable.node.preprint_file and self.reviewable.node.preprint_file.node == self.reviewable.node):
+                raise ValueError('Preprint node is not a valid preprint; cannot publish.')
+            if not self.reviewable.provider:
+                raise ValueError('Preprint provider not specified; cannot publish.')
+            if not self.reviewable.subjects.exists():
+                raise ValueError('Preprint must have at least one subject to be published.')
+            self.reviewable.date_published = now
+            self.reviewable.is_published = True
+            enqueue_postcommit_task(get_and_set_preprint_identifiers, (), {'preprint_id': self.reviewable._id}, celery=True)
+        elif not should_publish and self.reviewable.is_published:
+            self.reviewable.is_published = False
+        self.reviewable.save()
+        node.save()
+
+    def resubmission_allowed(self, ev):
+        return self.reviewable.provider.reviews_workflow == workflow.Workflows.PRE_MODERATION.value
+
+    def notify_submit(self, ev):
+        context = self.get_context()
+        context['referrer'] = ev.kwargs.get('user')
+        user = ev.kwargs.get('user')
+        auth = Auth(user)
+        self.reviewable.node.add_log(
+            action=NodeLog.PREPRINT_INITIATED,
+            params={
+                'preprint': self.reviewable._id
+            },
+            auth=auth,
+            save=False,
+        )
+        recipients = list(self.reviewable.node.contributors)
+        reviews_signals.reviews_email_submit.send(context=context, recipients=recipients)
+
+    def notify_resubmit(self, ev):
+        context = self.get_context()
+        reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
+                                           template='reviews_resubmission_confirmation',
+                                           action=self.action)
+
+    def notify_accept_reject(self, ev):
+        context = self.get_context()
+        context['notify_comment'] = not self.reviewable.provider.reviews_comments_private and self.action.comment
+        context['is_rejected'] = self.action.to_state == workflow.States.REJECTED.value
+        context['was_pending'] = self.action.from_state == workflow.States.PENDING.value
+        reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
+                                           template='reviews_submission_status',
+                                           action=self.action)
+
+    def notify_edit_comment(self, ev):
+        context = self.get_context()
+        if not self.reviewable.provider.reviews_comments_private and self.action.comment:
+            reviews_signals.reviews_email.send(creator=ev.kwargs.get('user'), context=context,
+                                               template='reviews_update_comment',
+                                               action=self.action)
+
+    def get_context(self):
+        return {
+            'domain': settings.DOMAIN,
+            'reviewable': self.reviewable,
+            'workflow': self.reviewable.provider.reviews_workflow,
+            'provider_url': self.reviewable.provider.domain or '{domain}preprints/{provider_id}'.format(domain=settings.DOMAIN, provider_id=self.reviewable.provider._id),
+            'provider_contact_email': self.reviewable.provider.email_contact or 'contact@osf.io',
+            'provider_support_email': self.reviewable.provider.email_support or 'support@osf.io',
+        }
+
+# Handle email notifications including: update comment, accept, and reject of submission.
+@reviews_signals.reviews_email.connect
+def reviews_notification(self, creator, template, context, action):
+    recipients = list(action.target.node.contributors)
+    time_now = action.date_created if action is not None else timezone.now()
+    node = action.target.node
+    emails.notify_global_event(
+        event='global_reviews',
+        sender_user=creator,
+        node=node,
+        timestamp=time_now,
+        recipients=recipients,
+        template=template,
+        context=context
+    )
+
+# Handle email notifications for a new submission.
+@reviews_signals.reviews_email_submit.connect
+def reviews_submit_notification(self, recipients, context):
+    event_type = utils.find_subscription_type('global_reviews')
+    for recipient in recipients:
+        user_subscriptions = get_user_subscriptions(recipient, event_type)
+        context['no_future_emails'] = user_subscriptions['none']
+        context['is_creator'] = recipient == context['reviewable'].node.creator
+        context['provider_name'] = context['reviewable'].provider.name
+        mails.send_mail(
+            recipient.username,
+            mails.REVIEWS_SUBMISSION_CONFIRMATION,
+            mimetype='html',
+            user=recipient,
+            **context
+        )
diff --git a/api/preprint_providers/permissions.py b/reviews/permissions.py
similarity index 60%
rename from api/preprint_providers/permissions.py
rename to reviews/permissions.py
index 8bd87d1fe6a..f9c5fd61a2d 100644
--- a/api/preprint_providers/permissions.py
+++ b/reviews/permissions.py
@@ -1,13 +1,25 @@
 # -*- coding: utf-8 -*-
 from __future__ import unicode_literals
 
-from django.contrib.auth.models import Group
+import logging
+
 from guardian.shortcuts import assign_perm
 from guardian.shortcuts import get_perms
 from guardian.shortcuts import remove_perm
 from rest_framework import permissions as drf_permissions
 
+from django.contrib.auth.models import Group
+
 from api.base.utils import get_user_auth
+from osf.models.action import Action
+from website.util import permissions as osf_permissions
+
+from reviews.models import ReviewableMixin, ReviewProviderMixin
+from reviews.workflow import Triggers
+
+
+logger = logging.getLogger(__name__)
+
 
 # Object-level permissions for providers.
 # Prefer assigning object permissions to groups and adding users to groups, over assigning permissions to users.
@@ -41,6 +53,16 @@
     # 'reviewer': (),  # TODO Implement reviewers
 }
 
+
+# Required permission to perform each action. `None` means no permissions required.
+TRIGGER_PERMISSIONS = {
+    Triggers.SUBMIT.value: None,
+    Triggers.ACCEPT.value: 'accept_submissions',
+    Triggers.REJECT.value: 'reject_submissions',
+    Triggers.EDIT_COMMENT.value: 'edit_review_comments',
+}
+
+
 class GroupHelper(object):
     """Helper for managing permission groups for a given provider.
     """
@@ -68,6 +90,46 @@ def update_provider_auth_groups(self):
     def get_permissions(self, user):
         return [p for p in get_perms(user, self.provider) if p in PERMISSIONS]
 
+
+class ActionPermission(drf_permissions.BasePermission):
+    def has_object_permission(self, request, view, obj):
+        auth = get_user_auth(request)
+        if auth.user is None:
+            return False
+
+        target = None
+        provider = None
+        if isinstance(obj, Action):
+            target = obj.target
+            provider = target.provider
+        elif isinstance(obj, ReviewableMixin):
+            target = obj
+            provider = target.provider
+        elif isinstance(obj, ReviewProviderMixin):
+            provider = obj
+        else:
+            raise ValueError('Not a reviews-related model: {}'.format(obj))
+
+        serializer = view.get_serializer()
+
+        if request.method in drf_permissions.SAFE_METHODS:
+            # Moderators and node contributors can view actions
+            is_node_contributor = target is not None and target.node.has_permission(auth.user, osf_permissions.READ)
+            return is_node_contributor or auth.user.has_perm('view_actions', provider)
+        else:
+            # Moderators and node admins can trigger state changes.
+            is_node_admin = target is not None and target.node.has_permission(auth.user, osf_permissions.ADMIN)
+            if not (is_node_admin or auth.user.has_perm('view_submissions', provider)):
+                return False
+
+            # User can trigger state changes on this reviewable, but can they use this trigger in particular?
+            serializer = view.get_serializer(data=request.data)
+            serializer.is_valid(raise_exception=True)
+            trigger = serializer.validated_data.get('trigger')
+            permission = TRIGGER_PERMISSIONS[trigger]
+            return permission is None or request.user.has_perm(permission, target.provider)
+
+
 class CanSetUpProvider(drf_permissions.BasePermission):
     def has_object_permission(self, request, view, obj):
         if request.method in drf_permissions.SAFE_METHODS:
diff --git a/reviews/test/.gitkeep b/reviews/test/.gitkeep
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/reviews/workflow.py b/reviews/workflow.py
new file mode 100644
index 00000000000..6c6eeff0170
--- /dev/null
+++ b/reviews/workflow.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from enum import Enum
+from enum import unique
+
+
+class ChoiceEnum(Enum):
+    @classmethod
+    def choices(cls):
+        return tuple((v, unicode(v).title()) for v in cls.values())
+
+    @classmethod
+    def values(cls):
+        return tuple(c.value for c in cls)
+
+
+@unique
+class Workflows(ChoiceEnum):
+    NONE = None
+    PRE_MODERATION = 'pre-moderation'
+    POST_MODERATION = 'post-moderation'
+
+
+@unique
+class States(ChoiceEnum):
+    INITIAL = 'initial'
+    PENDING = 'pending'
+    ACCEPTED = 'accepted'
+    REJECTED = 'rejected'
+
+
+@unique
+class Triggers(ChoiceEnum):
+    SUBMIT = 'submit'
+    ACCEPT = 'accept'
+    REJECT = 'reject'
+    EDIT_COMMENT = 'edit_comment'
+
+
+PUBLIC_STATES = {
+    Workflows.NONE.value: (
+        States.INITIAL.value,
+        States.PENDING.value,
+        States.ACCEPTED.value,
+        States.REJECTED.value,
+    ),
+    Workflows.PRE_MODERATION.value: (
+        States.ACCEPTED.value,
+    ),
+    Workflows.POST_MODERATION.value: (
+        States.PENDING.value,
+        States.ACCEPTED.value,
+    )
+}
+
+
+TRANSITIONS = [
+    {
+        'trigger': Triggers.SUBMIT.value,
+        'source': [States.INITIAL.value],
+        'dest': States.PENDING.value,
+        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_submit'],
+    },
+    {
+        'trigger': Triggers.SUBMIT.value,
+        'source': [States.PENDING.value, States.REJECTED.value],
+        'conditions': 'resubmission_allowed',
+        'dest': States.PENDING.value,
+        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_resubmit'],
+    },
+    {
+        'trigger': Triggers.ACCEPT.value,
+        'source': [States.PENDING.value, States.REJECTED.value],
+        'dest': States.ACCEPTED.value,
+        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_accept_reject'],
+    },
+    {
+        'trigger': Triggers.REJECT.value,
+        'source': [States.PENDING.value, States.ACCEPTED.value],
+        'dest': States.REJECTED.value,
+        'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_accept_reject'],
+    },
+    {
+        'trigger': Triggers.EDIT_COMMENT.value,
+        'source': [States.PENDING.value, States.REJECTED.value, States.ACCEPTED.value],
+        'dest': '=',
+        'after': ['save_action', 'save_changes', 'notify_edit_comment'],
+    },
+]
diff --git a/tests/test_notifications.py b/tests/test_notifications.py
index 17228be1862..54780474bcf 100644
--- a/tests/test_notifications.py
+++ b/tests/test_notifications.py
@@ -15,7 +15,6 @@
 from website.notifications import utils
 from website import mails, settings
 from website.project.signals import contributor_removed, node_deleted
-from website.reviews import listeners
 from website.util import api_url_for
 from website.util import web_url_for
 
@@ -23,6 +22,7 @@
 from tests.base import capture_signals
 from tests.base import OsfTestCase, NotificationTestCase
 
+from reviews.models import mixins
 
 
 class TestNotificationsModels(OsfTestCase):
@@ -1816,7 +1816,7 @@ def setUp(self):
             'provider_contact_email': 'contact@osf.io',
             'provider_support_email': 'support@osf.io',
         }
-        self.action = factories.ReviewActionFactory()
+        self.action = factories.ActionFactory()
         factories.NotificationSubscriptionFactory(
             _id=self.user._id + '_' + 'global_comments',
             user=self.user,
@@ -1842,10 +1842,10 @@ def test_reviews_base_notification(self):
 
     @mock.patch('website.mails.mails.send_mail')
     def test_reviews_submit_notification(self, mock_send_email):
-        listeners.reviews_submit_notification(self, context=self.context_info, recipients=[self.sender, self.user])
+        mixins.reviews_submit_notification(self, context=self.context_info, recipients=[self.sender, self.user])
         assert_true(mock_send_email.called)
 
     @mock.patch('website.notifications.emails.notify_global_event')
     def test_reviews_notification(self, mock_notify):
-        listeners.reviews_notification(self, creator=self.sender, context=self.context_info, action=self.action, template='test.html.mako')
+        mixins.reviews_notification(self, creator=self.sender, context=self.context_info, action=self.action, template='test.html.mako')
         assert_true(mock_notify.called)
diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py
deleted file mode 100644
index 07aad47cfd5..00000000000
--- a/website/reviews/listeners.py
+++ /dev/null
@@ -1,39 +0,0 @@
-
-from django.utils import timezone
-
-from website.mails import mails
-from website.notifications import emails, utils
-from website.reviews import signals as reviews_signals
-
-# Handle email notifications including: update comment, accept, and reject of submission.
-@reviews_signals.reviews_email.connect
-def reviews_notification(self, creator, template, context, action):
-    recipients = list(action.target.node.contributors)
-    time_now = action.date_created if action is not None else timezone.now()
-    node = action.target.node
-    emails.notify_global_event(
-        event='global_reviews',
-        sender_user=creator,
-        node=node,
-        timestamp=time_now,
-        recipients=recipients,
-        template=template,
-        context=context
-    )
-
-# Handle email notifications for a new submission.
-@reviews_signals.reviews_email_submit.connect
-def reviews_submit_notification(self, recipients, context):
-    event_type = utils.find_subscription_type('global_reviews')
-    for recipient in recipients:
-        user_subscriptions = emails.get_user_subscriptions(recipient, event_type)
-        context['no_future_emails'] = user_subscriptions['none']
-        context['is_creator'] = recipient == context['reviewable'].node.creator
-        context['provider_name'] = context['reviewable'].provider.name
-        mails.send_mail(
-            recipient.username,
-            mails.REVIEWS_SUBMISSION_CONFIRMATION,
-            mimetype='html',
-            user=recipient,
-            **context
-        )

From ac23194e3f1c08eb5f9631b60a39cdae2ce633b3 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Mon, 27 Nov 2017 11:35:52 -0500
Subject: [PATCH 127/192] Remake migration

---
 ...istration_deleted.py => 0068_draftregistration_deleted.py} | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
 rename osf/migrations/{0066_draftregistration_deleted.py => 0068_draftregistration_deleted.py} (77%)

diff --git a/osf/migrations/0066_draftregistration_deleted.py b/osf/migrations/0068_draftregistration_deleted.py
similarity index 77%
rename from osf/migrations/0066_draftregistration_deleted.py
rename to osf/migrations/0068_draftregistration_deleted.py
index 9fde4aad0ef..ae7967c733b 100644
--- a/osf/migrations/0066_draftregistration_deleted.py
+++ b/osf/migrations/0068_draftregistration_deleted.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# Generated by Django 1.11.7 on 2017-11-13 16:44
+# Generated by Django 1.11.7 on 2017-11-27 16:35
 from __future__ import unicode_literals
 
 from django.db import migrations
@@ -9,7 +9,7 @@
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('osf', '0065_preprintservice_original_publication_date'),
+        ('osf', '0067_auto_20171121_1050'),
     ]
 
     operations = [

From 34501f8413b538026fede1ef1658fa36a46818f9 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Mon, 27 Nov 2017 12:05:04 -0500
Subject: [PATCH 128/192] Fix merge conflict

---
 website/settings/defaults.py | 147 +----------------------------------
 1 file changed, 1 insertion(+), 146 deletions(-)

diff --git a/website/settings/defaults.py b/website/settings/defaults.py
index 1167d6e4a90..afb7d52ffc9 100644
--- a/website/settings/defaults.py
+++ b/website/settings/defaults.py
@@ -459,7 +459,6 @@ class CeleryConfig:
         'scripts.premigrate_created_modified',
     )
 
-<<<<<<< HEAD
     # Modules that need metrics and release requirements
     # imports += (
     #     'scripts.osfstorage.glacier_inventory',
@@ -555,151 +554,7 @@ class CeleryConfig:
             'generate_sitemap': {
                 'task': 'scripts.generate_sitemap',
                 'schedule': crontab(minute=0, hour=0),  # Daily 12:00 a.m.
-            }
-=======
-    CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
-    CELERY_ROUTES = ('framework.celery_tasks.routers.CeleryRouter', )
-    CELERY_IGNORE_RESULT = True
-    CELERY_STORE_ERRORS_EVEN_IF_IGNORED = True
-
-# Default RabbitMQ broker
-RABBITMQ_USERNAME = os.environ.get('RABBITMQ_USERNAME', 'guest')
-RABBITMQ_PASSWORD = os.environ.get('RABBITMQ_PASSWORD', 'guest')
-RABBITMQ_HOST = os.environ.get('RABBITMQ_HOST', 'localhost')
-RABBITMQ_PORT = os.environ.get('RABBITMQ_PORT', '5672')
-RABBITMQ_VHOST = os.environ.get('RABBITMQ_VHOST', '/')
-
-BROKER_URL = os.environ.get('BROKER_URL', 'amqp://{}:{}@{}:{}/{}'.format(RABBITMQ_USERNAME, RABBITMQ_PASSWORD, RABBITMQ_HOST, RABBITMQ_PORT, RABBITMQ_VHOST))
-BROKER_USE_SSL = False
-
-# Default RabbitMQ backend
-CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', BROKER_URL)
-
-# Modules to import when celery launches
-CELERY_IMPORTS = (
-    'framework.celery_tasks',
-    'framework.email.tasks',
-    'website.mailchimp_utils',
-    'website.notifications.tasks',
-    'website.archiver.tasks',
-    'website.search.search',
-    'website.project.tasks',
-    'scripts.populate_new_and_noteworthy_projects',
-    'scripts.populate_popular_projects_and_registrations',
-    'scripts.refresh_addon_tokens',
-    'scripts.retract_registrations',
-    'scripts.embargo_registrations',
-    'scripts.approve_registrations',
-    'scripts.approve_embargo_terminations',
-    'scripts.triggered_mails',
-    'scripts.send_queued_mails',
-    'scripts.analytics.run_keen_summaries',
-    'scripts.analytics.run_keen_snapshots',
-    'scripts.analytics.run_keen_events',
-    'scripts.generate_sitemap',
-    'scripts.generate_prereg_csv'
-)
-
-# Modules that need metrics and release requirements
-# CELERY_IMPORTS += (
-#     'scripts.osfstorage.glacier_inventory',
-#     'scripts.osfstorage.glacier_audit',
-#     'scripts.osfstorage.usage_audit',
-#     'scripts.stuck_registration_audit',
-#     'scripts.osfstorage.files_audit',
-#     'scripts.analytics.tasks',
-#     'scripts.analytics.upload',
-# )
-
-# celery.schedule will not be installed when running invoke requirements the first time.
-try:
-    from celery.schedules import crontab
-except ImportError:
-    pass
-else:
-    #  Setting up a scheduler, essentially replaces an independent cron job
-    CELERYBEAT_SCHEDULE = {
-        '5-minute-emails': {
-            'task': 'website.notifications.tasks.send_users_email',
-            'schedule': crontab(minute='*/5'),
-            'args': ('email_transactional',),
-        },
-        'daily-emails': {
-            'task': 'website.notifications.tasks.send_users_email',
-            'schedule': crontab(minute=0, hour=0),
-            'args': ('email_digest',),
-        },
-        'refresh_addons': {
-            'task': 'scripts.refresh_addon_tokens',
-            'schedule': crontab(minute=0, hour=2),  # Daily 2:00 a.m
-            'kwargs': {'dry_run': False, 'addons': {
-                'box': 60,          # https://docs.box.com/docs/oauth-20#section-6-using-the-access-and-refresh-tokens
-                'googledrive': 14,  # https://developers.google.com/identity/protocols/OAuth2#expiration
-                'mendeley': 14      # http://dev.mendeley.com/reference/topics/authorization_overview.html
-            }},
-        },
-        'retract_registrations': {
-            'task': 'scripts.retract_registrations',
-            'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
-            'kwargs': {'dry_run': False},
-        },
-        'embargo_registrations': {
-            'task': 'scripts.embargo_registrations',
-            'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
-            'kwargs': {'dry_run': False},
-        },
-        'approve_registrations': {
-            'task': 'scripts.approve_registrations',
-            'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
-            'kwargs': {'dry_run': False},
-        },
-        'approve_embargo_terminations': {
-            'task': 'scripts.approve_embargo_terminations',
-            'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
-            'kwargs': {'dry_run': False},
-        },
-        'triggered_mails': {
-            'task': 'scripts.triggered_mails',
-            'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
-            'kwargs': {'dry_run': False},
-        },
-        'send_queued_mails': {
-            'task': 'scripts.send_queued_mails',
-            'schedule': crontab(minute=0, hour=12),  # Daily 12 p.m.
-            'kwargs': {'dry_run': False},
-        },
-        'new-and-noteworthy': {
-            'task': 'scripts.populate_new_and_noteworthy_projects',
-            'schedule': crontab(minute=0, hour=2, day_of_week=6),  # Saturday 2:00 a.m.
-            'kwargs': {'dry_run': False}
-        },
-        'update_popular_nodes': {
-            'task': 'scripts.populate_popular_projects_and_registrations',
-            'schedule': crontab(minute=0, hour=2),  # Daily 2:00 a.m.
-            'kwargs': {'dry_run': False}
-        },
-        'run_keen_summaries': {
-            'task': 'scripts.analytics.run_keen_summaries',
-            'schedule': crontab(minute=00, hour=1),  # Daily 1:00 a.m.
-            'kwargs': {'yesterday': True}
-        },
-        'run_keen_snapshots': {
-            'task': 'scripts.analytics.run_keen_snapshots',
-            'schedule': crontab(minute=0, hour=3),  # Daily 3:00 a.m.
-        },
-        'run_keen_events': {
-            'task': 'scripts.analytics.run_keen_events',
-            'schedule': crontab(minute=0, hour=4),  # Daily 4:00 a.m.
-            'kwargs': {'yesterday': True}
-        },
-        'generate_sitemap': {
-            'task': 'scripts.generate_sitemap',
-            'schedule': crontab(minute=0, hour=0),  # Daily 12:00 a.m.
-        },
-        'generate_prereg_csv': {
-            'task': 'scripts.generate_prereg_csv',
-            'schedule': crontab(minute=0, hour=15, day_of_week=0),  # Sunday 3:00 a.m.
->>>>>>> hotfix/0.124.13
+            },
         }
 
         # Tasks that need metrics and release requirements

From 327e371498c7efb211d4cf04877775662a32adb7 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 27 Nov 2017 12:05:06 -0600
Subject: [PATCH 129/192] Add preprint_doi_created field to PreprintService
 model to track when preprint DOI created.

---
 ...68_preprintservice_preprint_doi_created.py | 21 +++++++++++++++++++
 osf/models/preprint_service.py                |  1 +
 2 files changed, 22 insertions(+)
 create mode 100644 osf/migrations/0068_preprintservice_preprint_doi_created.py

diff --git a/osf/migrations/0068_preprintservice_preprint_doi_created.py b/osf/migrations/0068_preprintservice_preprint_doi_created.py
new file mode 100644
index 00000000000..19de40a4900
--- /dev/null
+++ b/osf/migrations/0068_preprintservice_preprint_doi_created.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.7 on 2017-11-27 17:19
+from __future__ import unicode_literals
+
+from django.db import migrations
+import osf.utils.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0067_auto_20171121_1050'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='preprintservice',
+            name='preprint_doi_created',
+            field=osf.utils.fields.NonNaiveDateTimeField(blank=True, default=None, null=True),
+        ),
+    ]
diff --git a/osf/models/preprint_service.py b/osf/models/preprint_service.py
index b1c982a24c6..09669a5f430 100644
--- a/osf/models/preprint_service.py
+++ b/osf/models/preprint_service.py
@@ -43,6 +43,7 @@ class PreprintService(DirtyFieldsMixin, GuidMixin, IdentifierMixin, ReviewableMi
     subjects = models.ManyToManyField(blank=True, to='osf.Subject', related_name='preprint_services')
 
     identifiers = GenericRelation(Identifier, related_query_name='preprintservices')
+    preprint_doi_created = NonNaiveDateTimeField(default=None, null=True, blank=True)
 
     class Meta:
         unique_together = ('node', 'provider')

From 96f5e2a2920588941b3791bf64ffdabea9466667 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 27 Nov 2017 12:05:55 -0600
Subject: [PATCH 130/192] Remove preprint_doi_on_datacite SerializerMethodField
 and replace with preprint_doi_created field.

---
 api/preprints/serializers.py | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py
index 4455f251de0..6577169016d 100644
--- a/api/preprints/serializers.py
+++ b/api/preprints/serializers.py
@@ -84,7 +84,7 @@ class PreprintSerializer(JSONAPISerializer):
     description = ser.CharField(required=False, allow_blank=True, allow_null=True, source='node.description')
     tags = JSONAPIListField(child=NodeTagField(), required=False, source='node.tags')
     node_is_public = ser.BooleanField(read_only=True, source='node__is_public')
-    preprint_doi_on_datacite = ser.SerializerMethodField(read_only=True)
+    preprint_doi_created = DateByVersion(read_only=True)
 
     contributors = RelationshipField(
         related_view='nodes:node-contributors',
@@ -151,10 +151,6 @@ class PreprintSerializer(JSONAPISerializer):
     class Meta:
         type_ = 'preprints'
 
-    def get_preprint_doi_on_datacite(self, obj):
-        doi_identifier = obj.get_identifier('doi')
-        return doi_identifier is not None
-
     def get_subjects(self, obj):
         return [
             [

From bac06acb1e6255040f371232776f3da75fb9247a Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 27 Nov 2017 12:11:19 -0600
Subject: [PATCH 131/192] Add data migration to populate preprint_doi_created
 field on existing published preprints where DOI identifier exists.  Set to
 preprint date_published field.

---
 osf/migrations/0069_auto_20171127_1119.py | 51 +++++++++++++++++++++++
 1 file changed, 51 insertions(+)
 create mode 100644 osf/migrations/0069_auto_20171127_1119.py

diff --git a/osf/migrations/0069_auto_20171127_1119.py b/osf/migrations/0069_auto_20171127_1119.py
new file mode 100644
index 00000000000..7f4532f3bbc
--- /dev/null
+++ b/osf/migrations/0069_auto_20171127_1119.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.7 on 2017-11-27 17:19
+from __future__ import unicode_literals
+import logging
+
+from django.db import migrations
+from osf.models import PreprintService
+logger = logging.getLogger(__name__)
+
+def add_preprint_doi_created(apps, schema_editor):
+    """
+    Data migration that makes preprint_doi_created equal to date_published for existing published preprints.
+    """
+    null_preprint_doi_created = PreprintService.objects.filter(preprint_doi_created__isnull=True, date_published__isnull=False)
+    preprints_count = null_preprint_doi_created.count()
+    current_preprint = 0
+    logger.info('{} published preprints found with preprint_doi_created is null.'.format(preprints_count))
+
+    for preprint in null_preprint_doi_created:
+        current_preprint += 1
+        if preprint.get_identifier('doi'):
+            preprint.preprint_doi_created = preprint.date_published
+            preprint.save()
+            logger.info('Preprint ID {}, {}/{} preprint_doi_created field populated.'.format(preprint._id, current_preprint, preprints_count))
+        else:
+            logger.info('Preprint ID {}, {}/{} skipped because a DOI has not been created.'.format(preprint._id, current_preprint, preprints_count))
+
+def reverse_func(apps, schema_editor):
+    """
+    Reverses data migration. Sets preprint_doi_created field back to null.
+    """
+    preprint_doi_created_not_null = PreprintService.objects.filter(preprint_doi_created__isnull=False)
+    preprints_count = preprint_doi_created_not_null.count()
+    current_preprint = 0
+    logger.info('Reversing preprint_doi_created migration.')
+
+    for preprint in preprint_doi_created_not_null:
+        current_preprint += 1
+        preprint.preprint_doi_created = None
+        preprint.save()
+        logger.info('Preprint ID {}, {}/{} preprint_doi_created field set to None.'.format(preprint._id, current_preprint, preprints_count))
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0068_preprintservice_preprint_doi_created'),
+    ]
+
+    operations = [
+         migrations.RunPython(add_preprint_doi_created, reverse_func)
+    ]

From c44500847d41cae61d51d916e57b62a70cf8880e Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 27 Nov 2017 12:24:33 -0600
Subject: [PATCH 132/192] Set preprint_doi_created to timezone.now() after doi
 and ark identifiers have been set.

---
 osf/models/preprint_service.py | 1 +
 website/preprints/tasks.py     | 2 +-
 2 files changed, 2 insertions(+), 1 deletion(-)

diff --git a/osf/models/preprint_service.py b/osf/models/preprint_service.py
index 09669a5f430..e7debae01e3 100644
--- a/osf/models/preprint_service.py
+++ b/osf/models/preprint_service.py
@@ -234,6 +234,7 @@ def set_preprint_license(self, license_detail, auth, save=False):
     def set_identifier_values(self, doi, ark, save=False):
         self.set_identifier_value('doi', doi)
         self.set_identifier_value('ark', ark)
+        self.preprint_doi_created = timezone.now()
 
         if save:
             self.save()
diff --git a/website/preprints/tasks.py b/website/preprints/tasks.py
index 53d8b980233..7f81759b650 100644
--- a/website/preprints/tasks.py
+++ b/website/preprints/tasks.py
@@ -185,7 +185,7 @@ def get_and_set_preprint_identifiers(preprint_id):
     if ezid_response is None:
         return
     id_dict = parse_identifiers(ezid_response)
-    preprint.set_identifier_values(doi=id_dict['doi'], ark=id_dict['ark'])
+    preprint.set_identifier_values(doi=id_dict['doi'], ark=id_dict['ark'], save=True)
 
 
 @celery_app.task(ignore_results=True)

From c907c11ca9de4cc01b47662c200c3aebc9bc033e Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 27 Nov 2017 12:46:50 -0600
Subject: [PATCH 133/192] Modify api preprint tests to reflect new
 preprint_doi_created field that is on published preprints where DOI
 identifier has been set.

---
 api_tests/preprints/views/test_preprint_detail.py | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)

diff --git a/api_tests/preprints/views/test_preprint_detail.py b/api_tests/preprints/views/test_preprint_detail.py
index 8f5c8a68e0c..ecf24b5520f 100644
--- a/api_tests/preprints/views/test_preprint_detail.py
+++ b/api_tests/preprints/views/test_preprint_detail.py
@@ -108,7 +108,7 @@ def test_preprint_doi_link_absent_in_unpublished_preprints(self, app, user, unpu
         assert res.json['data']['id'] == unpublished_preprint._id
         assert res.json['data']['attributes']['is_published'] is False
         assert 'preprint_doi' not in res.json['data']['links'].keys()
-        assert res.json['data']['attributes']['preprint_doi_on_datacite'] is False
+        assert res.json['data']['attributes']['preprint_doi_created'] is None
 
     def test_published_preprint_doi_link_returned_before_datacite_request(self, app, user, unpublished_preprint, unpublished_url):
         unpublished_preprint.is_published = True
@@ -119,16 +119,17 @@ def test_published_preprint_doi_link_returned_before_datacite_request(self, app,
         assert 'preprint_doi' in res.json['data']['links'].keys()
         expected_doi = EZID_FORMAT.format(namespace=DOI_NAMESPACE, guid=unpublished_preprint._id).replace('doi:', '').upper()
         assert res.json['data']['links']['preprint_doi'] == 'https://dx.doi.org/{}'.format(expected_doi)
-        assert res.json['data']['attributes']['preprint_doi_on_datacite'] is False
+        assert res.json['data']['attributes']['preprint_doi_created'] is None
 
     def test_published_preprint_doi_link_returned_after_datacite_request(self, app, user, preprint, url):
+        expected_doi = EZID_FORMAT.format(namespace=DOI_NAMESPACE, guid=preprint._id).replace('doi:', '')
+        preprint.set_identifier_values(doi=expected_doi, ark='testark')
         res = app.get(url, auth=user.auth)
         assert res.json['data']['id'] == preprint._id
         assert res.json['data']['attributes']['is_published'] is True
         assert 'preprint_doi' in res.json['data']['links'].keys()
-        expected_doi = EZID_FORMAT.format(namespace=DOI_NAMESPACE, guid=preprint._id).replace('doi:', '')
         assert res.json['data']['links']['preprint_doi'] == 'https://dx.doi.org/{}'.format(expected_doi)
-        assert res.json['data']['attributes']['preprint_doi_on_datacite'] is True
+        assert res.json['data']['attributes']['preprint_doi_created'] is not None
 
 
 @pytest.mark.django_db

From c8cbd204bdf1cddce523d6aa08fb7f36cfe4dc3f Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 27 Nov 2017 12:59:18 -0600
Subject: [PATCH 134/192] Update field name documentation on preprints views to
 replace obsolete field.

---
 api/preprints/views.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/api/preprints/views.py b/api/preprints/views.py
index c6357512840..106c35771a0 100644
--- a/api/preprints/views.py
+++ b/api/preprints/views.py
@@ -78,7 +78,7 @@ class PreprintList(JSONAPIBaseView, generics.ListCreateAPIView, PreprintFilterMi
         is_preprint_orphan              boolean                             whether or not this preprint is orphaned
         subjects                        list of lists of dictionaries       ids of Subject in the BePress taxonomy. Dictionary, containing the subject text and subject ID
         doi                             string                              bare DOI for the manuscript, as entered by the user
-        preprint_doi_on_datacite        boolean                             whether or not the preprint doi has been added on datacite
+        preprint_doi_created            iso8601 timestamp                   timestamp that the preprint doi was created
 
     ##Relationships
 
@@ -202,7 +202,7 @@ class PreprintDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, Pre
         is_preprint_orphan              boolean                             whether or not this preprint is orphaned
         subjects                        array of tuples of dictionaries     ids of Subject in the BePress taxonomy. Dictionary, containing the subject text and subject ID
         doi                             string                              bare DOI for the manuscript, as entered by the user
-        preprint_doi_on_datacite        boolean                             whether or not the preprint doi has been added on datacite
+        preprint_doi_created            iso8601 timestamp                   timestamp that the preprint doi was created
 
 
     ##Relationships

From 5c2f55fd2150694cc4588f43cb9ffd24c99035b1 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Mon, 27 Nov 2017 14:37:22 -0500
Subject: [PATCH 135/192] Fix docs for `inv maintenance`

h/t @icereval for pointing all the errors out
---
 tasks/__init__.py | 24 +++++++++++-------------
 1 file changed, 11 insertions(+), 13 deletions(-)

diff --git a/tasks/__init__.py b/tasks/__init__.py
index 5c2b004b59d..0249281b438 100755
--- a/tasks/__init__.py
+++ b/tasks/__init__.py
@@ -953,22 +953,20 @@ def set_maintenance(ctx, message='', level=1, start=None, end=None):
     from website.app import setup_django
     setup_django()
     from website.maintenance import set_maintenance
-    """Creates a maintenance notice.
+    """Display maintenance notice across OSF applications (incl. preprints, registries, etc.)
 
-    Message is required.
-    Level defaults to 1. Valid levels are 1 (info), 2 (warning), and 3 (danger).
-
-    Set the time period for the maintenance notice to be displayed.
-    If no start or end values are displayed, default to starting now
-    and ending 24 hours from now. If no timezone info is passed along,
-    everything will be converted to UTC.
-
-    If a given end time results in a start that is after the end, start
-    will be changed to be 24 hours before the end time.
+    start - Start time for the maintenance period
+    end - End time for the mainteance period
+        NOTE: If no start or end values are provided, default to starting now
+        and ending 24 hours from now.
+    message - Message to display. If omitted, will be:
+        "The site will undergo maintenance between <localized start time> and <localized end time>. Thank you
+        for your patience."
+    level - Severity level. Modifies the color of the displayed notice. Must be one of 1 (info), 2 (warning), 3 (danger).
 
     Examples:
-        invoke set_maintenance --message 'OSF down for scheduled maintenance.' --start 2016-03-16T15:41:00-04:00
-        invoke set_maintenance --message 'Apocalypse' --level 3 --end 2016-03-16T15:41:00-04:00
+        invoke set_maintenance --start 2016-03-16T15:41:00-04:00 --end 2016-03-16T15:42:00-04:00
+        invoke set_maintenance --message 'The OSF is experiencing issues connecting to a 3rd party service' --level 2 --start 2016-03-16T15:41:00-04:00 --end 2016-03-16T15:42:00-04:00
     """
     state = set_maintenance(message, level, start, end)
     print('Maintenance notice up {} to {}.'.format(state['start'], state['end']))

From 5b1913d376188de0d6e2c5519c6a15aa96d1f34a Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Mon, 27 Nov 2017 15:46:43 -0500
Subject: [PATCH 136/192] Fix GitLab bugs   - Don't show "Force Check In"   -
 Don't break the whole page for uninitiated/empty repos

[#OSF-8169]
---
 addons/gitlab/api.py                | 2 +-
 addons/gitlab/apps.py               | 2 +-
 website/static/js/filepage/index.js | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/addons/gitlab/api.py b/addons/gitlab/api.py
index fd9b2e8f785..cd72bf3a451 100644
--- a/addons/gitlab/api.py
+++ b/addons/gitlab/api.py
@@ -68,7 +68,7 @@ def branches(self, repo_id, branch=None):
         if branch:
             return self.gitlab.getbranch(repo_id, branch)
 
-        return self.gitlab.getbranches(repo_id)
+        return self.gitlab.getbranches(repo_id) or []
 
     def starball(self, user, repo, repo_id, ref='master'):
         """Get link for archive download.
diff --git a/addons/gitlab/apps.py b/addons/gitlab/apps.py
index 4a103949ac1..28eb908b8aa 100644
--- a/addons/gitlab/apps.py
+++ b/addons/gitlab/apps.py
@@ -40,7 +40,7 @@ def gitlab_hgrid_data(node_settings, auth, **kwargs):
         ref = ref_to_params(branch, sha)
         can_edit = check_permissions(node_settings, auth, connection, branch, sha, repo=repo)
     else:
-        ref = None
+        ref = ''
         can_edit = False
 
     permissions = {
diff --git a/website/static/js/filepage/index.js b/website/static/js/filepage/index.js
index ae2d419fde4..0888790106a 100644
--- a/website/static/js/filepage/index.js
+++ b/website/static/js/filepage/index.js
@@ -501,7 +501,7 @@ var FileViewPage = {
         var height = $('iframe').attr('height') ? $('iframe').attr('height') : '0px';
 
         m.render(document.getElementById('toggleBar'), m('.btn-toolbar.m-t-md', [
-            ctrl.context.currentUser.canEdit && (ctrl.file.provider !== 'bitbucket') && (!ctrl.canEdit()) && (ctrl.context.currentUser.isAdmin) ? m('.btn-group.m-l-xs.m-t-xs', [
+            ctrl.context.currentUser.canEdit && (!ctrl.canEdit()) && (ctrl.context.currentUser.isAdmin) && (ctrl.file.provider !== 'bitbucket') && (ctrl.file.provider !== 'gitlab') && (ctrl.file.provider !== 'onedrive') ? m('.btn-group.m-l-xs.m-t-xs', [
                 ctrl.isLatestVersion ? m('.btn.btn-sm.btn-default', {onclick: $(document).trigger.bind($(document), 'fileviewpage:force_checkin')}, 'Force check in') : null
             ]) : '',
             ctrl.canEdit() && (!ctrl.file.checkoutUser) && (ctrl.file.provider === 'osfstorage') ? m('.btn-group.m-l-xs.m-t-xs', [

From eb3fd1f1ca61238b88e6b210f5c4f0548901a8c7 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Mon, 27 Nov 2017 16:24:48 -0500
Subject: [PATCH 137/192] Remove "Create Repo" functionality   - It serves no
 purpose and creates problems

---
 addons/gitlab/routes.py                       | 11 -----
 addons/gitlab/static/gitlabNodeConfig.js      | 42 -------------------
 .../templates/gitlab_node_settings.mako       |  1 -
 addons/gitlab/views.py                        | 24 -----------
 4 files changed, 78 deletions(-)

diff --git a/addons/gitlab/routes.py b/addons/gitlab/routes.py
index 8f6c1a15a79..af41c5cc3c3 100644
--- a/addons/gitlab/routes.py
+++ b/addons/gitlab/routes.py
@@ -101,17 +101,6 @@
             json_renderer,
         ),
 
-        Rule(
-            [
-                '/project/<pid>/gitlab/repo/create/',
-                '/project/<pid>/node/<nid>/gitlab/repo/create/',
-
-            ],
-            'post',
-            views.gitlab_create_repo,
-            json_renderer,
-        ),
-
         Rule(
             [
                 '/project/<pid>/gitlab/hgrid/root/',
diff --git a/addons/gitlab/static/gitlabNodeConfig.js b/addons/gitlab/static/gitlabNodeConfig.js
index 06ff30786a4..f79a83f89b4 100644
--- a/addons/gitlab/static/gitlabNodeConfig.js
+++ b/addons/gitlab/static/gitlabNodeConfig.js
@@ -36,44 +36,6 @@ var displayError = function(msg) {
         .fadeOut(100).fadeIn();
 };
 
-var createRepo = function() {
-
-    var $elm = $('#addonSettingsGitLab');
-    var $select = $elm.find('select');
-
-    bootbox.prompt({
-        title: 'Name your new repo',
-        placeholder: 'Repo name',
-        callback: function (repoName) {
-            // Return if cancelled
-            if (repoName === null) {
-                return;
-            }
-
-            if (repoName === '') {
-                displayError('Your repo must have a name');
-                return;
-            }
-
-            $osf.postJSON(
-                nodeApiUrl + 'gitlab/repo/create/',
-                {name: repoName, user: $("#gitlabUser").val()}
-            ).done(function (response) {
-                    $select.append('<option value="' + response.repo['id'] + '">' + $osf.htmlEscape(response.repo['path_with_namespace']) + '</option>');
-                    $select.val(response.repo['id']);
-                    updateHidden($select);
-                }).fail(function () {
-                    displayError('Could not create repository');
-                });
-        },
-        buttons:{
-            confirm:{
-                label: 'Save',
-                className:'btn-success'
-            }
-        }
-    });
-};
 
 var askImport = function() {
     $.get('/api/v1/settings/gitlab/accounts/'
@@ -136,10 +98,6 @@ $(document).ready(function() {
         }
     });
 
-    $('#gitlabCreateRepo').on('click', function() {
-        createRepo();
-    });
-
     $('#gitlabImportToken').on('click', function() {
         askImport();
     });
diff --git a/addons/gitlab/templates/gitlab_node_settings.mako b/addons/gitlab/templates/gitlab_node_settings.mako
index bc3f7eb9edd..686a54ae7db 100644
--- a/addons/gitlab/templates/gitlab_node_settings.mako
+++ b/addons/gitlab/templates/gitlab_node_settings.mako
@@ -63,7 +63,6 @@
                     <button class="btn btn-success addon-settings-submit">
                         Save
                     </button>
-                    <a id="gitlabCreateRepo" class="btn btn-success pull-right">Create Repo</a>
                 </div>
             </div>
             % elif gitlab_repo_full_name:
diff --git a/addons/gitlab/views.py b/addons/gitlab/views.py
index 732b2491c4a..c9fb066bb3a 100644
--- a/addons/gitlab/views.py
+++ b/addons/gitlab/views.py
@@ -251,30 +251,6 @@ def gitlab_root_folder(*args, **kwargs):
 # Repos #
 #########
 
-@must_have_addon(SHORT_NAME, 'user')
-@must_have_addon(SHORT_NAME, 'node')
-@must_be_addon_authorizer(SHORT_NAME)
-@must_have_permission('write')
-def gitlab_create_repo(**kwargs):
-    repo_name = request.json.get('name')
-    user = request.json.get('user')
-
-    if not repo_name:
-        raise HTTPError(http.BAD_REQUEST)
-
-    node_settings = kwargs['node_addon']
-    connection = GitLabClient(external_account=node_settings.external_account)
-
-    try:
-        repo = connection.create_repo(repo_name, auto_init=True)
-    except GitLabError:
-        raise HTTPError(http.BAD_REQUEST)
-
-    return {
-        'user': user,
-        'repo': repo,
-    }
-
 def add_hook_log(node, gitlab, action, path, date, committer, include_urls=False,
                  sha=None, save=False):
     """Add log event for commit from webhook payload.

From 0593767ccf0c406bc34dd85592a23d238de4fa21 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Mon, 27 Nov 2017 16:36:51 -0500
Subject: [PATCH 138/192] Fix Flake

---
 addons/gitlab/views.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/addons/gitlab/views.py b/addons/gitlab/views.py
index c9fb066bb3a..eec0a06f556 100644
--- a/addons/gitlab/views.py
+++ b/addons/gitlab/views.py
@@ -12,7 +12,6 @@
 from addons.base import generic_views
 from addons.gitlab.api import GitLabClient
 from addons.gitlab.apps import gitlab_hgrid_data
-from addons.gitlab.exceptions import GitLabError
 from addons.gitlab.settings import DEFAULT_HOSTS
 from addons.gitlab.serializer import GitLabSerializer
 from addons.gitlab.utils import verify_hook_signature, MESSAGES

From 44a78a8dc9230557ba8171d44a162aeeaa7179a0 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Thu, 13 Jul 2017 11:27:05 -0400
Subject: [PATCH 139/192] Rename ReverseManyToOneDescriptor for creator    
 -rename Comment.modified->edited

---
 osf/migrations/0042_auto_20170713_1025.py | 22 ++++++++++++++++++++++
 osf/models/node.py                        |  2 +-
 osf/models/user.py                        |  2 +-
 osf_tests/test_user.py                    |  2 +-
 4 files changed, 25 insertions(+), 3 deletions(-)
 create mode 100644 osf/migrations/0042_auto_20170713_1025.py

diff --git a/osf/migrations/0042_auto_20170713_1025.py b/osf/migrations/0042_auto_20170713_1025.py
new file mode 100644
index 00000000000..0ed0194acbe
--- /dev/null
+++ b/osf/migrations/0042_auto_20170713_1025.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 15:25
+from __future__ import unicode_literals
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0041_auto_20170706_1024'),
+    ]
+
+    operations = [
+        migrations.AlterField(
+            model_name='abstractnode',
+            name='creator',
+            field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='nodes_created', to=settings.AUTH_USER_MODEL),
+        ),
+    ]
diff --git a/osf/models/node.py b/osf/models/node.py
index 3ae623db146..59a1c912679 100644
--- a/osf/models/node.py
+++ b/osf/models/node.py
@@ -288,7 +288,7 @@ def contributors(self):
 
     creator = models.ForeignKey(OSFUser,
                                 db_index=True,
-                                related_name='created',
+                                related_name='nodes_created',
                                 on_delete=models.SET_NULL,
                                 null=True, blank=True)
     date_created = NonNaiveDateTimeField(auto_now_add=True)
diff --git a/osf/models/user.py b/osf/models/user.py
index 47ba5f35888..ed26d7afcca 100644
--- a/osf/models/user.py
+++ b/osf/models/user.py
@@ -688,7 +688,7 @@ def merge_user(self, user):
         from osf.models import BaseFileNode
 
         # - projects where the user was the creator
-        user.created.filter(is_bookmark_collection=False).exclude(type=QuickFilesNode._typedmodels_type).update(creator=self)
+        user.nodes_created.filter(is_bookmark_collection=False).exclude(type=QuickFilesNode._typedmodels_type).update(creator=self)
 
         # - file that the user has checked_out, import done here to prevent import error
         for file_node in BaseFileNode.files_checked_out(user=user):
diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py
index 7065f02b604..a6cfcc67055 100644
--- a/osf_tests/test_user.py
+++ b/osf_tests/test_user.py
@@ -1447,7 +1447,7 @@ def test_created_property(self):
         # make sure there's at least one project
         ProjectFactory(creator=self.user)
         projects_created_by_user = AbstractNode.objects.filter(creator=self.user)
-        assert list(self.user.created.all()) == list(projects_created_by_user)
+        assert list(self.user.nodes_created.all()) == list(projects_created_by_user)
 
 
 # Copied from tests/models/test_user.py

From 907b89c3131ad9431b23037145747eca18d18e3f Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Thu, 13 Jul 2017 12:03:25 -0400
Subject: [PATCH 140/192] Fix addon model inheritance

---
 addons/base/models.py        | 2 +-
 addons/bitbucket/models.py   | 4 ++--
 addons/dataverse/models.py   | 2 +-
 addons/dropbox/models.py     | 2 +-
 addons/figshare/models.py    | 4 ++--
 addons/github/models.py      | 4 ++--
 addons/googledrive/models.py | 2 +-
 addons/osfstorage/models.py  | 2 +-
 addons/owncloud/models.py    | 2 +-
 addons/s3/models.py          | 2 +-
 10 files changed, 13 insertions(+), 13 deletions(-)

diff --git a/addons/base/models.py b/addons/base/models.py
index 9043994e634..ab2c852aab7 100644
--- a/addons/base/models.py
+++ b/addons/base/models.py
@@ -542,7 +542,7 @@ class GenericRootNode(object):
     name = ''
 
 
-class BaseStorageAddon(BaseModel):
+class BaseStorageAddon(object):
     """
     Mixin class for traversing file trees of addons with files
     """
diff --git a/addons/bitbucket/models.py b/addons/bitbucket/models.py
index f8d266a7d9f..d5a158f9157 100644
--- a/addons/bitbucket/models.py
+++ b/addons/bitbucket/models.py
@@ -80,7 +80,7 @@ def fetch_access_token(self, force_refresh=False):
         return self.account.oauth_key
 
 
-class UserSettings(BaseStorageAddon, BaseOAuthUserSettings):
+class UserSettings(BaseOAuthUserSettings):
     """Stores user-specific bitbucket information
 
     Quirks::
@@ -101,7 +101,7 @@ def public_id(self):
         return None
 
 
-class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
+class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
     oauth_provider = BitbucketProvider
     serializer = BitbucketSerializer
 
diff --git a/addons/dataverse/models.py b/addons/dataverse/models.py
index 8b048e12e54..8658ff32fb5 100644
--- a/addons/dataverse/models.py
+++ b/addons/dataverse/models.py
@@ -74,7 +74,7 @@ class UserSettings(BaseOAuthUserSettings):
     serializer = DataverseSerializer
 
 
-class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
+class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
     oauth_provider = DataverseProvider
     serializer = DataverseSerializer
 
diff --git a/addons/dropbox/models.py b/addons/dropbox/models.py
index b88a9837573..197c7aaf636 100644
--- a/addons/dropbox/models.py
+++ b/addons/dropbox/models.py
@@ -121,7 +121,7 @@ def revoke_remote_oauth_access(self, external_account):
             pass
 
 
-class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
+class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
     oauth_provider = Provider
     serializer = DropboxSerializer
 
diff --git a/addons/figshare/models.py b/addons/figshare/models.py
index dfc465bff4d..4f3a7315d61 100644
--- a/addons/figshare/models.py
+++ b/addons/figshare/models.py
@@ -85,14 +85,14 @@ def handle_callback(self, response):
         }
 
 
-class UserSettings(BaseStorageAddon, BaseOAuthUserSettings):
+class UserSettings(BaseOAuthUserSettings):
     """Stores user-specific figshare information
     """
     oauth_provider = FigshareProvider
     serializer = FigshareSerializer
 
 
-class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
+class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
     oauth_provider = FigshareProvider
     serializer = FigshareSerializer
 
diff --git a/addons/github/models.py b/addons/github/models.py
index 43320a52c11..48994cbc04f 100644
--- a/addons/github/models.py
+++ b/addons/github/models.py
@@ -68,7 +68,7 @@ def handle_callback(self, response):
         }
 
 
-class UserSettings(BaseStorageAddon, BaseOAuthUserSettings):
+class UserSettings(BaseOAuthUserSettings):
     """Stores user-specific github information
     """
     oauth_provider = GitHubProvider
@@ -95,7 +95,7 @@ def public_id(self):
         return None
 
 
-class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
+class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
     oauth_provider = GitHubProvider
     serializer = GitHubSerializer
 
diff --git a/addons/googledrive/models.py b/addons/googledrive/models.py
index fba67a85c32..b1906903845 100644
--- a/addons/googledrive/models.py
+++ b/addons/googledrive/models.py
@@ -74,7 +74,7 @@ class UserSettings(BaseOAuthUserSettings):
     serializer = GoogleDriveSerializer
 
 
-class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
+class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
     oauth_provider = GoogleDriveProvider
     provider_name = 'googledrive'
 
diff --git a/addons/osfstorage/models.py b/addons/osfstorage/models.py
index 1746aba3094..c9be74c635a 100644
--- a/addons/osfstorage/models.py
+++ b/addons/osfstorage/models.py
@@ -381,7 +381,7 @@ def serialize(self, include_full=False, version=None):
         return ret
 
 
-class NodeSettings(BaseStorageAddon, BaseNodeSettings):
+class NodeSettings(BaseNodeSettings, BaseStorageAddon):
     # Required overrides
     complete = True
     has_auth = True
diff --git a/addons/owncloud/models.py b/addons/owncloud/models.py
index 62a0922f597..bcad90cea42 100644
--- a/addons/owncloud/models.py
+++ b/addons/owncloud/models.py
@@ -56,7 +56,7 @@ def to_json(self, user):
         return ret
 
 
-class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
+class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
     oauth_provider = OwnCloudProvider
     serializer = OwnCloudSerializer
 
diff --git a/addons/s3/models.py b/addons/s3/models.py
index 0f8debc0da9..9a564812e6e 100644
--- a/addons/s3/models.py
+++ b/addons/s3/models.py
@@ -31,7 +31,7 @@ class UserSettings(BaseOAuthUserSettings):
     serializer = S3Serializer
 
 
-class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
+class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
     oauth_provider = S3Provider
     serializer = S3Serializer
 

From 284bd3a192f6ebae538e2cfb7270f4d1e894d050 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Thu, 13 Jul 2017 15:27:18 -0400
Subject: [PATCH 141/192] Add migrations

---
 .../migrations/0002_auto_20170808_1140.py     |  39 ++
 .../box/migrations/0003_auto_20170713_1125.py |  39 ++
 .../migrations/0003_auto_20170713_1125.py     |  39 ++
 .../migrations/0003_auto_20170713_1125.py     |  39 ++
 .../migrations/0003_auto_20170713_1125.py     |  39 ++
 .../migrations/0003_auto_20170713_1125.py     |  28 +
 .../migrations/0003_auto_20170713_1125.py     |  39 ++
 .../migrations/0003_auto_20170713_1125.py     |  39 ++
 .../migrations/0003_auto_20170713_1125.py     |  39 ++
 .../migrations/0003_auto_20170713_1125.py     |  28 +
 .../migrations/0003_auto_20170713_1125.py     |  39 ++
 .../s3/migrations/0003_auto_20170713_1125.py  |  39 ++
 .../migrations/0003_auto_20170713_1125.py     |  28 +
 .../migrations/0005_auto_20170713_1125.py     |  39 ++
 .../migrations/0003_auto_20170713_1125.py     |  39 ++
 ...25.py => 0065_creator_modified_renames.py} |   7 +-
 .../0066_skippable_created_modified.py        | 539 ++++++++++++++++++
 scripts/premigrate_created_modified.py        |   5 -
 18 files changed, 1097 insertions(+), 6 deletions(-)
 create mode 100644 addons/bitbucket/migrations/0002_auto_20170808_1140.py
 create mode 100644 addons/box/migrations/0003_auto_20170713_1125.py
 create mode 100644 addons/dataverse/migrations/0003_auto_20170713_1125.py
 create mode 100644 addons/dropbox/migrations/0003_auto_20170713_1125.py
 create mode 100644 addons/figshare/migrations/0003_auto_20170713_1125.py
 create mode 100644 addons/forward/migrations/0003_auto_20170713_1125.py
 create mode 100644 addons/github/migrations/0003_auto_20170713_1125.py
 create mode 100644 addons/googledrive/migrations/0003_auto_20170713_1125.py
 create mode 100644 addons/mendeley/migrations/0003_auto_20170713_1125.py
 create mode 100644 addons/osfstorage/migrations/0003_auto_20170713_1125.py
 create mode 100644 addons/owncloud/migrations/0003_auto_20170713_1125.py
 create mode 100644 addons/s3/migrations/0003_auto_20170713_1125.py
 create mode 100644 addons/twofactor/migrations/0003_auto_20170713_1125.py
 create mode 100644 addons/wiki/migrations/0005_auto_20170713_1125.py
 create mode 100644 addons/zotero/migrations/0003_auto_20170713_1125.py
 rename osf/migrations/{0042_auto_20170713_1025.py => 0065_creator_modified_renames.py} (76%)
 create mode 100644 osf/migrations/0066_skippable_created_modified.py

diff --git a/addons/bitbucket/migrations/0002_auto_20170808_1140.py b/addons/bitbucket/migrations/0002_auto_20170808_1140.py
new file mode 100644
index 00000000000..468fdbfa848
--- /dev/null
+++ b/addons/bitbucket/migrations/0002_auto_20170808_1140.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-08-08 16:40
+from __future__ import unicode_literals
+
+from django.db import migrations
+import django.utils.timezone
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_bitbucket', '0001_initial'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/box/migrations/0003_auto_20170713_1125.py b/addons/box/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..20c0830872c
--- /dev/null
+++ b/addons/box/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_box', '0002_auto_20170323_1534'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/dataverse/migrations/0003_auto_20170713_1125.py b/addons/dataverse/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..31bba6403b4
--- /dev/null
+++ b/addons/dataverse/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_dataverse', '0002_auto_20170323_1534'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/dropbox/migrations/0003_auto_20170713_1125.py b/addons/dropbox/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..31f9667de96
--- /dev/null
+++ b/addons/dropbox/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_dropbox', '0002_auto_20170323_1534'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/figshare/migrations/0003_auto_20170713_1125.py b/addons/figshare/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..b05a7e2da5d
--- /dev/null
+++ b/addons/figshare/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_figshare', '0002_auto_20170323_1534'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/forward/migrations/0003_auto_20170713_1125.py b/addons/forward/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..0f6fb9e6dd8
--- /dev/null
+++ b/addons/forward/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_forward', '0002_nodesettings_owner'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/github/migrations/0003_auto_20170713_1125.py b/addons/github/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..912a107f16f
--- /dev/null
+++ b/addons/github/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_github', '0002_auto_20170323_1534'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/googledrive/migrations/0003_auto_20170713_1125.py b/addons/googledrive/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..7617ec73569
--- /dev/null
+++ b/addons/googledrive/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_googledrive', '0002_auto_20170323_1534'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/mendeley/migrations/0003_auto_20170713_1125.py b/addons/mendeley/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..23cdb22461b
--- /dev/null
+++ b/addons/mendeley/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_mendeley', '0002_auto_20170323_1534'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/osfstorage/migrations/0003_auto_20170713_1125.py b/addons/osfstorage/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..9f3975674f8
--- /dev/null
+++ b/addons/osfstorage/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_osfstorage', '0002_auto_20170323_1534'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/owncloud/migrations/0003_auto_20170713_1125.py b/addons/owncloud/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..453bf4460a4
--- /dev/null
+++ b/addons/owncloud/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_owncloud', '0002_auto_20170323_1534'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/s3/migrations/0003_auto_20170713_1125.py b/addons/s3/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..d7ddb2b1578
--- /dev/null
+++ b/addons/s3/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_s3', '0002_auto_20170323_1534'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/twofactor/migrations/0003_auto_20170713_1125.py b/addons/twofactor/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..8240ddfed43
--- /dev/null
+++ b/addons/twofactor/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_twofactor', '0002_usersettings_owner'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/wiki/migrations/0005_auto_20170713_1125.py b/addons/wiki/migrations/0005_auto_20170713_1125.py
new file mode 100644
index 00000000000..5428ae4f108
--- /dev/null
+++ b/addons/wiki/migrations/0005_auto_20170713_1125.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_wiki', '0004_remove_nodewikipage_guid_string'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='nodewikipage',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodewikipage',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/zotero/migrations/0003_auto_20170713_1125.py b/addons/zotero/migrations/0003_auto_20170713_1125.py
new file mode 100644
index 00000000000..5d18e064c14
--- /dev/null
+++ b/addons/zotero/migrations/0003_auto_20170713_1125.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2017-07-13 16:25
+from __future__ import unicode_literals
+
+import datetime
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_zotero', '0002_auto_20170323_1534'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/osf/migrations/0042_auto_20170713_1025.py b/osf/migrations/0065_creator_modified_renames.py
similarity index 76%
rename from osf/migrations/0042_auto_20170713_1025.py
rename to osf/migrations/0065_creator_modified_renames.py
index 0ed0194acbe..ca6af016b9d 100644
--- a/osf/migrations/0042_auto_20170713_1025.py
+++ b/osf/migrations/0065_creator_modified_renames.py
@@ -10,7 +10,7 @@
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('osf', '0041_auto_20170706_1024'),
+        ('osf', '0064_auto_20171019_0918'),
     ]
 
     operations = [
@@ -19,4 +19,9 @@ class Migration(migrations.Migration):
             name='creator',
             field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='nodes_created', to=settings.AUTH_USER_MODEL),
         ),
+        migrations.RenameField(
+            model_name='comment',
+            old_name='modified',
+            new_name='edited'
+        ),
     ]
diff --git a/osf/migrations/0066_skippable_created_modified.py b/osf/migrations/0066_skippable_created_modified.py
new file mode 100644
index 00000000000..d227acafad2
--- /dev/null
+++ b/osf/migrations/0066_skippable_created_modified.py
@@ -0,0 +1,539 @@
+# -*- coding: utf-8 -*-
+# Note:
+# This migration is skippable if `scripts/premigrate_created_modified.py`
+# is utilized. It allows the larger of these tables to be updated asynchronously without downtime.
+# It requires not releasing these model changes until the beat tasks are approximately complete.
+from __future__ import unicode_literals
+
+from django.db import migrations
+import django.utils.timezone
+import django_extensions.db.fields
+import osf.utils.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0065_creator_modified_renames'),
+    ]
+
+    operations = [
+        migrations.AlterModelOptions(
+            name='fileversion',
+            options={'ordering': ('-created',)},
+        ),
+        migrations.RenameField(
+            model_name='action',
+            old_name='date_modified',
+            new_name='modified',
+        ),
+        migrations.RenameField(
+            model_name='action',
+            old_name='date_created',
+            new_name='created',
+        ),
+        migrations.RenameField(
+            model_name='fileversion',
+            old_name='date_modified',
+            new_name='external_modified',
+        ),
+        migrations.RemoveField(
+            model_name='abstractnode',
+            name='date_created',
+        ),
+        migrations.RemoveField(
+            model_name='abstractnode',
+            name='date_modified',
+        ),
+        migrations.RemoveField(
+            model_name='apioauth2application',
+            name='date_created',
+        ),
+        migrations.RemoveField(
+            model_name='comment',
+            name='date_created',
+        ),
+        migrations.RemoveField(
+            model_name='comment',
+            name='date_modified',
+        ),
+        migrations.RemoveField(
+            model_name='fileversion',
+            name='date_created',
+        ),
+        migrations.RemoveField(
+            model_name='preprintservice',
+            name='date_created',
+        ),
+        migrations.RemoveField(
+            model_name='preprintservice',
+            name='date_modified',
+        ),
+        migrations.RemoveField(
+            model_name='privatelink',
+            name='date_created',
+        ),
+        migrations.RemoveField(
+            model_name='session',
+            name='date_created',
+        ),
+        migrations.RemoveField(
+            model_name='session',
+            name='date_modified',
+        ),
+        migrations.AddField(
+            model_name='abstractnode',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='abstractnode',
+            name='last_logged',
+            field=osf.utils.fields.NonNaiveDateTimeField(blank=True, db_index=True, default=django.utils.timezone.now, null=True),
+        ),
+        migrations.AddField(
+            model_name='abstractnode',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='apioauth2application',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='apioauth2application',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='apioauth2personaltoken',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='apioauth2personaltoken',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='apioauth2scope',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='apioauth2scope',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='archivejob',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='archivejob',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='archivetarget',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='archivetarget',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='basefilenode',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='basefilenode',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='blacklistguid',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='blacklistguid',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='citationstyle',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='citationstyle',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='comment',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='comment',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='conference',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='conference',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='draftregistration',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='draftregistration',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='draftregistrationapproval',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='draftregistrationapproval',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='draftregistrationlog',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='draftregistrationlog',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='embargo',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='embargo',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='embargoterminationapproval',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='embargoterminationapproval',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='externalaccount',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='externalaccount',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='fileversion',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='fileversion',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='guid',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='identifier',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='identifier',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='institution',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='institution',
+            name='last_logged',
+            field=osf.utils.fields.NonNaiveDateTimeField(blank=True, db_index=True, default=django.utils.timezone.now, null=True),
+        ),
+        migrations.AddField(
+            model_name='institution',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='mailrecord',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='mailrecord',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='metaschema',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='metaschema',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='nodelicense',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodelicense',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='nodelicenserecord',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodelicenserecord',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='nodelog',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodelog',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='noderelation',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='noderelation',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='notificationdigest',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='notificationdigest',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='notificationsubscription',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='notificationsubscription',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='osfuser',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='osfuser',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='pagecounter',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='pagecounter',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='preprintprovider',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='preprintprovider',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='preprintservice',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='preprintservice',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='privatelink',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='privatelink',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='queuedmail',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='queuedmail',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='registrationapproval',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='registrationapproval',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='retraction',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='retraction',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='session',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='session',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='subject',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='subject',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='tag',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='tag',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='useractivitycounter',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='useractivitycounter',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/scripts/premigrate_created_modified.py b/scripts/premigrate_created_modified.py
index 2bc6d734715..1af3d45cfc2 100644
--- a/scripts/premigrate_created_modified.py
+++ b/scripts/premigrate_created_modified.py
@@ -53,8 +53,6 @@
     "UPDATE osf_pagecounter SET created='epoch', modified='epoch' WHERE created IS NULL;",
     "ALTER TABLE osf_pagecounter ALTER COLUMN created SET NOT NULL;",
     "ALTER TABLE osf_pagecounter ALTER COLUMN modified SET NOT NULL;",
-    "ALTER TABLE osf_alternativecitation ADD COLUMN created timestamp with time zone;",
-    "ALTER TABLE osf_alternativecitation ADD COLUMN modified timestamp with time zone;",
     "ALTER TABLE osf_apioauth2application ADD COLUMN modified timestamp with time zone;",
     "ALTER TABLE osf_apioauth2personaltoken ADD COLUMN created timestamp with time zone;",
     "ALTER TABLE osf_apioauth2personaltoken ADD COLUMN modified timestamp with time zone;",
@@ -141,7 +139,6 @@
     SET last_logged=modified
     WHERE (SELECT COUNT(id) FROM osf_nodelog WHERE node_id = "osf_abstractnode"."id" LIMIT 1) = 0;
     """,
-    "UPDATE osf_alternativecitation SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
     "UPDATE osf_apioauth2application SET modified='epoch';",
     "UPDATE osf_apioauth2personaltoken SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
     "UPDATE osf_apioauth2scope SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
@@ -178,8 +175,6 @@
     "UPDATE osf_subject SET created=TO_TIMESTAMP(('x' || SUBSTR(_id, 1, 8))::bit(32)::int)::timestamptz, modified='epoch';",
     "UPDATE osf_tag SET created='epoch', modified='epoch';",
     "UPDATE osf_useractivitycounter SET created='epoch', modified='epoch';",
-    "ALTER TABLE osf_alternativecitation ALTER COLUMN created SET NOT NULL;",
-    "ALTER TABLE osf_alternativecitation ALTER COLUMN modified SET NOT NULL;",
     "ALTER TABLE osf_apioauth2application ALTER COLUMN modified SET NOT NULL;",
     "ALTER TABLE osf_apioauth2personaltoken ALTER COLUMN created SET NOT NULL;",
     "ALTER TABLE osf_apioauth2personaltoken ALTER COLUMN modified SET NOT NULL;",

From 9122acce2105a32e5272ade54badc065b2fb2aa7 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Thu, 13 Jul 2017 15:27:31 -0400
Subject: [PATCH 142/192] Update app

---
 addons/osfstorage/models.py                   |  6 +-
 addons/osfstorage/tests/factories.py          |  2 +-
 addons/osfstorage/tests/test_models.py        | 14 ++--
 addons/osfstorage/tests/test_utils.py         |  4 +-
 addons/osfstorage/tests/test_views.py         |  5 +-
 addons/osfstorage/utils.py                    |  4 +-
 addons/osfstorage/views.py                    | 10 +--
 admin/institutions/views.py                   |  2 +-
 admin/nodes/serializers.py                    |  2 +-
 admin/nodes/views.py                          |  4 +-
 admin/preprints/serializers.py                |  4 +-
 admin/spam/serializers.py                     |  6 +-
 admin/users/views.py                          |  2 +-
 admin_tests/users/test_views.py               | 24 +++----
 api/applications/serializers.py               |  2 +-
 api/applications/views.py                     |  2 +-
 api/base/filters.py                           |  2 +-
 api/base/views.py                             | 12 ++--
 api/citations/views.py                        |  2 +-
 api/collections/serializers.py                |  4 +-
 api/collections/views.py                      | 12 ++--
 api/comments/serializers.py                   |  6 +-
 api/comments/views.py                         |  2 +-
 api/files/serializers.py                      | 10 +--
 api/institutions/views.py                     |  4 +-
 api/nodes/serializers.py                      |  6 +-
 api/nodes/views.py                            | 66 +++++++++----------
 api/preprint_providers/views.py               |  2 +-
 api/preprints/serializers.py                  |  4 +-
 api/preprints/views.py                        |  6 +-
 api/registrations/serializers.py              |  2 +-
 api/registrations/views.py                    | 16 ++---
 api/search/views.py                           |  2 +-
 api/users/serializers.py                      |  2 +-
 api/users/views.py                            | 12 ++--
 api/view_only_links/serializers.py            |  2 +-
 api/view_only_links/views.py                  |  2 +-
 api/wikis/views.py                            |  2 +-
 api_tests/base/test_serializers.py            | 16 ++---
 api_tests/collections/test_serializers.py     |  8 +--
 .../files/serializers/test_file_serializer.py | 18 ++---
 api_tests/files/views/test_file_detail.py     |  6 +-
 .../views/test_institution_nodes_list.py      | 12 ++--
 api_tests/nodes/filters/test_filters.py       | 18 ++---
 .../nodes/views/test_node_comments_list.py    |  4 +-
 api_tests/nodes/views/test_node_list.py       |  6 +-
 api_tests/preprints/filters/test_filters.py   | 21 +++---
 .../views/test_withdrawn_registrations.py     |  4 +-
 framework/sessions/__init__.py                |  2 +-
 .../commands/export_user_account.py           |  8 +--
 osf/management/commands/force_archive.py      |  6 +-
 .../commands/update_preprint_share_dates.py   |  4 +-
 osf/migrations/0053_add_quickfiles.py         |  2 +-
 osf/models/base.py                            |  8 ++-
 osf/models/comment.py                         | 15 ++---
 osf/models/files.py                           | 29 ++++----
 osf/models/mixins.py                          |  9 ++-
 osf/models/node.py                            | 12 ++--
 osf/models/oauth.py                           |  3 -
 osf/models/preprint_service.py                |  2 -
 osf/models/private_link.py                    |  5 +-
 osf/models/session.py                         |  3 -
 osf/models/user.py                            |  5 +-
 osf/utils/migrations.py                       | 10 +++
 osf_tests/factories.py                        |  2 +-
 osf_tests/test_node.py                        | 16 ++---
 osf_tests/test_oauth_application.py           |  2 +-
 osf_tests/test_preprint_summary.py            |  4 +-
 osf_tests/test_user.py                        |  4 +-
 osf_tests/test_utils.py                       | 10 +--
 scripts/analytics/institution_summary.py      |  2 +-
 scripts/analytics/node_summary.py             |  2 +-
 scripts/generate_sitemap.py                   |  6 +-
 scripts/osfstorage/glacier_audit.py           |  2 +-
 .../populate_new_and_noteworthy_projects.py   |  2 +-
 tests/test_auth.py                            |  2 +-
 tests/test_auth_basic_auth.py                 |  2 +-
 tests/test_identifiers.py                     |  2 +-
 tests/test_preprints.py                       | 10 +--
 tests/test_registrations/test_retractions.py  |  6 +-
 tests/test_websitefiles.py                    |  6 +-
 tests/test_webtests.py                        |  4 +-
 website/conferences/views.py                  |  2 +-
 website/identifiers/metadata.py               |  4 +-
 website/preprints/tasks.py                    |  2 +-
 website/project/views/node.py                 | 11 ++--
 website/search/elastic_search.py              |  2 +-
 website/search_migration/migrate.py           |  2 +-
 .../templates/public/pages/active_nodes.mako  | 12 ++--
 89 files changed, 311 insertions(+), 315 deletions(-)

diff --git a/addons/osfstorage/models.py b/addons/osfstorage/models.py
index c9be74c635a..4028a151cec 100644
--- a/addons/osfstorage/models.py
+++ b/addons/osfstorage/models.py
@@ -224,13 +224,13 @@ def serialize(self, include_full=None, version=None):
             ret['fullPath'] = self.materialized_path
 
         version = self.get_version(version)
-        earliest_version = self.versions.order_by('date_created').first()
+        earliest_version = self.versions.order_by('created').first()
         ret.update({
             'version': self.versions.count(),
             'md5': version.metadata.get('md5') if version else None,
             'sha256': version.metadata.get('sha256') if version else None,
-            'modified': version.date_created.isoformat() if version else None,
-            'created': earliest_version.date_created.isoformat() if version else None,
+            'modified': version.created.isoformat() if version else None,
+            'created': earliest_version.created.isoformat() if version else None,
         })
         return ret
 
diff --git a/addons/osfstorage/tests/factories.py b/addons/osfstorage/tests/factories.py
index b5ce6d4474f..67310e636e9 100644
--- a/addons/osfstorage/tests/factories.py
+++ b/addons/osfstorage/tests/factories.py
@@ -25,7 +25,7 @@ class Meta:
         model = models.FileVersion
 
     creator = SubFactory(AuthUserFactory)
-    date_modified = timezone.now()
+    modified = timezone.now()
     location = generic_location
     identifier = 0
 
diff --git a/addons/osfstorage/tests/test_models.py b/addons/osfstorage/tests/test_models.py
index ab9d5da88f8..c90782e0e82 100644
--- a/addons/osfstorage/tests/test_models.py
+++ b/addons/osfstorage/tests/test_models.py
@@ -91,13 +91,13 @@ def test_serialize(self):
         assert_equals(file.serialize(), {
             u'id': file._id,
             u'path': file.path,
-            u'created': version.date_created.isoformat(),
+            u'created': version.created.isoformat(),
             u'name': u'MOAR PYLONS',
             u'kind': u'file',
             u'version': 1,
             u'downloads': 0,
             u'size': 1234L,
-            u'modified': version.date_created.isoformat(),
+            u'modified': version.created.isoformat(),
             u'contentType': u'text/plain',
             u'checkout': None,
             u'md5': None,
@@ -112,7 +112,7 @@ def test_serialize(self):
         assert_equals(file.serialize(), {
             u'id': file._id,
             u'path': file.path,
-            u'created': version.date_created.isoformat(),
+            u'created': version.created.isoformat(),
             u'name': u'MOAR PYLONS',
             u'kind': u'file',
             u'version': 1,
@@ -120,7 +120,7 @@ def test_serialize(self):
             u'size': 1234L,
             # modified date is the creation date of latest version
             # see https://github.com/CenterForOpenScience/osf.io/pull/7155
-            u'modified': version.date_created.isoformat(),
+            u'modified': version.created.isoformat(),
             u'contentType': u'text/plain',
             u'checkout': None,
             u'md5': None,
@@ -234,7 +234,7 @@ def test_delete_file(self):
         child_storage['materialized_path'] = child.materialized_path
         assert_equal(trashed.path, '/' + child._id)
         trashed_field_names = [f.name for f in child._meta.get_fields() if not f.is_relation and
-                               f.name not in ['id', '_materialized_path', 'content_type_pk', '_path', 'deleted_on', 'deleted_by', 'type']]
+                               f.name not in ['id', '_materialized_path', 'content_type_pk', '_path', 'deleted_on', 'deleted_by', 'type', 'modified']]
         for f, value in child_data.iteritems():
             if f in trashed_field_names:
                 assert_equal(getattr(trashed, f), value)
@@ -564,7 +564,7 @@ def test_fields(self):
         version = factories.FileVersionFactory(
             size=1024,
             content_type='application/json',
-            date_modified=timezone.now(),
+            modified=timezone.now(),
         )
         retrieved = models.FileVersion.load(version._id)
         assert_true(retrieved.creator)
@@ -573,7 +573,7 @@ def test_fields(self):
         # sometimes identifiers are strings, so this always has to be a string, sql is funny about that.
         assert_equal(retrieved.identifier, u"0")
         assert_true(retrieved.content_type)
-        assert_true(retrieved.date_modified)
+        assert_true(retrieved.modified)
 
     def test_is_duplicate_true(self):
         version1 = factories.FileVersionFactory()
diff --git a/addons/osfstorage/tests/test_utils.py b/addons/osfstorage/tests/test_utils.py
index 563a93151dd..7675db8bf36 100644
--- a/addons/osfstorage/tests/test_utils.py
+++ b/addons/osfstorage/tests/test_utils.py
@@ -39,7 +39,7 @@ def test_serialize_revision(self):
                 'name': self.user.fullname,
                 'url': self.user.url,
             },
-            'date': self.versions[0].date_created.isoformat(),
+            'date': self.versions[0].created.isoformat(),
             'downloads': 2,
             'md5': None,
             'sha256': None,
@@ -63,7 +63,7 @@ def test_anon_revisions(self):
         expected = {
             'index': 2,
             'user': None,
-            'date': self.versions[0].date_created.isoformat(),
+            'date': self.versions[0].created.isoformat(),
             'downloads': 0,
             'md5': None,
             'sha256': None,
diff --git a/addons/osfstorage/tests/test_views.py b/addons/osfstorage/tests/test_views.py
index 7bbd1fc4458..8a06f7ef2d0 100644
--- a/addons/osfstorage/tests/test_views.py
+++ b/addons/osfstorage/tests/test_views.py
@@ -106,7 +106,6 @@ def test_children_metadata(self):
         assert_equal(res_date_created, expected_date_created)
         assert_equal(res_data, expected_data)
 
-
     def test_osf_storage_root(self):
         auth = Auth(self.project.creator)
         result = osf_storage_root(self.node_settings.config, self.node_settings, auth)
@@ -393,7 +392,7 @@ def send_metadata_hook(self, payload=None, **kwargs):
         )
 
     def test_callback(self):
-        self.version.date_modified = None
+        self.version.external_modified = None
         self.version.save()
         self.send_metadata_hook()
         self.version.reload()
@@ -404,7 +403,7 @@ def test_callback(self):
 
         #Test attributes are populated
         assert_equal(self.version.size, 123)
-        assert_true(isinstance(self.version.date_modified, datetime.datetime))
+        assert_true(isinstance(self.version.external_modified, datetime.datetime))
 
     def test_archived(self):
         self.send_metadata_hook({
diff --git a/addons/osfstorage/utils.py b/addons/osfstorage/utils.py
index 847f4f1fb8f..033357eeed9 100644
--- a/addons/osfstorage/utils.py
+++ b/addons/osfstorage/utils.py
@@ -55,7 +55,7 @@ def serialize_revision(node, record, version, index, anon=False):
     return {
         'user': user,
         'index': index + 1,
-        'date': version.date_created.isoformat(),
+        'date': version.created.isoformat(),
         'downloads': version._download_count if hasattr(version, '_download_count') else record.get_download_count(version=index),
         'md5': version.metadata.get('md5'),
         'sha256': version.metadata.get('sha256'),
@@ -86,7 +86,7 @@ def get_filename(version_idx, file_version, file_record):
     name, ext = os.path.splitext(file_record.name)
     return u'{name}-{date}{ext}'.format(
         name=name,
-        date=file_version.date_created.isoformat(),
+        date=file_version.created.isoformat(),
         ext=ext,
     )
 
diff --git a/addons/osfstorage/views.py b/addons/osfstorage/views.py
index 0a05f846107..313a7fb07bb 100644
--- a/addons/osfstorage/views.py
+++ b/addons/osfstorage/views.py
@@ -69,7 +69,7 @@ def osfstorage_get_revisions(file_node, node_addon, payload, **kwargs):
     version_count = file_node.versions.count()
     # Don't worry. The only % at the end of the LIKE clause, the index is still used
     counts = dict(PageCounter.objects.filter(_id__startswith=counter_prefix).values_list('_id', 'total'))
-    qs = FileVersion.includable_objects.filter(basefilenode__id=file_node.id).include('creator__guids').order_by('-date_created')
+    qs = FileVersion.includable_objects.filter(basefilenode__id=file_node.id).include('creator__guids').order_by('-created')
 
     for i, version in enumerate(qs):
         version._download_count = counts.get('{}{}'.format(counter_prefix, version_count - i - 1), 0)
@@ -143,8 +143,8 @@ def osfstorage_get_children(file_node, **kwargs):
                         , 'downloads',  COALESCE(DOWNLOAD_COUNT, 0)
                         , 'version', (SELECT COUNT(*) FROM osf_basefilenode_versions WHERE osf_basefilenode_versions.basefilenode_id = F.id)
                         , 'contentType', LATEST_VERSION.content_type
-                        , 'modified', LATEST_VERSION.date_created
-                        , 'created', EARLIEST_VERSION.date_created
+                        , 'modified', LATEST_VERSION.created
+                        , 'created', EARLIEST_VERSION.created
                         , 'checkout', CHECKOUT_GUID
                         , 'md5', LATEST_VERSION.metadata ->> 'md5'
                         , 'sha256', LATEST_VERSION.metadata ->> 'sha256'
@@ -163,14 +163,14 @@ def osfstorage_get_children(file_node, **kwargs):
                 SELECT * FROM osf_fileversion
                 JOIN osf_basefilenode_versions ON osf_fileversion.id = osf_basefilenode_versions.fileversion_id
                 WHERE osf_basefilenode_versions.basefilenode_id = F.id
-                ORDER BY date_created DESC
+                ORDER BY created DESC
                 LIMIT 1
             ) LATEST_VERSION ON TRUE
             LEFT JOIN LATERAL (
                 SELECT * FROM osf_fileversion
                 JOIN osf_basefilenode_versions ON osf_fileversion.id = osf_basefilenode_versions.fileversion_id
                 WHERE osf_basefilenode_versions.basefilenode_id = F.id
-                ORDER BY date_created ASC
+                ORDER BY created ASC
                 LIMIT 1
             ) EARLIEST_VERSION ON TRUE
             LEFT JOIN LATERAL (
diff --git a/admin/institutions/views.py b/admin/institutions/views.py
index fdfee5bcd52..45f63f75068 100644
--- a/admin/institutions/views.py
+++ b/admin/institutions/views.py
@@ -140,7 +140,7 @@ def get_context_data(self, *args, **kwargs):
 class InstitutionNodeList(PermissionRequiredMixin, ListView):
     template_name = 'institutions/node_list.html'
     paginate_by = 25
-    ordering = 'date_modified'
+    ordering = 'modified'
     permission_required = 'osf.view_node'
     raise_exception = True
     model = Node
diff --git a/admin/nodes/serializers.py b/admin/nodes/serializers.py
index b1e3060f1ff..ee8ce4d30e9 100644
--- a/admin/nodes/serializers.py
+++ b/admin/nodes/serializers.py
@@ -17,7 +17,7 @@ def serialize_node(node):
         'parent': node.parent_id,
         'root': node.root._id,
         'is_registration': node.is_registration,
-        'date_created': node.date_created,
+        'date_created': node.created,
         'withdrawn': node.is_retracted,
         'embargo': embargo,
         'contributors': [serialize_simple_user_and_node_permissions(node, user) for user in node.contributors],
diff --git a/admin/nodes/views.py b/admin/nodes/views.py
index 6cac7e6cad9..b273a50ae3f 100644
--- a/admin/nodes/views.py
+++ b/admin/nodes/views.py
@@ -210,7 +210,7 @@ class RegistrationListView(PermissionRequiredMixin, ListView):
     template_name = 'nodes/registration_list.html'
     paginate_by = 10
     paginate_orphans = 1
-    ordering = 'date_created'
+    ordering = 'created'
     context_object_name = '-node'
     permission_required = 'osf.view_registration'
     raise_exception = True
@@ -234,7 +234,7 @@ class NodeSpamList(PermissionRequiredMixin, ListView):
 
     paginate_by = 25
     paginate_orphans = 1
-    ordering = 'date_created'
+    ordering = 'created'
     context_object_name = '-node'
     permission_required = 'osf.view_spam'
     raise_exception = True
diff --git a/admin/preprints/serializers.py b/admin/preprints/serializers.py
index 3f447bf860a..94ac7dd7826 100644
--- a/admin/preprints/serializers.py
+++ b/admin/preprints/serializers.py
@@ -5,8 +5,8 @@ def serialize_preprint(preprint):
 
     return {
         'id': preprint._id,
-        'date_created': preprint.date_created,
-        'modified': preprint.date_modified,
+        'date_created': preprint.created,
+        'modified': preprint.modified,
         'provider': preprint.provider,
         'node': serialize_node(preprint.node),
         'is_published': preprint.is_published,
diff --git a/admin/spam/serializers.py b/admin/spam/serializers.py
index ec7a3ff7a58..b4a3f0cbc18 100644
--- a/admin/spam/serializers.py
+++ b/admin/spam/serializers.py
@@ -16,11 +16,11 @@ def serialize_comment(comment):
         'author': OSFUser.load(comment.user._id),
         'author_id': comment.user._id,
         'author_path': author_abs_url.url,
-        'date_created': comment.date_created,
-        'date_modified': comment.date_modified,
+        'date_created': comment.created,
+        'date_modified': comment.modified,
         'content': comment.content,
         'has_children': bool(getattr(comment, 'commented', [])),
-        'modified': comment.modified,
+        'modified': comment.edited,
         'is_deleted': comment.is_deleted,
         'spam_status': comment.spam_status,
         'reports': reports,
diff --git a/admin/users/views.py b/admin/users/views.py
index b0697944bbc..1d177a1097e 100644
--- a/admin/users/views.py
+++ b/admin/users/views.py
@@ -371,7 +371,7 @@ def get_user_logs_since_workshop(user, workshop_date):
     @staticmethod
     def get_user_nodes_since_workshop(user, workshop_date):
         query_date = workshop_date + timedelta(days=1)
-        return Node.objects.filter(creator=user, date_created__gt=query_date)
+        return Node.objects.filter(creator=user, created__gt=query_date)
 
     def parse(self, csv_file):
         """ Parse and add to csv file.
diff --git a/admin_tests/users/test_views.py b/admin_tests/users/test_views.py
index ea48624b5fe..787c18163c4 100644
--- a/admin_tests/users/test_views.py
+++ b/admin_tests/users/test_views.py
@@ -460,16 +460,16 @@ def _add_log(self, date):
         self.node.add_log('log_added', params={'project': self.node._id}, auth=self.auth, log_date=date, save=True)
 
     def test_correct_number_of_columns_added(self):
-        self._setup_workshop(self.node.date_created)
+        self._setup_workshop(self.node.created)
         added_columns = ['OSF ID', 'Logs Since Workshop', 'Nodes Created Since Workshop', 'Last Log Data']
         result_csv = self.view.parse(self.data)
         nt.assert_equal(len(self.data[0]) + len(added_columns), len(result_csv[0]))
 
     def test_user_activity_day_of_workshop_and_before(self):
-        self._setup_workshop(self.node.date_created)
+        self._setup_workshop(self.node.created)
         # add logs 0 to 48 hours back
         for time_mod in range(9):
-            self._add_log(self.node.date_created - timedelta(hours=(time_mod * 6)))
+            self._add_log(self.node.created - timedelta(hours=(time_mod * 6)))
         result_csv = self.view.parse(self.data)
         user_logs_since_workshop = result_csv[1][-3]
         user_nodes_created_since_workshop = result_csv[1][-2]
@@ -478,8 +478,8 @@ def test_user_activity_day_of_workshop_and_before(self):
         nt.assert_equal(user_nodes_created_since_workshop, 0)
 
     def test_user_activity_after_workshop(self):
-        self._setup_workshop(self.node.date_created - timedelta(hours=25))
-        self._add_log(self.node.date_created)
+        self._setup_workshop(self.node.created - timedelta(hours=25))
+        self._add_log(self.node.created)
 
         result_csv = self.view.parse(self.data)
         user_logs_since_workshop = result_csv[1][-3]
@@ -490,7 +490,7 @@ def test_user_activity_after_workshop(self):
         nt.assert_equal(user_nodes_created_since_workshop, 1)
 
         # Test workshop 30 days ago
-        self._setup_workshop(self.node.date_created - timedelta(days=30))
+        self._setup_workshop(self.node.created - timedelta(days=30))
 
         result_csv = self.view.parse(self.data)
         user_logs_since_workshop = result_csv[1][-3]
@@ -500,7 +500,7 @@ def test_user_activity_after_workshop(self):
         nt.assert_equal(user_nodes_created_since_workshop, 1)
 
         # Test workshop a year ago
-        self._setup_workshop(self.node.date_created - timedelta(days=365))
+        self._setup_workshop(self.node.created - timedelta(days=365))
 
         result_csv = self.view.parse(self.data)
         user_logs_since_workshop = result_csv[1][-3]
@@ -511,7 +511,7 @@ def test_user_activity_after_workshop(self):
 
     # Regression test for OSF-8089
     def test_utc_new_day(self):
-        node_date = self.node.date_created
+        node_date = self.node.created
         date = datetime(node_date.year, node_date.month, node_date.day, 0, tzinfo=pytz.utc) + timedelta(days=1)
         self._setup_workshop(date)
         self._add_log(self.workshop_date + timedelta(hours=25))
@@ -522,7 +522,7 @@ def test_utc_new_day(self):
 
     # Regression test for OSF-8089
     def test_utc_new_day_plus_hour(self):
-        node_date = self.node.date_created
+        node_date = self.node.created
         date = datetime(node_date.year, node_date.month, node_date.day, 0, tzinfo=pytz.utc) + timedelta(days=1, hours=1)
         self._setup_workshop(date)
         self._add_log(self.workshop_date + timedelta(hours=25))
@@ -533,7 +533,7 @@ def test_utc_new_day_plus_hour(self):
 
     # Regression test for OSF-8089
     def test_utc_new_day_minus_hour(self):
-        node_date = self.node.date_created
+        node_date = self.node.created
         date = datetime(node_date.year, node_date.month, node_date.day, 0, tzinfo=pytz.utc) + timedelta(days=1) - timedelta(hours=1)
         self._setup_workshop(date)
         self._add_log(self.workshop_date + timedelta(hours=25))
@@ -543,7 +543,7 @@ def test_utc_new_day_minus_hour(self):
         nt.assert_equal(user_logs_since_workshop, 1)
 
     def test_user_osf_account_not_found(self):
-        self._setup_workshop(self.node.date_created)
+        self._setup_workshop(self.node.created)
         result_csv = self.view.parse(self.user_not_found_data)
         user_id = result_csv[1][-4]
         last_log_date = result_csv[1][-1]
@@ -556,7 +556,7 @@ def test_user_osf_account_not_found(self):
         nt.assert_equal(user_nodes_created_since_workshop, 0)
 
     def test_user_found_by_name(self):
-        self._setup_workshop(self.node.date_created)
+        self._setup_workshop(self.node.created)
         result_csv = self.view.parse(self.user_exists_by_name_data)
         user_id = result_csv[1][-4]
         last_log_date = result_csv[1][-1]
diff --git a/api/applications/serializers.py b/api/applications/serializers.py
index 2abe7f06caa..dbd4322d7f4 100644
--- a/api/applications/serializers.py
+++ b/api/applications/serializers.py
@@ -67,7 +67,7 @@ class ApiOAuth2ApplicationSerializer(ApiOAuthApplicationBaseSerializer):
                           read_only=True,  # Don't let user register an application in someone else's name
                           source='owner._id')
 
-    date_created = DateByVersion(help_text='The date this application was generated (automatically filled in)',
+    date_created = DateByVersion(source='created', help_text='The date this application was generated (automatically filled in)',
                                      read_only=True)
 
     def create(self, validated_data):
diff --git a/api/applications/views.py b/api/applications/views.py
index 023229a16ff..de22f02ec5c 100644
--- a/api/applications/views.py
+++ b/api/applications/views.py
@@ -49,7 +49,7 @@ class ApplicationList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMix
 
     renderer_classes = [JSONRendererWithESISupport, JSONAPIRenderer, ]  # Hide from web-browsable API tool
 
-    ordering = ('-date_created',)
+    ordering = ('-created',)
 
     def get_default_queryset(self):
         return ApiOAuth2Application.objects.filter(owner=self.request.user, is_active=True)
diff --git a/api/base/filters.py b/api/base/filters.py
index 0c448d87ced..1f5a88dc5a9 100644
--- a/api/base/filters.py
+++ b/api/base/filters.py
@@ -148,7 +148,7 @@ def _validate_operator(self, field, field_name, op):
     def _parse_date_param(self, field, source_field_name, op, value):
         """
         Allow for ambiguous date filters. This supports operations like finding Nodes created on a given day
-        even though Node.date_created is a specific datetime.
+        even though Node.created is a specific datetime.
 
         :return list<dict>: list of one (specific datetime) or more (date range) parsed query params
         """
diff --git a/api/base/views.py b/api/base/views.py
index deb91356cdb..adf177e2b3a 100644
--- a/api/base/views.py
+++ b/api/base/views.py
@@ -589,7 +589,7 @@ def root(request, format=None, **kwargs):
     ###Attribute Validation
 
     Endpoints that allow creation or modification of entities generally limit updates to certain attributes of the
-    entity.  If you attempt to set an attribute that does not permit updates (such as a `date_created` timestamp), the
+    entity.  If you attempt to set an attribute that does not permit updates (such as a `created` timestamp), the
     API will silently ignore that attribute.  This will not affect the response from the API: if the request would have
     succeeded without the updated attribute, it will still report as successful.  Likewise, if the request would have
     failed without the attribute update, the API will still report a failure.
@@ -808,7 +808,7 @@ def get_object(self):
 
 class BaseContributorList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin):
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     def get_default_queryset(self):
         node = self.get_node()
@@ -837,7 +837,7 @@ class BaseNodeLinksDetail(JSONAPIBaseView, generics.RetrieveAPIView):
 
 class BaseNodeLinksList(JSONAPIBaseView, generics.ListAPIView):
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     def get_queryset(self):
         auth = get_user_auth(self.request)
@@ -848,7 +848,7 @@ def get_queryset(self):
         return sorted([
             node_link for node_link in query
             if node_link.child.can_view(auth) and not node_link.child.is_retracted
-        ], key=lambda node_link: node_link.child.date_modified, reverse=True)
+        ], key=lambda node_link: node_link.child.modified, reverse=True)
 
 
 class BaseLinkedList(JSONAPIBaseView, generics.ListAPIView):
@@ -868,7 +868,7 @@ class BaseLinkedList(JSONAPIBaseView, generics.ListAPIView):
     view_category = None
     view_name = None
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     # TODO: This class no longer exists
     # model_class = Pointer
@@ -876,7 +876,7 @@ class BaseLinkedList(JSONAPIBaseView, generics.ListAPIView):
     def get_queryset(self):
         auth = get_user_auth(self.request)
 
-        return self.get_node().linked_nodes.filter(is_deleted=False).exclude(type='osf.collection').can_view(user=auth.user, private_link=auth.private_link).order_by('-date_modified')
+        return self.get_node().linked_nodes.filter(is_deleted=False).exclude(type='osf.collection').can_view(user=auth.user, private_link=auth.private_link).order_by('-modified')
 
 
 class WaterButlerMixin(object):
diff --git a/api/citations/views.py b/api/citations/views.py
index d1ef0fb7598..02e54b4b129 100644
--- a/api/citations/views.py
+++ b/api/citations/views.py
@@ -40,7 +40,7 @@ class CitationStyleList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin):
     view_category = 'citations'
     view_name = 'citation-list'
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     # overrides ListAPIView
     def get_default_queryset(self):
diff --git a/api/collections/serializers.py b/api/collections/serializers.py
index c2892c0472a..b7f57dec265 100644
--- a/api/collections/serializers.py
+++ b/api/collections/serializers.py
@@ -23,8 +23,8 @@ class CollectionSerializer(JSONAPISerializer):
     type = TypeField()
 
     title = ser.CharField(required=True)
-    date_created = DateByVersion(read_only=True)
-    date_modified = DateByVersion(read_only=True)
+    date_created = DateByVersion(source='created', read_only=True)
+    date_modified = DateByVersion(source='modified', read_only=True)
     bookmarks = ser.BooleanField(read_only=False, default=False, source='is_bookmark_collection')
 
     links = LinksField({})
diff --git a/api/collections/views.py b/api/collections/views.py
index 1c238ef8442..69a09466c55 100644
--- a/api/collections/views.py
+++ b/api/collections/views.py
@@ -59,7 +59,7 @@ def get_node(self, check_object_permissions=True):
 class CollectionList(JSONAPIBaseView, bulk_views.BulkUpdateJSONAPIView, bulk_views.BulkDestroyJSONAPIView, bulk_views.ListBulkCreateJSONAPIView, ListFilterMixin):
     """Organizer Collections organize projects and components. *Writeable*.
 
-    Paginated list of Project Organizer Collections ordered by their `date_modified`.
+    Paginated list of Project Organizer Collections ordered by their `modified`.
     Each resource contains the full representation of the project organizer collection, meaning additional
     requests to an individual Organizer Collection's detail view are not necessary.
 
@@ -130,7 +130,7 @@ class CollectionList(JSONAPIBaseView, bulk_views.BulkUpdateJSONAPIView, bulk_vie
     view_name = 'collection-list'
     model_class = Collection
 
-    ordering = ('-date_modified', )  # default ordering
+    ordering = ('-modified', )  # default ordering
 
     def get_default_queryset(self):
         user = self.request.user
@@ -340,7 +340,7 @@ class LinkedNodesList(BaseLinkedList, CollectionMixin):
     view_category = 'collections'
     view_name = 'linked-nodes'
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     def get_queryset(self):
         return super(LinkedNodesList, self).get_queryset().exclude(type='osf.registration')
@@ -365,7 +365,7 @@ class LinkedRegistrationsList(BaseLinkedList, CollectionMixin):
 
     Each resource contains the full representation of the registration, meaning additional requests to an individual
     registration's detail view are not necessary. A withdrawn registration will display a limited subset of information,
-    namely, title, description, date_created, registration, withdrawn, date_registered, withdrawal_justification, and
+    namely, title, description, created, registration, withdrawn, date_registered, withdrawal_justification, and
     registration supplement. All other fields will be displayed as null. Additionally, the only relationships permitted
     to be accessed for a withdrawn registration are the contributors - other relationships will return a 403.
 
@@ -423,7 +423,7 @@ class LinkedRegistrationsList(BaseLinkedList, CollectionMixin):
     view_category = 'collections'
     view_name = 'linked-registrations'
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     def get_queryset(self):
         return super(LinkedRegistrationsList, self).get_queryset().filter(type='osf.registration')
@@ -504,7 +504,7 @@ class NodeLinksList(JSONAPIBaseView, bulk_views.BulkDestroyJSONAPIView, bulk_vie
     view_name = 'node-pointers'
     model_class = NodeRelation
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     def get_queryset(self):
         return self.get_node().node_relations.select_related('child').filter(child__is_deleted=False).exclude(child__type='osf.collection')
diff --git a/api/comments/serializers.py b/api/comments/serializers.py
index c6a41fde942..669522dc0f4 100644
--- a/api/comments/serializers.py
+++ b/api/comments/serializers.py
@@ -43,9 +43,9 @@ class CommentSerializer(JSONAPISerializer):
     user = RelationshipField(related_view='users:user-detail', related_view_kwargs={'user_id': '<user._id>'})
     reports = RelationshipField(related_view='comments:comment-reports', related_view_kwargs={'comment_id': '<_id>'})
 
-    date_created = DateByVersion(read_only=True)
-    date_modified = DateByVersion(read_only=True)
-    modified = ser.BooleanField(read_only=True, default=False)
+    date_created = DateByVersion(source='created', read_only=True)
+    date_modified = DateByVersion(source='modified', read_only=True)
+    modified = ser.BooleanField(source='edited', read_only=True, default=False)
     deleted = ser.BooleanField(read_only=True, source='is_deleted', default=False)
     is_abuse = ser.SerializerMethodField(help_text='If the comment has been reported or confirmed.')
     is_ham = ser.SerializerMethodField(help_text='Comment has been confirmed as ham.')
diff --git a/api/comments/views.py b/api/comments/views.py
index 98d1015c9f8..e0228268ee1 100644
--- a/api/comments/views.py
+++ b/api/comments/views.py
@@ -254,7 +254,7 @@ class CommentReportsList(JSONAPIBaseView, generics.ListCreateAPIView, CommentMix
     view_category = 'comments'
     view_name = 'comment-reports'
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     def get_queryset(self):
         user_id = self.request.user._id
diff --git a/api/files/serializers.py b/api/files/serializers.py
index 1dd5e1b4538..19121790efd 100644
--- a/api/files/serializers.py
+++ b/api/files/serializers.py
@@ -203,10 +203,10 @@ def get_date_modified(self, obj):
         mod_dt = None
         if obj.provider == 'osfstorage' and obj.versions.exists():
             # Each time an osfstorage file is added or uploaded, a new version object is created with its
-            # date_created equal to the time of the update.  The date_modified is the modified date
+            # date_created equal to the time of the update.  The external_modified is the modified date
             # from the backend the file is stored on.  This field refers to the modified date on osfstorage,
-            # so prefer to use the date_created of the latest version.
-            mod_dt = obj.versions.first().date_created
+            # so prefer to use the created of the latest version.
+            mod_dt = obj.versions.first().created
         elif obj.provider != 'osfstorage' and obj.history:
             mod_dt = obj.history[-1].get('modified', None)
 
@@ -218,7 +218,7 @@ def get_date_modified(self, obj):
     def get_date_created(self, obj):
         creat_dt = None
         if obj.provider == 'osfstorage' and obj.versions.exists():
-            creat_dt = obj.versions.last().date_created
+            creat_dt = obj.versions.last().created
         elif obj.provider != 'osfstorage' and obj.history:
             # Non-osfstorage files don't store a created date, so instead get the modified date of the
             # earliest entry in the file history.
@@ -355,7 +355,7 @@ class FileVersionSerializer(JSONAPISerializer):
     id = ser.CharField(read_only=True, source='identifier')
     size = ser.IntegerField(read_only=True, help_text='The size of this file at this version')
     content_type = ser.CharField(read_only=True, help_text='The mime type of this file at this verison')
-    date_created = DateByVersion(read_only=True, help_text='The date that this version was created')
+    date_created = DateByVersion(source='created', read_only=True, help_text='The date that this version was created')
     links = LinksField({
         'self': 'self_url',
         'html': 'absolute_url'
diff --git a/api/institutions/views.py b/api/institutions/views.py
index bf33afa0bfa..ec0208bbfaa 100644
--- a/api/institutions/views.py
+++ b/api/institutions/views.py
@@ -152,7 +152,7 @@ class InstitutionNodeList(JSONAPIBaseView, generics.ListAPIView, InstitutionMixi
     view_category = 'institutions'
     view_name = 'institution-nodes'
 
-    ordering = ('-date_modified', )
+    ordering = ('-modified', )
 
     # overrides NodesFilterMixin
     def get_default_queryset(self):
@@ -217,7 +217,7 @@ class InstitutionRegistrationList(InstitutionNodeList):
     serializer_class = RegistrationSerializer
     view_name = 'institution-registrations'
 
-    ordering = ('-date_modified', )
+    ordering = ('-modified', )
 
     def get_default_queryset(self):
         institution = self.get_institution()
diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py
index 01e6ac0efee..8b85f1ebf03 100644
--- a/api/nodes/serializers.py
+++ b/api/nodes/serializers.py
@@ -160,8 +160,8 @@ class NodeSerializer(JSONAPISerializer):
     title = ser.CharField(required=True)
     description = ser.CharField(required=False, allow_blank=True, allow_null=True)
     category = ser.ChoiceField(choices=category_choices, help_text='Choices: ' + category_choices_string)
-    date_created = DateByVersion(read_only=True)
-    date_modified = DateByVersion(read_only=True)
+    date_created = DateByVersion(source='created', read_only=True)
+    date_modified = DateByVersion(source='last_logged', read_only=True)
     registration = ser.BooleanField(read_only=True, source='is_registration')
     preprint = ser.BooleanField(read_only=True, source='is_preprint')
     fork = ser.BooleanField(read_only=True, source='is_fork')
@@ -1188,7 +1188,7 @@ class NodeViewOnlyLinkSerializer(JSONAPISerializer):
 
     key = ser.CharField(read_only=True)
     id = IDField(source='_id', read_only=True)
-    date_created = DateByVersion(read_only=True)
+    date_created = DateByVersion(source='created', read_only=True)
     anonymous = ser.BooleanField(required=False, default=False)
     name = ser.CharField(required=False, default='Shared project link')
 
diff --git a/api/nodes/views.py b/api/nodes/views.py
index 9e4b42c66ec..d7fc3514ca9 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -173,7 +173,7 @@ def get_draft(self, draft_id=None):
 class NodeList(JSONAPIBaseView, bulk_views.BulkUpdateJSONAPIView, bulk_views.BulkDestroyJSONAPIView, bulk_views.ListBulkCreateJSONAPIView, NodesFilterMixin, WaterButlerMixin):
     """Nodes that represent projects and components. *Writeable*.
 
-    Paginated list of nodes ordered by their `date_modified`.  Each resource contains the full representation of the
+    Paginated list of nodes ordered by their `modified`.  Each resource contains the full representation of the
     node, meaning additional requests to an individual node's detail view are not necessary.  Registrations and withdrawn
     registrations cannot be accessed through this endpoint (see registration endpoints instead).
 
@@ -273,7 +273,7 @@ class NodeList(JSONAPIBaseView, bulk_views.BulkUpdateJSONAPIView, bulk_views.Bul
     view_category = 'nodes'
     view_name = 'node-list'
 
-    ordering = ('-date_modified', )  # default ordering
+    ordering = ('-modified', )  # default ordering
 
     # overrides NodesFilterMixin
     def get_default_queryset(self):
@@ -902,7 +902,7 @@ class NodeDraftRegistrationsList(JSONAPIBaseView, generics.ListCreateAPIView, No
     view_category = 'nodes'
     view_name = 'node-draft-registrations'
 
-    ordering = ('-datetime_updated',)
+    ordering = ('-modified',)
 
     # overrides ListCreateAPIView
     def get_queryset(self):
@@ -1030,7 +1030,7 @@ class NodeRegistrationsList(JSONAPIBaseView, generics.ListCreateAPIView, NodeMix
     <!--- Copied from RegistrationList -->
 
     A withdrawn registration will display a limited subset of information, namely, title, description,
-    date_created, registration, withdrawn, date_registered, withdrawal_justification, and registration supplement. All
+    created, registration, withdrawn, date_registered, withdrawal_justification, and registration supplement. All
     other fields will be displayed as null. Additionally, the only relationships permitted to be accessed for a withdrawn
     registration are the contributors - other relationships will return a 403. Each resource contains the full representation
     of the registration, meaning additional requests to an individual registrations's detail view are not necessary.
@@ -1127,7 +1127,7 @@ class NodeRegistrationsList(JSONAPIBaseView, generics.ListCreateAPIView, NodeMix
     view_category = 'nodes'
     view_name = 'node-registrations'
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     # overrides ListCreateAPIView
     # TODO: Filter out withdrawals by default
@@ -1237,7 +1237,7 @@ class NodeChildrenList(JSONAPIBaseView, bulk_views.ListBulkCreateJSONAPIView, No
     view_category = 'nodes'
     view_name = 'node-children'
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     def get_default_queryset(self):
         return default_node_list_queryset(model_cls=Node)
@@ -1248,7 +1248,7 @@ def get_queryset(self):
         auth = get_user_auth(self.request)
         node_pks = node.node_relations.filter(is_node_link=False).select_related('child')\
                 .values_list('child__pk', flat=True)
-        return self.get_queryset_from_request().filter(pk__in=node_pks).can_view(auth.user).order_by('-date_modified')
+        return self.get_queryset_from_request().filter(pk__in=node_pks).can_view(auth.user).order_by('-modified')
 
     # overrides ListBulkCreateJSONAPIView
     def perform_create(self, serializer):
@@ -1536,7 +1536,7 @@ class NodeForksList(JSONAPIBaseView, generics.ListCreateAPIView, NodeMixin, Node
         description                 string             description of the node
         category                    string             node category, must be one of the allowed values
         date_created                iso8601 timestamp  timestamp that the node was created
-        date_modified               iso8601 timestamp  timestamp when the node was last updated
+        modified               iso8601 timestamp  timestamp when the node was last updated
         tags                        array of strings   list of tags that describe the node
         registration                boolean            has this project been registered? (always False)
         collection                  boolean            is this node a collection (always False)
@@ -1580,8 +1580,8 @@ class NodeForksList(JSONAPIBaseView, generics.ListCreateAPIView, NodeMixin, Node
 
     <!--- Copied Query Params from NodeList -->
 
-    Nodes may be filtered by their `title`, `category`, `description`, `public`, `registration`, `tags`, `date_created`,
-    `date_modified`, `root`, `parent`, and `contributors`. Most are string fields and will be filtered using simple
+    Nodes may be filtered by their `title`, `category`, `description`, `public`, `registration`, `tags`, `created`,
+    `modified`, `root`, `parent`, and `contributors`. Most are string fields and will be filtered using simple
     substring matching.  Others are booleans, and can be filtered using truthy values, such as `true`, `false`, `0`, or `1`.
     Note that quoting `true` or `false` in the query will cause the match to fail regardless. `tags` is an array of simple strings.
 
@@ -1706,8 +1706,8 @@ class NodeFilesList(JSONAPIBaseView, generics.ListAPIView, WaterButlerMixin, Lis
                                              for Google Drive, "box" for Box.com.
         last_touched      iso8601 timestamp  last time the metadata for the file was retrieved. only
                                              applies to non-OSF storage providers.
-        date_modified     iso8601 timestamp  timestamp of when this file was last updated*
-        date_created      iso8601 timestamp  timestamp of when this file was created*
+        modified     iso8601 timestamp  timestamp of when this file was last updated*
+        created      iso8601 timestamp  timestamp of when this file was created*
         extra             object             may contain additional data beyond what's described here,
                                              depending on the provider
           hashes          object
@@ -1715,7 +1715,7 @@ class NodeFilesList(JSONAPIBaseView, generics.ListAPIView, WaterButlerMixin, Lis
             sha256        string             SHA-256 hash of file, null for folders
           downloads       integer            number of times the file has been downloaded (for osfstorage files)
 
-    * A note on timestamps: for files stored in osfstorage, `date_created` refers to the time the file was
+    * A note on timestamps: for files stored in osfstorage, `created` refers to the time the file was
     first uploaded to osfstorage, and `date_modified` is the time the file was last updated while in osfstorage.
     Other providers may or may not provide this information, but if they do it will correspond to the provider's
     semantics for created/modified times.  These timestamps may also be stale; metadata retrieved via the File Detail
@@ -2521,7 +2521,7 @@ def get_queryset(self):
 class NodeCommentsList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMixin, NodeMixin):
     """List of comments on a node. *Writeable*.
 
-    Paginated list of comments ordered by their `date_created.` Each resource contains the full representation of the
+    Paginated list of comments ordered by their `created.` Each resource contains the full representation of the
     comment, meaning additional requests to an individual comment's detail view are not necessary.
 
     Note that if an anonymous view_only key is being used, the user relationship will not be exposed.
@@ -2539,9 +2539,9 @@ class NodeCommentsList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMi
         name           type               description
         =================================================================================
         content        string             content of the comment
-        date_created   iso8601 timestamp  timestamp that the comment was created
-        date_modified  iso8601 timestamp  timestamp when the comment was last updated
-        modified       boolean            has this comment been edited?
+        created        iso8601 timestamp  timestamp that the comment was created
+        modified       iso8601 timestamp  timestamp when the comment was last updated
+        edited         boolean            has this comment been edited?
         deleted        boolean            is this comment deleted?
         is_abuse       boolean            has this comment been reported by the current user?
         has_children   boolean            does this comment have replies?
@@ -2593,9 +2593,9 @@ class NodeCommentsList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMi
     filtered using truthy values, such as `true`, `false`, `0`, or `1`. Note that quoting `true` or `false` in
     the query will cause the match to fail regardless.
 
-    + `filter[date_created][comparison_operator]=YYYY-MM-DDTH:M:S` -- filter comments based on date created.
+    + `filter[created][comparison_operator]=YYYY-MM-DDTH:M:S` -- filter comments based on date created.
 
-    Comments can also be filtered based on their `date_created` and `date_modified` fields. Possible comparison
+    Comments can also be filtered based on their `created` and `modified` fields. Possible comparison
     operators include 'gt' (greater than), 'gte'(greater than or equal to), 'lt' (less than) and 'lte'
     (less than or equal to). The date must be in the format YYYY-MM-DD and the time is optional.
 
@@ -2621,7 +2621,7 @@ class NodeCommentsList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMi
     view_category = 'nodes'
     view_name = 'node-comments'
 
-    ordering = ('-date_created', )  # default ordering
+    ordering = ('-created', )  # default ordering
 
     def get_default_queryset(self):
         return Comment.objects.filter(node=self.get_node(), root_target__isnull=False)
@@ -2967,8 +2967,8 @@ class LinkedNodesList(BaseLinkedList, NodeMixin):
         title          string             title of project or component
         description    string             description of the node
         category       string             node category, must be one of the allowed values
-        date_created   iso8601 timestamp  timestamp that the node was created
-        date_modified  iso8601 timestamp  timestamp when the node was last updated
+        created   iso8601 timestamp  timestamp that the node was created
+        modified  iso8601 timestamp  timestamp when the node was last updated
         tags           array of strings   list of tags that describe the node
         registration   boolean            is this is a registration?
         collection     boolean            is this node a collection of other nodes?
@@ -3087,7 +3087,7 @@ class NodeLinkedRegistrationsList(BaseLinkedList, NodeMixin):
 
     Each resource contains the full representation of the registration, meaning additional requests to an individual
     registration's detail view are not necessary. A withdrawn registration will display a limited subset of information,
-    namely, title, description, date_created, registration, withdrawn, date_registered, withdrawal_justification, and
+    namely, title, description, created, registration, withdrawn, date_registered, withdrawal_justification, and
     registration supplement. All other fields will be displayed as null. Additionally, the only relationships permitted
     to be accessed for a withdrawn registration are the contributors - other relationships will return a 403.
 
@@ -3102,8 +3102,8 @@ class NodeLinkedRegistrationsList(BaseLinkedList, NodeMixin):
         title                           string             title of the registered project or component
         description                     string             description of the registered node
         category                        string             bode category, must be one of the allowed values
-        date_created                    iso8601 timestamp  timestamp that the node was created
-        date_modified                   iso8601 timestamp  timestamp when the node was last updated
+        created                         iso8601 timestamp  timestamp that the node was created
+        modified                        iso8601 timestamp  timestamp when the node was last updated
         tags                            array of strings   list of tags that describe the registered node
         current_user_can_comment        boolean            Whether the current user is allowed to post comments
         current_user_permissions        array of strings   list of strings representing the permissions for the current user on this node
@@ -3176,7 +3176,7 @@ class NodeViewOnlyLinksList(JSONAPIBaseView, generics.ListCreateAPIView, ListFil
         =================================================================================
         name            string                  name of the view only link
         anonymous       boolean                 whether the view only link has anonymized contributors
-        date_created    iso8601 timestamp       timestamp when the view only link was created
+        created         iso8601 timestamp       timestamp when the view only link was created
         key             string                  the view only link key
 
 
@@ -3209,7 +3209,7 @@ class NodeViewOnlyLinksList(JSONAPIBaseView, generics.ListCreateAPIView, ListFil
 
     + `filter[<fieldname>]=<Str>` -- fields and values to filter the search results on.
 
-    View only links may be filtered by their `name`, `anonymous`, and `date_created` attributes.
+    View only links may be filtered by their `name`, `anonymous`, and `created` attributes.
 
     #This Request/Response
     """
@@ -3227,7 +3227,7 @@ class NodeViewOnlyLinksList(JSONAPIBaseView, generics.ListCreateAPIView, ListFil
     view_category = 'nodes'
     view_name = 'node-view-only-links'
 
-    ordering = ('-date_created',)
+    ordering = ('-created',)
 
     def get_default_queryset(self):
         return self.get_node().private_links.filter(is_deleted=False)
@@ -3251,7 +3251,7 @@ class NodeViewOnlyLinkDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIV
         =================================================================================
         name            string                  name of the view only link
         anonymous       boolean                 whether the view only link has anonymized contributors
-        date_created    iso8601 timestamp       timestamp when the view only link was created
+        created         iso8601 timestamp       timestamp when the view only link was created
         key             string                  the view only key
 
 
@@ -3385,7 +3385,7 @@ class NodePreprintsList(JSONAPIBaseView, generics.ListAPIView, NodeMixin, Prepri
     ##Note
     **This API endpoint is under active development, and is subject to change in the future.**
 
-    Paginated list of preprints ordered by their `date_created`.  Each resource contains a representation of the
+    Paginated list of preprints ordered by their `created`.  Each resource contains a representation of the
     preprint.
 
     ##Preprint Attributes
@@ -3394,8 +3394,8 @@ class NodePreprintsList(JSONAPIBaseView, generics.ListAPIView, NodeMixin, Prepri
 
         name                            type                                description
         ====================================================================================
-        date_created                    iso8601 timestamp                   timestamp that the preprint was created
-        date_modified                   iso8601 timestamp                   timestamp that the preprint was last modified
+        created                         iso8601 timestamp                   timestamp that the preprint was created
+        modified                        iso8601 timestamp                   timestamp that the preprint was last modified
         date_published                  iso8601 timestamp                   timestamp when the preprint was published
         original_publication_date       iso8601 timestamp                   user-entered date of publication from external posting
         is_published                    boolean                             whether or not this preprint is published
@@ -3444,7 +3444,7 @@ class NodePreprintsList(JSONAPIBaseView, generics.ListAPIView, NodeMixin, Prepri
     view_category = 'nodes'
     view_name = 'node-preprints'
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     def get_default_queryset(self):
         auth = get_user_auth(self.request)
diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py
index 6322442ba55..e54917258e3 100644
--- a/api/preprint_providers/views.py
+++ b/api/preprint_providers/views.py
@@ -249,7 +249,7 @@ class PreprintProviderPreprintList(JSONAPIBaseView, generics.ListAPIView, Prepri
         PreprintPublishedOrAdmin,
     )
 
-    ordering = ('-date_created')
+    ordering = ('-created')
 
     serializer_class = PreprintSerializer
     model_class = AbstractNode
diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py
index dfc387f235e..8ec13703b2b 100644
--- a/api/preprints/serializers.py
+++ b/api/preprints/serializers.py
@@ -71,8 +71,8 @@ class PreprintSerializer(JSONAPISerializer):
 
     id = IDField(source='_id', read_only=True)
     subjects = ser.SerializerMethodField()
-    date_created = DateByVersion(read_only=True)
-    date_modified = DateByVersion(read_only=True)
+    date_created = DateByVersion(source='created', read_only=True)
+    date_modified = DateByVersion(source='modified', read_only=True)
     date_published = DateByVersion(read_only=True)
     original_publication_date = DateByVersion(required=False)
     doi = ser.CharField(source='article_doi', required=False, allow_null=True)
diff --git a/api/preprints/views.py b/api/preprints/views.py
index 0ebda342966..e67f5416186 100644
--- a/api/preprints/views.py
+++ b/api/preprints/views.py
@@ -61,7 +61,7 @@ def get_preprint(self, check_object_permissions=True):
 class PreprintList(JSONAPIBaseView, generics.ListCreateAPIView, PreprintFilterMixin):
     """Preprints that represent a special kind of preprint node. *Writeable*.
 
-    Paginated list of preprints ordered by their `date_created`.  Each resource contains a representation of the
+    Paginated list of preprints ordered by their `created`.  Each resource contains a representation of the
     preprint.
 
     ##Preprint Attributes
@@ -162,8 +162,8 @@ class PreprintList(JSONAPIBaseView, generics.ListCreateAPIView, PreprintFilterMi
 
     serializer_class = PreprintSerializer
 
-    ordering = ('-date_created')
-    ordering_fields = ('date_created', 'date_last_transitioned')
+    ordering = ('-created')
+    ordering_fields = ('created', 'date_last_transitioned')
     view_category = 'preprints'
     view_name = 'preprint-list'
 
diff --git a/api/registrations/serializers.py b/api/registrations/serializers.py
index ed45000694c..b98de487f6d 100644
--- a/api/registrations/serializers.py
+++ b/api/registrations/serializers.py
@@ -28,7 +28,7 @@ class BaseRegistrationSerializer(NodeSerializer):
     category_choices = NodeSerializer.category_choices
     category_choices_string = NodeSerializer.category_choices_string
     category = HideIfWithdrawal(ser.ChoiceField(read_only=True, choices=category_choices, help_text='Choices: ' + category_choices_string))
-    date_modified = DateByVersion(read_only=True)
+    date_modified = DateByVersion(source='last_logged', read_only=True)
     fork = HideIfWithdrawal(ser.BooleanField(read_only=True, source='is_fork'))
     collection = HideIfWithdrawal(ser.BooleanField(read_only=True, source='is_collection'))
     node_license = HideIfWithdrawal(NodeLicenseSerializer(read_only=True))
diff --git a/api/registrations/views.py b/api/registrations/views.py
index 40ab7168348..932435c69f6 100644
--- a/api/registrations/views.py
+++ b/api/registrations/views.py
@@ -82,7 +82,7 @@ class RegistrationList(JSONAPIBaseView, generics.ListAPIView, bulk_views.BulkUpd
 
     Registrations are read-only snapshots of a project. This view is a list of all current registrations for which a user
     has access.  A withdrawn registration will display a limited subset of information, namely, title, description,
-    date_created, registration, withdrawn, date_registered, withdrawal_justification, and registration supplement. All
+    created, registration, withdrawn, date_registered, withdrawal_justification, and registration supplement. All
     other fields will be displayed as null. Additionally, the only relationships permitted to be accessed for a withdrawn
     registration are the contributors - other relationships will return a 403.
 
@@ -153,7 +153,7 @@ class RegistrationList(JSONAPIBaseView, generics.ListAPIView, bulk_views.BulkUpd
     view_category = 'registrations'
     view_name = 'registration-list'
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
     model_class = Registration
 
     # overrides BulkUpdateJSONAPIView
@@ -211,7 +211,7 @@ class RegistrationDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView, Regist
 
     Each resource contains the full representation of the registration, meaning additional requests to an individual
     registration's detail view are not necessary. A withdrawn registration will display a limited subset of information,
-    namely, title, description, date_created, registration, withdrawn, date_registered, withdrawal_justification, and
+    namely, title, description, created, registration, withdrawn, date_registered, withdrawal_justification, and
     registration supplement. All other fields will be displayed as null. Additionally, the only relationships permitted
     to be accessed for a withdrawn registration are the contributors - other relationships will return a 403.
 
@@ -511,7 +511,7 @@ class RegistrationChildrenList(JSONAPIBaseView, generics.ListAPIView, ListFilter
 
     <!--- Copied Query Params from NodeList -->
 
-    Nodes may be filtered by their `id`, `title`, `category`, `description`, `public`, `tags`, `date_created`, `date_modified`,
+    Nodes may be filtered by their `id`, `title`, `category`, `description`, `public`, `tags`, `date_created`, `modified`,
     `root`, `parent`, and `contributors`.  Most are string fields and will be filtered using simple substring matching.  `public`
     is a boolean, and can be filtered using truthy values, such as `true`, `false`, `0`, or `1`.  Note that quoting `true`
     or `false` in the query will cause the match to fail regardless.  `tags` is an array of simple strings.
@@ -534,7 +534,7 @@ class RegistrationChildrenList(JSONAPIBaseView, generics.ListAPIView, ListFilter
     required_read_scopes = [CoreScopes.NODE_REGISTRATIONS_READ]
     required_write_scopes = [CoreScopes.NULL]
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     def get_default_queryset(self):
         return default_node_list_permission_queryset(user=self.request.user, model_cls=Registration)
@@ -542,7 +542,7 @@ def get_default_queryset(self):
     def get_queryset(self):
         registration = self.get_node()
         registration_pks = registration.node_relations.filter(is_node_link=False).select_related('child').values_list('child__pk', flat=True)
-        return self.get_queryset_from_request().filter(pk__in=registration_pks).can_view(self.request.user).order_by('-date_modified')
+        return self.get_queryset_from_request().filter(pk__in=registration_pks).can_view(self.request.user).order_by('-modified')
 
 
 class RegistrationCitationDetail(NodeCitationDetail, RegistrationMixin):
@@ -652,7 +652,7 @@ class RegistrationForksList(NodeForksList, RegistrationMixin):
     <!--- Copied Query Params from NodeList -->
 
     Nodes may be filtered by their `title`, `category`, `description`, `public`, `registration`, `tags`, `date_created`,
-    `date_modified`, `root`, `parent`, and `contributors`. Most are string fields and will be filtered using simple
+    `modified`, `root`, `parent`, and `contributors`. Most are string fields and will be filtered using simple
     substring matching.  Others are booleans, and can be filtered using truthy values, such as `true`, `false`, `0`, or `1`.
     Note that quoting `true` or `false` in the query will cause the match to fail regardless. `tags` is an array of simple strings.
 
@@ -949,7 +949,7 @@ class RegistrationLinkedRegistrationsList(NodeLinkedRegistrationsList, Registrat
 
     Each resource contains the full representation of the registration, meaning additional requests to an individual
     registration's detail view are not necessary. A withdrawn registration will display a limited subset of information,
-    namely, title, description, date_created, registration, withdrawn, date_registered, withdrawal_justification, and
+    namely, title, description, created, registration, withdrawn, date_registered, withdrawal_justification, and
     registration supplement. All other fields will be displayed as null. Additionally, the only relationships permitted
     to be accessed for a withdrawn registration are the contributors - other relationships will return a 403.
 
diff --git a/api/search/views.py b/api/search/views.py
index 86875c04f98..f869b0dcd00 100644
--- a/api/search/views.py
+++ b/api/search/views.py
@@ -442,7 +442,7 @@ class SearchRegistrations(BaseSearchView):
 
     Registrations are read-only snapshots of a project. This view is a list of all current registrations for which a user
     has access.  A withdrawn registration will display a limited subset of information, namely, title, description,
-    date_created, registration, withdrawn, date_registered, withdrawal_justification, and registration supplement. All
+    created, registration, withdrawn, date_registered, withdrawal_justification, and registration supplement. All
     other fields will be displayed as null. Additionally, the only relationships permitted to be accessed for a withdrawn
     registration are the contributors - other relationships will return a 403.
 
diff --git a/api/users/serializers.py b/api/users/serializers.py
index 9613573c0a4..aa2c0af0500 100644
--- a/api/users/serializers.py
+++ b/api/users/serializers.py
@@ -20,7 +20,7 @@ class QuickFilesRelationshipField(RelationshipField):
 
     def to_representation(self, value):
         relationship_links = super(QuickFilesRelationshipField, self).to_representation(value)
-        quickfiles_guid = value.created.filter(type=QuickFilesNode._typedmodels_type).values_list('guids___id', flat=True).get()
+        quickfiles_guid = value.nodes_created.filter(type=QuickFilesNode._typedmodels_type).values_list('guids___id', flat=True).get()
         upload_url = website_utils.waterbutler_api_url_for(quickfiles_guid, 'osfstorage')
         relationship_links['links']['upload'] = {
             'href': upload_url,
diff --git a/api/users/views.py b/api/users/views.py
index 162ad81f358..41f5e85984e 100644
--- a/api/users/views.py
+++ b/api/users/views.py
@@ -469,7 +469,7 @@ def get_object(self):
 class UserNodes(JSONAPIBaseView, generics.ListAPIView, UserMixin, NodesFilterMixin):
     """List of nodes that the user contributes to. *Read-only*.
 
-    Paginated list of nodes that the user contributes to ordered by `date_modified`.  User registrations are not available
+    Paginated list of nodes that the user contributes to ordered by `modified`.  User registrations are not available
     at this endpoint. Each resource contains the full representation of the node, meaning additional requests to an individual
     node's detail view are not necessary. If the user id in the path is the same as the logged-in user, all nodes will be
     visible.  Otherwise, you will only be able to see the other user's publicly-visible nodes.  The special user id `me`
@@ -533,7 +533,7 @@ class UserNodes(JSONAPIBaseView, generics.ListAPIView, UserMixin, NodesFilterMix
     view_category = 'users'
     view_name = 'user-nodes'
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     # overrides NodesFilterMixin
     def get_default_queryset(self):
@@ -547,7 +547,7 @@ def get_queryset(self):
         return (
             AbstractNode.objects.filter(id__in=set(self.get_queryset_from_request().values_list('id', flat=True)))
             .select_related('node_license')
-            .order_by('-date_modified', )
+            .order_by('-modified', )
             .include('contributor__user__guids', 'root__guids', limit_includes=10)
         )
 
@@ -589,7 +589,7 @@ class UserPreprints(JSONAPIBaseView, generics.ListAPIView, UserMixin, PreprintFi
         base_permissions.TokenHasScope,
     )
 
-    ordering = ('-date_created')
+    ordering = ('-created')
     model_class = AbstractNode
 
     required_read_scopes = [CoreScopes.USERS_READ, CoreScopes.NODE_PREPRINTS_READ]
@@ -648,7 +648,7 @@ class UserRegistrations(JSONAPIBaseView, generics.ListAPIView, UserMixin, NodesF
     logged-in user.
 
     A withdrawn registration will display a limited subset of information, namely, title, description,
-    date_created, registration, withdrawn, date_registered, withdrawal_justification, and registration supplement. All
+    created, registration, withdrawn, date_registered, withdrawal_justification, and registration supplement. All
     other fields will be displayed as null. Additionally, the only relationships permitted to be accessed for a withdrawn
     registration are the contributors - other relationships will return a 403.
 
@@ -733,7 +733,7 @@ class UserRegistrations(JSONAPIBaseView, generics.ListAPIView, UserMixin, NodesF
     view_category = 'users'
     view_name = 'user-registrations'
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     # overrides NodesFilterMixin
     def get_default_queryset(self):
diff --git a/api/view_only_links/serializers.py b/api/view_only_links/serializers.py
index c10842135c7..c85d9b5a643 100644
--- a/api/view_only_links/serializers.py
+++ b/api/view_only_links/serializers.py
@@ -15,7 +15,7 @@
 class ViewOnlyLinkDetailSerializer(JSONAPISerializer):
     key = ser.CharField(read_only=True)
     id = IDField(source='_id', read_only=True)
-    date_created = DateByVersion(read_only=True)
+    date_created = DateByVersion(source='created', read_only=True)
     anonymous = ser.BooleanField(required=False)
     name = ser.CharField(required=False)
 
diff --git a/api/view_only_links/views.py b/api/view_only_links/views.py
index c43a7b99e10..32651a0dce8 100644
--- a/api/view_only_links/views.py
+++ b/api/view_only_links/views.py
@@ -97,7 +97,7 @@ class ViewOnlyLinkNodes(JSONAPIBaseView, generics.ListAPIView):
     view_category = 'view-only-links'
     view_name = 'view-only-link-nodes'
 
-    ordering = ('-date_modified',)
+    ordering = ('-modified',)
 
     def get_serializer_class(self):
         if 'link_id' in self.kwargs:
diff --git a/api/wikis/views.py b/api/wikis/views.py
index e3895e1372b..eadc07a2c46 100644
--- a/api/wikis/views.py
+++ b/api/wikis/views.py
@@ -63,7 +63,7 @@ class WikiDetail(JSONAPIBaseView, generics.RetrieveAPIView, WikiMixin):
         name                        string             name of the wiki pag
         path                        string             the path of the wiki page
         materialized_path           string             the path of the wiki page
-        date_modified               iso8601 timestamp  timestamp when the wiki was last updated
+        date_modified                    iso8601 timestamp  timestamp when the wiki was last updated
         content_type                string             MIME-type
         current_user_can_comment    boolean            Whether the current user is allowed to post comments
         extra                       object
diff --git a/api_tests/base/test_serializers.py b/api_tests/base/test_serializers.py
index 58b9dc7a34e..d0bbc2b8411 100644
--- a/api_tests/base/test_serializers.py
+++ b/api_tests/base/test_serializers.py
@@ -420,13 +420,13 @@ def setUp(self):
 
     def test_old_date_formats_to_old_format(self):
         req = make_drf_request_with_version(version='2.0')
-        setattr(self.node, 'date_modified', self.old_date)
+        setattr(self.node, 'last_logged', self.old_date)
         data = NodeSerializer(self.node, context={'request': req}).data['data']
         assert_equal(datetime.strftime(self.old_date, self.old_format), data['attributes']['date_modified'])
 
     def test_old_date_without_microseconds_formats_to_old_format(self):
         req = make_drf_request_with_version(version='2.0')
-        setattr(self.node, 'date_modified', self.old_date_without_microseconds)
+        setattr(self.node, 'last_logged', self.old_date_without_microseconds)
         data = NodeSerializer(self.node, context={'request': req}).data['data']
         assert_equal(
             datetime.strftime(self.old_date_without_microseconds, self.old_format_without_microseconds),
@@ -435,13 +435,13 @@ def test_old_date_without_microseconds_formats_to_old_format(self):
 
     def test_old_date_formats_to_new_format(self):
         req = make_drf_request_with_version(version='2.2')
-        setattr(self.node, 'date_modified', self.old_date)
+        setattr(self.node, 'last_logged', self.old_date)
         data = NodeSerializer(self.node, context={'request': req}).data['data']
         assert_equal(datetime.strftime(self.old_date, self.new_format), data['attributes']['date_modified'])
 
     def test_old_date_without_microseconds_formats_to_new_format(self):
         req = make_drf_request_with_version(version='2.2')
-        setattr(self.node, 'date_modified', self.old_date_without_microseconds)
+        setattr(self.node, 'last_logged', self.old_date_without_microseconds)
         data = NodeSerializer(self.node, context={'request': req}).data['data']
         assert_equal(
             datetime.strftime(self.old_date_without_microseconds, self.new_format),
@@ -450,13 +450,13 @@ def test_old_date_without_microseconds_formats_to_new_format(self):
 
     def test_new_date_formats_to_old_format(self):
         req = make_drf_request_with_version(version='2.0')
-        setattr(self.node, 'date_modified', self.new_date)
+        setattr(self.node, 'last_logged', self.new_date)
         data = NodeSerializer(self.node, context={'request': req}).data['data']
         assert_equal(datetime.strftime(self.new_date, self.old_format), data['attributes']['date_modified'])
 
     def test_new_date_without_microseconds_formats_to_old_format(self):
         req = make_drf_request_with_version(version='2.0')
-        setattr(self.node, 'date_modified', self.new_date_without_microseconds)
+        setattr(self.node, 'last_logged', self.new_date_without_microseconds)
         data = NodeSerializer(self.node, context={'request': req}).data['data']
         assert_equal(
             datetime.strftime(self.new_date_without_microseconds, self.old_format_without_microseconds),
@@ -465,13 +465,13 @@ def test_new_date_without_microseconds_formats_to_old_format(self):
 
     def test_new_date_formats_to_new_format(self):
         req = make_drf_request_with_version(version='2.2')
-        setattr(self.node, 'date_modified', self.new_date)
+        setattr(self.node, 'last_logged', self.new_date)
         data = NodeSerializer(self.node, context={'request': req}).data['data']
         assert_equal(datetime.strftime(self.new_date, self.new_format), data['attributes']['date_modified'])
 
     def test_new_date_without_microseconds_formats_to_new_format(self):
         req = make_drf_request_with_version(version='2.2')
-        setattr(self.node, 'date_modified', self.new_date_without_microseconds)
+        setattr(self.node, 'last_logged', self.new_date_without_microseconds)
         data = NodeSerializer(self.node, context={'request': req}).data['data']
         assert_equal(
             datetime.strftime(self.new_date_without_microseconds, self.new_format),
diff --git a/api_tests/collections/test_serializers.py b/api_tests/collections/test_serializers.py
index 3895cd12205..ccfd9188780 100644
--- a/api_tests/collections/test_serializers.py
+++ b/api_tests/collections/test_serializers.py
@@ -18,8 +18,8 @@ def test_collection_serialization(self):
             created_format = '%Y-%m-%dT%H:%M:%S.%fZ'
             modified_format = '%Y-%m-%dT%H:%M:%S.%fZ'
         else:
-            created_format = '%Y-%m-%dT%H:%M:%S.%f' if collection.date_created.microsecond else '%Y-%m-%dT%H:%M:%S'
-            modified_format = '%Y-%m-%dT%H:%M:%S.%f' if collection.date_modified.microsecond else '%Y-%m-%dT%H:%M:%S'
+            created_format = '%Y-%m-%dT%H:%M:%S.%f' if collection.created.microsecond else '%Y-%m-%dT%H:%M:%S'
+            modified_format = '%Y-%m-%dT%H:%M:%S.%f' if collection.modified.microsecond else '%Y-%m-%dT%H:%M:%S'
 
         result = CollectionSerializer(collection, context={'request': req}).data
         data = result['data']
@@ -28,8 +28,8 @@ def test_collection_serialization(self):
         # Attributes
         attributes = data['attributes']
         assert attributes['title'] == collection.title
-        assert attributes['date_created'] == collection.date_created.strftime(created_format)
-        assert attributes['date_modified'] == collection.date_modified.strftime(modified_format)
+        assert attributes['date_created'] == collection.created.strftime(created_format)
+        assert attributes['date_modified'] == collection.modified.strftime(modified_format)
         assert attributes['bookmarks'] == collection.is_bookmark_collection
 
         # Relationships
diff --git a/api_tests/files/serializers/test_file_serializer.py b/api_tests/files/serializers/test_file_serializer.py
index 9fcbe932905..65ae7f6f9b3 100644
--- a/api_tests/files/serializers/test_file_serializer.py
+++ b/api_tests/files/serializers/test_file_serializer.py
@@ -6,7 +6,7 @@
 from api.files.serializers import FileSerializer
 from api_tests import utils
 from osf_tests.factories import (
-    UserFactory, 
+    UserFactory,
     NodeFactory,
 )
 from tests.utils import make_drf_request_with_version
@@ -27,28 +27,28 @@ def file_one(self, node, user):
         return utils.create_test_file(node, user)
 
     def test_file_serializer(self, file_one):
-        date_created = file_one.versions.last().date_created
-        date_modified = file_one.versions.first().date_created
-        date_created_tz_aware = date_created.replace(tzinfo=utc)
-        date_modified_tz_aware = date_modified.replace(tzinfo=utc)
+        created = file_one.versions.last().created
+        modified = file_one.versions.first().created
+        created_tz_aware = created.replace(tzinfo=utc)
+        modified_tz_aware = modified.replace(tzinfo=utc)
         new_format = '%Y-%m-%dT%H:%M:%S.%fZ'
 
         # test_date_modified_formats_to_old_format
         req = make_drf_request_with_version(version='2.0')
         data = FileSerializer(file_one, context={'request': req}).data['data']
-        assert date_modified_tz_aware == data['attributes']['date_modified']
+        assert modified_tz_aware == data['attributes']['date_modified']
 
         # test_date_modified_formats_to_new_format
         req = make_drf_request_with_version(version='2.2')
         data = FileSerializer(file_one, context={'request': req}).data['data']
-        assert datetime.strftime(date_modified, new_format) == data['attributes']['date_modified']
+        assert datetime.strftime(modified, new_format) == data['attributes']['date_modified']
 
         # test_date_created_formats_to_old_format
         req = make_drf_request_with_version(version='2.0')
         data = FileSerializer(file_one, context={'request': req}).data['data']
-        assert date_created_tz_aware == data['attributes']['date_created']
+        assert created_tz_aware == data['attributes']['date_created']
 
         # test_date_created_formats_to_new_format
         req = make_drf_request_with_version(version='2.2')
         data = FileSerializer(file_one, context={'request': req}).data['data']
-        assert datetime.strftime(date_created, new_format) == data['attributes']['date_created']
+        assert datetime.strftime(created, new_format) == data['attributes']['date_created']
diff --git a/api_tests/files/views/test_file_detail.py b/api_tests/files/views/test_file_detail.py
index d91f45ca03b..aae9251b94d 100644
--- a/api_tests/files/views/test_file_detail.py
+++ b/api_tests/files/views/test_file_detail.py
@@ -127,8 +127,8 @@ def test_get_file(self, app, user, file_url, file):
         assert attributes['provider'] == file.provider
         assert attributes['size'] == file.versions.first().size
         assert attributes['current_version'] == len(file.history)
-        assert attributes['date_modified'] == _dt_to_iso8601(file.versions.first().date_created.replace(tzinfo=pytz.utc))
-        assert attributes['date_created'] == _dt_to_iso8601(file.versions.last().date_created.replace(tzinfo=pytz.utc))
+        assert attributes['date_modified'] == _dt_to_iso8601(file.versions.first().created.replace(tzinfo=pytz.utc))
+        assert attributes['date_created'] == _dt_to_iso8601(file.versions.last().created.replace(tzinfo=pytz.utc))
         assert attributes['extra']['hashes']['md5'] is None
         assert attributes['extra']['hashes']['sha256'] is None
         assert attributes['tags'] == []
@@ -153,7 +153,7 @@ def test_file_has_comments_link(self, app, user, file, file_url):
     def test_file_has_correct_unread_comments_count(self, app, user, file, node):
         contributor = AuthUserFactory()
         node.add_contributor(contributor, auth=Auth(user), save=True)
-        comment = CommentFactory(node=node, target=file.get_guid(create=True), user=contributor, page='files')
+        CommentFactory(node=node, target=file.get_guid(create=True), user=contributor, page='files')
         res = app.get('/{}files/{}/?related_counts=True'.format(API_BASE, file._id), auth=user.auth)
         assert res.status_code == 200
         unread_comments = res.json['data']['relationships']['comments']['links']['related']['meta']['unread']
diff --git a/api_tests/institutions/views/test_institution_nodes_list.py b/api_tests/institutions/views/test_institution_nodes_list.py
index 8c2f112c927..ada0e3104c8 100644
--- a/api_tests/institutions/views/test_institution_nodes_list.py
+++ b/api_tests/institutions/views/test_institution_nodes_list.py
@@ -194,24 +194,24 @@ def url(self, institution):
 
     @pytest.fixture()
     def node_may(self, user, institution):
-        node_may = ProjectFactory(creator=user, is_public = True)
-        node_may.date_created = '2016-05-01 00:00:00.000000+00:00'
+        node_may = ProjectFactory(creator=user, is_public=True)
+        node_may.created = '2016-05-01 00:00:00.000000+00:00'
         node_may.affiliated_institutions.add(institution)
         node_may.save()
         return node_may
 
     @pytest.fixture()
     def node_june(self, user, institution):
-        node_june = ProjectFactory(creator=user, is_public = True)
-        node_june.date_created = '2016-06-01 00:00:00.000000+00:00'
+        node_june = ProjectFactory(creator=user, is_public=True)
+        node_june.created = '2016-06-01 00:00:00.000000+00:00'
         node_june.affiliated_institutions.add(institution)
         node_june.save()
         return node_june
 
     @pytest.fixture()
     def node_july(self, user, institution):
-        node_july = ProjectFactory(creator=user, is_public = True)
-        node_july.date_created = '2016-07-01 00:00:00.000000+00:00'
+        node_july = ProjectFactory(creator=user, is_public=True)
+        node_july.created = '2016-07-01 00:00:00.000000+00:00'
         node_july.affiliated_institutions.add(institution)
         node_july.save()
         return node_july
diff --git a/api_tests/nodes/filters/test_filters.py b/api_tests/nodes/filters/test_filters.py
index 5edb86eb9fc..dfdcfcec571 100644
--- a/api_tests/nodes/filters/test_filters.py
+++ b/api_tests/nodes/filters/test_filters.py
@@ -143,38 +143,38 @@ def user(self):
     @pytest.fixture()
     def node_may(self, user):
         node_may = ProjectFactory(creator=user)
-        node_may.date_created = '2016-05-01 00:00:00.000000+00:00'
+        node_may.created = '2016-05-01 00:00:00.000000+00:00'
         node_may.save()
         return node_may
 
     @pytest.fixture()
     def node_june(self, user):
         node_june = ProjectFactory(creator=user)
-        node_june.date_created = '2016-06-01 00:00:00.000000+00:00'
+        node_june.created = '2016-06-01 00:00:00.000000+00:00'
         node_june.save()
         return node_june
 
     @pytest.fixture()
     def node_july(self, user):
         node_july = ProjectFactory(creator=user)
-        node_july.date_created = '2016-07-01 00:00:00.000000+00:00'
+        node_july.created = '2016-07-01 00:00:00.000000+00:00'
         node_july.save()
         return node_july
 
     @pytest.fixture()
-    def date_created_url(self, url):
+    def created_url(self, url):
         return '{}filter[date_created]='.format(url)
 
-    def test_node_list_date_filter(self, app, user, node_may, node_june, node_july, url, date_created_url):
+    def test_node_list_date_filter(self, app, user, node_may, node_june, node_july, url, created_url):
 
-    #   test_date_filter_equals
+        # test_date_filter_equals
         expected = []
-        res = app.get('{}{}'.format(date_created_url, '2016-04-01'), auth=user.auth)
+        res = app.get('{}{}'.format(created_url, '2016-04-01'), auth=user.auth)
         actual = [node['id'] for node in res.json['data']]
         assert expected == actual
 
         expected = [node_may._id]
-        res = app.get('{}{}'.format(date_created_url, node_may.date_created), auth=user.auth)
+        res = app.get('{}{}'.format(created_url, node_may.created), auth=user.auth)
         actual = [node['id'] for node in res.json['data']]
         assert expected == actual
 
@@ -239,7 +239,7 @@ def test_node_list_date_filter(self, app, user, node_may, node_june, node_july,
         assert expected == actual
 
         expected = [node_may._id]
-        res = app.get('{}{}'.format(res_url, node_may.date_created), auth=user.auth)
+        res = app.get('{}{}'.format(res_url, node_may.created), auth=user.auth)
         actual = [node['id'] for node in res.json['data']]
         assert expected == actual
 
diff --git a/api_tests/nodes/views/test_node_comments_list.py b/api_tests/nodes/views/test_node_comments_list.py
index 8f7c18d4a3d..96c514bd917 100644
--- a/api_tests/nodes/views/test_node_comments_list.py
+++ b/api_tests/nodes/views/test_node_comments_list.py
@@ -980,12 +980,12 @@ def url_base(self, project):
 
     @pytest.fixture()
     def date_created_formatted(self, comment):
-        return comment.date_created.strftime('%Y-%m-%dT%H:%M:%S.%f')
+        return comment.created.strftime('%Y-%m-%dT%H:%M:%S.%f')
 
     @pytest.fixture()
     def date_modified_formatted(self, user, comment):
         comment.edit('Edited comment', auth=core.Auth(user), save=True)
-        return comment.date_modified.strftime('%Y-%m-%dT%H:%M:%S.%f')
+        return comment.modified.strftime('%Y-%m-%dT%H:%M:%S.%f')
 
     def test_filtering(self, app, user, project, comment, comment_deleted, date_created_formatted, date_modified_formatted, url_base):
 
diff --git a/api_tests/nodes/views/test_node_list.py b/api_tests/nodes/views/test_node_list.py
index 7532354eb65..a8884c79602 100644
--- a/api_tests/nodes/views/test_node_list.py
+++ b/api_tests/nodes/views/test_node_list.py
@@ -120,7 +120,7 @@ def test_node_list_has_proper_root(self, app, user, url):
             assert project_json['embeds']['root']['data']['id'] == project.root._id
 
     def test_node_list_sorting(self, app, url):
-        res = app.get('{}?sort=-date_created'.format(url))
+        res = app.get('{}?sort=-created'.format(url))
         assert res.status_code == 200
 
         res = app.get('{}?sort=title'.format(url))
@@ -390,10 +390,10 @@ def test_filtering_multiple_fields(self, app, user_one):
         project_public_four = ProjectFactory(is_public=True, title='test', creator=user_one, description='test')
 
         for project in [project_public_one, project_public_two, project_public_three, project_private_one, project_private_two]:
-            project.date_created = '2016-10-25 00:00:00.000000+00:00'
+            project.created = '2016-10-25 00:00:00.000000+00:00'
             project.save()
 
-        project_public_four.date_created = '2016-10-28 00:00:00.000000+00:00'
+        project_public_four.created = '2016-10-28 00:00:00.000000+00:00'
         project_public_four.save()
 
         expected = [project_public_one._id, project_public_two._id, project_public_three._id]
diff --git a/api_tests/preprints/filters/test_filters.py b/api_tests/preprints/filters/test_filters.py
index 85457847a44..5255473e2cd 100644
--- a/api_tests/preprints/filters/test_filters.py
+++ b/api_tests/preprints/filters/test_filters.py
@@ -63,7 +63,7 @@ def preprint_one(self, user, project_one, provider_one, subject_one):
     @pytest.fixture()
     def preprint_two(self, user, project_two, provider_two, subject_two):
         preprint_two = PreprintFactory(creator=user, project=project_two, filename='howto_reason.txt', provider=provider_two, subjects=[[subject_two._id]])
-        preprint_two.date_created = '2013-12-11 10:09:08.070605+00:00'
+        preprint_two.created = '2013-12-11 10:09:08.070605+00:00'
         preprint_two.date_published = '2013-12-11 10:09:08.070605+00:00'
         preprint_two.original_publication_date = '2013-12-11 10:09:08.070605+00:00'
         preprint_two.save()
@@ -72,14 +72,13 @@ def preprint_two(self, user, project_two, provider_two, subject_two):
     @pytest.fixture()
     def preprint_three(self, user, project_three, provider_three, subject_one, subject_two):
         preprint_three = PreprintFactory(creator=user, project=project_three, filename='darn_reason.txt', provider=provider_three, subjects=[[subject_one._id], [subject_two._id]])
-        preprint_three.date_created = '2013-12-11 10:09:08.070605+00:00'
+        preprint_three.created = '2013-12-11 10:09:08.070605+00:00'
         preprint_three.date_published = '2013-12-11 10:09:08.070605+00:00'
         preprint_three.original_publication_date = '2013-12-11 10:09:08.070605+00:00'
         preprint_three.is_published = False
         preprint_three.save()
         return preprint_three
 
-
     @pytest.fixture()
     def provider_url(self, url):
         return '{}filter[provider]='.format(url)
@@ -89,7 +88,7 @@ def id_url(self, url):
         return '{}filter[id]='.format(url)
 
     @pytest.fixture()
-    def date_created_url(self, url):
+    def created_url(self, url):
         return '{}filter[date_created]='.format(url)
 
     @pytest.fixture()
@@ -138,21 +137,21 @@ def test_id_filter_equals_returns_one(self, app, user, preprint_two, id_url):
         actual = [preprint['id'] for preprint in res.json['data']]
         assert expected == actual
 
-    def test_date_created_filter_equals_returns_none(self, app, user, date_created_url):
+    def test_date_created_filter_equals_returns_none(self, app, user, created_url):
         expected = []
-        res = app.get('{}{}'.format(date_created_url, '2015-11-15 10:09:08.070605+00:00'), auth=user.auth)
+        res = app.get('{}{}'.format(created_url, '2015-11-15 10:09:08.070605+00:00'), auth=user.auth)
         actual = [preprint['id'] for preprint in res.json['data']]
         assert expected == actual
 
-    def test_date_created_filter_equals_returns_one(self, app, user, preprint_one, date_created_url):
+    def test_date_created_filter_equals_returns_one(self, app, user, preprint_one, created_url):
         expected = [preprint_one._id]
-        res = app.get('{}{}'.format(date_created_url, preprint_one.date_created), auth=user.auth)
+        res = app.get('{}{}'.format(created_url, preprint_one.created), auth=user.auth)
         actual = [preprint['id'] for preprint in res.json['data']]
         assert expected == actual
 
-    def test_date_created_filter_equals_returns_multiple(self, app, user, preprint_two, preprint_three, date_created_url):
+    def test_date_created_filter_equals_returns_multiple(self, app, user, preprint_two, preprint_three, created_url):
         expected = set([preprint_two._id, preprint_three._id])
-        res = app.get('{}{}'.format(date_created_url, preprint_two.date_created), auth=user.auth)
+        res = app.get('{}{}'.format(created_url, preprint_two.created), auth=user.auth)
         actual = set([preprint['id'] for preprint in res.json['data']])
         assert expected == actual
 
@@ -166,7 +165,7 @@ def test_date_modified_filter_equals_returns_none(self, app, user, date_modified
     # This test could hypothetically fail if the time between fixture creations splits a day (e.g., midnight)
     def test_date_modified_filter_equals_returns_multiple(self, app, user, preprint_one, preprint_two, preprint_three, date_modified_url):
         expected = set([preprint_one._id, preprint_two._id, preprint_three._id])
-        res = app.get('{}{}'.format(date_modified_url, preprint_one.date_modified), auth=user.auth)
+        res = app.get('{}{}'.format(date_modified_url, preprint_one.modified), auth=user.auth)
         actual = set([preprint['id'] for preprint in res.json['data']])
         assert expected == actual
 
diff --git a/api_tests/registrations/views/test_withdrawn_registrations.py b/api_tests/registrations/views/test_withdrawn_registrations.py
index ed68b065127..32f5c5be0fb 100644
--- a/api_tests/registrations/views/test_withdrawn_registrations.py
+++ b/api_tests/registrations/views/test_withdrawn_registrations.py
@@ -110,9 +110,9 @@ def test_withdrawn_registrations_display_limited_fields(self, app, user, registr
         expected_attributes = {
             'title': registration.title,
             'description': registration.description,
-            'date_created': registration.date_created.isoformat().replace('+00:00', 'Z'),
+            'date_created': registration.created.isoformat().replace('+00:00', 'Z'),
             'date_registered': registration.registered_date.isoformat().replace('+00:00', 'Z'),
-            'date_modified': registration.date_modified.isoformat().replace('+00:00', 'Z'),
+            'date_modified': registration.last_logged.isoformat().replace('+00:00', 'Z'),
             'date_withdrawn': registration.retraction.date_retracted.isoformat().replace('+00:00', 'Z'),
             'withdrawal_justification': registration.retraction.justification,
             'public': None,
diff --git a/framework/sessions/__init__.py b/framework/sessions/__init__.py
index 35d95a47b4c..e4d5d29d03e 100644
--- a/framework/sessions/__init__.py
+++ b/framework/sessions/__init__.py
@@ -163,7 +163,7 @@ def before_request():
             user_session = Session.load(session_id) or Session(_id=session_id)
         except itsdangerous.BadData:
             return
-        if not util_time.throttle_period_expired(user_session.date_created, settings.OSF_SESSION_TIMEOUT):
+        if not util_time.throttle_period_expired(user_session.created, settings.OSF_SESSION_TIMEOUT):
             # Update date last login when making non-api requests
             if user_session.data.get('auth_user_id') and 'api' not in request.url:
                 OSFUser = apps.get_model('osf.OSFUser')
diff --git a/osf/management/commands/export_user_account.py b/osf/management/commands/export_user_account.py
index 9bad2bf457b..62e31ad53c6 100644
--- a/osf/management/commands/export_user_account.py
+++ b/osf/management/commands/export_user_account.py
@@ -30,8 +30,8 @@
 
 PREPRINT_EXPORT_FIELDS = [
     'is_published',
-    'date_created',
-    'date_modified',
+    'created',
+    'modified',
     'date_published'
 ]
 
@@ -42,8 +42,8 @@
     'is_public',
     'description',
     'forked_date',
-    'date_created',
-    'date_modified'
+    'created',
+    'modified'
 ]
 
 REGISTRATION_EXPORT_FIELDS = NODE_EXPORT_FIELDS + [
diff --git a/osf/management/commands/force_archive.py b/osf/management/commands/force_archive.py
index 78d294377c9..86ce555d3c9 100644
--- a/osf/management/commands/force_archive.py
+++ b/osf/management/commands/force_archive.py
@@ -212,7 +212,7 @@ def modify_file_tree_recursive(reg_id, tree, file_obj, deleted=None, cached=Fals
                 'deleted': None,
                 'object': file_obj,
                 'name': file_obj.name,
-                'version': int(file_obj.versions.latest('date_created').identifier) if file_obj.versions.exists() else None
+                'version': int(file_obj.versions.latest('created').identifier) if file_obj.versions.exists() else None
             })
             cached = True
             if move_under:
@@ -244,7 +244,7 @@ def modify_file_tree_recursive(reg_id, tree, file_obj, deleted=None, cached=Fals
                     'object': file_obj,
                     'name': file_obj.name,
                     'deleted': file_obj.is_deleted,
-                    'version': int(file_obj.versions.latest('date_created').identifier) if file_obj.versions.exists() else None
+                    'version': int(file_obj.versions.latest('created').identifier) if file_obj.versions.exists() else None
                 })
             noop = False
         if filenode.get('children'):
@@ -315,7 +315,7 @@ def _recurse(file_obj, node):
             'object': file_obj,
             'name': file_obj.name,
             'deleted': file_obj.is_deleted,
-            'version': int(file_obj.versions.latest('date_created').identifier) if file_obj.versions.exists() else None
+            'version': int(file_obj.versions.latest('created').identifier) if file_obj.versions.exists() else None
         }
         if not file_obj.is_file:
             serialized['children'] = [_recurse(child, node) for child in node.files.filter(parent_id=file_obj.id)]
diff --git a/osf/management/commands/update_preprint_share_dates.py b/osf/management/commands/update_preprint_share_dates.py
index faebcc85907..7c689cd2f4f 100644
--- a/osf/management/commands/update_preprint_share_dates.py
+++ b/osf/management/commands/update_preprint_share_dates.py
@@ -11,7 +11,7 @@
 logger = logging.getLogger(__name__)
 
 def update_share_preprint_modified_dates(dry_run=False):
-    for preprint in PreprintService.objects.filter(date_modified__lt=F('node__date_modified')):
+    for preprint in PreprintService.objects.filter(date_modified__lt=F('node__modified')):
         if dry_run:
             logger.info('Would have sent ' + preprint._id + ' data to SHARE')
         else:
@@ -20,7 +20,7 @@ def update_share_preprint_modified_dates(dry_run=False):
 
 class Command(BaseCommand):
     """
-    Send more accurate preprint modified dates to SHARE (sends updates if preprint.date_modified < node.date_modified)
+    Send more accurate preprint modified dates to SHARE (sends updates if preprint.modified < node.modified)
     """
     def add_arguments(self, parser):
         super(Command, self).add_arguments(parser)
diff --git a/osf/migrations/0053_add_quickfiles.py b/osf/migrations/0053_add_quickfiles.py
index bd7e960bdd1..23f39590765 100644
--- a/osf/migrations/0053_add_quickfiles.py
+++ b/osf/migrations/0053_add_quickfiles.py
@@ -17,7 +17,7 @@
 
 
 def add_quickfiles(*args, **kwargs):
-    ids_without_quickfiles = list(OSFUser.objects.exclude(created__type=QuickFilesNode._typedmodels_type).values_list('id', flat=True))
+    ids_without_quickfiles = list(OSFUser.objects.exclude(nodes_created__type=QuickFilesNode._typedmodels_type).values_list('id', flat=True))
 
     users_without_quickfiles = OSFUser.objects.filter(id__in=ids_without_quickfiles).order_by('id')
     total_quickfiles_to_create = users_without_quickfiles.count()
diff --git a/osf/models/base.py b/osf/models/base.py
index 50fd4a326df..c3c6009cde5 100644
--- a/osf/models/base.py
+++ b/osf/models/base.py
@@ -11,7 +11,9 @@
 from django.db.models import ForeignKey
 from django.db.models.signals import post_save
 from django.dispatch import receiver
+from django_extensions.db.models import TimeStampedModel
 from include import IncludeQuerySet
+
 from osf.utils.caching import cached_property
 from osf.exceptions import ValidationError
 from osf.utils.fields import LowercaseCharField, NonNaiveDateTimeField
@@ -41,7 +43,7 @@ def generate_object_id():
     return str(bson.ObjectId())
 
 
-class BaseModel(models.Model):
+class BaseModel(TimeStampedModel):
     migration_page_size = 50000
 
     objects = models.QuerySet.as_manager()
@@ -52,8 +54,8 @@ class Meta:
     def __unicode__(self):
         return '{}'.format(self.id)
 
-    def to_storage(self):
-        local_django_fields = set([x.name for x in self._meta.concrete_fields])
+    def to_storage(self, include_auto_now=True):
+        local_django_fields = set([x.name for x in self._meta.concrete_fields if include_auto_now or not getattr(x, 'auto_now', False)])
         return {name: self.serializable_value(name) for name in local_django_fields}
 
     @classmethod
diff --git a/osf/models/comment.py b/osf/models/comment.py
index bc5e5801875..558af0be40c 100644
--- a/osf/models/comment.py
+++ b/osf/models/comment.py
@@ -11,7 +11,6 @@
 from osf.models import validators
 
 from framework.exceptions import PermissionsError
-from osf.utils.fields import NonNaiveDateTimeField
 from website import settings
 from website.util import api_v2_url
 from website.project import signals as project_signals
@@ -38,9 +37,7 @@ class Comment(GuidMixin, SpamMixin, CommentableMixin, BaseModel):
                                     related_name='child_comments',
                                     null=True, blank=True)
 
-    date_created = NonNaiveDateTimeField(auto_now_add=True)
-    date_modified = NonNaiveDateTimeField(auto_now=True)
-    modified = models.BooleanField(default=False)
+    edited = models.BooleanField(default=False)
     is_deleted = models.BooleanField(default=False)
     # The type of root_target: node/files
     page = models.CharField(max_length=255, blank=True)
@@ -127,7 +124,7 @@ def find_n_unread(cls, user, node, page, root_id=None):
 
             return cls.objects.filter(
                 Q(node=node) & ~Q(user=user) & Q(is_deleted=False) &
-                (Q(date_created__gt=view_timestamp) | Q(date_modified__gt=view_timestamp)) &
+                (Q(created__gt=view_timestamp) | Q(modified__gt=view_timestamp)) &
                 Q(root_target=root_target)
             ).count()
 
@@ -191,8 +188,8 @@ def edit(self, content, auth, save=False):
         }
         log_dict.update(self.root_target.referent.get_extra_log_params(self))
         self.content = content
-        self.modified = True
-        self.date_modified = timezone.now()
+        self.edited = True
+        self.modified = timezone.now()
         new_mentions = get_valid_mentioned_users_guids(self, self.node.contributors)
 
         if save:
@@ -219,7 +216,7 @@ def delete(self, auth, save=False):
         }
         self.is_deleted = True
         log_dict.update(self.root_target.referent.get_extra_log_params(self))
-        self.date_modified = timezone.now()
+        self.modified = timezone.now()
         if save:
             self.save()
             self.node.add_log(
@@ -241,7 +238,7 @@ def undelete(self, auth, save=False):
             'comment': self._id,
         }
         log_dict.update(self.root_target.referent.get_extra_log_params(self))
-        self.date_modified = timezone.now()
+        self.modified = timezone.now()
         if save:
             self.save()
             self.node.add_log(
diff --git a/osf/models/files.py b/osf/models/files.py
index df69c93a4f3..20cd24151f4 100644
--- a/osf/models/files.py
+++ b/osf/models/files.py
@@ -209,8 +209,8 @@ def get_file_guids(cls, materialized_path, provider, node):
 
         return guids
 
-    def to_storage(self):
-        storage = super(BaseFileNode, self).to_storage()
+    def to_storage(self, **kwargs):
+        storage = super(BaseFileNode, self).to_storage(**kwargs)
         if 'trashed' not in self.type.lower():
             for key in tuple(storage.keys()):
                 if 'deleted' in key:
@@ -480,8 +480,8 @@ def serialize(self):
             'checkout': self.checkout._id if self.checkout else None,
             'version': newest_version.identifier if newest_version else None,
             'contentType': newest_version.content_type if newest_version else None,
-            'modified': newest_version.date_modified.isoformat() if newest_version.date_modified else None,
-            'created': self.versions.all().first().date_modified.isoformat() if self.versions.all().first().date_modified else None,
+            'modified': newest_version.external_modified.isoformat() if newest_version.external_modified else None,
+            'created': self.versions.all().first().external_modified.isoformat() if self.versions.all().first().external_modified else None,
         })
 
     def restore(self, recursive=True, parent=None, save=True, deleted_on=None):
@@ -570,7 +570,7 @@ def restore(self, recursive=True, parent=None, save=True, deleted_on=None):
         self.recast(self._resolve_class(type_cls)._typedmodels_type)
 
         if save:
-            self.save()
+            self.save(update_modified=False)
 
         return self
 
@@ -616,21 +616,22 @@ class FileVersion(ObjectIDMixin, BaseModel):
     """A version of an OsfStorageFileNode. contains information
     about where the file is located, hashes and datetimes
     """
+    # Note on fields:
+    # `created`: Date version record was created. This is the date displayed to the user.
+    # `modified`: Date this object was last modified. Distinct from the date the file associated
+    #       with this object was last modified
+    # `external_modified`: Date file modified on third-party backend. Not displayed to user, since
+    #       this date may be earlier than the date of upload if the file already
+    #       exists on the backend
 
     creator = models.ForeignKey('OSFUser', null=True, blank=True, on_delete=models.CASCADE)
 
     identifier = models.CharField(max_length=100, blank=False, null=False)  # max length on staging was 51
 
-    # Date version record was created. This is the date displayed to the user.
-    date_created = NonNaiveDateTimeField(auto_now_add=True)
-
     size = models.BigIntegerField(default=-1, blank=True, null=True)
 
     content_type = models.CharField(max_length=100, blank=True, null=True)  # was 24 on staging
-    # Date file modified on third-party backend. Not displayed to user, since
-    # this date may be earlier than the date of upload if the file already
-    # exists on the backend
-    date_modified = NonNaiveDateTimeField(null=True, blank=True)
+    external_modified = NonNaiveDateTimeField(null=True, blank=True)
 
     metadata = DateTimeAwareJSONField(blank=True, default=dict)
     location = DateTimeAwareJSONField(default=None, blank=True, null=True, validators=[validate_location])
@@ -655,7 +656,7 @@ def update_metadata(self, metadata, save=True):
         self.size = self.metadata.get('size', self.size)
         self.content_type = self.metadata.get('contentType', self.content_type)
         if self.metadata.get('modified'):
-            self.date_modified = parse_date(self.metadata['modified'], ignoretz=False)
+            self.external_modified = parse_date(self.metadata['modified'], ignoretz=False)
 
         if save:
             self.save()
@@ -691,4 +692,4 @@ def _find_matching_archive(self, save=True):
         return True
 
     class Meta:
-        ordering = ('-date_created',)
+        ordering = ('-created',)
diff --git a/osf/models/mixins.py b/osf/models/mixins.py
index b2f31d5ca94..2ddac713756 100644
--- a/osf/models/mixins.py
+++ b/osf/models/mixins.py
@@ -2,10 +2,12 @@
 from django.apps import apps
 from django.db import models
 from django.core.exceptions import ObjectDoesNotExist
+from django.utils import timezone
 from framework.analytics import increment_user_activity_counters
 from osf.models.node_relation import NodeRelation
 from osf.models.nodelog import NodeLog
 from osf.models.tag import Tag
+from osf.utils.fields import NonNaiveDateTimeField
 from website.exceptions import NodeStateError
 from website import settings
 
@@ -51,7 +53,8 @@ class Meta:
 
 
 class Loggable(models.Model):
-    # TODO: This should be in the NodeLog model
+
+    last_logged = NonNaiveDateTimeField(db_index=True, null=True, blank=True, default=timezone.now)
 
     def add_log(self, action, params, auth, foreign_user=None, log_date=None, save=True, request=None):
         AbstractNode = apps.get_model('osf.AbstractNode')
@@ -73,9 +76,9 @@ def add_log(self, action, params, auth, foreign_user=None, log_date=None, save=T
         log.save()
 
         if self.logs.count() == 1:
-            self.date_modified = log.date.replace(tzinfo=pytz.utc)
+            self.last_logged = log.date.replace(tzinfo=pytz.utc)
         else:
-            self.date_modified = self.logs.first().date
+            self.last_logged = self.logs.first().date
 
         if save:
             self.save()
diff --git a/osf/models/node.py b/osf/models/node.py
index 59a1c912679..66d27994107 100644
--- a/osf/models/node.py
+++ b/osf/models/node.py
@@ -291,8 +291,6 @@ def contributors(self):
                                 related_name='nodes_created',
                                 on_delete=models.SET_NULL,
                                 null=True, blank=True)
-    date_created = NonNaiveDateTimeField(auto_now_add=True)
-    date_modified = NonNaiveDateTimeField(db_index=True, auto_now=True)
     deleted_date = NonNaiveDateTimeField(null=True, blank=True)
     description = models.TextField(blank=True, default='')
     file_guid_to_share_uuids = DateTimeAwareJSONField(default=dict, blank=True)
@@ -2015,8 +2013,8 @@ def use_as_template(self, auth, changes=None, top_level=True):
         if len(new.title) > 200:
             new.title = new.title[:200]
 
-        # Slight hack - date_created is a read-only field.
-        new.date_created = timezone.now()
+        # Slight hack - created is a read-only field.
+        new.created = timezone.now()
 
         new.save(suppress_log=True)
 
@@ -2035,7 +2033,7 @@ def use_as_template(self, auth, changes=None, top_level=True):
                 },
             },
             auth=auth,
-            log_date=new.date_created,
+            log_date=new.created,
             save=False,
         )
         new.save()
@@ -2064,7 +2062,7 @@ def next_descendants(self, auth, condition=lambda auth, node: True):
         returns a list of [(node, [children]), ...]
         """
         ret = []
-        for node in self._nodes.order_by('date_created').all():
+        for node in self._nodes.order_by('created').all():
             if condition(auth, node):
                 # base case
                 ret.append((node, []))
@@ -2980,7 +2978,7 @@ def add_project_created_log(sender, instance, created, **kwargs):
             log_action,
             params=log_params,
             auth=Auth(user=instance.creator),
-            log_date=instance.date_created,
+            log_date=instance.created,
             save=True,
         )
 
diff --git a/osf/models/oauth.py b/osf/models/oauth.py
index 7b281ca5058..66549eccc12 100644
--- a/osf/models/oauth.py
+++ b/osf/models/oauth.py
@@ -1,7 +1,6 @@
 import urlparse
 import uuid
 
-from osf.utils.fields import NonNaiveDateTimeField
 from website.util import api_v2_url
 
 from django.db import models
@@ -57,8 +56,6 @@ class ApiOAuth2Application(base.ObjectIDMixin, base.BaseModel):
     name = models.CharField(db_index=True, blank=False, null=False, max_length=200)
     description = models.CharField(blank=True, null=True, max_length=1000)
 
-    date_created = NonNaiveDateTimeField(auto_now_add=True)
-
     home_url = models.URLField(blank=False, null=False)
     callback_url = models.URLField(blank=False, null=False)
 
diff --git a/osf/models/preprint_service.py b/osf/models/preprint_service.py
index b1c982a24c6..7b8d2ba8c09 100644
--- a/osf/models/preprint_service.py
+++ b/osf/models/preprint_service.py
@@ -25,8 +25,6 @@
 from osf.models.identifiers import IdentifierMixin, Identifier
 
 class PreprintService(DirtyFieldsMixin, GuidMixin, IdentifierMixin, ReviewableMixin, BaseModel):
-    date_created = NonNaiveDateTimeField(auto_now_add=True)
-    date_modified = NonNaiveDateTimeField(auto_now=True)
     provider = models.ForeignKey('osf.PreprintProvider',
                                  on_delete=models.SET_NULL,
                                  related_name='preprint_services',
diff --git a/osf/models/private_link.py b/osf/models/private_link.py
index a690e498d90..d339656f236 100644
--- a/osf/models/private_link.py
+++ b/osf/models/private_link.py
@@ -1,17 +1,14 @@
-from django.utils import timezone
 from django.db import models
 from django.dispatch import receiver
 from django.core.exceptions import ValidationError
 
 from framework.utils import iso8601format
-from osf.utils.fields import NonNaiveDateTimeField
 from website.util import sanitize
 
 from osf.models.base import BaseModel, ObjectIDMixin
 
 
 class PrivateLink(ObjectIDMixin, BaseModel):
-    date_created = NonNaiveDateTimeField(default=timezone.now)
     key = models.CharField(max_length=512, null=False, unique=True, blank=False)
     name = models.CharField(max_length=255, blank=True, null=True)
     is_deleted = models.BooleanField(default=False)
@@ -35,7 +32,7 @@ def node_scale(self, node):
     def to_json(self):
         return {
             'id': self._id,
-            'date_created': iso8601format(self.date_created),
+            'date_created': iso8601format(self.created),
             'key': self.key,
             'name': sanitize.unescape_entities(self.name),
             'creator': {'fullname': self.creator.fullname, 'url': self.creator.profile_url},
diff --git a/osf/models/session.py b/osf/models/session.py
index f2f1b8bbe45..aa384997459 100644
--- a/osf/models/session.py
+++ b/osf/models/session.py
@@ -1,11 +1,8 @@
 from osf.models.base import BaseModel, ObjectIDMixin
 from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
-from osf.utils.fields import NonNaiveDateTimeField
 
 
 class Session(ObjectIDMixin, BaseModel):
-    date_created = NonNaiveDateTimeField(auto_now_add=True)
-    date_modified = NonNaiveDateTimeField(auto_now=True)
     data = DateTimeAwareJSONField(default=dict, blank=True)
 
     @property
diff --git a/osf/models/user.py b/osf/models/user.py
index ed26d7afcca..4fd0dc69e05 100644
--- a/osf/models/user.py
+++ b/osf/models/user.py
@@ -24,7 +24,6 @@
 from django.db import models
 from django.utils import timezone
 
-from django_extensions.db.models import TimeStampedModel
 from framework.auth import Auth, signals, utils
 from framework.auth.core import generate_verification_key
 from framework.auth.exceptions import (ChangePasswordError, ExpiredTokenError,
@@ -102,7 +101,7 @@ def create_superuser(self, username, password):
         return user
 
 
-class Email(BaseModel, TimeStampedModel):
+class Email(BaseModel):
     address = LowercaseEmailField(unique=True, db_index=True, validators=[validate_email])
     user = models.ForeignKey('OSFUser', related_name='emails', on_delete=models.CASCADE)
 
@@ -1433,7 +1432,7 @@ def get_or_create_cookie(self, secret=None):
         user_session = Session.objects.filter(
             data__auth_user_id=self._id
         ).order_by(
-            '-date_modified'
+            '-modified'
         ).first()
 
         if not user_session:
diff --git a/osf/utils/migrations.py b/osf/utils/migrations.py
index 707a200a157..2631bb5a2d8 100644
--- a/osf/utils/migrations.py
+++ b/osf/utils/migrations.py
@@ -37,6 +37,11 @@ def ensure_licenses(*args, **kwargs):
     """
     ninserted = 0
     nupdated = 0
+    try:
+        NodeLicense = args[0].get_model('osf', 'nodelicense')
+    except:
+        # Working outside a migration
+        from osf.models import NodeLicense
     with open(
             os.path.join(
                 settings.APP_PATH,
@@ -81,6 +86,11 @@ def ensure_schemas(*args):
     """Import meta-data schemas from JSON to database if not already loaded
     """
     schema_count = 0
+    try:
+        MetaSchema = args[0].get_model('osf', 'metaschema')
+    except:
+        # Working outside a migration
+        from osf.models import MetaSchema
     for schema in OSF_META_SCHEMAS:
         schema_obj, created = MetaSchema.objects.update_or_create(
             name=schema['name'],
diff --git a/osf_tests/factories.py b/osf_tests/factories.py
index be74dfc0a81..3a234307c60 100644
--- a/osf_tests/factories.py
+++ b/osf_tests/factories.py
@@ -177,7 +177,7 @@ def _create(cls, target_class, username, password, fullname):
 class BaseNodeFactory(DjangoModelFactory):
     title = factory.Faker('catch_phrase')
     description = factory.Faker('sentence')
-    date_created = factory.LazyFunction(timezone.now)
+    created = factory.LazyFunction(timezone.now)
     creator = factory.SubFactory(AuthUserFactory)
 
     class Meta:
diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py
index fe9af162b99..9b9cc085611 100644
--- a/osf_tests/test_node.py
+++ b/osf_tests/test_node.py
@@ -607,7 +607,7 @@ def test_project_factory(self):
         assert node.category == 'project'
         assert bool(node._id)
         # assert_almost_equal(
-        #     node.date_created, timezone.now(),
+        #     node.created, timezone.now(),
         #     delta=datetime.timedelta(seconds=5),
         # )
         assert node.is_public is False
@@ -671,8 +671,8 @@ def test_add_log(self, node, auth):
         # date is tzaware
         assert last_log.date.tzinfo == pytz.utc
 
-        # updates node.date_modified
-        assert_datetime_equal(node.date_modified, last_log.date)
+        # updates node.modified
+        assert_datetime_equal(node.modified, last_log.date)
 
 
 class TestTagging:
@@ -779,7 +779,7 @@ def test_project_created_log_is_added(self, fake):
         assert first_log.action == NodeLog.PROJECT_CREATED
         params = first_log.params
         assert params['node'] == node._id
-        assert_datetime_equal(first_log.date, node.date_created)
+        assert_datetime_equal(first_log.date, node.created)
 
 # Copied from tests/test_models.py
 class TestContributorMethods:
@@ -2615,8 +2615,8 @@ def _cmp_fork_original(self, fork_user, fork_date, fork, original,
         assert fork._id in [n._id for n in original.forks.all()]
         # Note: Must cast ForeignList to list for comparison
         assert list(fork.contributors.all()) == [fork_user]
-        assert (fork_date - fork.date_created) < datetime.timedelta(seconds=30)
-        assert fork.forked_date != original.date_created
+        assert (fork_date - fork.created) < datetime.timedelta(seconds=30)
+        assert fork.forked_date != original.created
 
         # Test that pointers were copied correctly
         assert(
@@ -3474,7 +3474,7 @@ def test_simple_template(self, project, auth):
         )
 
         assert new.title == self._default_title(project)
-        assert new.date_created != project.date_created
+        assert new.created != project.created
         self._verify_log(new)
 
     def test_simple_template_title_changed(self, project, auth):
@@ -3492,7 +3492,7 @@ def test_simple_template_title_changed(self, project, auth):
         )
 
         assert new.title == changed_title
-        assert new.date_created != project.date_created
+        assert new.created != project.created
         self._verify_log(new)
 
     def test_use_as_template_adds_default_addons(self, project, auth):
diff --git a/osf_tests/test_oauth_application.py b/osf_tests/test_oauth_application.py
index 29dfcfa7984..379e3f459db 100644
--- a/osf_tests/test_oauth_application.py
+++ b/osf_tests/test_oauth_application.py
@@ -37,7 +37,7 @@ def test_new_app_is_not_flagged_as_deleted(self):
     @pytest.mark.skip('Django\'s editable=False does not prevent edits')
     def test_cant_edit_creation_date(self):
         with pytest.raises(AttributeError):
-            self.api_app.date_created = timezone.now()
+            self.api_app.created = timezone.now()
 
     def test_invalid_home_url_raises_exception(self):
         with pytest.raises(ValidationError):
diff --git a/osf_tests/test_preprint_summary.py b/osf_tests/test_preprint_summary.py
index 5d00c301080..bde71fcc3df 100644
--- a/osf_tests/test_preprint_summary.py
+++ b/osf_tests/test_preprint_summary.py
@@ -45,10 +45,10 @@ def test_get_preprint_count(self, preprint, date):
         resp._content = '{"hits" : {"total" : 1}}'
         requests.post.return_value = resp
 
-        field = PreprintService._meta.get_field('date_created')
+        field = PreprintService._meta.get_field('created')
         field.auto_now_add = False  # We have to fudge the time because Keen doesn't allow same day queries.
 
-        preprint.date_created = date['preprint_date_created']
+        preprint.created = date['preprint_date_created']
         preprint.save()
 
         field.auto_now_add = True
diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py
index a6cfcc67055..4fa5bfa90f8 100644
--- a/osf_tests/test_user.py
+++ b/osf_tests/test_user.py
@@ -1290,8 +1290,8 @@ def test_is_disabled_idempotency(self):
 
     @mock.patch('website.mailchimp_utils.get_mailchimp_api')
     def test_disable_account_and_remove_sessions(self, mock_mail):
-        session1 = SessionFactory(user=self.user, date_created=(timezone.now() - dt.timedelta(seconds=settings.OSF_SESSION_TIMEOUT)))
-        session2 = SessionFactory(user=self.user, date_created=(timezone.now() - dt.timedelta(seconds=settings.OSF_SESSION_TIMEOUT)))
+        session1 = SessionFactory(user=self.user, created=(timezone.now() - dt.timedelta(seconds=settings.OSF_SESSION_TIMEOUT)))
+        session2 = SessionFactory(user=self.user, created=(timezone.now() - dt.timedelta(seconds=settings.OSF_SESSION_TIMEOUT)))
 
         self.user.mailchimp_mailing_lists[settings.MAILCHIMP_GENERAL_LIST] = True
         self.user.save()
diff --git a/osf_tests/test_utils.py b/osf_tests/test_utils.py
index 9d3a7ebe901..bec0c787a22 100644
--- a/osf_tests/test_utils.py
+++ b/osf_tests/test_utils.py
@@ -15,19 +15,19 @@ class TestDisableAutoNowContextManager:
 
     def test_auto_now_not_updated(self, node):
         # update, save, confirm date changes
-        original_date_modified = node.date_modified
+        original_date_modified = node.modified
         node.title = 'A'
         node.save()
-        assert node.date_modified != original_date_modified
+        assert node.modified != original_date_modified
 
         # update and save within context manager, confirm date doesn't change (i.e. auto_now was set to False)
-        new_date_modified = node.date_modified
+        new_date_modified = node.modified
         with disable_auto_now_fields(Node):
             node.title = 'AB'
             node.save()
-        assert node.date_modified == new_date_modified
+        assert node.modified == new_date_modified
 
         # update, save, confirm date changes (i.e. that auto_now was set back to True)
         node.title = 'ABC'
         node.save()
-        assert node.date_modified != new_date_modified
+        assert node.modified != new_date_modified
diff --git a/scripts/analytics/institution_summary.py b/scripts/analytics/institution_summary.py
index d6551aac71e..f3b1b2c3165 100644
--- a/scripts/analytics/institution_summary.py
+++ b/scripts/analytics/institution_summary.py
@@ -34,7 +34,7 @@ def get_events(self, date):
         for institution in institutions:
             node_query = (
                 Q(is_deleted=False) &
-                Q(date_created__lt=query_datetime)
+                Q(created__lt=query_datetime)
             )
 
             project_query = node_query
diff --git a/scripts/analytics/node_summary.py b/scripts/analytics/node_summary.py
index 70bbf48208d..32888f42b0d 100644
--- a/scripts/analytics/node_summary.py
+++ b/scripts/analytics/node_summary.py
@@ -27,7 +27,7 @@ def get_events(self, date):
         timestamp_datetime = datetime(date.year, date.month, date.day).replace(tzinfo=pytz.UTC)
         query_datetime = timestamp_datetime + timedelta(1)
 
-        node_query = {'is_deleted': False, 'date_created__lte': query_datetime}
+        node_query = {'is_deleted': False, 'created__lte': query_datetime}
         project_query = node_query
 
         public_query = {'is_public': True}
diff --git a/scripts/generate_sitemap.py b/scripts/generate_sitemap.py
index fe4128d660d..720834ab863 100644
--- a/scripts/generate_sitemap.py
+++ b/scripts/generate_sitemap.py
@@ -176,13 +176,13 @@ def generate(self):
         objs = (AbstractNode.objects
             .filter(is_public=True, is_deleted=False, retraction_id__isnull=True)
             .exclude(type__in=["osf.collection", "osf.quickfilesnode"])
-            .values('guids___id', 'date_modified'))
+            .values('guids___id', 'modified'))
         progress.start(objs.count(), 'NODE: ')
         for obj in objs:
             try:
                 config = settings.SITEMAP_NODE_CONFIG
                 config['loc'] = urlparse.urljoin(settings.DOMAIN, '/{}/'.format(obj['guids___id']))
-                config['lastmod'] = obj['date_modified'].strftime('%Y-%m-%d')
+                config['lastmod'] = obj['modified'].strftime('%Y-%m-%d')
                 self.add_url(config)
             except Exception as e:
                 self.log_errors('NODE', obj['guids___id'], e)
@@ -197,7 +197,7 @@ def generate(self):
         osf = PreprintProvider.objects.get(_id='osf')
         for obj in objs:
             try:
-                preprint_date = obj.date_modified.strftime('%Y-%m-%d')
+                preprint_date = obj.modified.strftime('%Y-%m-%d')
                 config = settings.SITEMAP_PREPRINT_CONFIG
                 preprint_url = obj.url
                 provider = obj.provider
diff --git a/scripts/osfstorage/glacier_audit.py b/scripts/osfstorage/glacier_audit.py
index 55c2ee2545e..39ee9bcc11d 100644
--- a/scripts/osfstorage/glacier_audit.py
+++ b/scripts/osfstorage/glacier_audit.py
@@ -66,7 +66,7 @@ def get_job(vault, job_id=None):
 
 def get_targets(date):
     return FileVersion.objects.filter(
-        date_created__lt=date - DELTA_DATE, metadata__has_key='archive', location__isnull=False
+        created__lt=date - DELTA_DATE, metadata__has_key='archive', location__isnull=False
     ).iterator()
 
 
diff --git a/scripts/populate_new_and_noteworthy_projects.py b/scripts/populate_new_and_noteworthy_projects.py
index c6b23dc0c14..27c5d52e5c9 100644
--- a/scripts/populate_new_and_noteworthy_projects.py
+++ b/scripts/populate_new_and_noteworthy_projects.py
@@ -56,7 +56,7 @@ def get_new_and_noteworthy_nodes(noteworthy_links_node):
     """
     today = timezone.now()
     last_month = (today - dateutil.relativedelta.relativedelta(months=1))
-    data = Node.objects.filter(Q(date_created__gte=last_month) & Q(is_public=True) & Q(is_deleted=False)).get_roots()
+    data = Node.objects.filter(Q(created__gte=last_month) & Q(is_public=True) & Q(is_deleted=False)).get_roots()
     nodes = []
     for node in data:
         unique_actions = NodeLog.objects.filter(node=node.pk).order_by('action').distinct('action').count()
diff --git a/tests/test_auth.py b/tests/test_auth.py
index 270618621f9..294eef913aa 100644
--- a/tests/test_auth.py
+++ b/tests/test_auth.py
@@ -109,7 +109,7 @@ def test_confirm_email(self, mock_mail):
         assert_equal(res.status_code, 302)
         assert_equal('/', urlparse.urlparse(res.location).path)
         assert_equal(len(mock_mail.call_args_list), 1)
-        session = Session.objects.filter(data__auth_user_id=user._id).order_by('-date_modified').first()
+        session = Session.objects.filter(data__auth_user_id=user._id).order_by('-modified').first()
         assert_equal(len(session.data['status']), 1)
 
     def test_get_user_by_id(self):
diff --git a/tests/test_auth_basic_auth.py b/tests/test_auth_basic_auth.py
index 41dc133fee0..0f24a654261 100644
--- a/tests/test_auth_basic_auth.py
+++ b/tests/test_auth_basic_auth.py
@@ -93,7 +93,7 @@ def test_expired_cookie(self):
         with connection.cursor() as cursor:
             cursor.execute("""
                 UPDATE osf_session
-                SET date_created = %s
+                SET created = %s
                 WHERE id = %s
             """, [(timezone.now() - timedelta(seconds=settings.OSF_SESSION_TIMEOUT)), self.session.id])
         cookie = self.user1.get_or_create_cookie()
diff --git a/tests/test_identifiers.py b/tests/test_identifiers.py
index 73c3b0d1fc4..ec541826270 100644
--- a/tests/test_identifiers.py
+++ b/tests/test_identifiers.py
@@ -99,7 +99,7 @@ def test_metadata_for_preprint_has_correct_structure(self):
         assert pub_year.text == str(preprint.date_published.year)
 
         dates = root.find('{%s}dates' % metadata.NAMESPACE).getchildren()[0]
-        assert dates.text == preprint.date_modified.isoformat()
+        assert dates.text == preprint.modified.isoformat()
         assert dates.attrib['dateType'] == 'Updated'
 
         alternate_identifier = root.find('{%s}alternateIdentifiers' % metadata.NAMESPACE).getchildren()[0]
diff --git a/tests/test_preprints.py b/tests/test_preprints.py
index 9dc4fce2269..c167d154ed8 100644
--- a/tests/test_preprints.py
+++ b/tests/test_preprints.py
@@ -133,8 +133,8 @@ def test_preprint_created_date(self):
         self.preprint.set_primary_file(self.file, auth=self.auth, save=True)
         assert_equal(self.project.preprint_file._id, self.file._id)
 
-        assert(self.preprint.date_created)
-        assert_not_equal(self.project.date_created, self.preprint.date_created)
+        assert(self.preprint.created)
+        assert_not_equal(self.project.created, self.preprint.created)
 
     def test_non_admin_update_file(self):
         self.preprint.set_primary_file(self.file, auth=self.auth, save=True)
@@ -375,7 +375,7 @@ def test_format_preprint(self):
         assert preprint['title'] == self.preprint.node.title
         assert preprint['description'] == self.preprint.node.description
         assert preprint['is_deleted'] == (not self.preprint.is_published or not self.preprint.node.is_public or self.preprint.node.is_preprint_orphan)
-        assert preprint['date_updated'] == self.preprint.date_modified.isoformat()
+        assert preprint['date_updated'] == self.preprint.modified.isoformat()
         assert preprint['date_published'] == self.preprint.date_published.isoformat()
 
         tags = [nodes.pop(k) for k, v in nodes.items() if v['@type'] == 'tag']
@@ -481,7 +481,7 @@ def test_format_preprint_date_modified_node_updated(self):
         res = format_preprint(self.preprint, self.preprint.provider.share_publish_type)
         nodes = dict(enumerate(res))
         preprint = nodes.pop(next(k for k, v in nodes.items() if v['@type'] == 'preprint'))
-        assert preprint['date_updated'] == self.preprint.node.date_modified.isoformat()
+        assert preprint['date_updated'] == self.preprint.node.modified.isoformat()
 
     def test_format_preprint_nones(self):
         self.preprint.node.tags = []
@@ -499,7 +499,7 @@ def test_format_preprint_nones(self):
         assert preprint['title'] == self.preprint.node.title
         assert preprint['description'] == self.preprint.node.description
         assert preprint['is_deleted'] == (not self.preprint.is_published or not self.preprint.node.is_public or self.preprint.node.is_preprint_orphan)
-        assert preprint['date_updated'] == self.preprint.date_modified.isoformat()
+        assert preprint['date_updated'] == self.preprint.modified.isoformat()
         assert preprint.get('date_published') is None
 
         people = sorted([nodes.pop(k) for k, v in nodes.items() if v['@type'] == 'person'], key=lambda x: x['given_name'])
diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py
index d03d01ed56d..7bf7f668446 100644
--- a/tests/test_registrations/test_retractions.py
+++ b/tests/test_registrations/test_retractions.py
@@ -730,9 +730,9 @@ def setUp(self):
             title='Subcomponent'
         )
         self.registration = RegistrationFactory(is_public=True, project=self.project)
-        self.component_registration = self.registration._nodes.order_by('date_created').first()
-        self.subproject_registration = list(self.registration._nodes.order_by('date_created'))[1]
-        self.subproject_component_registration = self.subproject_registration._nodes.order_by('date_created').first()
+        self.component_registration = self.registration._nodes.order_by('created').first()
+        self.subproject_registration = list(self.registration._nodes.order_by('created'))[1]
+        self.subproject_component_registration = self.subproject_registration._nodes.order_by('created').first()
 
     def test_POST_retraction_to_component_returns_HTTPError_BAD_REQUEST(self):
         res = self.app.post_json(
diff --git a/tests/test_websitefiles.py b/tests/test_websitefiles.py
index 390d47d5463..4329af45710 100644
--- a/tests/test_websitefiles.py
+++ b/tests/test_websitefiles.py
@@ -417,8 +417,8 @@ def build_tree(acc=None, parent=None, atleastone=False):
         round1 = build_tree(parent=branch, atleastone=True)
         round2 = build_tree(parent=parent, atleastone=True)
 
-        stay_deleted = [branch.to_storage()] + [child.to_storage() for child in round1]
-        get_restored = [parent.to_storage()] + [child.to_storage() for child in round2]
+        stay_deleted = [branch.to_storage(include_auto_now=False)] + [child.to_storage(include_auto_now=False) for child in round1]
+        get_restored = [parent.to_storage(include_auto_now=False)] + [child.to_storage(include_auto_now=False) for child in round2]
 
         branch.delete()
 
@@ -440,7 +440,7 @@ def build_tree(acc=None, parent=None, atleastone=False):
 
         for data in get_restored:
             assert_is(models.TrashedFileNode.load(data['_id']), None)
-            assert TestFileNode.load(data['_id']).to_storage() == data
+            assert TestFileNode.load(data['_id']).to_storage(include_auto_now=False) == data
 
     def test_metadata_url(self):
         pass
diff --git a/tests/test_webtests.py b/tests/test_webtests.py
index cd0d419673b..772c12ff4c7 100644
--- a/tests/test_webtests.py
+++ b/tests/test_webtests.py
@@ -808,14 +808,14 @@ def test_new_and_noteworthy_and_popular_nodes_show_in_explore_activity(self):
 
         # New and Noteworthy
         assert_in(str(self.project.title), res)
-        assert_in(str(self.project.date_created.date()), res)
+        assert_in(str(self.project.created.date()), res)
         assert_in(str(self.registration.title), res)
         assert_in(str(self.registration.registered_date.date()), res)
         assert_not_in(str(self.private_project.title), res)
 
         # Popular Projects and Registrations
         assert_in(str(self.popular_project.title), res)
-        assert_in(str(self.popular_project.date_created.date()), res)
+        assert_in(str(self.popular_project.created.date()), res)
         assert_in(str(self.popular_registration.title), res)
         assert_in(str(self.popular_registration.registered_date.date()), res)
 
diff --git a/website/conferences/views.py b/website/conferences/views.py
index a6a24d0b638..7bda4c391fd 100644
--- a/website/conferences/views.py
+++ b/website/conferences/views.py
@@ -174,7 +174,7 @@ def _render_conference_node(node, idx, conf):
         'category': conf.field_names['submission1'] if conf.field_names['submission1'] in tags else conf.field_names['submission2'],
         'download': download_count,
         'downloadUrl': download_url,
-        'dateCreated': node.date_created.isoformat(),
+        'dateCreated': node.created.isoformat(),
         'confName': conf.name,
         'confUrl': web_url_for('conference_results', meeting=conf.endpoint),
         'tags': ' '.join(tags)
diff --git a/website/identifiers/metadata.py b/website/identifiers/metadata.py
index 0ab8aa867df..9643ae7e689 100644
--- a/website/identifiers/metadata.py
+++ b/website/identifiers/metadata.py
@@ -65,7 +65,7 @@ def datacite_metadata_for_node(node, doi, pretty_print=False):
         title=node.title,
         creators=creators,
         publisher='Open Science Framework',
-        publication_year=getattr(node.registered_date or node.date_created, 'year'),
+        publication_year=getattr(node.registered_date or node.created, 'year'),
         pretty_print=pretty_print
     )
 
@@ -118,7 +118,7 @@ def datacite_metadata_for_preprint(preprint, doi, pretty_print=False):
         E.titles(E.title(remove_control_characters(preprint.node.title))),
         E.publisher(preprint.provider.name),
         E.publicationYear(str(getattr(preprint.date_published, 'year'))),
-        E.dates(E.date(preprint.date_modified.isoformat(), dateType='Updated')),
+        E.dates(E.date(preprint.modified.isoformat(), dateType='Updated')),
         E.alternateIdentifiers(E.alternateIdentifier(settings.DOMAIN + preprint._id, alternateIdentifierType='URL')),
         E.descriptions(E.description(remove_control_characters(preprint.node.description), descriptionType='Abstract')),
     )
diff --git a/website/preprints/tasks.py b/website/preprints/tasks.py
index 53d8b980233..b42cfcbf23f 100644
--- a/website/preprints/tasks.py
+++ b/website/preprints/tasks.py
@@ -122,7 +122,7 @@ def format_preprint(preprint, share_type, old_subjects=None):
         # If we send a date_updated that is <= the one we previously sent, SHARE will ignore any changes
         # because it looks like a race condition that arose from preprints being resent to SHARE on
         # every step of preprint creation.
-        'date_updated': max(preprint.date_modified, preprint.node.date_modified).isoformat(),
+        'date_updated': max(preprint.modified, preprint.node.modified).isoformat(),
         'date_published': preprint.date_published.isoformat() if preprint.date_published else None
     })
 
diff --git a/website/project/views/node.py b/website/project/views/node.py
index c71f183d3e2..15438d37e2c 100644
--- a/website/project/views/node.py
+++ b/website/project/views/node.py
@@ -542,9 +542,8 @@ def update_node(auth, node, **kwargs):
     updated_fields_dict = {
         key: getattr(node, key) if key != 'tags' else [str(tag) for tag in node.tags]
         for key in updated_field_names
-        if key != 'logs' and key != 'date_modified'
+        if key != 'logs' and key != 'modified' and key != 'last_logged'
     }
-    node.save()
     return {'updated_fields': updated_fields_dict}
 
 @must_be_valid_project
@@ -698,8 +697,8 @@ def _view_project(node, auth, primary=False,
             'in_dashboard': in_bookmark_collection,
             'is_public': node.is_public,
             'is_archiving': node.archiving,
-            'date_created': iso8601format(node.date_created),
-            'date_modified': iso8601format(node.logs.latest().date) if node.logs.exists() else '',
+            'date_created': iso8601format(node.created),
+            'date_modified': iso8601format(node.last_logged) if node.last_logged else '',
             'tags': list(node.tags.filter(system=False).values_list('name', flat=True)),
             'children': node.nodes_active.exists(),
             'child_exists': Node.objects.get_children(node, active=True).exists(),
@@ -1077,8 +1076,8 @@ def _serialize_node_search(node):
         data['title'] += ' (registration)'
         data['dateRegistered'] = node.registered_date.isoformat()
     else:
-        data['dateCreated'] = node.date_created.isoformat()
-        data['dateModified'] = node.date_modified.isoformat()
+        data['dateCreated'] = node.created.isoformat()
+        data['dateModified'] = node.modified.isoformat()
 
     first_author = node.visible_contributors[0]
     data['firstAuthor'] = first_author.family_name or first_author.given_name or first_author.fullname
diff --git a/website/search/elastic_search.py b/website/search/elastic_search.py
index 7c00393b4b1..ac855e76d46 100644
--- a/website/search/elastic_search.py
+++ b/website/search/elastic_search.py
@@ -370,7 +370,7 @@ def serialize_node(node, category):
         'registered_date': node.registered_date,
         'wikis': {},
         'parent_id': parent_id,
-        'date_created': node.date_created,
+        'date_created': node.created,
         'license': serialize_node_license_record(node.license),
         'affiliated_institutions': list(node.affiliated_institutions.values_list('name', flat=True)),
         'boost': int(not node.is_registration) + 1,  # This is for making registered projects less relevant
diff --git a/website/search_migration/migrate.py b/website/search_migration/migrate.py
index afb68556295..f49923aeab3 100644
--- a/website/search_migration/migrate.py
+++ b/website/search_migration/migrate.py
@@ -76,7 +76,7 @@ def migrate(delete, index=None, app=None):
     set_up_alias(index, new_index)
 
     # migrate nodes modified since start
-    migrate_nodes(new_index, query=Q(date_modified__gte=start_time))
+    migrate_nodes(new_index, query=Q(modified__gte=start_time))
 
     if delete:
         delete_old(new_index)
diff --git a/website/templates/public/pages/active_nodes.mako b/website/templates/public/pages/active_nodes.mako
index 48a6c720978..dddb56a56b6 100644
--- a/website/templates/public/pages/active_nodes.mako
+++ b/website/templates/public/pages/active_nodes.mako
@@ -29,7 +29,7 @@
             <section id='newNoteworthyProjects'>
                 <h3 class='anchor'>New and noteworthy projects</h3>
                 <div class='project-list'>
-                    ${node_list(new_and_noteworthy_projects, prefix='newest_public', metric='date_created')}
+                  ${node_list(new_and_noteworthy_projects, prefix='newest_public', metric='date_created')}
                 </div>
             </section>
             <section id='newPublicRegistrations' class="m-t-lg">
@@ -41,7 +41,7 @@
             <section id='popularPublicProjects' class="m-t-lg">
                 <h3 class='anchor'>Popular public projects</h3>
                 <div class='project-list'>
-                    ${node_list(popular_public_projects, prefix='most_viewed', metric='date_created')}
+                  ${node_list(popular_public_projects, prefix='most_viewed', metric='date_created')}
                 </div>
             </section>
             <section id='popularPublicRegistrations' class="m-t-lg">
@@ -65,8 +65,8 @@
                     )
                 else:
                     explicit_date = '{month} {dt.day} {dt.year}'.format(
-                    dt=node.date_created.date(),
-                    month=node.date_created.date().strftime('%B')
+                    dt=node.created.date(),
+                    month=node.created.date().strftime('%B')
                 )
 
             %>
@@ -78,9 +78,9 @@
                         </h4>
                     </div>
                     <div class="col-md-2">
-                        % if metric == 'date_created':
+                      % if metric == 'date_created':
                             <span class="project-meta pull-right" rel='tooltip' data-original-title='Created: ${explicit_date}'>
-                                ${node.date_created.date()}
+                              ${node.created.date()}
                             </span>
                         % elif metric == 'registered_date':
                             <span class="project-meta pull-right" rel='tooltip' data-original-title='Registered: ${explicit_date}'>

From d031dba83060e17ab66d258931f156167ba7fe1d Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Fri, 21 Jul 2017 16:54:00 -0400
Subject: [PATCH 143/192] Allow migration finalization     -remove premigrate
 celery artifacts

---
 scripts/premigrate_created_modified.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/scripts/premigrate_created_modified.py b/scripts/premigrate_created_modified.py
index 1af3d45cfc2..7d7604376d2 100644
--- a/scripts/premigrate_created_modified.py
+++ b/scripts/premigrate_created_modified.py
@@ -303,7 +303,6 @@ def main():
         if pargs.start:
             add_columns()
         elif pargs.finish:
-            raise Exception('Not until data is migrated')
             finalize_migration()
         else:
             raise Exception('Must specify start or finish')

From 29c81a0fc930b28f748438c6701c58c8a18c7e3d Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Wed, 25 Oct 2017 15:26:45 -0400
Subject: [PATCH 144/192] Update for Moderation changes   -bump migrations

---
 .../migrations/0002_auto_20170808_1140.py     |  7 ++--
 .../box/migrations/0003_auto_20170713_1125.py |  5 ++-
 .../migrations/0003_auto_20170713_1125.py     |  5 ++-
 .../migrations/0003_auto_20170713_1125.py     |  5 ++-
 .../migrations/0003_auto_20170713_1125.py     |  5 ++-
 .../migrations/0003_auto_20170713_1125.py     |  3 +-
 .../migrations/0003_auto_20170713_1125.py     |  5 ++-
 .../migrations/0002_auto_20171121_1426.py     | 40 +++++++++++++++++++
 addons/gitlab/models.py                       |  4 +-
 .../migrations/0003_auto_20170713_1125.py     |  5 ++-
 .../migrations/0003_auto_20170713_1125.py     |  5 ++-
 .../migrations/0002_auto_20171121_1426.py     | 40 +++++++++++++++++++
 .../migrations/0003_auto_20170713_1125.py     |  3 +-
 .../migrations/0003_auto_20170713_1125.py     |  5 ++-
 .../s3/migrations/0003_auto_20170713_1125.py  |  5 ++-
 .../migrations/0003_auto_20170713_1125.py     |  3 +-
 .../migrations/0005_auto_20170713_1125.py     |  5 ++-
 .../migrations/0003_auto_20170713_1125.py     |  5 ++-
 api/actions/serializers.py                    |  4 +-
 api/preprints/views.py                        |  2 +-
 api/users/views.py                            |  2 +-
 api_tests/reviews/mixins/filter_mixins.py     |  8 ++--
 api_tests/users/views/test_user_actions.py    |  6 +--
 .../commands/add_notification_subscription.py | 13 ++++--
 ...1_add_reviews_notification_subscription.py |  4 +-
 ...es.py => 0068_creator_modified_renames.py} |  2 +-
 ....py => 0069_skippable_created_modified.py} | 12 +++++-
 osf/models/action.py                          |  3 --
 reviews/models/mixins.py                      |  6 +--
 scripts/premigrate_created_modified.py        |  2 +
 30 files changed, 164 insertions(+), 55 deletions(-)
 create mode 100644 addons/gitlab/migrations/0002_auto_20171121_1426.py
 create mode 100644 addons/onedrive/migrations/0002_auto_20171121_1426.py
 rename osf/migrations/{0065_creator_modified_renames.py => 0068_creator_modified_renames.py} (94%)
 rename osf/migrations/{0066_skippable_created_modified.py => 0069_skippable_created_modified.py} (98%)

diff --git a/addons/bitbucket/migrations/0002_auto_20170808_1140.py b/addons/bitbucket/migrations/0002_auto_20170808_1140.py
index 468fdbfa848..de53c330e55 100644
--- a/addons/bitbucket/migrations/0002_auto_20170808_1140.py
+++ b/addons/bitbucket/migrations/0002_auto_20170808_1140.py
@@ -1,9 +1,10 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-08-08 16:40
 from __future__ import unicode_literals
+import datetime
+import pytz
 
 from django.db import migrations
-import django.utils.timezone
 import django_extensions.db.fields
 
 
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
@@ -28,7 +29,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='usersettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/box/migrations/0003_auto_20170713_1125.py b/addons/box/migrations/0003_auto_20170713_1125.py
index 20c0830872c..9dab29b4df2 100644
--- a/addons/box/migrations/0003_auto_20170713_1125.py
+++ b/addons/box/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
@@ -28,7 +29,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='usersettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/dataverse/migrations/0003_auto_20170713_1125.py b/addons/dataverse/migrations/0003_auto_20170713_1125.py
index 31bba6403b4..e17632303bd 100644
--- a/addons/dataverse/migrations/0003_auto_20170713_1125.py
+++ b/addons/dataverse/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
@@ -28,7 +29,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='usersettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/dropbox/migrations/0003_auto_20170713_1125.py b/addons/dropbox/migrations/0003_auto_20170713_1125.py
index 31f9667de96..ce0fcbb8958 100644
--- a/addons/dropbox/migrations/0003_auto_20170713_1125.py
+++ b/addons/dropbox/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
@@ -28,7 +29,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='usersettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/figshare/migrations/0003_auto_20170713_1125.py b/addons/figshare/migrations/0003_auto_20170713_1125.py
index b05a7e2da5d..a132859a90b 100644
--- a/addons/figshare/migrations/0003_auto_20170713_1125.py
+++ b/addons/figshare/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
@@ -28,7 +29,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='usersettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/forward/migrations/0003_auto_20170713_1125.py b/addons/forward/migrations/0003_auto_20170713_1125.py
index 0f6fb9e6dd8..add7aa751da 100644
--- a/addons/forward/migrations/0003_auto_20170713_1125.py
+++ b/addons/forward/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/github/migrations/0003_auto_20170713_1125.py b/addons/github/migrations/0003_auto_20170713_1125.py
index 912a107f16f..084e1b73a7d 100644
--- a/addons/github/migrations/0003_auto_20170713_1125.py
+++ b/addons/github/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
@@ -28,7 +29,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='usersettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/gitlab/migrations/0002_auto_20171121_1426.py b/addons/gitlab/migrations/0002_auto_20171121_1426.py
new file mode 100644
index 00000000000..3f0839bfb6c
--- /dev/null
+++ b/addons/gitlab/migrations/0002_auto_20171121_1426.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.7 on 2017-11-21 20:26
+from __future__ import unicode_literals
+import datetime
+import pytz
+
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_gitlab', '0001_initial'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/gitlab/models.py b/addons/gitlab/models.py
index 14d27d8fc0b..e88bda39e2f 100644
--- a/addons/gitlab/models.py
+++ b/addons/gitlab/models.py
@@ -53,12 +53,12 @@ def __repr__(self):
         )
 
 
-class UserSettings(BaseStorageAddon, BaseOAuthUserSettings):
+class UserSettings(BaseOAuthUserSettings):
     oauth_provider = GitLabProvider
     serializer = GitLabSerializer
 
 
-class NodeSettings(BaseStorageAddon, BaseOAuthNodeSettings):
+class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
     oauth_provider = GitLabProvider
     serializer = GitLabSerializer
 
diff --git a/addons/googledrive/migrations/0003_auto_20170713_1125.py b/addons/googledrive/migrations/0003_auto_20170713_1125.py
index 7617ec73569..8dde20244ca 100644
--- a/addons/googledrive/migrations/0003_auto_20170713_1125.py
+++ b/addons/googledrive/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
@@ -28,7 +29,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='usersettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/mendeley/migrations/0003_auto_20170713_1125.py b/addons/mendeley/migrations/0003_auto_20170713_1125.py
index 23cdb22461b..89b92735338 100644
--- a/addons/mendeley/migrations/0003_auto_20170713_1125.py
+++ b/addons/mendeley/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
@@ -28,7 +29,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='usersettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/onedrive/migrations/0002_auto_20171121_1426.py b/addons/onedrive/migrations/0002_auto_20171121_1426.py
new file mode 100644
index 00000000000..df83d82562a
--- /dev/null
+++ b/addons/onedrive/migrations/0002_auto_20171121_1426.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.7 on 2017-11-21 20:26
+from __future__ import unicode_literals
+import datetime
+import pytz
+
+from django.db import migrations
+import django_extensions.db.fields
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('addons_onedrive', '0001_initial'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='nodesettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='nodesettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
+            preserve_default=False,
+        ),
+        migrations.AddField(
+            model_name='usersettings',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
+    ]
diff --git a/addons/osfstorage/migrations/0003_auto_20170713_1125.py b/addons/osfstorage/migrations/0003_auto_20170713_1125.py
index 9f3975674f8..4805d3e8a47 100644
--- a/addons/osfstorage/migrations/0003_auto_20170713_1125.py
+++ b/addons/osfstorage/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/owncloud/migrations/0003_auto_20170713_1125.py b/addons/owncloud/migrations/0003_auto_20170713_1125.py
index 453bf4460a4..e67eedf6af8 100644
--- a/addons/owncloud/migrations/0003_auto_20170713_1125.py
+++ b/addons/owncloud/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
@@ -28,7 +29,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='usersettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/s3/migrations/0003_auto_20170713_1125.py b/addons/s3/migrations/0003_auto_20170713_1125.py
index d7ddb2b1578..c7dbe0dda5c 100644
--- a/addons/s3/migrations/0003_auto_20170713_1125.py
+++ b/addons/s3/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
@@ -28,7 +29,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='usersettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/twofactor/migrations/0003_auto_20170713_1125.py b/addons/twofactor/migrations/0003_auto_20170713_1125.py
index 8240ddfed43..bdadf69975d 100644
--- a/addons/twofactor/migrations/0003_auto_20170713_1125.py
+++ b/addons/twofactor/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='usersettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/wiki/migrations/0005_auto_20170713_1125.py b/addons/wiki/migrations/0005_auto_20170713_1125.py
index 5428ae4f108..ebbef480b7c 100644
--- a/addons/wiki/migrations/0005_auto_20170713_1125.py
+++ b/addons/wiki/migrations/0005_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
@@ -28,7 +29,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodewikipage',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/addons/zotero/migrations/0003_auto_20170713_1125.py b/addons/zotero/migrations/0003_auto_20170713_1125.py
index 5d18e064c14..48e9ab82886 100644
--- a/addons/zotero/migrations/0003_auto_20170713_1125.py
+++ b/addons/zotero/migrations/0003_auto_20170713_1125.py
@@ -1,6 +1,7 @@
 # -*- coding: utf-8 -*-
 # Generated by Django 1.11.2 on 2017-07-13 16:25
 from __future__ import unicode_literals
+import pytz
 
 import datetime
 from django.db import migrations
@@ -17,7 +18,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='nodesettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
@@ -28,7 +29,7 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='usersettings',
             name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0), verbose_name='created'),
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=datetime.datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc), verbose_name='created'),
             preserve_default=False,
         ),
         migrations.AddField(
diff --git a/api/actions/serializers.py b/api/actions/serializers.py
index c3a5fc5267b..ec73823dc73 100644
--- a/api/actions/serializers.py
+++ b/api/actions/serializers.py
@@ -80,8 +80,8 @@ class ActionSerializer(JSONAPISerializer):
     from_state = ser.ChoiceField(choices=States.choices(), read_only=True)
     to_state = ser.ChoiceField(choices=States.choices(), read_only=True)
 
-    date_created = ser.DateTimeField(read_only=True)
-    date_modified = ser.DateTimeField(read_only=True)
+    date_created = ser.DateTimeField(source='created', read_only=True)
+    date_modified = ser.DateTimeField(source='modified', read_only=True)
 
     provider = RelationshipField(
         read_only=True,
diff --git a/api/preprints/views.py b/api/preprints/views.py
index e67f5416186..e49e63b109d 100644
--- a/api/preprints/views.py
+++ b/api/preprints/views.py
@@ -457,7 +457,7 @@ class PreprintActionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin,
     serializer_class = ActionSerializer
     model_class = Action
 
-    ordering = ('-date_created',)
+    ordering = ('-created',)
     view_category = 'preprints'
     view_name = 'preprint-action-list'
 
diff --git a/api/users/views.py b/api/users/views.py
index 41f5e85984e..d3a966aefc6 100644
--- a/api/users/views.py
+++ b/api/users/views.py
@@ -839,7 +839,7 @@ class UserActionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, Use
     serializer_class = ActionSerializer
     model_class = Action
 
-    ordering = ('-date_created',)
+    ordering = ('-created',)
     view_category = 'users'
     view_name = 'user-action-list'
 
diff --git a/api_tests/reviews/mixins/filter_mixins.py b/api_tests/reviews/mixins/filter_mixins.py
index e1cca87a4eb..7ef01b43502 100644
--- a/api_tests/reviews/mixins/filter_mixins.py
+++ b/api_tests/reviews/mixins/filter_mixins.py
@@ -110,20 +110,20 @@ def test_filter_actions(self, app, url, user, expected_actions):
 
         # filter by date_created
         expected = set([l._id for l in expected_actions])
-        actual = get_actual(app, url, user, date_created=action.date_created)
+        actual = get_actual(app, url, user, date_created=action.created)
         assert expected == actual
 
         expected = set()
-        actual = get_actual(app, url, user, date_created=action.date_created - timedelta(days=1))
+        actual = get_actual(app, url, user, date_created=action.created - timedelta(days=1))
         assert expected == actual
 
         # filter by date_modified
         expected = set([l._id for l in expected_actions])
-        actual = get_actual(app, url, user, date_modified=action.date_modified)
+        actual = get_actual(app, url, user, date_modified=action.modified)
         assert expected == actual
 
         expected = set()
-        actual = get_actual(app, url, user, date_modified=action.date_modified - timedelta(days=1))
+        actual = get_actual(app, url, user, date_modified=action.modified - timedelta(days=1))
         assert expected == actual
 
         # filter by target
diff --git a/api_tests/users/views/test_user_actions.py b/api_tests/users/views/test_user_actions.py
index 1286e666279..4501d986edf 100644
--- a/api_tests/users/views/test_user_actions.py
+++ b/api_tests/users/views/test_user_actions.py
@@ -225,14 +225,14 @@ def test_valid_transitions(self, mock_ezid, app, url, preprint, provider, modera
                 res = app.post_json_api(url, payload, auth=moderator.auth)
                 assert res.status_code == 201
 
-                action = preprint.actions.order_by('-date_created').first()
+                action = preprint.actions.order_by('-created').first()
                 assert action.trigger == trigger
 
                 preprint.refresh_from_db()
                 assert preprint.reviews_state == to_state
                 if preprint.in_public_reviews_state:
                     assert preprint.is_published
-                    assert preprint.date_published == action.date_created
+                    assert preprint.date_published == action.created
                     assert mock_ezid.called
                     mock_ezid.reset_mock()
                 else:
@@ -243,4 +243,4 @@ def test_valid_transitions(self, mock_ezid, app, url, preprint, provider, modera
                 if trigger == 'edit_comment':
                     assert preprint.date_last_transitioned is None
                 else:
-                    assert preprint.date_last_transitioned == action.date_created
+                    assert preprint.date_last_transitioned == action.created
diff --git a/osf/management/commands/add_notification_subscription.py b/osf/management/commands/add_notification_subscription.py
index 790a0dabe42..ea0ed8a954e 100644
--- a/osf/management/commands/add_notification_subscription.py
+++ b/osf/management/commands/add_notification_subscription.py
@@ -12,8 +12,6 @@
 from django.core.management.base import BaseCommand
 from django.db import transaction
 
-from osf.models import OSFUser, NotificationSubscription
-
 from website.notifications.utils import to_subscription_key
 
 from scripts import utils as script_utils
@@ -21,7 +19,13 @@
 logger = logging.getLogger(__name__)
 
 
-def add_reviews_notification_setting(notification_type):
+def add_reviews_notification_setting(notification_type, state=None):
+    if state:
+        OSFUser = state.get_model('osf', 'OSFUser')
+        NotificationSubscription = state.get_model('osf', 'NotificationSubscription')
+    else:
+        from osf.models import OSFUser, NotificationSubscription
+
     active_users = OSFUser.objects.filter(date_confirmed__isnull=False).exclude(date_disabled__isnull=False).exclude(is_active=False).order_by('id')
     total_active_users = active_users.count()
 
@@ -66,9 +70,10 @@ def add_arguments(self, parser):
 
     def handle(self, *args, **options):
         dry_run = options.get('dry_run', False)
+        state = options.get('state', None)
         if not dry_run:
             script_utils.add_file_logger(logger, __file__)
         with transaction.atomic():
-            add_reviews_notification_setting(notification_type=options['notification'])
+            add_reviews_notification_setting(notification_type=options['notification'], state=state)
             if dry_run:
                 raise RuntimeError('Dry run, transaction rolled back.')
diff --git a/osf/migrations/0061_add_reviews_notification_subscription.py b/osf/migrations/0061_add_reviews_notification_subscription.py
index 5338da23856..d40df94e4c9 100644
--- a/osf/migrations/0061_add_reviews_notification_subscription.py
+++ b/osf/migrations/0061_add_reviews_notification_subscription.py
@@ -6,8 +6,8 @@
 from django.core.management import call_command
 
 
-def add_reviews_notification_subscription(apps, schema_editor):
-    call_command('add_notification_subscription', '--notification=global_reviews')
+def add_reviews_notification_subscription(state, schema_editor):
+    call_command('add_notification_subscription', '--notification=global_reviews', state=state)
 
 class Migration(migrations.Migration):
 
diff --git a/osf/migrations/0065_creator_modified_renames.py b/osf/migrations/0068_creator_modified_renames.py
similarity index 94%
rename from osf/migrations/0065_creator_modified_renames.py
rename to osf/migrations/0068_creator_modified_renames.py
index ca6af016b9d..11b56a3d2cf 100644
--- a/osf/migrations/0065_creator_modified_renames.py
+++ b/osf/migrations/0068_creator_modified_renames.py
@@ -10,7 +10,7 @@
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('osf', '0064_auto_20171019_0918'),
+        ('osf', '0067_auto_20171121_1050'),
     ]
 
     operations = [
diff --git a/osf/migrations/0066_skippable_created_modified.py b/osf/migrations/0069_skippable_created_modified.py
similarity index 98%
rename from osf/migrations/0066_skippable_created_modified.py
rename to osf/migrations/0069_skippable_created_modified.py
index d227acafad2..e2a22eede3b 100644
--- a/osf/migrations/0066_skippable_created_modified.py
+++ b/osf/migrations/0069_skippable_created_modified.py
@@ -14,7 +14,7 @@
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('osf', '0065_creator_modified_renames'),
+        ('osf', '0068_creator_modified_renames'),
     ]
 
     operations = [
@@ -536,4 +536,14 @@ class Migration(migrations.Migration):
             name='modified',
             field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
         ),
+        migrations.AlterField(
+            model_name='action',
+            name='created',
+            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
+        ),
+        migrations.AlterField(
+            model_name='action',
+            name='modified',
+            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+        ),
     ]
diff --git a/osf/models/action.py b/osf/models/action.py
index f3c712bf93d..30454659ef1 100644
--- a/osf/models/action.py
+++ b/osf/models/action.py
@@ -9,7 +9,6 @@
 from reviews.workflow import States
 
 from osf.models.base import BaseModel, ObjectIDMixin
-from osf.utils.fields import NonNaiveDateTimeField
 
 
 class Action(ObjectIDMixin, BaseModel):
@@ -26,5 +25,3 @@ class Action(ObjectIDMixin, BaseModel):
     comment = models.TextField(blank=True)
 
     is_deleted = models.BooleanField(default=False)
-    date_created = NonNaiveDateTimeField(auto_now_add=True)
-    date_modified = NonNaiveDateTimeField(auto_now=True)
diff --git a/reviews/models/mixins.py b/reviews/models/mixins.py
index 47abbb10a18..f219d43bdf3 100644
--- a/reviews/models/mixins.py
+++ b/reviews/models/mixins.py
@@ -169,13 +169,13 @@ def save_action(self, ev):
         )
 
     def update_last_transitioned(self, ev):
-        now = self.action.date_created if self.action is not None else timezone.now()
+        now = self.action.created if self.action is not None else timezone.now()
         self.reviewable.date_last_transitioned = now
 
     def save_changes(self, ev):
         node = self.reviewable.node
         node._has_abandoned_preprint = False
-        now = self.action.date_created if self.action is not None else timezone.now()
+        now = self.action.created if self.action is not None else timezone.now()
         should_publish = self.reviewable.in_public_reviews_state
         if should_publish and not self.reviewable.is_published:
             if not (self.reviewable.node.preprint_file and self.reviewable.node.preprint_file.node == self.reviewable.node):
@@ -247,7 +247,7 @@ def get_context(self):
 @reviews_signals.reviews_email.connect
 def reviews_notification(self, creator, template, context, action):
     recipients = list(action.target.node.contributors)
-    time_now = action.date_created if action is not None else timezone.now()
+    time_now = action.created if action is not None else timezone.now()
     node = action.target.node
     emails.notify_global_event(
         event='global_reviews',
diff --git a/scripts/premigrate_created_modified.py b/scripts/premigrate_created_modified.py
index 7d7604376d2..fff3c91251e 100644
--- a/scripts/premigrate_created_modified.py
+++ b/scripts/premigrate_created_modified.py
@@ -115,6 +115,8 @@
     "ALTER TABLE osf_useractivitycounter ADD COLUMN modified timestamp with time zone;",
     "ALTER TABLE osf_abstractnode RENAME COLUMN date_created TO created;",
     "ALTER TABLE osf_abstractnode RENAME COLUMN date_modified TO modified;",
+    "ALTER TABLE osf_action RENAME COLUMN date_created TO created;",
+    "ALTER TABLE osf_action RENAME COLUMN date_modified TO modified;",
     "ALTER TABLE osf_apioauth2application RENAME COLUMN date_created TO created;",
     "ALTER TABLE osf_comment RENAME COLUMN date_created TO created;",
     "ALTER TABLE osf_comment RENAME COLUMN date_modified TO modified;",

From 977d2f545ac6f9d33ace98a1725b008462cf70e9 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Tue, 21 Nov 2017 18:34:12 -0500
Subject: [PATCH 145/192] Simplify deployment

---
 .../0069_skippable_created_modified.py        | 1021 ++++++++---------
 osf/migrations/0070_auto_20171121_1805.py     |   76 ++
 2 files changed, 567 insertions(+), 530 deletions(-)
 create mode 100644 osf/migrations/0070_auto_20171121_1805.py

diff --git a/osf/migrations/0069_skippable_created_modified.py b/osf/migrations/0069_skippable_created_modified.py
index e2a22eede3b..9b6bc3bdfc7 100644
--- a/osf/migrations/0069_skippable_created_modified.py
+++ b/osf/migrations/0069_skippable_created_modified.py
@@ -4,12 +4,502 @@
 # is utilized. It allows the larger of these tables to be updated asynchronously without downtime.
 # It requires not releasing these model changes until the beat tasks are approximately complete.
 from __future__ import unicode_literals
+import logging
 
 from django.db import migrations
 import django.utils.timezone
 import django_extensions.db.fields
 import osf.utils.fields
+from website import settings
 
+logger = logging.getLogger(__file__)
+
+PREMIGRATED = '1-minute-incremental-migrations' in settings.CeleryConfig.beat_schedule
+
+def finalize_premigrated(state, schema):
+    from scripts.premigrate_created_modified import finalize_migration
+    logger.info('Finalizing pre-migraiton')
+    finalize_migration()
+
+OPERATIONS = [
+    migrations.AlterModelOptions(
+        name='fileversion',
+        options={'ordering': ('-created',)},
+    ),
+    migrations.RenameField(
+        model_name='action',
+        old_name='date_modified',
+        new_name='modified',
+    ),
+    migrations.RenameField(
+        model_name='action',
+        old_name='date_created',
+        new_name='created',
+    ),
+    migrations.RenameField(
+        model_name='fileversion',
+        old_name='date_modified',
+        new_name='external_modified',
+    ),
+    migrations.RenameField(
+        model_name='abstractnode',
+        old_name='date_created',
+        new_name='created',
+    ),
+    migrations.RenameField(
+        model_name='abstractnode',
+        old_name='date_modified',
+        new_name='last_logged',
+    ),
+    migrations.RenameField(
+        model_name='apioauth2application',
+        old_name='date_created',
+        new_name='created',
+    ),
+    migrations.RenameField(
+        model_name='comment',
+        old_name='date_created',
+        new_name='created',
+    ),
+    migrations.RenameField(
+        model_name='comment',
+        old_name='date_modified',
+        new_name='modified',
+    ),
+    migrations.RenameField(
+        model_name='fileversion',
+        old_name='date_created',
+        new_name='created',
+    ),
+    migrations.RenameField(
+        model_name='preprintservice',
+        old_name='date_created',
+        new_name='created',
+    ),
+    migrations.RenameField(
+        model_name='preprintservice',
+        old_name='date_modified',
+        new_name='modified'
+    ),
+    migrations.RenameField(
+        model_name='privatelink',
+        old_name='date_created',
+        new_name='created',
+    ),
+    migrations.RenameField(
+        model_name='session',
+        old_name='date_created',
+        new_name='created',
+    ),
+    migrations.RenameField(
+        model_name='session',
+        old_name='date_modified',
+        new_name='modified',
+    ),
+    migrations.AddField(
+        model_name='abstractnode',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='apioauth2application',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='apioauth2personaltoken',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='apioauth2personaltoken',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='apioauth2scope',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='apioauth2scope',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='archivejob',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='archivejob',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='archivetarget',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='archivetarget',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='basefilenode',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='basefilenode',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='blacklistguid',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='blacklistguid',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='citationstyle',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='citationstyle',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='conference',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='conference',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='draftregistration',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='draftregistration',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='draftregistrationapproval',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='draftregistrationapproval',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='draftregistrationlog',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='draftregistrationlog',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='embargo',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='embargo',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='embargoterminationapproval',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='embargoterminationapproval',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='externalaccount',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='externalaccount',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='fileversion',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='guid',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='identifier',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='identifier',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='institution',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='institution',
+        name='last_logged',
+        field=osf.utils.fields.NonNaiveDateTimeField(blank=True, db_index=True, default=django.utils.timezone.now, null=True),
+    ),
+    migrations.AddField(
+        model_name='institution',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='mailrecord',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='mailrecord',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='metaschema',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='metaschema',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='nodelicense',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='nodelicense',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='nodelicenserecord',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='nodelicenserecord',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='nodelog',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='nodelog',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='noderelation',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='noderelation',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='notificationdigest',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='notificationdigest',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='notificationsubscription',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='notificationsubscription',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='osfuser',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='osfuser',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='pagecounter',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='pagecounter',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='preprintprovider',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='preprintprovider',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='privatelink',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='queuedmail',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='queuedmail',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='registrationapproval',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='registrationapproval',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='retraction',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='retraction',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='subject',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='subject',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='tag',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='tag',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AddField(
+        model_name='useractivitycounter',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
+        preserve_default=False,
+    ),
+    migrations.AddField(
+        model_name='useractivitycounter',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AlterField(
+        model_name='action',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
+    ),
+    migrations.AlterField(
+        model_name='action',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+] if not PREMIGRATED else [migrations.RunPython(finalize_premigrated)]
 
 class Migration(migrations.Migration):
 
@@ -17,533 +507,4 @@ class Migration(migrations.Migration):
         ('osf', '0068_creator_modified_renames'),
     ]
 
-    operations = [
-        migrations.AlterModelOptions(
-            name='fileversion',
-            options={'ordering': ('-created',)},
-        ),
-        migrations.RenameField(
-            model_name='action',
-            old_name='date_modified',
-            new_name='modified',
-        ),
-        migrations.RenameField(
-            model_name='action',
-            old_name='date_created',
-            new_name='created',
-        ),
-        migrations.RenameField(
-            model_name='fileversion',
-            old_name='date_modified',
-            new_name='external_modified',
-        ),
-        migrations.RemoveField(
-            model_name='abstractnode',
-            name='date_created',
-        ),
-        migrations.RemoveField(
-            model_name='abstractnode',
-            name='date_modified',
-        ),
-        migrations.RemoveField(
-            model_name='apioauth2application',
-            name='date_created',
-        ),
-        migrations.RemoveField(
-            model_name='comment',
-            name='date_created',
-        ),
-        migrations.RemoveField(
-            model_name='comment',
-            name='date_modified',
-        ),
-        migrations.RemoveField(
-            model_name='fileversion',
-            name='date_created',
-        ),
-        migrations.RemoveField(
-            model_name='preprintservice',
-            name='date_created',
-        ),
-        migrations.RemoveField(
-            model_name='preprintservice',
-            name='date_modified',
-        ),
-        migrations.RemoveField(
-            model_name='privatelink',
-            name='date_created',
-        ),
-        migrations.RemoveField(
-            model_name='session',
-            name='date_created',
-        ),
-        migrations.RemoveField(
-            model_name='session',
-            name='date_modified',
-        ),
-        migrations.AddField(
-            model_name='abstractnode',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='abstractnode',
-            name='last_logged',
-            field=osf.utils.fields.NonNaiveDateTimeField(blank=True, db_index=True, default=django.utils.timezone.now, null=True),
-        ),
-        migrations.AddField(
-            model_name='abstractnode',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='apioauth2application',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='apioauth2application',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='apioauth2personaltoken',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='apioauth2personaltoken',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='apioauth2scope',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='apioauth2scope',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='archivejob',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='archivejob',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='archivetarget',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='archivetarget',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='basefilenode',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='basefilenode',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='blacklistguid',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='blacklistguid',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='citationstyle',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='citationstyle',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='comment',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='comment',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='conference',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='conference',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='draftregistration',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='draftregistration',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='draftregistrationapproval',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='draftregistrationapproval',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='draftregistrationlog',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='draftregistrationlog',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='embargo',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='embargo',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='embargoterminationapproval',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='embargoterminationapproval',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='externalaccount',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='externalaccount',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='fileversion',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='fileversion',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='guid',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='identifier',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='identifier',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='institution',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='institution',
-            name='last_logged',
-            field=osf.utils.fields.NonNaiveDateTimeField(blank=True, db_index=True, default=django.utils.timezone.now, null=True),
-        ),
-        migrations.AddField(
-            model_name='institution',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='mailrecord',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='mailrecord',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='metaschema',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='metaschema',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='nodelicense',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='nodelicense',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='nodelicenserecord',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='nodelicenserecord',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='nodelog',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='nodelog',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='noderelation',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='noderelation',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='notificationdigest',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='notificationdigest',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='notificationsubscription',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='notificationsubscription',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='osfuser',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='osfuser',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='pagecounter',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='pagecounter',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='preprintprovider',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='preprintprovider',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='preprintservice',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='preprintservice',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='privatelink',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='privatelink',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='queuedmail',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='queuedmail',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='registrationapproval',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='registrationapproval',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='retraction',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='retraction',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='session',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='session',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='subject',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='subject',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='tag',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='tag',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AddField(
-            model_name='useractivitycounter',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='created'),
-            preserve_default=False,
-        ),
-        migrations.AddField(
-            model_name='useractivitycounter',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-        migrations.AlterField(
-            model_name='action',
-            name='created',
-            field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
-        ),
-        migrations.AlterField(
-            model_name='action',
-            name='modified',
-            field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
-        ),
-    ]
+    operations = OPERATIONS
diff --git a/osf/migrations/0070_auto_20171121_1805.py b/osf/migrations/0070_auto_20171121_1805.py
new file mode 100644
index 00000000000..ba32ebd665a
--- /dev/null
+++ b/osf/migrations/0070_auto_20171121_1805.py
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.7 on 2017-11-22 00:05
+from __future__ import unicode_literals
+
+from django.db import migrations
+import django.utils.timezone
+import django_extensions.db.fields
+import osf.utils.fields
+from website import settings
+
+PREMIGRATED = '1-minute-incremental-migrations' in settings.CeleryConfig.beat_schedule
+OPERATIONS = [
+    migrations.AlterField(
+        model_name='abstractnode',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
+    ),
+    migrations.AlterField(
+        model_name='abstractnode',
+        name='last_logged',
+        field=osf.utils.fields.NonNaiveDateTimeField(blank=True, db_index=True, default=django.utils.timezone.now, null=True),
+    ),
+    migrations.AlterField(
+        model_name='apioauth2application',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
+    ),
+    migrations.AlterField(
+        model_name='comment',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
+    ),
+    migrations.AlterField(
+        model_name='comment',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AlterField(
+        model_name='fileversion',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
+    ),
+    migrations.AlterField(
+        model_name='preprintservice',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
+    ),
+    migrations.AlterField(
+        model_name='preprintservice',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+    migrations.AlterField(
+        model_name='privatelink',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
+    ),
+    migrations.AlterField(
+        model_name='session',
+        name='created',
+        field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
+    ),
+    migrations.AlterField(
+        model_name='session',
+        name='modified',
+        field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
+    ),
+] if not PREMIGRATED else []
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0069_skippable_created_modified'),
+    ]
+
+    operations = OPERATIONS

From 44ad50b37ddf452c85dfb3555ac293cfb4feaffb Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 27 Nov 2017 17:30:50 -0600
Subject: [PATCH 146/192] Remove extra space.

---
 api/preprints/views.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/api/preprints/views.py b/api/preprints/views.py
index 106c35771a0..277276fcd90 100644
--- a/api/preprints/views.py
+++ b/api/preprints/views.py
@@ -204,7 +204,6 @@ class PreprintDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, Pre
         doi                             string                              bare DOI for the manuscript, as entered by the user
         preprint_doi_created            iso8601 timestamp                   timestamp that the preprint doi was created
 
-
     ##Relationships
 
     ###Node

From 0cbf20b013ebe00fefbebd1c190ef9ee68773588 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 27 Nov 2017 19:28:39 -0600
Subject: [PATCH 147/192] Improve comment.

---
 osf/migrations/0069_auto_20171127_1119.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/osf/migrations/0069_auto_20171127_1119.py b/osf/migrations/0069_auto_20171127_1119.py
index 7f4532f3bbc..2dabb365c22 100644
--- a/osf/migrations/0069_auto_20171127_1119.py
+++ b/osf/migrations/0069_auto_20171127_1119.py
@@ -9,7 +9,7 @@
 
 def add_preprint_doi_created(apps, schema_editor):
     """
-    Data migration that makes preprint_doi_created equal to date_published for existing published preprints.
+    Sets preprint_doi_created equal to date_published for existing published preprints.
     """
     null_preprint_doi_created = PreprintService.objects.filter(preprint_doi_created__isnull=True, date_published__isnull=False)
     preprints_count = null_preprint_doi_created.count()

From 02e9cf74baed66c65aa1e84c89af5ccb095e9f8f Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Mon, 27 Nov 2017 22:33:14 -0600
Subject: [PATCH 148/192] Recent changes to develop required a merge migration.

---
 osf/migrations/0070_merge_20171127_2232.py | 16 ++++++++++++++++
 1 file changed, 16 insertions(+)
 create mode 100644 osf/migrations/0070_merge_20171127_2232.py

diff --git a/osf/migrations/0070_merge_20171127_2232.py b/osf/migrations/0070_merge_20171127_2232.py
new file mode 100644
index 00000000000..732a7e42606
--- /dev/null
+++ b/osf/migrations/0070_merge_20171127_2232.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.7 on 2017-11-28 04:32
+from __future__ import unicode_literals
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0068_draftregistration_deleted'),
+        ('osf', '0069_auto_20171127_1119'),
+    ]
+
+    operations = [
+    ]

From 7ec5bff7808860e9753d67320ce6fe932bef8bad Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Tue, 28 Nov 2017 10:52:11 -0500
Subject: [PATCH 149/192] Add merge migration

---
 osf/migrations/0071_merge_20171128_0950.py | 16 ++++++++++++++++
 1 file changed, 16 insertions(+)
 create mode 100644 osf/migrations/0071_merge_20171128_0950.py

diff --git a/osf/migrations/0071_merge_20171128_0950.py b/osf/migrations/0071_merge_20171128_0950.py
new file mode 100644
index 00000000000..75f0f7d9d3f
--- /dev/null
+++ b/osf/migrations/0071_merge_20171128_0950.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.7 on 2017-11-28 15:50
+from __future__ import unicode_literals
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0068_draftregistration_deleted'),
+        ('osf', '0070_auto_20171121_1805'),
+    ]
+
+    operations = [
+    ]

From ba6f38c324b4c8d9ac97fd595c54e612ee32a1b0 Mon Sep 17 00:00:00 2001
From: Dawn Pattison <pattison.dawn.r@gmail.com>
Date: Tue, 28 Nov 2017 10:16:25 -0600
Subject: [PATCH 150/192] Use update in reverse migration function to set
 preprint_doi_created back to None.

---
 osf/migrations/0069_auto_20171127_1119.py | 10 +---------
 1 file changed, 1 insertion(+), 9 deletions(-)

diff --git a/osf/migrations/0069_auto_20171127_1119.py b/osf/migrations/0069_auto_20171127_1119.py
index 2dabb365c22..670df08f746 100644
--- a/osf/migrations/0069_auto_20171127_1119.py
+++ b/osf/migrations/0069_auto_20171127_1119.py
@@ -29,16 +29,8 @@ def reverse_func(apps, schema_editor):
     """
     Reverses data migration. Sets preprint_doi_created field back to null.
     """
-    preprint_doi_created_not_null = PreprintService.objects.filter(preprint_doi_created__isnull=False)
-    preprints_count = preprint_doi_created_not_null.count()
-    current_preprint = 0
     logger.info('Reversing preprint_doi_created migration.')
-
-    for preprint in preprint_doi_created_not_null:
-        current_preprint += 1
-        preprint.preprint_doi_created = None
-        preprint.save()
-        logger.info('Preprint ID {}, {}/{} preprint_doi_created field set to None.'.format(preprint._id, current_preprint, preprints_count))
+    PreprintService.objects.filter(preprint_doi_created__isnull=False).update(preprint_doi_created=None)
 
 class Migration(migrations.Migration):
 

From bc77982309e48856576e5da5d5aa5fe92733e453 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Tue, 28 Nov 2017 11:18:46 -0500
Subject: [PATCH 151/192] Add merge migration

---
 osf/migrations/0072_merge_20171128_1018.py | 16 ++++++++++++++++
 1 file changed, 16 insertions(+)
 create mode 100644 osf/migrations/0072_merge_20171128_1018.py

diff --git a/osf/migrations/0072_merge_20171128_1018.py b/osf/migrations/0072_merge_20171128_1018.py
new file mode 100644
index 00000000000..2a601725405
--- /dev/null
+++ b/osf/migrations/0072_merge_20171128_1018.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.7 on 2017-11-28 16:18
+from __future__ import unicode_literals
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('osf', '0070_merge_20171127_2232'),
+        ('osf', '0071_merge_20171128_0950'),
+    ]
+
+    operations = [
+    ]

From c2734b5e9a6894868b646b4fb9991eeca53c9289 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Tue, 28 Nov 2017 11:37:45 -0500
Subject: [PATCH 152/192] Downgrade for now to 3.6.3, as there is a browsable
 API breaking change in 3.6.4

[OSF-8966]
---
 requirements.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/requirements.txt b/requirements.txt
index 23fd61884a2..42e4e65ae32 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -59,7 +59,7 @@ raven==5.32.0
 
 # API requirements
 Django==1.11.7  # pyup: <2.0 # Remove this when we're on Py3
-djangorestframework==3.6.4
+djangorestframework==3.6.3
 django-cors-headers==1.3.1
 djangorestframework-bulk==0.2.1
 pyjwt==1.5.3

From 7262bb7082ccbac28965b8aae2b0c667c82b932f Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Tue, 28 Nov 2017 11:57:31 -0500
Subject: [PATCH 153/192] Don't call ban_object_from_cache

---
 api/caching/listeners.py | 11 +++++------
 1 file changed, 5 insertions(+), 6 deletions(-)

diff --git a/api/caching/listeners.py b/api/caching/listeners.py
index 792d82a414a..356ab659a7e 100644
--- a/api/caching/listeners.py
+++ b/api/caching/listeners.py
@@ -1,11 +1,10 @@
-from django.dispatch import receiver
-from django.db.models.signals import post_save
-
 from api.caching.tasks import ban_url
 from framework.postcommit_tasks.handlers import enqueue_postcommit_task
 
-
-@receiver(post_save)
-def ban_object_from_cache(sender, instance, created, **kwargs):
+# unused for now
+# from django.dispatch import receiver
+# from django.db.models.signals import post_save
+# @receiver(post_save)
+def ban_object_from_cache(sender, instance, **kwargs):
     if hasattr(instance, 'absolute_api_v2_url'):
         enqueue_postcommit_task(ban_url, (instance, ), {}, celery=False, once_per_request=True)

From 75d7097070d60093e6fe1a640b017affd56d6fa3 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Tue, 28 Nov 2017 10:57:26 -0500
Subject: [PATCH 154/192] Add regression test for browsable API issue h/t
 @alexschiller

---
 api_tests/users/views/test_user_detail.py | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/api_tests/users/views/test_user_detail.py b/api_tests/users/views/test_user_detail.py
index cf78eb92be8..b438d9712fd 100644
--- a/api_tests/users/views/test_user_detail.py
+++ b/api_tests/users/views/test_user_detail.py
@@ -105,6 +105,12 @@ def test_nodes_relationship_is_absent(self, app, user_one):
         res = app.get(url, auth=user_one)
         assert 'node' not in res.json['data']['relationships'].keys()
 
+    # Regression test for https://openscience.atlassian.net/browse/OSF-8966
+    def test_browsable_api_for_user_detail(self, app, user_one):
+        url = "/{}users/{}/?format=api".format(API_BASE, user_one._id)
+        res = app.get(url, auth=user_one.auth)
+        assert res.status_code == 200
+
 
 @pytest.mark.django_db
 class TestUserRoutesNodeRoutes:

From 2097d168875526d3175c014e9415c5709848411d Mon Sep 17 00:00:00 2001
From: BRosenblatt <Rebecca@cos.io>
Date: Wed, 29 Nov 2017 10:07:58 -0500
Subject: [PATCH 155/192] Update support.mako

---
 website/templates/public/pages/support.mako | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/website/templates/public/pages/support.mako b/website/templates/public/pages/support.mako
index ae096366f67..2897e82aec2 100644
--- a/website/templates/public/pages/support.mako
+++ b/website/templates/public/pages/support.mako
@@ -69,7 +69,7 @@
             </div>
             <div class="col-sm-4">
                 <h5 class="m-t-md f-w-xl"> Are you experiencing downtime with our services? </h5>
-                <p> Check out our <a href="https://status.cos.io"> status page</a> for updates on how our services are operating.</p>
+                <p> Check out our<a href="https://status.cos.io"> status page</a> for updates on how our services are operating.</p>
             </div>
             <div class="col-sm-4">
                 <h5 class="m-t-md f-w-xl"> Are you looking for statistics consultations?</h5>

From f059bacc9a28badb1868421e5037be0b5505c5ee Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Wed, 29 Nov 2017 12:11:39 -0500
Subject: [PATCH 156/192] Remove unused result argument to archive_addon

[#OSF-8987]]
---
 website/archiver/tasks.py | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/website/archiver/tasks.py b/website/archiver/tasks.py
index e90962cd5e0..1fe740381db 100644
--- a/website/archiver/tasks.py
+++ b/website/archiver/tasks.py
@@ -175,7 +175,7 @@ def make_waterbutler_payload(dst_id, rename):
 
 @celery_app.task(base=ArchiverTask, ignore_result=False)
 @logged('archive_addon')
-def archive_addon(addon_short_name, job_pk, stat_result):
+def archive_addon(addon_short_name, job_pk):
     """Archive the contents of an addon by making a copy request to the
     WaterBulter API
 
@@ -243,8 +243,7 @@ def archive_node(stat_results, job_pk):
             else:
                 archive_addon.delay(
                     addon_short_name=result.target_name,
-                    job_pk=job_pk,
-                    stat_result=result,
+                    job_pk=job_pk
                 )
         project_signals.archive_callback.send(dst)
 

From fd6975d71dc40c9472f089e959a08e010142f4f3 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Wed, 29 Nov 2017 12:14:18 -0500
Subject: [PATCH 157/192] Remove extra arg from changed archive_addon

---
 osf_tests/test_archiver.py | 9 +++------
 1 file changed, 3 insertions(+), 6 deletions(-)

diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py
index dc52fec7929..1d299705e8f 100644
--- a/osf_tests/test_archiver.py
+++ b/osf_tests/test_archiver.py
@@ -444,8 +444,7 @@ def test_archive_node_pass(self, mock_archive_addon):
             archive_node(results, self.archive_job._id)
         archive_osfstorage_signature = archive_addon.si(
             'osfstorage',
-            self.archive_job._id,
-            results
+            self.archive_job._id
         )
         assert(mock_group.called_with(archive_osfstorage_signature))
 
@@ -492,15 +491,13 @@ def test_archive_node_no_archive_size_limit(self, mock_archive_addon):
             archive_node(results, self.archive_job._id)
         archive_dropbox_signature = archive_addon.si(
             'dropbox',
-            self.archive_job._id,
-            results
+            self.archive_job._id
         )
         assert(mock_group.called_with(archive_dropbox_signature))
 
     @mock.patch('website.archiver.tasks.make_copy_request.delay')
     def test_archive_addon(self, mock_make_copy_request):
-        result = archiver_utils.aggregate_file_tree_metadata('osfstorage', FILE_TREE, self.user)
-        archive_addon('osfstorage', self.archive_job._id, result)
+        archive_addon('osfstorage', self.archive_job._id)
         assert_equal(self.archive_job.get_target('osfstorage').status, ARCHIVER_INITIATED)
         cookie = self.user.get_or_create_cookie()
         assert(mock_make_copy_request.called_with(

From 44ce4a6302bd71bc4e12cd084e540a09d9fffcc5 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Wed, 29 Nov 2017 15:40:57 -0500
Subject: [PATCH 158/192] Make StatResult and AggregateStatResult sublclasses
 of dict for serialization

[#OSF-8987]

These are used in celery tasks, which serialize things to JSON. Before, we were passing around the raw objects, which was breaking in the upgraded celery - hence making them JSON serializable by subclassing dict!
---
 website/archiver/__init__.py | 30 +++++++++++++-----------------
 website/archiver/tasks.py    |  6 +++---
 2 files changed, 16 insertions(+), 20 deletions(-)

diff --git a/website/archiver/__init__.py b/website/archiver/__init__.py
index be3ef281b15..75f76373992 100644
--- a/website/archiver/__init__.py
+++ b/website/archiver/__init__.py
@@ -24,7 +24,10 @@
 
 NO_ARCHIVE_LIMIT = 'high_upload_limit'
 
-class StatResult(object):
+# StatResult and AggregateStatResult are dict subclasses because they are used
+# in celery tasks, and celery serializes to JSON by default
+
+class StatResult(dict):
     """
     Helper class to collect metadata about a single file
     """
@@ -35,18 +38,15 @@ def __init__(self, target_id, target_name, disk_usage=0):
         self.target_name = target_name
         self.disk_usage = float(disk_usage)
 
-    def __str__(self):
-        return str(self._to_dict())
-
-    def _to_dict(self):
-        return {
+        self.update({
             'target_id': self.target_id,
             'target_name': self.target_name,
             'disk_usage': self.disk_usage,
-        }
+            'num_files': self.num_files
+        })
 
 
-class AggregateStatResult(object):
+class AggregateStatResult(dict):
     """
     Helper class to collect metadata about arbitrary depth file/addon/node file trees
     """
@@ -56,25 +56,21 @@ def __init__(self, target_id, target_name, targets=None):
         targets = targets or []
         self.targets = [target for target in targets if target]
 
-    def __str__(self):
-        return str(self._to_dict())
-
-    def _to_dict(self):
-        return {
+        self.update({
             'target_id': self.target_id,
             'target_name': self.target_name,
             'targets': [
-                target._to_dict()
+                target
                 for target in self.targets
             ],
             'num_files': self.num_files,
             'disk_usage': self.disk_usage,
-        }
+        })
 
     @property
     def num_files(self):
-        return sum([value.num_files for value in self.targets])
+        return sum([value['num_files'] for value in self.targets])
 
     @property
     def disk_usage(self):
-        return sum([value.disk_usage for value in self.targets])
+        return sum([value['disk_usage'] for value in self.targets])
diff --git a/website/archiver/tasks.py b/website/archiver/tasks.py
index 1fe740381db..31995be132a 100644
--- a/website/archiver/tasks.py
+++ b/website/archiver/tasks.py
@@ -238,11 +238,11 @@ def archive_node(stat_results, job_pk):
             job.status = ARCHIVER_SUCCESS
             job.save()
         for result in stat_result.targets:
-            if not result.num_files:
-                job.update_target(result.target_name, ARCHIVER_SUCCESS)
+            if not result['num_files']:
+                job.update_target(result['target_name'], ARCHIVER_SUCCESS)
             else:
                 archive_addon.delay(
-                    addon_short_name=result.target_name,
+                    addon_short_name=result['target_name'],
                     job_pk=job_pk
                 )
         project_signals.archive_callback.send(dst)

From c6e495e136d2cbe8b3fd26d60e1a44018b33cbb8 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Wed, 29 Nov 2017 16:07:06 -0500
Subject: [PATCH 159/192] Serialize nodes < 2 levels from the given node.

- Add `grid_root` comparison to determine if serialization should
continue.
- Add docstring and comment.
---
 website/util/rubeus.py | 18 +++++++++++-------
 1 file changed, 11 insertions(+), 7 deletions(-)

diff --git a/website/util/rubeus.py b/website/util/rubeus.py
index cf87fccdadb..cc035044c05 100644
--- a/website/util/rubeus.py
+++ b/website/util/rubeus.py
@@ -161,10 +161,12 @@ def __init__(self, node, auth, **kwargs):
         self.can_edit = self.node.can_edit(auth) and not self.node.is_registration
 
     def to_hgrid(self):
-        """Return the Rubeus.JS representation of the node's file data, including
-        addons and components
         """
-        root = self._get_nodes(self.node)
+        Returns a representation of the node's file data, including
+        addons and components. For efficiency, only the children and
+        grandchildren of the node are serialized.
+        """
+        root = self._get_nodes(self.node, grid_root=self.node)
         return [root]
 
     def _get_node_name(self, node, can_view, is_pointer=False):
@@ -183,12 +185,14 @@ def _get_node_name(self, node, can_view, is_pointer=False):
 
         return node_name
 
-    def _serialize_node(self, node, parent=None, children=[]):
+    def _serialize_node(self, node, parent=None, grid_root=None, children=[]):
         is_pointer = parent and node.is_linked_node
         can_view = node.can_view(auth=self.auth)
         can_edit = node.has_write_perm if hasattr(node, 'has_write_perm') else node.can_edit(auth=self.auth)
 
-        if parent and parent.root.title == parent.title:
+        # Determines if `node` is within two levels of `grid_root`
+        # Used to prevent complete serialization of deeply nested projects
+        if parent and grid_root and parent == grid_root:
             children = self._get_nodes(node)['children']
 
         return {
@@ -211,7 +215,7 @@ def _serialize_node(self, node, parent=None, children=[]):
             'nodeID': node._id,
         }
 
-    def _get_nodes(self, node):
+    def _get_nodes(self, node, grid_root=None):
         AbstractNode = apps.get_model('osf.AbstractNode')
         Contributor = apps.get_model('osf.Contributor')
 
@@ -225,7 +229,7 @@ def _get_nodes(self, node):
                         .annotate(is_linked_node=Exists(linked_node_sqs))
                         .annotate(has_write_perm=Exists(has_write_perm_sqs))
                         )
-            serialized_children = [self._serialize_node(child, parent=node) for child in children]
+            serialized_children = [self._serialize_node(child, parent=node, grid_root=grid_root) for child in children]
             data = serialized_addons + serialized_children
         return self._serialize_node(node, children=data)
 

From b75cbb7fbc2b5ffdcccdc215427de3a66c4a658d Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Thu, 30 Nov 2017 10:00:42 -0500
Subject: [PATCH 160/192] Silence a couple 3rd-party loggers

---
 api_tests/conftest.py | 2 ++
 osf_tests/conftest.py | 2 ++
 tests/base.py         | 2 ++
 3 files changed, 6 insertions(+)

diff --git a/api_tests/conftest.py b/api_tests/conftest.py
index 1db6971a0f4..bcf49df0161 100644
--- a/api_tests/conftest.py
+++ b/api_tests/conftest.py
@@ -18,6 +18,8 @@
     'website.search.elastic_search',
     'website.search_migration.migrate',
     'website.util.paths',
+    'transitions.core',
+    'MARKDOWN',
 ]
 for logger_name in SILENT_LOGGERS:
     logging.getLogger(logger_name).setLevel(logging.CRITICAL)
diff --git a/osf_tests/conftest.py b/osf_tests/conftest.py
index fdadbb96e8f..57fef5b4ac1 100644
--- a/osf_tests/conftest.py
+++ b/osf_tests/conftest.py
@@ -27,6 +27,8 @@
     'requests_oauthlib.oauth2_session',
     'raven.base.Client',
     'raven.contrib.django.client.DjangoClient',
+    'transitions.core',
+    'MARKDOWN',
 ]
 for logger_name in SILENT_LOGGERS:
     logging.getLogger(logger_name).setLevel(logging.CRITICAL)
diff --git a/tests/base.py b/tests/base.py
index 33425b92151..829c3b5f713 100644
--- a/tests/base.py
+++ b/tests/base.py
@@ -68,6 +68,8 @@ def get_default_metaschema():
     'requests_oauthlib.oauth2_session',
     'raven.base.Client',
     'raven.contrib.django.client.DjangoClient',
+    'transitions.core',
+    'MARKDOWN',
 ]
 for logger_name in SILENT_LOGGERS:
     logging.getLogger(logger_name).setLevel(logging.CRITICAL)

From b219fa5a2778ad3fc3e46318a155bc69363e53a8 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Wed, 29 Nov 2017 17:30:38 -0500
Subject: [PATCH 161/192] Deserialize signatures in postcommit_celery_queue

[#OSF-8992]

developed with @sloria
---
 framework/postcommit_tasks/handlers.py | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/framework/postcommit_tasks/handlers.py b/framework/postcommit_tasks/handlers.py
index ccb712066e8..be2eba7aae9 100644
--- a/framework/postcommit_tasks/handlers.py
+++ b/framework/postcommit_tasks/handlers.py
@@ -9,6 +9,7 @@
 import os
 
 from celery import chain
+from celery.canvas import Signature
 from framework.celery_tasks import app
 from celery.local import PromiseProxy
 from gevent.pool import Pool
@@ -36,7 +37,9 @@ def postcommit_before_request():
 def postcommit_celery_task_wrapper(queue):
     # chain.apply calls the tasks synchronously without re-enqueuing each one
     # http://stackoverflow.com/questions/34177131/how-to-solve-python-celery-error-when-using-chain-encodeerrorruntimeerrormaxi?answertab=votes#tab-top
-    chain(*queue.values()).apply()
+    # celery serialized signatures into dictionaries, so we need to deserialize here
+    # https://sentry.cos.io/sentry/osf-iy/issues/289209/
+    chain([Signature.from_dict(task_dict) for task_dict in queue.values()]).apply()
 
 def postcommit_after_request(response, base_status_error_code=500):
     if response.status_code >= base_status_error_code:

From 390e8c20a284d80e82ebf28e16648c52e8515eef Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Thu, 30 Nov 2017 13:10:31 -0500
Subject: [PATCH 162/192] Check is_public when determining read access

---
 website/project/views/node.py | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)

diff --git a/website/project/views/node.py b/website/project/views/node.py
index 15438d37e2c..063dfa2075d 100644
--- a/website/project/views/node.py
+++ b/website/project/views/node.py
@@ -909,7 +909,8 @@ def _get_readable_descendants(auth, node, permission=None):
 def serialize_child_tree(child_list, user, nested):
     serialized_children = []
     for child in child_list:
-        if child.has_read_perm or child.has_permission_on_children(user, READ):
+        can_read = child.is_public or child.has_read_perm
+        if can_read or child.has_permission_on_children(user, READ):
             contributors = [{
                 'id': contributor.user._id,
                 'is_admin': contributor.admin,
@@ -920,8 +921,8 @@ def serialize_child_tree(child_list, user, nested):
             serialized_children.append({
                 'node': {
                     'id': child._id,
-                    'url': child.url if child.has_read_perm else '',
-                    'title': child.title if child.has_read_perm else 'Private Project',
+                    'url': child.url if can_read else '',
+                    'title': child.title if can_read else 'Private Project',
                     'is_public': child.is_public,
                     'contributors': contributors,
                     'is_admin': child.has_admin_perm,
@@ -931,7 +932,7 @@ def serialize_child_tree(child_list, user, nested):
                 'nodeType': 'project' if not child.parentnode_id else 'component',
                 'category': child.category,
                 'permissions': {
-                    'view': child.has_read_perm,
+                    'view': can_read,
                     'is_admin': child.has_admin_perm
                 }
             })

From 3f4ee523ae4f725bb792db0670662fc0f4e1b781 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Thu, 30 Nov 2017 13:19:04 -0500
Subject: [PATCH 163/192] Remove trailing whitespace

---
 tasks/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tasks/__init__.py b/tasks/__init__.py
index 4363a720b34..5bf7137670b 100755
--- a/tasks/__init__.py
+++ b/tasks/__init__.py
@@ -567,7 +567,7 @@ def test_travis_else(ctx, numprocesses=None):
 def test_travis_api1_and_js(ctx, numprocesses=None):
     flake(ctx)
     jshint(ctx)
-    # TODO: Uncomment when https://github.com/travis-ci/travis-ci/issues/8836 is resolved 
+    # TODO: Uncomment when https://github.com/travis-ci/travis-ci/issues/8836 is resolved
     # karma(ctx)
     test_api1(ctx, numprocesses=numprocesses)
 

From ba9f9b48ac4640d03f39e914bb28f77cb33e7049 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 30 Nov 2017 15:33:06 -0500
Subject: [PATCH 164/192] Update error output and be sure to set
 BEPRESS_PROVIDER in validate method

[#OSF-8559]
---
 osf/management/commands/populate_custom_taxonomies.py | 11 +++++++++--
 1 file changed, 9 insertions(+), 2 deletions(-)

diff --git a/osf/management/commands/populate_custom_taxonomies.py b/osf/management/commands/populate_custom_taxonomies.py
index 12f553966e2..c3818ae6718 100644
--- a/osf/management/commands/populate_custom_taxonomies.py
+++ b/osf/management/commands/populate_custom_taxonomies.py
@@ -15,6 +15,12 @@
 BEPRESS_PROVIDER = None
 
 def validate_input(custom_provider, data, copy=False, add_missing=False):
+
+    # This function may be run outside of this command (e.g. in the admin app) so we
+    # need to make sure that BEPRESS_PROVIDER is set
+    global BEPRESS_PROVIDER
+    BEPRESS_PROVIDER = PreprintProvider.objects.filter(_id='osf').first()
+
     logger.info('Validating data')
     includes = data.get('include', [])
     excludes = data.get('exclude', [])
@@ -50,7 +56,7 @@ def validate_input(custom_provider, data, copy=False, add_missing=False):
     included_subjects = included_subjects | Subject.objects.filter(text__in=merges.keys())
     missing_subjects = Subject.objects.filter(id__in=set([hier[-1].id for ps in PreprintService.objects.filter(provider=custom_provider) for hier in ps.subject_hierarchy])).exclude(id__in=included_subjects.values_list('id', flat=True))
     if not add_missing:
-        assert not missing_subjects.exists(), 'Incomplete mapping -- following subjects in use but not included:\n{}'.format(missing_subjects.all())
+        assert not missing_subjects.exists(), 'Incomplete mapping -- following subjects in use but not included:\n{}'.format(list(missing_subjects.values_list('text', flat=True)))
     logger.info('Successfully validated mapping completeness')
     return list(missing_subjects) if add_missing else None
 
@@ -154,7 +160,8 @@ def migrate(provider=None, share_title=None, data=None, dry_run=False, copy=Fals
     # This function may be run outside of this command (e.g. in the admin app) so we
     # need to make sure that BEPRESS_PROVIDER is set
     global BEPRESS_PROVIDER
-    BEPRESS_PROVIDER = PreprintProvider.objects.filter(_id='osf').first()
+    if not BEPRESS_PROVIDER:
+        BEPRESS_PROVIDER = PreprintProvider.objects.filter(_id='osf').first()
     custom_provider = PreprintProvider.objects.filter(_id=provider).first()
     assert custom_provider, 'Unable to find specified provider: {}'.format(provider)
     assert custom_provider.id != BEPRESS_PROVIDER.id, 'Cannot add custom mapping to BePress provider'

From 9627902aaeaa57872588358b2b8d754fd04a2312 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Thu, 30 Nov 2017 15:47:06 -0500
Subject: [PATCH 165/192] Show correct GitLab file version

[#OSF-8169]
---
 addons/gitlab/models.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/addons/gitlab/models.py b/addons/gitlab/models.py
index e88bda39e2f..92a9a9cd5eb 100644
--- a/addons/gitlab/models.py
+++ b/addons/gitlab/models.py
@@ -30,7 +30,7 @@ class GitLabFolder(GitLabFileNode, Folder):
 
 
 class GitLabFile(GitLabFileNode, File):
-    version_identifier = 'branch'
+    version_identifier = 'commitSha'
 
     def touch(self, auth_header, revision=None, ref=None, branch=None, **kwargs):
         revision = revision or ref or branch

From d92f6368e0399de93fac9fabb28e18eeba3a48a3 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Thu, 30 Nov 2017 15:47:39 -0500
Subject: [PATCH 166/192] Fix GL url on user settings

---
 addons/gitlab/templates/gitlab_user_settings.mako | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/addons/gitlab/templates/gitlab_user_settings.mako b/addons/gitlab/templates/gitlab_user_settings.mako
index aea509e46c6..eaf6caace08 100644
--- a/addons/gitlab/templates/gitlab_user_settings.mako
+++ b/addons/gitlab/templates/gitlab_user_settings.mako
@@ -20,7 +20,7 @@
             <table class="table table-hover">
                 <thead>
                     <tr class="user-settings-addon-auth">
-                        <th class="text-muted default-authorized-by">Authorized on <a data-bind="attr: {href: gitlabHost}"><em data-bind="text: gitlabHost"></em></a></th>
+                        <th class="text-muted default-authorized-by">Authorized on <a data-bind="attr: {href: gitlabUrl}"><em data-bind="text: gitlabHost"></em></a></th>
                     </tr>
                 </thead>
                 <!-- ko if: connectedNodes().length > 0 -->

From a9f722a389dbce526f4cec9f10b16830f8f114e4 Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Thu, 30 Nov 2017 15:48:02 -0500
Subject: [PATCH 167/192] Prevent KeyErrors and allow GL file comments

---
 website/notifications/constants.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/website/notifications/constants.py b/website/notifications/constants.py
index 51a12ee343e..1f35af24238 100644
--- a/website/notifications/constants.py
+++ b/website/notifications/constants.py
@@ -29,6 +29,7 @@
     'dropbox': 'Dropbox',
     'figshare': 'figshare',
     'github': 'GitHub',
+    'gitlab': 'GitLab',
     'bitbucket': 'Bitbucket',
     'googledrive': 'Google Drive',
     'owncloud': 'ownCloud',

From b777502b797cf6c66b8f8588ad90badfbdbbce8e Mon Sep 17 00:00:00 2001
From: Matt Frazier <maf7sm@virginia.edu>
Date: Thu, 30 Nov 2017 15:53:05 -0500
Subject: [PATCH 168/192] Fix drag/drop behavior for read only addons

---
 website/static/js/fangorn.js | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/website/static/js/fangorn.js b/website/static/js/fangorn.js
index e16ab6c9266..4ee4005378d 100644
--- a/website/static/js/fangorn.js
+++ b/website/static/js/fangorn.js
@@ -48,6 +48,7 @@ var STATE_MAP = {
 };
 
 var SYNC_UPLOAD_ADDONS = ['github', 'dataverse'];
+var READ_ONLY_ADDONS = ['bitbucket', 'gitlab', 'onedrive'];
 
 
 var OPERATIONS = {
@@ -2520,7 +2521,9 @@ function isInvalidDropFolder(folder) {
         !folder.data.provider ||
         folder.data.status ||
         // cannot add to published dataverse
-        (folder.data.provider === 'dataverse' && folder.data.dataverseIsPublished)
+        (folder.data.provider === 'dataverse' && folder.data.dataverseIsPublished) ||
+        // no dropping into read-only providers
+        (READ_ONLY_ADDONS.indexOf(folder.data.provider) !== -1)
     ) {
         return true;
     }
@@ -2556,7 +2559,7 @@ function allowedToMove(folder, item, mustBeIntra) {
         item.data.permissions.edit &&
         (!mustBeIntra || (item.data.provider === folder.data.provider && item.data.nodeId === folder.data.nodeId)) &&
         !(item.data.provider === 'figshare' && item.data.extra && item.data.extra.status === 'public') &&
-        (item.data.provider !== 'bitbucket') && (item.data.provider !== 'gitlab') && (item.data.provider !== 'onedrive')
+        (READ_ONLY_ADDONS.indexOf(item.data.provider) === -1) && (READ_ONLY_ADDONS.indexOf(folder.data.provider) === -1)
     );
 }
 

From ea3db2f69a9ffa2e902b971d6992826da1b55cd3 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 30 Nov 2017 15:35:36 -0500
Subject: [PATCH 169/192] Add merge, add missing; improve UI a bit for better
 error handling

---
 admin/preprint_providers/forms.py             |  8 +++-
 admin/preprint_providers/views.py             | 16 +++++--
 .../preprint_providers/preprintProviders.js   | 29 +++++++++++-
 .../enter_custom_taxonomy.html                | 47 +++++++++++++++++++
 4 files changed, 92 insertions(+), 8 deletions(-)

diff --git a/admin/preprint_providers/forms.py b/admin/preprint_providers/forms.py
index 998b5129579..a0e7d42bcf7 100644
--- a/admin/preprint_providers/forms.py
+++ b/admin/preprint_providers/forms.py
@@ -62,7 +62,8 @@ def clean_footer_links(self, *args, **kwargs):
 
 
 class PreprintProviderCustomTaxonomyForm(forms.Form):
-    custom_taxonomy_json = forms.CharField(widget=forms.Textarea, initial='{"include": [], "exclude": [], "custom": {}}', required=False)
+    add_missing = forms.BooleanField(required=False)
+    custom_taxonomy_json = forms.CharField(widget=forms.Textarea, initial='{"include": [], "exclude": [], "custom": {}, "merge": {}}', required=False)
     provider_id = forms.IntegerField(widget=forms.HiddenInput())
     include = forms.ChoiceField(choices=[], required=False)
     exclude = forms.ChoiceField(choices=[], required=False)
@@ -70,9 +71,12 @@ class PreprintProviderCustomTaxonomyForm(forms.Form):
     custom_parent = forms.CharField(required=False)
     bepress = forms.ChoiceField(choices=[], required=False)
 
+    merge_from = forms.ChoiceField(choices=[], required=False)
+    merge_into = forms.ChoiceField(choices=[], required=False)
+
     def __init__(self, *args, **kwargs):
         super(PreprintProviderCustomTaxonomyForm, self).__init__(*args, **kwargs)
-        subject_choices = [(x, x) for x in Subject.objects.all().values_list('text', flat=True)]
+        subject_choices = [(x, x) for x in Subject.objects.filter(bepress_subject__isnull=True).values_list('text', flat=True)]
         for name, field in self.fields.iteritems():
             if hasattr(field, 'choices'):
                 if field.choices == []:
diff --git a/admin/preprint_providers/views.py b/admin/preprint_providers/views.py
index 34ea25760a4..a80b44ee70e 100644
--- a/admin/preprint_providers/views.py
+++ b/admin/preprint_providers/views.py
@@ -186,14 +186,17 @@ def post(self, request, *args, **kwargs):
                 if request.is_ajax():
                     # An ajax request is for validation only, so run that validation!
                     try:
-                        response_data = validate_input(custom_provider=provider, data=taxonomy_json)
+                        response_data = validate_input(custom_provider=provider, data=taxonomy_json, add_missing=provider_form.cleaned_data['add_missing'])
+                        if response_data:
+                            added_subjects = [subject.text for subject in response_data]
+                            response_data = {'message': 'Custom taxonomy validated with added subjects: {}'.format(added_subjects), 'feedback_type': 'success'}
                     except (RuntimeError, AssertionError) as script_feedback:
                         response_data = {'message': script_feedback.message, 'feedback_type': 'error'}
                     if not response_data:
                         response_data = {'message': 'Custom taxonomy validated!', 'feedback_type': 'success'}
                 else:
                     # Actually do the migration of the custom taxonomies
-                    migrate(provider=provider._id, data=taxonomy_json)
+                    migrate(provider=provider._id, data=taxonomy_json, add_missing=provider_form.cleaned_data['add_missing'])
                     return redirect('preprint_providers:detail', preprint_provider_id=provider.id)
 
             except ValueError as error:
@@ -201,8 +204,13 @@ def post(self, request, *args, **kwargs):
                     'message': 'There is an error with the submitted JSON. Here are some details: ' + error.message,
                     'feedback_type': 'error'
                 }
-            # Return a JsonResponse with the JSON error or the validation error if it's not doing an actual migration
-            return JsonResponse(response_data)
+        else:
+            response_data = {
+                'message': 'There is a problem with the form. Here are some details: ' + unicode(provider_form.errors),
+                'feedback_type': 'error'
+            }
+        # Return a JsonResponse with the JSON error or the validation error if it's not doing an actual migration
+        return JsonResponse(response_data)
 
 class ExportPreprintProvider(PermissionRequiredMixin, View):
     permission_required = 'osf.change_preprintprovider'
diff --git a/admin/static/js/preprint_providers/preprintProviders.js b/admin/static/js/preprint_providers/preprintProviders.js
index 24d3cb002ca..b0e1ae340cb 100644
--- a/admin/static/js/preprint_providers/preprintProviders.js
+++ b/admin/static/js/preprint_providers/preprintProviders.js
@@ -160,9 +160,17 @@ $(document).ready(function() {
         });
     });
 
+    var getContent = function(taxonomyTextField) {
+        currentCustomTaxonomyContent = taxonomyTextField.val();
+        if (currentCustomTaxonomyContent === "") {
+            currentCustomTaxonomyContent = '{\"include\": [], \"exclude\": [], \"custom\": {}, \"merge\": {}}'
+        }
+        return JSON.parse(currentCustomTaxonomyContent);
+    };
+
     $( ".taxonomy-action-button" ).click(function() {
         var taxonomyTextField=$("#id_custom_taxonomy_json");
-        var content = JSON.parse(taxonomyTextField.val());
+        var content = getContent(taxonomyTextField);
         var value = $("#" + $(this).attr("value")).val();
         var subjects = content[$(this).attr("id")];
         if (subjects.indexOf(value) == -1) {
@@ -176,7 +184,7 @@ $(document).ready(function() {
         var name = $("#id_custom_name").val();
         var parent = $("#id_custom_parent").val();
         var bepress = $("#id_bepress").val();
-        var content = JSON.parse(taxonomyTextField.val());
+        var content = getContent(taxonomyTextField);
         if (content["custom"][name] === undefined) {
             content["custom"][name] = {
                 "parent": parent,
@@ -187,6 +195,21 @@ $(document).ready(function() {
         taxonomyTextField.val(JSON.stringify(content, undefined, 4));
     });
 
+
+    $( "#id-add-merge" ).click(function() {
+        var taxonomyTextField=$("#id_custom_taxonomy_json");
+        var merge_from = $("#id_merge_from").val();
+        var merge_into = $("#id_merge_into").val();
+        var content = getContent(taxonomyTextField);
+
+        if (content["merge"][merge_from] === undefined) {
+            content["merge"][merge_from] = merge_into
+        }
+
+        taxonomyTextField.val(JSON.stringify(content, undefined, 4));
+    });
+
+
     $("#id-validate-custom").on("click", function(event) {
        checkTaxonomy();
     });
@@ -212,5 +235,7 @@ $(document).ready(function() {
     $("#id_include").select2();
     $("#id_exclude").select2();
     $("#id_bepress").select2();
+    $("#id_merge_from").select2();
+    $("#id_merge_into").select2();
 
 });
diff --git a/admin/templates/preprint_providers/enter_custom_taxonomy.html b/admin/templates/preprint_providers/enter_custom_taxonomy.html
index e7e278b59ab..579ee937898 100644
--- a/admin/templates/preprint_providers/enter_custom_taxonomy.html
+++ b/admin/templates/preprint_providers/enter_custom_taxonomy.html
@@ -21,6 +21,17 @@ <h2>Custom Taxonomy</h2>
             {{ taxonomy_form.provider_id }}
 
             <div>
+                <div class="fieldWrapper">
+                    {{ taxonomy_form.add_missing.errors }}
+                    <div class="row">
+                        <div class="col-md-2">
+                            {{ taxonomy_form.add_missing.label_tag }}
+                        </div>
+                        <div class="col-md-10">
+                            {{ taxonomy_form.add_missing }}
+                        </div>
+                    </div>
+                </div>
                 <div class="fieldWrapper">
                     {{ taxonomy_form.include.errors }}
                     <div class="row">
@@ -72,6 +83,29 @@ <h2>Custom Taxonomy</h2>
                     </div>
                 </div>
             </div>
+
+            <div>
+                <div><b>Merge:</b></div>
+                <div class="panel panel-default">
+                    <div class="panel-body">
+                        {{ taxonomy_form.merge_from.errors }}
+                        <p>
+                            {{ taxonomy_form.merge_from.label_tag }}
+                            {{ taxonomy_form.merge_from }}
+                        </p>
+                        {{ taxonomy_form.merge_into.errors }}
+                        <p>
+                            {{ taxonomy_form.merge_into.label_tag }}
+                            {{ taxonomy_form.merge_into }}
+                        </p>
+                        <div class="pull-right">
+                            <button type="button" id="id-add-merge">Add</button>
+                        </div>
+                    </div>
+                </div>
+            </div>
+
+
             <div>
                 <div class="fieldWrapper">
                     {{ taxonomy_form.custom_taxonomy_json.errors }}
@@ -109,6 +143,7 @@ <h4 class="modal-title" id="exampleModalLabel">Custom Taxonomy Example</h4>
             <div class="modal-body">
                 <div>
                     <ul>
+                        <li><b>add missing</b>: Choose "add missing" to automatically include subjects that are already in use on this preprint provider.
                         <li><b>include</b>: These subjects, and their children will be included in the custom taxonomy.
                             If a second level subject is included, that second level will become the top of the subject tree, and its parent will not be in the taxonomy.</li>
                         <li><b>exclude</b>: These subjects, and their children will *not* be included in the custom taxonomy. Useful to include a top level subject, but then specify
@@ -123,6 +158,14 @@ <h4 class="modal-title" id="exampleModalLabel">Custom Taxonomy Example</h4>
                                 <li>bepress: the existing subject that you would like to repalce with the subject listed in the custom name field.</li>
                             </ul>
                         </li>
+                        <li><b>merge</b>: The existing Bepress subjects you would like to "merge" together..
+                            <ul>
+                                <li>merge from: The subject to merge from</li>
+                                <li>merge into: The subject you would like the original subject to become, or merge into.
+                                *Note*: The subject you select for merge into must be included explicitly, and cannot just be implicitly via selecting "Add missing"
+                                </li>
+                            </ul>
+                        </li>
                     </ul>
                 </div>
                 <pre>
@@ -159,6 +202,10 @@ <h4 class="modal-title" id="exampleModalLabel">Custom Taxonomy Example</h4>
             "parent": "",
             "bepress": "Engineering"
         }
+    },
+    merge: {
+        "Dance": "Arts and Humanities",
+        "Hydrology": "Climate"
     }
 }
                 </pre>

From e38885f4779a061ab32f4b78df75d8f254a66450 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 30 Nov 2017 16:06:57 -0500
Subject: [PATCH 170/192] Add validation error about share-title that will show
 in form. Add message for missing share-title to form in validate or in JSON
 on save

---
 admin/preprint_providers/views.py                     | 5 ++---
 osf/management/commands/populate_custom_taxonomies.py | 2 ++
 2 files changed, 4 insertions(+), 3 deletions(-)

diff --git a/admin/preprint_providers/views.py b/admin/preprint_providers/views.py
index a80b44ee70e..a8fa87997ea 100644
--- a/admin/preprint_providers/views.py
+++ b/admin/preprint_providers/views.py
@@ -198,10 +198,9 @@ def post(self, request, *args, **kwargs):
                     # Actually do the migration of the custom taxonomies
                     migrate(provider=provider._id, data=taxonomy_json, add_missing=provider_form.cleaned_data['add_missing'])
                     return redirect('preprint_providers:detail', preprint_provider_id=provider.id)
-
-            except ValueError as error:
+            except (ValueError, RuntimeError) as error:
                 response_data = {
-                    'message': 'There is an error with the submitted JSON. Here are some details: ' + error.message,
+                    'message': 'There is an error with the submitted JSON or the provider. Here are some details: ' + error.message,
                     'feedback_type': 'error'
                 }
         else:
diff --git a/osf/management/commands/populate_custom_taxonomies.py b/osf/management/commands/populate_custom_taxonomies.py
index c3818ae6718..ce7bbd3b909 100644
--- a/osf/management/commands/populate_custom_taxonomies.py
+++ b/osf/management/commands/populate_custom_taxonomies.py
@@ -57,6 +57,8 @@ def validate_input(custom_provider, data, copy=False, add_missing=False):
     missing_subjects = Subject.objects.filter(id__in=set([hier[-1].id for ps in PreprintService.objects.filter(provider=custom_provider) for hier in ps.subject_hierarchy])).exclude(id__in=included_subjects.values_list('id', flat=True))
     if not add_missing:
         assert not missing_subjects.exists(), 'Incomplete mapping -- following subjects in use but not included:\n{}'.format(list(missing_subjects.values_list('text', flat=True)))
+    assert custom_provider.share_title not in [None, '', 'bepress'], 'share title not set; please set the share title on this provider before creating a custom taxonomy.'
+
     logger.info('Successfully validated mapping completeness')
     return list(missing_subjects) if add_missing else None
 

From 3c76a7c0f7fbae87ba9dd6915ae162945d8df858 Mon Sep 17 00:00:00 2001
From: John Tordoff <john@cos.io>
Date: Thu, 30 Nov 2017 17:00:39 -0500
Subject: [PATCH 171/192] Add useful info to docs for configuring local
 OneDrive addon

---
 addons/onedrive/README.md | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/addons/onedrive/README.md b/addons/onedrive/README.md
index 212f33e1ce8..ca934b59e2d 100644
--- a/addons/onedrive/README.md
+++ b/addons/onedrive/README.md
@@ -3,5 +3,6 @@
 Enabling the addon for development
 
 1. If `addons/onedrive/settings/local.py` does not yet exist, create a local onedrive settings file with `cp addons/onedrive/settings/local-dist.py addons/onedrive/settings/local.py`
-2. Register the addon with Microsoft (https://account.live.com/developers/applications/index) and enter http://localhost:5000/oauth/callback/onedrive/ as the Redirect URL.
-3. Enter your OneDrive `client_id` and `client_secret` as `ONEDRIVE_KEY` and `ONEDRIVE_SECRET` in `addons/onedrive/settings/local.py`.
+2. Register the addon with Microsoft at https://account.live.com/developers/applications/index it should be a 'Web' platform, not 'Web API'
+3. Enter the Redirect URL as http://localhost:5000/oauth/callback/onedrive/
+4. Click 'Generate New Password' and put that string as the `ONEDRIVE_SECRET` in `addons/onedrive/settings/local.py` and put the Application Id as `ONEDRIVE_KEY`

From d8efa5d0953ec9b44911f0f8b3cd35e84a2ceb10 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 7 Sep 2017 10:10:17 -0400
Subject: [PATCH 172/192] Add quickfiles js files along with calling them from
 profile-page

[#OSF-8369]

- Add quickFiles.js for rendering quickfiles with the right data from
the API
- Add file icon to the iconmap
- Call new quickFiles js from profile page js
---
 website/static/js/components/quickFiles.js | 275 +++++++++++++++++++++
 website/static/js/iconmap.js               |   3 +-
 website/static/js/pages/profile-page.js    |   2 +
 3 files changed, 279 insertions(+), 1 deletion(-)
 create mode 100644 website/static/js/components/quickFiles.js

diff --git a/website/static/js/components/quickFiles.js b/website/static/js/components/quickFiles.js
new file mode 100644
index 00000000000..1cbed8c1776
--- /dev/null
+++ b/website/static/js/components/quickFiles.js
@@ -0,0 +1,275 @@
+'use strict';
+
+var m = require('mithril'); // exposes mithril methods, useful for redraw etc.
+var $osf = require('js/osfHelpers');
+var iconmap = require('js/iconmap');
+var lodashFind = require('lodash.find');
+var mHelpers = require('js/mithrilHelpers');
+var Raven = require('raven-js');
+
+var MAX_PAGES_ON_PAGINATOR = 7;
+var MAX_PAGES_ON_PAGINATOR_SIDE = 5;
+var QUICKFILES_PAGE_SIZE = 10;
+
+var _buildUrl = function(page, user) {
+
+    var query = {
+        'page[size]': QUICKFILES_PAGE_SIZE,
+        'page': page || 1,
+        'version': '2.2',
+    };
+
+    return $osf.apiV2Url('users/' + user +  '/quickfiles/', { query: query});
+};
+
+
+var QuickFile = {
+
+    controller: function(options) {
+        var self = this;
+        self.file = options.file;
+        self.icon =  iconmap.file;
+    },
+
+    view: function(ctrl)  {
+        var viewBase = window.location.origin + '/quickfiles';
+        var viewUrl = ctrl.file.attributes.guid ? viewBase + '/' + ctrl.file.attributes.guid : viewBase + ctrl.file.attributes.path;
+        return m('div', [
+            m('li.project list-group-item list-group-item-node cite-container', [
+                m('h4.list-group-item-heading', [
+                    m('span.component-overflow.f-w-lg', {style: 'line-height: 1.5; width: 100%'}, [
+                        m('span.project-statuses-lg', {style: 'width: 50%; float:left'}, [
+                            m('span', {class: ctrl.icon, style: 'padding-right: 5px;'}, ''),
+                            m('a', {'href': viewUrl}, ctrl.file.attributes.name),
+                        ]),
+                        m('div', {style: 'width: 50%; float:right; font-size:small; line-height:2;'}, 'downloads: ' + ctrl.file.attributes.extra.downloads)
+                    ])
+                ])
+            ])
+        ]);
+    }
+};
+
+var QuickFiles = {
+
+    controller: function (options) {
+        var self = this;
+        self.user = options.user._id;
+        self.isProfile = options.user.is_profile;
+
+        self.quickFiles = m.prop([]);
+        self.requestPending = m.prop(false);
+
+        self.failed = false;
+        self.paginators = m.prop([]);
+        self.nextPage = m.prop('');
+        self.prevPage = m.prop('');
+        self.totalPages = m.prop(0);
+        self.currentPage = m.prop(0);
+        self.pageToGet = m.prop(0);
+
+        self.getQuickFiles = function _getQuickFiles(url) {
+            if (self.requestPending()) {
+                return;
+            }
+            self.quickFiles([]);
+            self.requestPending(true);
+
+            function _processResults(result) {
+
+                self.quickFiles(result.data);
+                self.nextPage(result.links.next);
+                self.prevPage(result.links.prev);
+
+                var params = $osf.urlParams(url);
+                var page = params.page || 1;
+
+                self.currentPage(parseInt(page));
+                self.totalPages(Math.ceil(result.meta.total / result.meta.per_page));
+
+                m.redraw();
+            }
+
+            var promise = m.request({
+                method: 'GET',
+                url: url,
+                background: true,
+                config: mHelpers.apiV2Config({withCredentials: window.contextVars.isOnRootDomain})
+            });
+
+            promise.then(
+                function (result) {
+                    self.requestPending(false);
+                    _processResults(result);
+                    return promise;
+                }, function (xhr, textStatus, error) {
+                    self.failed = true;
+                    self.requestPending(false);
+                    m.redraw();
+                    Raven.captureMessage('Error retrieving quickfiles', {
+                        extra: {
+                            url: url,
+                            textStatus: textStatus,
+                            error: error
+                        }
+                    });
+                }
+            );
+        };
+
+        self.getCurrentQuickFiles = function _getCurrentQuickFiles(page) {
+            if (!self.requestPending()) {
+                var url = _buildUrl(page, self.user);
+                return self.getQuickFiles(url);
+            }
+        };
+        self.getCurrentQuickFiles();
+    },
+
+    view: function (ctrl) {
+        var i;
+        ctrl.paginators([]);
+        if (ctrl.totalPages() > 1) {
+            // previous page
+            ctrl.paginators().push({
+                url: function() { return ctrl.prevPage(); },
+                text: '<'
+            });
+            // first page
+            ctrl.paginators().push({
+                text: 1,
+                url: function() {
+                    ctrl.pageToGet(1);
+                    if(ctrl.pageToGet() !== ctrl.currentPage()) {
+                        return _buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                    }
+                }
+            });
+            // no ellipses
+            if (ctrl.totalPages() <= MAX_PAGES_ON_PAGINATOR) {
+                for (i = 2; i < ctrl.totalPages(); i++) {
+                    ctrl.paginators().push({
+                        text: i,
+                        url: function() {
+                            ctrl.pageToGet(parseInt(this.text));
+                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                return _buildUrl(ctrl.pageToGet(), ctrl.user);
+                            }
+                        }
+                    });/* jshint ignore:line */
+                    // function defined inside loop
+                }
+            }
+            // one ellipse at the end
+            else if (ctrl.currentPage() < MAX_PAGES_ON_PAGINATOR_SIDE - 1) {
+                for (i = 2; i < MAX_PAGES_ON_PAGINATOR_SIDE; i++) {
+                    ctrl.paginators().push({
+                        text: i,
+                        url: function() {
+                            ctrl.pageToGet(parseInt(this.text));
+                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                return _buildUrl(ctrl.pageToGet(), ctrl.user);
+                            }
+                        }
+                    });/* jshint ignore:line */
+                    // function defined inside loop
+                }
+                ctrl.paginators().push({
+                    text: '...',
+                    url: function() { }
+                });
+            }
+            // one ellipse at the beginning
+            else if (ctrl.currentPage() > ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2) {
+                ctrl.paginators().push({
+                    text: '...',
+                    url: function() { }
+                });
+                for (i = ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2; i <= ctrl.totalPages() - 1; i++) {
+                    ctrl.paginators().push({
+                        text: i,
+                        url: function() {
+                            ctrl.pageToGet(parseInt(this.text));
+                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                return _buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                            }
+                        }
+                    });/* jshint ignore:line */
+                    // function defined inside loop
+                }
+            }
+            // two ellipses
+            else {
+                ctrl.paginators().push({
+                    text: '...',
+                    url: function() { }
+                });
+                for (i = parseInt(ctrl.currentPage()) - 1; i <= parseInt(ctrl.currentPage()) + 1; i++) {
+                    ctrl.paginators().push({
+                        text: i,
+                        url: function() {
+                            ctrl.pageToGet(parseInt(this.text));
+                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                return _buildUrl(ctrl.pageToGet(), ctrl.user);
+                            }
+                        }
+                    });/* jshint ignore:line */
+                    // function defined inside loop
+                }
+                ctrl.paginators().push({
+                    text: '...',
+                    url: function() { }
+                });
+            }
+            // last page
+            ctrl.paginators().push({
+                text: ctrl.totalPages(),
+                url: function() {
+                    ctrl.pageToGet(ctrl.totalPages());
+                    if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                        return _buildUrl(ctrl.pageToGet(), ctrl.user);
+                    }
+                }
+            });
+            // next page
+            ctrl.paginators().push({
+                url: function() { return ctrl.nextPage(); },
+                text: '>'
+            });
+        }
+
+        return m('ul.list-group m-md', [
+            // Error message if the request fails
+            ctrl.failed ? m('p', [
+                'Unable to retrieve quickfiles at this time. Please refresh the page or contact ',
+                m('a', {'href': 'mailto:support@osf.io'}, 'support@osf.io'),
+                ' if the problem persists.'
+            ]) :
+
+            // Show laoding icon while there is a pending request
+            ctrl.requestPending() ?  m('.ball-pulse.ball-scale-blue.text-center', [m(''), m(''), m('')]) :
+
+            // Display each quickfile
+            [
+                ctrl.quickFiles().length !== 0 ? ctrl.quickFiles().map(function(file) {
+                    return m.component(QuickFile, {file: file});
+                }) : ctrl.isProfile ?
+                    m('div.help-block', {}, 'You have no public quickfiles')
+                : m('div.help-block', {}, 'This user has no public quickfiles.'),
+
+                // Pagination
+                m('.db-activity-nav.text-center', {style: 'margin-top: 5px; margin-bottom: -10px;'}, [
+                    ctrl.paginators() ? ctrl.paginators().map(function(page) {
+                        return page.url() ? m('.btn.btn-sm.btn-link', { onclick : function() {
+                            ctrl.getQuickFiles(page.url());
+                        }}, page.text) : m('.btn.btn-sm.btn-link.disabled', {style: 'color: black'}, page.text);
+                    }) : ''
+                ])
+            ]
+        ]);
+    }
+};
+
+module.exports = {
+    QuickFiles: QuickFiles
+};
diff --git a/website/static/js/iconmap.js b/website/static/js/iconmap.js
index 61406c4438d..aa2a75590c8 100644
--- a/website/static/js/iconmap.js
+++ b/website/static/js/iconmap.js
@@ -24,5 +24,6 @@ module.exports = {
     info: 'fa fa-info-circle',
     smaller: 'iconmap-smaller',
     clickable: 'iconmap-clickable',
-    private : 'fa fa-lock'
+    private : 'fa fa-lock',
+    file: 'fa fa-file-o',
 };
diff --git a/website/static/js/pages/profile-page.js b/website/static/js/pages/profile-page.js
index 2df42fb3aaa..68672ebb109 100644
--- a/website/static/js/pages/profile-page.js
+++ b/website/static/js/pages/profile-page.js
@@ -10,6 +10,7 @@ require('../project.js'); // Needed for nodelists to work
 require('../components/logFeed.js'); // Needed for nodelists to work
 var profile = require('../profile.js'); // Social, Job, Education classes
 var publicNodes = require('../components/publicNodes.js');
+var quickFiles = require('../components/quickFiles.js');
 
 var ctx = window.contextVars;
 // Instantiate all the profile modules
@@ -20,5 +21,6 @@ new profile.Schools('#schools', ctx.schoolsUrls, ['view'], false);
 $(document).ready(function () {
     m.mount(document.getElementById('publicProjects'), m.component(publicNodes.PublicNodes, {user: ctx.user, nodeType: 'projects'}));
     m.mount(document.getElementById('publicComponents'), m.component(publicNodes.PublicNodes, {user: ctx.user, nodeType: 'components'}));
+    m.mount(document.getElementById('quickFiles'), m.component(quickFiles.QuickFiles, {user: ctx.user}));
 });
 

From 87fcb786f19c6522a1455fa60defa66937cf9d5c Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 7 Sep 2017 10:13:15 -0400
Subject: [PATCH 173/192] Add quickfiles to profile mako, pass has_quickfiles
 from view

---
 website/profile/views.py       |  4 ++-
 website/templates/profile.mako | 48 +++++++++++++++++++++++++++-------
 2 files changed, 41 insertions(+), 11 deletions(-)

diff --git a/website/profile/views.py b/website/profile/views.py
index d8412ba94ef..56648e7f776 100644
--- a/website/profile/views.py
+++ b/website/profile/views.py
@@ -21,7 +21,7 @@
 from framework.flask import redirect  # VOL-aware redirect
 from framework.status import push_status_message
 
-from osf.models import ApiOAuth2Application, ApiOAuth2PersonalToken, OSFUser
+from osf.models import ApiOAuth2Application, ApiOAuth2PersonalToken, OSFUser, QuickFilesNode
 from website import mails
 from website import mailchimp_utils
 from website import settings
@@ -211,6 +211,7 @@ def _profile_view(profile, is_profile=False, include_node_counts=False):
         raise HTTPError(http.GONE)
 
     if profile:
+        profile_quickfilesnode = QuickFilesNode.objects.get_for_user(profile)
         profile_user_data = profile_utils.serialize_user(profile, full=True, is_profile=is_profile, include_node_counts=include_node_counts)
         ret = {
             'profile': profile_user_data,
@@ -219,6 +220,7 @@ def _profile_view(profile, is_profile=False, include_node_counts=False):
                 'is_profile': is_profile,
                 'can_edit': None,  # necessary for rendering nodes
                 'permissions': [],  # necessary for rendering nodes
+                'has_quickfiles': profile_quickfilesnode.files.filter(type='osf.osfstoragefile').exists()
             },
         }
         return ret
diff --git a/website/templates/profile.mako b/website/templates/profile.mako
index 657ac34715a..bf719667fdd 100644
--- a/website/templates/profile.mako
+++ b/website/templates/profile.mako
@@ -98,25 +98,52 @@
 <hr />
 <div class="row">
     <div class="col-sm-6">
-        <div class="panel panel-default">
-            <div class="panel-heading clearfix">
-              <h3 class="panel-title" >Public projects</h3>
+        % if user['has_quickfiles']:
+        <div class="row">
+            <div class="col-sm-12">
+        %endif
+                <div class="panel panel-default">
+                    <div class="panel-heading clearfix">
+                      <h3 class="panel-title" >Public projects</h3>
+                    </div>
+                    <div class="panel-body clearfix" id="publicProjects">
+                        <div class="ball-pulse ball-scale-blue text-center">
+                          <div></div>
+                          <div></div>
+                          <div></div>
+                        </div>
+                    </div>
+                </div>
             </div>
-            <div class="panel-body clearfix" id="publicProjects">
-                <div class="ball-pulse ball-scale-blue text-center">
-                  <div></div>
-                  <div></div>
-                  <div></div>
+        % if user['has_quickfiles']:
+        </div>
+        <div class="row">
+            <div class="col-sm-12">
+        %else:
+            <div class="col-sm-6">
+            %endif
+                <div class="panel panel-default">
+                    <div class="panel-heading clearfix">
+                        <h3 class="panel-title">Public components</h3>
+                    </div>
+                    <div class="panel-body clearfix" id="publicComponents">
+                      <div class="ball-pulse ball-scale-blue text-center">
+                        <div></div>
+                        <div></div>
+                        <div></div>
+                      </div>
+                    </div>
                 </div>
             </div>
         </div>
     </div>
+    % if user['has_quickfiles']:
     <div class="col-sm-6">
         <div class="panel panel-default">
             <div class="panel-heading clearfix">
-                <h3 class="panel-title">Public components</h3>
+                <h3 class="panel-title">Quick Files</h3>
             </div>
-            <div class="panel-body clearfix" id="publicComponents">
+            <div class="panel-body clearfix" id="quickFiles">
               <div class="ball-pulse ball-scale-blue text-center">
                 <div></div>
                 <div></div>
@@ -125,6 +152,7 @@
             </div>
         </div>
     </div>
+    % endif
 </div><!-- end row -->
 
 <%include file="include/profile/social.mako" />

From 1b92ea534905f97be8fedefb26fd94993762820a Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 7 Sep 2017 11:23:05 -0400
Subject: [PATCH 174/192] Test to check widget is gone for users with no
 QuickFiles

---
 tests/test_views.py | 20 +++++++++++++++++++-
 1 file changed, 19 insertions(+), 1 deletion(-)

diff --git a/tests/test_views.py b/tests/test_views.py
index 60f18043133..d539d9998f5 100644
--- a/tests/test_views.py
+++ b/tests/test_views.py
@@ -65,10 +65,11 @@
     assert_datetime_equal,
 )
 from tests.base import test_app as mock_app
+from api_tests.utils import create_test_file
 
 pytestmark = pytest.mark.django_db
 
-from osf.models import NodeRelation
+from osf.models import NodeRelation, QuickFilesNode
 from osf_tests.factories import (
     fake_email,
     ApiOAuth2ApplicationFactory,
@@ -1393,6 +1394,23 @@ def test_unsubscribe_mailchimp_not_called_if_user_not_subscribed(self, mock_get_
         assert_equal(mock_client.lists.subscribe.call_count, 0)
         handlers.celery_teardown_request()
 
+    def test_user_with_quickfiles(self):
+        quickfiles_node = QuickFilesNode.objects.get_for_user(self.user)
+        create_test_file(quickfiles_node, self.user, filename='skrr_skrrrrrrr.pdf')
+
+        url = web_url_for('profile_view_id', uid=self.user._id)
+        res = self.app.get(url, auth=self.user.auth)
+
+        assert_in('Quick Files', res.body)
+
+    def test_user_with_no_quickfiles(self):
+        assert(not QuickFilesNode.objects.first().files.filter(type='osf.osfstoragefile').exists())
+
+        url = web_url_for('profile_view_id', uid=self.user._primary_key)
+        res = self.app.get(url, auth=self.user.auth)
+
+        assert_not_in('Quick Files', res.body)
+
 
 class TestUserProfileApplicationsPage(OsfTestCase):
 

From bddb348c795d429fde3e329fa83fb704f9760f21 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Thu, 7 Sep 2017 12:25:09 -0400
Subject: [PATCH 175/192] Break out pagination for public Nodes and quickfiles

---
 .../static/js/components/profilePagination.js | 129 ++++++++++++++++++
 website/static/js/components/publicNodes.js   | 115 +---------------
 website/static/js/components/quickFiles.js    | 117 +---------------
 3 files changed, 137 insertions(+), 224 deletions(-)
 create mode 100644 website/static/js/components/profilePagination.js

diff --git a/website/static/js/components/profilePagination.js b/website/static/js/components/profilePagination.js
new file mode 100644
index 00000000000..e96b92c84cc
--- /dev/null
+++ b/website/static/js/components/profilePagination.js
@@ -0,0 +1,129 @@
+'use strict';
+
+var m = require('mithril'); // exposes mithril methods, useful for redraw etc.
+var $osf = require('js/osfHelpers');
+var iconmap = require('js/iconmap');
+var lodashFind = require('lodash.find');
+var mHelpers = require('js/mithrilHelpers');
+var Raven = require('raven-js');
+
+
+var MAX_PAGES_ON_PAGINATOR = 7;
+var MAX_PAGES_ON_PAGINATOR_SIDE = 5;
+
+var ProfilePagination = function(ctrl, buildUrlFunction) {
+    var i;
+    ctrl.paginators([]);
+    if (ctrl.totalPages() > 1) {
+        // previous page
+        ctrl.paginators().push({
+            url: function() { return ctrl.prevPage(); },
+            text: '<'
+        });
+        // first page
+        ctrl.paginators().push({
+            text: 1,
+            url: function() {
+                ctrl.pageToGet(1);
+                if(ctrl.pageToGet() !== ctrl.currentPage()) {
+                    return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                }
+            }
+        });
+        // no ellipses
+        if (ctrl.totalPages() <= MAX_PAGES_ON_PAGINATOR) {
+            for (i = 2; i < ctrl.totalPages(); i++) {
+                ctrl.paginators().push({
+                    text: i,
+                    url: function() {
+                        ctrl.pageToGet(parseInt(this.text));
+                        if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                            return buildUrlFunction(ctrl.pageToGet(), ctrl.user);
+                        }
+                    }
+                });/* jshint ignore:line */
+                // function defined inside loop
+            }
+        }
+        // one ellipse at the end
+        else if (ctrl.currentPage() < MAX_PAGES_ON_PAGINATOR_SIDE - 1) {
+            for (i = 2; i < MAX_PAGES_ON_PAGINATOR_SIDE; i++) {
+                ctrl.paginators().push({
+                    text: i,
+                    url: function() {
+                        ctrl.pageToGet(parseInt(this.text));
+                        if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                            return buildUrlFunction(ctrl.pageToGet(), ctrl.user);
+                        }
+                    }
+                });/* jshint ignore:line */
+                // function defined inside loop
+            }
+            ctrl.paginators().push({
+                text: '...',
+                url: function() { }
+            });
+        }
+        // one ellipse at the beginning
+        else if (ctrl.currentPage() > ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2) {
+            ctrl.paginators().push({
+                text: '...',
+                url: function() { }
+            });
+            for (i = ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2; i <= ctrl.totalPages() - 1; i++) {
+                ctrl.paginators().push({
+                    text: i,
+                    url: function() {
+                        ctrl.pageToGet(parseInt(this.text));
+                        if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                            return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                        }
+                    }
+                });/* jshint ignore:line */
+                // function defined inside loop
+            }
+        }
+        // two ellipses
+        else {
+            ctrl.paginators().push({
+                text: '...',
+                url: function() { }
+            });
+            for (i = parseInt(ctrl.currentPage()) - 1; i <= parseInt(ctrl.currentPage()) + 1; i++) {
+                ctrl.paginators().push({
+                    text: i,
+                    url: function() {
+                        ctrl.pageToGet(parseInt(this.text));
+                        if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                            return buildUrlFunction(ctrl.pageToGet(), ctrl.user);
+                        }
+                    }
+                });/* jshint ignore:line */
+                // function defined inside loop
+            }
+            ctrl.paginators().push({
+                text: '...',
+                url: function() { }
+            });
+        }
+        // last page
+        ctrl.paginators().push({
+            text: ctrl.totalPages(),
+            url: function() {
+                ctrl.pageToGet(ctrl.totalPages());
+                if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                    return buildUrlFunction(ctrl.pageToGet(), ctrl.user);
+                }
+            }
+        });
+        // next page
+        ctrl.paginators().push({
+            url: function() { return ctrl.nextPage(); },
+            text: '>'
+        });
+    }
+};
+
+module.exports = {
+    ProfilePagination: ProfilePagination
+};
diff --git a/website/static/js/components/publicNodes.js b/website/static/js/components/publicNodes.js
index 412b104e6b7..6bf9c383d92 100644
--- a/website/static/js/components/publicNodes.js
+++ b/website/static/js/components/publicNodes.js
@@ -6,9 +6,9 @@ var iconmap = require('js/iconmap');
 var lodashFind = require('lodash.find');
 var mHelpers = require('js/mithrilHelpers');
 var Raven = require('raven-js');
+var profilePagination = require('js/components/profilePagination.js');
+
 
-var MAX_PAGES_ON_PAGINATOR = 7;
-var MAX_PAGES_ON_PAGINATOR_SIDE = 5;
 var PROJECTS_PAGE_SIZE = 5;
 
 var _buildUrl = function(page, user, nodeType) {
@@ -198,116 +198,7 @@ var PublicNodes = {
 
     view : function (ctrl) {
 
-        var i;
-        ctrl.paginators([]);
-        if (ctrl.totalPages() > 1) {
-            // previous page
-            ctrl.paginators().push({
-                url: function() { return ctrl.prevPage(); },
-                text: '<'
-            });
-            // first page
-            ctrl.paginators().push({
-                text: 1,
-                url: function() {
-                    ctrl.pageToGet(1);
-                    if(ctrl.pageToGet() !== ctrl.currentPage()) {
-                        return _buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
-                    }
-                }
-            });
-            // no ellipses
-            if (ctrl.totalPages() <= MAX_PAGES_ON_PAGINATOR) {
-                for (i = 2; i < ctrl.totalPages(); i++) {
-                    ctrl.paginators().push({
-                        text: i,
-                        url: function() {
-                            ctrl.pageToGet(parseInt(this.text));
-                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                                return _buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
-                            }
-                        }
-                    });/* jshint ignore:line */
-                    // function defined inside loop
-                }
-            }
-            // one ellipse at the end
-            else if (ctrl.currentPage() < MAX_PAGES_ON_PAGINATOR_SIDE - 1) {
-                for (i = 2; i < MAX_PAGES_ON_PAGINATOR_SIDE; i++) {
-                    ctrl.paginators().push({
-                        text: i,
-                        url: function() {
-                            ctrl.pageToGet(parseInt(this.text));
-                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                                return _buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
-                            }
-                        }
-                    });/* jshint ignore:line */
-                    // function defined inside loop
-                }
-                ctrl.paginators().push({
-                    text: '...',
-                    url: function() { }
-                });
-            }
-            // one ellipse at the beginning
-            else if (ctrl.currentPage() > ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2) {
-                ctrl.paginators().push({
-                    text: '...',
-                    url: function() { }
-                });
-                for (i = ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2; i <= ctrl.totalPages() - 1; i++) {
-                    ctrl.paginators().push({
-                        text: i,
-                        url: function() {
-                            ctrl.pageToGet(parseInt(this.text));
-                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                                return _buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
-                            }
-                        }
-                    });/* jshint ignore:line */
-                    // function defined inside loop
-                }
-            }
-            // two ellipses
-            else {
-                ctrl.paginators().push({
-                    text: '...',
-                    url: function() { }
-                });
-                for (i = parseInt(ctrl.currentPage()) - 1; i <= parseInt(ctrl.currentPage()) + 1; i++) {
-                    ctrl.paginators().push({
-                        text: i,
-                        url: function() {
-                            ctrl.pageToGet(parseInt(this.text));
-                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                                return _buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
-                            }
-                        }
-                    });/* jshint ignore:line */
-                    // function defined inside loop
-                }
-                ctrl.paginators().push({
-                    text: '...',
-                    url: function() { }
-                });
-            }
-            // last page
-            ctrl.paginators().push({
-                text: ctrl.totalPages(),
-                url: function() {
-                    ctrl.pageToGet(ctrl.totalPages());
-                    if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                        return _buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
-                    }
-                }
-            });
-            // next page
-            ctrl.paginators().push({
-                url: function() { return ctrl.nextPage(); },
-                text: '>'
-            });
-        }
+        profilePagination.ProfilePagination(ctrl, _buildUrl);
 
         return m('ul.list-group m-md', [
             // Error message if the request fails
diff --git a/website/static/js/components/quickFiles.js b/website/static/js/components/quickFiles.js
index 1cbed8c1776..30360c712d6 100644
--- a/website/static/js/components/quickFiles.js
+++ b/website/static/js/components/quickFiles.js
@@ -7,8 +7,9 @@ var lodashFind = require('lodash.find');
 var mHelpers = require('js/mithrilHelpers');
 var Raven = require('raven-js');
 
-var MAX_PAGES_ON_PAGINATOR = 7;
-var MAX_PAGES_ON_PAGINATOR_SIDE = 5;
+var profilePagination = require('js/components/profilePagination.js');
+
+
 var QUICKFILES_PAGE_SIZE = 10;
 
 var _buildUrl = function(page, user) {
@@ -127,116 +128,8 @@ var QuickFiles = {
     },
 
     view: function (ctrl) {
-        var i;
-        ctrl.paginators([]);
-        if (ctrl.totalPages() > 1) {
-            // previous page
-            ctrl.paginators().push({
-                url: function() { return ctrl.prevPage(); },
-                text: '<'
-            });
-            // first page
-            ctrl.paginators().push({
-                text: 1,
-                url: function() {
-                    ctrl.pageToGet(1);
-                    if(ctrl.pageToGet() !== ctrl.currentPage()) {
-                        return _buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
-                    }
-                }
-            });
-            // no ellipses
-            if (ctrl.totalPages() <= MAX_PAGES_ON_PAGINATOR) {
-                for (i = 2; i < ctrl.totalPages(); i++) {
-                    ctrl.paginators().push({
-                        text: i,
-                        url: function() {
-                            ctrl.pageToGet(parseInt(this.text));
-                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                                return _buildUrl(ctrl.pageToGet(), ctrl.user);
-                            }
-                        }
-                    });/* jshint ignore:line */
-                    // function defined inside loop
-                }
-            }
-            // one ellipse at the end
-            else if (ctrl.currentPage() < MAX_PAGES_ON_PAGINATOR_SIDE - 1) {
-                for (i = 2; i < MAX_PAGES_ON_PAGINATOR_SIDE; i++) {
-                    ctrl.paginators().push({
-                        text: i,
-                        url: function() {
-                            ctrl.pageToGet(parseInt(this.text));
-                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                                return _buildUrl(ctrl.pageToGet(), ctrl.user);
-                            }
-                        }
-                    });/* jshint ignore:line */
-                    // function defined inside loop
-                }
-                ctrl.paginators().push({
-                    text: '...',
-                    url: function() { }
-                });
-            }
-            // one ellipse at the beginning
-            else if (ctrl.currentPage() > ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2) {
-                ctrl.paginators().push({
-                    text: '...',
-                    url: function() { }
-                });
-                for (i = ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2; i <= ctrl.totalPages() - 1; i++) {
-                    ctrl.paginators().push({
-                        text: i,
-                        url: function() {
-                            ctrl.pageToGet(parseInt(this.text));
-                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                                return _buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
-                            }
-                        }
-                    });/* jshint ignore:line */
-                    // function defined inside loop
-                }
-            }
-            // two ellipses
-            else {
-                ctrl.paginators().push({
-                    text: '...',
-                    url: function() { }
-                });
-                for (i = parseInt(ctrl.currentPage()) - 1; i <= parseInt(ctrl.currentPage()) + 1; i++) {
-                    ctrl.paginators().push({
-                        text: i,
-                        url: function() {
-                            ctrl.pageToGet(parseInt(this.text));
-                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                                return _buildUrl(ctrl.pageToGet(), ctrl.user);
-                            }
-                        }
-                    });/* jshint ignore:line */
-                    // function defined inside loop
-                }
-                ctrl.paginators().push({
-                    text: '...',
-                    url: function() { }
-                });
-            }
-            // last page
-            ctrl.paginators().push({
-                text: ctrl.totalPages(),
-                url: function() {
-                    ctrl.pageToGet(ctrl.totalPages());
-                    if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                        return _buildUrl(ctrl.pageToGet(), ctrl.user);
-                    }
-                }
-            });
-            // next page
-            ctrl.paginators().push({
-                url: function() { return ctrl.nextPage(); },
-                text: '>'
-            });
-        }
+
+        profilePagination.ProfilePagination(ctrl, _buildUrl);
 
         return m('ul.list-group m-md', [
             // Error message if the request fails

From 4b8c15f2c03cbff51eef1001be2d54b5e223faf2 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Wed, 25 Oct 2017 15:09:04 -0400
Subject: [PATCH 176/192] Track every time someone clicks a file from the
 profile page

---
 website/static/js/components/quickFiles.js | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/website/static/js/components/quickFiles.js b/website/static/js/components/quickFiles.js
index 30360c712d6..0cdbcf9d8db 100644
--- a/website/static/js/components/quickFiles.js
+++ b/website/static/js/components/quickFiles.js
@@ -41,7 +41,11 @@ var QuickFile = {
                     m('span.component-overflow.f-w-lg', {style: 'line-height: 1.5; width: 100%'}, [
                         m('span.project-statuses-lg', {style: 'width: 50%; float:left'}, [
                             m('span', {class: ctrl.icon, style: 'padding-right: 5px;'}, ''),
-                            m('a', {'href': viewUrl}, ctrl.file.attributes.name),
+                            m('a', {'href': viewUrl,
+                                onclick : function () {
+                                    $osf.trackClick('QuickFiles', 'view', 'view-quickfile-from-profile-page');
+                                }
+                            }, ctrl.file.attributes.name),
                         ]),
                         m('div', {style: 'width: 50%; float:right; font-size:small; line-height:2;'}, 'downloads: ' + ctrl.file.attributes.extra.downloads)
                     ])

From 45880afc308fe98ca1dad8de23b857efd4b76f3a Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Mon, 20 Nov 2017 21:08:57 -0500
Subject: [PATCH 177/192] Change case for Quick files on profile page

---
 tests/test_views.py            | 4 ++--
 website/templates/profile.mako | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/tests/test_views.py b/tests/test_views.py
index d539d9998f5..41b3e626d43 100644
--- a/tests/test_views.py
+++ b/tests/test_views.py
@@ -1401,7 +1401,7 @@ def test_user_with_quickfiles(self):
         url = web_url_for('profile_view_id', uid=self.user._id)
         res = self.app.get(url, auth=self.user.auth)
 
-        assert_in('Quick Files', res.body)
+        assert_in('Quick files', res.body)
 
     def test_user_with_no_quickfiles(self):
         assert(not QuickFilesNode.objects.first().files.filter(type='osf.osfstoragefile').exists())
@@ -1409,7 +1409,7 @@ def test_user_with_no_quickfiles(self):
         url = web_url_for('profile_view_id', uid=self.user._primary_key)
         res = self.app.get(url, auth=self.user.auth)
 
-        assert_not_in('Quick Files', res.body)
+        assert_not_in('Quick files', res.body)
 
 
 class TestUserProfileApplicationsPage(OsfTestCase):
diff --git a/website/templates/profile.mako b/website/templates/profile.mako
index bf719667fdd..b95d8747218 100644
--- a/website/templates/profile.mako
+++ b/website/templates/profile.mako
@@ -141,7 +141,7 @@
     <div class="col-sm-6">
         <div class="panel panel-default">
             <div class="panel-heading clearfix">
-                <h3 class="panel-title">Quick Files</h3>
+                <h3 class="panel-title">Quick files</h3>
             </div>
             <div class="panel-body clearfix" id="quickFiles">
               <div class="ball-pulse ball-scale-blue text-center">

From f6ef5a1425f6f73831b7820291b31704c8676103 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Mon, 27 Nov 2017 12:02:40 -0500
Subject: [PATCH 178/192] Refactor pagination into entirely seperate module for
 use in Qfs and public nodes

---
 website/static/js/components/pagination.js  | 175 ++++++++++++++++++++
 website/static/js/components/publicNodes.js |  51 ++----
 website/static/js/components/quickFiles.js  |  58 ++-----
 3 files changed, 200 insertions(+), 84 deletions(-)
 create mode 100644 website/static/js/components/pagination.js

diff --git a/website/static/js/components/pagination.js b/website/static/js/components/pagination.js
new file mode 100644
index 00000000000..25f2558ad51
--- /dev/null
+++ b/website/static/js/components/pagination.js
@@ -0,0 +1,175 @@
+'use strict';
+
+var m = require('mithril'); // exposes mithril methods, useful for redraw etc.
+var $osf = require('js/osfHelpers');
+var iconmap = require('js/iconmap');
+var lodashFind = require('lodash.find');
+var mHelpers = require('js/mithrilHelpers');
+var Raven = require('raven-js');
+
+var MAX_PAGES_ON_PAGINATOR = 7;
+var MAX_PAGES_ON_PAGINATOR_SIDE = 5;
+
+
+var WithPagination = function(component, buildUrlFunction) {
+    /**
+     * Wrapper around another controller to add pagination functionality.
+     * Assumes that the wrapped controller has a function getNextItems which
+     * takes care of making the appropriate request and calling this wrapper's
+     * updatePagination function.
+     */
+
+    return {
+        controller: function (options) {
+            var self = this;
+
+            self.failed = false;
+            self.paginators = m.prop([]);
+            self.nextPage = m.prop('');
+            self.prevPage = m.prop('');
+            self.totalPages = m.prop(0);
+            self.currentPage = m.prop(0);
+            self.pageToGet = m.prop(0);
+
+            options.updatePagination = function (result, url) {
+                self.nextPage(result.links.next);
+                self.prevPage(result.links.prev);
+                var params = $osf.urlParams(url);
+                var page = params.page || 1;
+                self.currentPage(parseInt(page));
+                self.totalPages(Math.ceil(result.meta.total / result.meta.per_page));
+            };
+
+            self.user = options.user._id;
+            self.nodeType = options.nodeType;
+            self.innerCtrl = new component.controller(options);
+        },
+        view: function (ctrl) {
+            var i;
+            ctrl.paginators([]);
+            if (ctrl.totalPages() > 1) {
+                // previous page
+                ctrl.paginators().push({
+                    url: function() { return ctrl.prevPage(); },
+                    text: '<'
+                });
+                // first page
+                ctrl.paginators().push({
+                    text: 1,
+                    url: function() {
+                        ctrl.pageToGet(1);
+                        if(ctrl.pageToGet() !== ctrl.currentPage()) {
+                            return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                        }
+                    }
+                });
+                // no ellipses
+                if (ctrl.totalPages() <= MAX_PAGES_ON_PAGINATOR) {
+                    for (i = 2; i < ctrl.totalPages(); i++) {
+                        ctrl.paginators().push({
+                            text: i,
+                            url: function() {
+                                ctrl.pageToGet(parseInt(this.text));
+                                if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                    return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                                }
+                            }
+                        });/* jshint ignore:line */
+                        // function defined inside loop
+                    }
+                }
+                // one ellipse at the end
+                else if (ctrl.currentPage() < MAX_PAGES_ON_PAGINATOR_SIDE - 1) {
+                    for (i = 2; i < MAX_PAGES_ON_PAGINATOR_SIDE; i++) {
+                        self.paginators().push({
+                            text: i,
+                            url: function() {
+                                ctrl.pageToGet(parseInt(this.text));
+                                if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                    return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                                }
+                            }
+                        });/* jshint ignore:line */
+                        // function defined inside loop
+                    }
+                    ctrl.paginators().push({
+                        text: '...',
+                        url: function() { }
+                    });
+                }
+                // one ellipse at the beginning
+                else if (ctrl.currentPage() > ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2) {
+                    ctrl.paginators().push({
+                        text: '...',
+                        url: function() { }
+                    });
+                    for (i = ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2; i <= ctrl.totalPages() - 1; i++) {
+                        ctrl.paginators().push({
+                            text: i,
+                            url: function() {
+                                ctrl.pageToGet(parseInt(this.text));
+                                if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                    return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                                }
+                            }
+                        });/* jshint ignore:line */
+                        // function defined inside loop
+                    }
+                }
+                // two ellipses
+                else {
+                    ctrl.paginators().push({
+                        text: '...',
+                        url: function() { }
+                    });
+                    for (i = parseInt(ctrl.currentPage()) - 1; i <= parseInt(ctrl.currentPage()) + 1; i++) {
+                        ctrl.paginators().push({
+                            text: i,
+                            url: function() {
+                                ctrl.pageToGet(parseInt(this.text));
+                                if (ctrl.pageToGet() !== self.currentPage()) {
+                                    return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                                }
+                            }
+                        });/* jshint ignore:line */
+                        // function defined inside loop
+                    }
+                    ctrl.paginators().push({
+                        text: '...',
+                        url: function() { }
+                    });
+                }
+                // last page
+                ctrl.paginators().push({
+                    text: ctrl.totalPages(),
+                    url: function() {
+                        ctrl.pageToGet(ctrl.totalPages());
+                        if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                            return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                        }
+                    }
+                });
+                // next page
+                ctrl.paginators().push({
+                    url: function() { return ctrl.nextPage(); },
+                    text: '>'
+                });
+            }
+
+            return m('.db-activity-nav.text-center', {style: 'margin-top: 5px; margin-bottom: -10px;'}, [
+                component.view.call(this, ctrl.innerCtrl),
+                ctrl.paginators() ? ctrl.paginators().map(function(page) {
+                    return page.url() ? m('.btn.btn-sm.btn-link', { onclick : function() {
+                        ctrl.innerCtrl.getNextItems(page.url());
+                    }}, page.text) : m('.btn.btn-sm.btn-link.disabled', {style: 'color: black'}, page.text);
+                }) : ''
+
+            ]);
+        }
+    };
+
+};
+
+module.exports = {
+    withPagination: WithPagination
+};
diff --git a/website/static/js/components/publicNodes.js b/website/static/js/components/publicNodes.js
index 6bf9c383d92..f40cb287923 100644
--- a/website/static/js/components/publicNodes.js
+++ b/website/static/js/components/publicNodes.js
@@ -6,7 +6,7 @@ var iconmap = require('js/iconmap');
 var lodashFind = require('lodash.find');
 var mHelpers = require('js/mithrilHelpers');
 var Raven = require('raven-js');
-var profilePagination = require('js/components/profilePagination.js');
+var withPagination = require('js/components/pagination.js').withPagination;
 
 
 var PROJECTS_PAGE_SIZE = 5;
@@ -33,6 +33,7 @@ var _buildUrl = function(page, user, nodeType) {
         query.embed.push('parent');
     }
 
+    var urlToReturn = $osf.apiV2Url('users/' + user +  '/nodes/', { query: query});
     return $osf.apiV2Url('users/' + user +  '/nodes/', { query: query});
 };
 
@@ -133,16 +134,7 @@ var PublicNodes = {
         self.publicProjects = m.prop([]);
         self.requestPending = m.prop(false);
 
-        self.failed = false;
-        self.paginators = m.prop([]);
-        self.nextPage = m.prop('');
-        self.prevPage = m.prop('');
-        self.totalPages = m.prop(0);
-        self.currentPage = m.prop(0);
-        self.pageToGet = m.prop(0);
-
-        self.getProjects = function _getProjects (url) {
-
+        self.getNextItems = function _getProjects (url) {
             if(self.requestPending()) {
                 return;
             }
@@ -150,21 +142,6 @@ var PublicNodes = {
             self.publicProjects([]);
             self.requestPending(true);
 
-            function _processResults (result){
-
-                self.publicProjects(result.data);
-                self.nextPage(result.links.next);
-                self.prevPage(result.links.prev);
-
-                var params = $osf.urlParams(url);
-                var page = params.page || 1;
-
-                self.currentPage(parseInt(page));
-                self.totalPages(Math.ceil(result.meta.total / result.meta.per_page));
-
-                m.redraw();
-            }
-
             var promise = m.request({
                 method : 'GET',
                 url : url,
@@ -175,7 +152,9 @@ var PublicNodes = {
             promise.then(
                 function(result) {
                     self.requestPending(false);
-                    _processResults(result);
+                    self.publicProjects(result.data);
+                    options.updatePagination(result, url);
+                    m.redraw();
                     return promise;
                 }, function(xhr, textStatus, error) {
                     self.failed = true;
@@ -189,7 +168,7 @@ var PublicNodes = {
         self.getCurrentProjects = function _getCurrentProjects (page){
             if(!self.requestPending()) {
                 var url = _buildUrl(page, self.user, self.nodeType);
-                return self.getProjects(url);
+                return self.getNextItems(url);
             }
         };
 
@@ -198,8 +177,6 @@ var PublicNodes = {
 
     view : function (ctrl) {
 
-        profilePagination.ProfilePagination(ctrl, _buildUrl);
-
         return m('ul.list-group m-md', [
             // Error message if the request fails
             ctrl.failed ? m('p', [
@@ -224,22 +201,16 @@ var PublicNodes = {
                             '.'
                         ])
                     ])
-                : m('div.help-block', {}, 'This user has no public ' + ctrl.nodeType + '.'),
-
-                // Pagination
-                m('.db-activity-nav.text-center', {style: 'margin-top: 5px; margin-bottom: -10px;'}, [
-                    ctrl.paginators() ? ctrl.paginators().map(function(page) {
-                        return page.url() ? m('.btn.btn-sm.btn-link', { onclick : function() {
-                            ctrl.getProjects(page.url());
-                        }}, page.text) : m('.btn.btn-sm.btn-link.disabled', {style: 'color: black'}, page.text);
-                    }) : ''
-                ])
+                : m('div.help-block', {}, 'This user has no public ' + ctrl.nodeType + '.')
+
             ]
         ]);
 
     }
 };
 
+PublicNodes = withPagination(PublicNodes, _buildUrl);
+
 module.exports = {
     PublicNodes: PublicNodes
 };
diff --git a/website/static/js/components/quickFiles.js b/website/static/js/components/quickFiles.js
index 0cdbcf9d8db..b0b610c8a87 100644
--- a/website/static/js/components/quickFiles.js
+++ b/website/static/js/components/quickFiles.js
@@ -7,11 +7,11 @@ var lodashFind = require('lodash.find');
 var mHelpers = require('js/mithrilHelpers');
 var Raven = require('raven-js');
 
-var profilePagination = require('js/components/profilePagination.js');
-
+var withPagination = require('js/components/pagination').withPagination;
 
 var QUICKFILES_PAGE_SIZE = 10;
 
+
 var _buildUrl = function(page, user) {
 
     var query = {
@@ -65,36 +65,13 @@ var QuickFiles = {
         self.quickFiles = m.prop([]);
         self.requestPending = m.prop(false);
 
-        self.failed = false;
-        self.paginators = m.prop([]);
-        self.nextPage = m.prop('');
-        self.prevPage = m.prop('');
-        self.totalPages = m.prop(0);
-        self.currentPage = m.prop(0);
-        self.pageToGet = m.prop(0);
-
-        self.getQuickFiles = function _getQuickFiles(url) {
+        self.getNextItems = function _getQuickFiles(url) {
             if (self.requestPending()) {
                 return;
             }
             self.quickFiles([]);
             self.requestPending(true);
 
-            function _processResults(result) {
-
-                self.quickFiles(result.data);
-                self.nextPage(result.links.next);
-                self.prevPage(result.links.prev);
-
-                var params = $osf.urlParams(url);
-                var page = params.page || 1;
-
-                self.currentPage(parseInt(page));
-                self.totalPages(Math.ceil(result.meta.total / result.meta.per_page));
-
-                m.redraw();
-            }
-
             var promise = m.request({
                 method: 'GET',
                 url: url,
@@ -105,7 +82,9 @@ var QuickFiles = {
             promise.then(
                 function (result) {
                     self.requestPending(false);
-                    _processResults(result);
+                    self.quickFiles(result.data);
+                    options.updatePagination(result, url);
+                    m.redraw();
                     return promise;
                 }, function (xhr, textStatus, error) {
                     self.failed = true;
@@ -125,7 +104,7 @@ var QuickFiles = {
         self.getCurrentQuickFiles = function _getCurrentQuickFiles(page) {
             if (!self.requestPending()) {
                 var url = _buildUrl(page, self.user);
-                return self.getQuickFiles(url);
+                return self.getNextItems(url);
             }
         };
         self.getCurrentQuickFiles();
@@ -133,15 +112,13 @@ var QuickFiles = {
 
     view: function (ctrl) {
 
-        profilePagination.ProfilePagination(ctrl, _buildUrl);
-
         return m('ul.list-group m-md', [
             // Error message if the request fails
             ctrl.failed ? m('p', [
-                'Unable to retrieve quickfiles at this time. Please refresh the page or contact ',
-                m('a', {'href': 'mailto:support@osf.io'}, 'support@osf.io'),
-                ' if the problem persists.'
-            ]) :
+                    'Unable to retrieve quickfiles at this time. Please refresh the page or contact ',
+                    m('a', {'href': 'mailto:support@osf.io'}, 'support@osf.io'),
+                    ' if the problem persists.'
+                ]) :
 
             // Show laoding icon while there is a pending request
             ctrl.requestPending() ?  m('.ball-pulse.ball-scale-blue.text-center', [m(''), m(''), m('')]) :
@@ -152,21 +129,14 @@ var QuickFiles = {
                     return m.component(QuickFile, {file: file});
                 }) : ctrl.isProfile ?
                     m('div.help-block', {}, 'You have no public quickfiles')
-                : m('div.help-block', {}, 'This user has no public quickfiles.'),
-
-                // Pagination
-                m('.db-activity-nav.text-center', {style: 'margin-top: 5px; margin-bottom: -10px;'}, [
-                    ctrl.paginators() ? ctrl.paginators().map(function(page) {
-                        return page.url() ? m('.btn.btn-sm.btn-link', { onclick : function() {
-                            ctrl.getQuickFiles(page.url());
-                        }}, page.text) : m('.btn.btn-sm.btn-link.disabled', {style: 'color: black'}, page.text);
-                    }) : ''
-                ])
+                : m('div.help-block', {}, 'This user has no public quickfiles.')
             ]
         ]);
     }
 };
 
+QuickFiles = withPagination(QuickFiles, _buildUrl);
+
 module.exports = {
     QuickFiles: QuickFiles
 };

From 0191347dffeff701eb8878553c9cb0a64115cbb4 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Mon, 27 Nov 2017 12:41:46 -0500
Subject: [PATCH 179/192] Refactor paginationjs a bit to be a wrapper builder

Also remove old profile-pagintion as it was renamed to pagination
---
 website/static/js/components/pagination.js    | 285 +++++++++---------
 .../static/js/components/profilePagination.js | 129 --------
 website/static/js/components/publicNodes.js   |   7 +-
 website/static/js/components/quickFiles.js    |   7 +-
 4 files changed, 162 insertions(+), 266 deletions(-)
 delete mode 100644 website/static/js/components/profilePagination.js

diff --git a/website/static/js/components/pagination.js b/website/static/js/components/pagination.js
index 25f2558ad51..fcfc95e3b6a 100644
--- a/website/static/js/components/pagination.js
+++ b/website/static/js/components/pagination.js
@@ -11,163 +11,178 @@ var MAX_PAGES_ON_PAGINATOR = 7;
 var MAX_PAGES_ON_PAGINATOR_SIDE = 5;
 
 
-var WithPagination = function(component, buildUrlFunction) {
-    /**
-     * Wrapper around another controller to add pagination functionality.
-     * Assumes that the wrapped controller has a function getNextItems which
-     * takes care of making the appropriate request and calling this wrapper's
-     * updatePagination function.
-     */
+var WithPagination = function(options) {
+    return function PaginationWrapper(component) {
+        /**
+         * Wrapper around another controller to add pagination functionality.
+         * Assumes that the wrapped controller has a function getNextItems which
+         * takes care of making the appropriate request and calling this wrapper's
+         * updatePagination function.
+         */
+        return {
+            controller: function (ctrlOptions) {
+                var self = this;
 
-    return {
-        controller: function (options) {
-            var self = this;
+                self.failed = false;
+                self.paginators = m.prop([]);
+                self.nextPage = m.prop('');
+                self.prevPage = m.prop('');
+                self.totalPages = m.prop(0);
+                self.currentPage = m.prop(0);
+                self.pageToGet = m.prop(0);
 
-            self.failed = false;
-            self.paginators = m.prop([]);
-            self.nextPage = m.prop('');
-            self.prevPage = m.prop('');
-            self.totalPages = m.prop(0);
-            self.currentPage = m.prop(0);
-            self.pageToGet = m.prop(0);
+                ctrlOptions.updatePagination = function (result, url) {
+                    self.nextPage(result.links.next);
+                    self.prevPage(result.links.prev);
+                    var params = $osf.urlParams(url);
+                    var page = params.page || 1;
+                    self.currentPage(parseInt(page));
+                    self.totalPages(Math.ceil(result.meta.total / result.meta.per_page));
+                };
 
-            options.updatePagination = function (result, url) {
-                self.nextPage(result.links.next);
-                self.prevPage(result.links.prev);
-                var params = $osf.urlParams(url);
-                var page = params.page || 1;
-                self.currentPage(parseInt(page));
-                self.totalPages(Math.ceil(result.meta.total / result.meta.per_page));
-            };
-
-            self.user = options.user._id;
-            self.nodeType = options.nodeType;
-            self.innerCtrl = new component.controller(options);
-        },
-        view: function (ctrl) {
-            var i;
-            ctrl.paginators([]);
-            if (ctrl.totalPages() > 1) {
-                // previous page
-                ctrl.paginators().push({
-                    url: function() { return ctrl.prevPage(); },
-                    text: '<'
-                });
-                // first page
-                ctrl.paginators().push({
-                    text: 1,
-                    url: function() {
-                        ctrl.pageToGet(1);
-                        if(ctrl.pageToGet() !== ctrl.currentPage()) {
-                            return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                self.buildUrl = options.buildUrl;
+                self.user = ctrlOptions.user._id;
+                self.nodeType = ctrlOptions.nodeType;
+                self.innerCtrl = new component.controller(ctrlOptions);
+            },
+            view: function (ctrl) {
+                var i;
+                ctrl.paginators([]);
+                if (ctrl.totalPages() > 1) {
+                    // previous page
+                    ctrl.paginators().push({
+                        url: function () {
+                            return ctrl.prevPage();
+                        },
+                        text: '<'
+                    });
+                    // first page
+                    ctrl.paginators().push({
+                        text: 1,
+                        url: function () {
+                            ctrl.pageToGet(1);
+                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                return ctrl.buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                            }
                         }
-                    }
-                });
-                // no ellipses
-                if (ctrl.totalPages() <= MAX_PAGES_ON_PAGINATOR) {
-                    for (i = 2; i < ctrl.totalPages(); i++) {
-                        ctrl.paginators().push({
-                            text: i,
-                            url: function() {
-                                ctrl.pageToGet(parseInt(this.text));
-                                if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                                    return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                    });
+                    // no ellipses
+                    if (ctrl.totalPages() <= MAX_PAGES_ON_PAGINATOR) {
+                        for (i = 2; i < ctrl.totalPages(); i++) {
+                            ctrl.paginators().push({
+                                text: i,
+                                url: function () {
+                                    ctrl.pageToGet(parseInt(this.text));
+                                    if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                        return ctrl.buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                                    }
                                 }
-                            }
-                        });/* jshint ignore:line */
-                        // function defined inside loop
+                            });
+                            /* jshint ignore:line */
+                            // function defined inside loop
+                        }
                     }
-                }
-                // one ellipse at the end
-                else if (ctrl.currentPage() < MAX_PAGES_ON_PAGINATOR_SIDE - 1) {
-                    for (i = 2; i < MAX_PAGES_ON_PAGINATOR_SIDE; i++) {
-                        self.paginators().push({
-                            text: i,
-                            url: function() {
-                                ctrl.pageToGet(parseInt(this.text));
-                                if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                                    return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                    // one ellipse at the end
+                    else if (ctrl.currentPage() < MAX_PAGES_ON_PAGINATOR_SIDE - 1) {
+                        for (i = 2; i < MAX_PAGES_ON_PAGINATOR_SIDE; i++) {
+                            ctrl.paginators().push({
+                                text: i,
+                                url: function () {
+                                    ctrl.pageToGet(parseInt(this.text));
+                                    if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                        return ctrl.buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                                    }
                                 }
+                            });
+                            /* jshint ignore:line */
+                            // function defined inside loop
+                        }
+                        ctrl.paginators().push({
+                            text: '...',
+                            url: function () {
                             }
-                        });/* jshint ignore:line */
-                        // function defined inside loop
+                        });
                     }
-                    ctrl.paginators().push({
-                        text: '...',
-                        url: function() { }
-                    });
-                }
-                // one ellipse at the beginning
-                else if (ctrl.currentPage() > ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2) {
-                    ctrl.paginators().push({
-                        text: '...',
-                        url: function() { }
-                    });
-                    for (i = ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2; i <= ctrl.totalPages() - 1; i++) {
+                    // one ellipse at the beginning
+                    else if (ctrl.currentPage() > ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2) {
                         ctrl.paginators().push({
-                            text: i,
-                            url: function() {
-                                ctrl.pageToGet(parseInt(this.text));
-                                if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                                    return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
-                                }
+                            text: '...',
+                            url: function () {
                             }
-                        });/* jshint ignore:line */
-                        // function defined inside loop
+                        });
+                        for (i = ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2; i <= ctrl.totalPages() - 1; i++) {
+                            ctrl.paginators().push({
+                                text: i,
+                                url: function () {
+                                    ctrl.pageToGet(parseInt(this.text));
+                                    if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                        return ctrl.buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                                    }
+                                }
+                            });
+                            /* jshint ignore:line */
+                            // function defined inside loop
+                        }
                     }
-                }
-                // two ellipses
-                else {
-                    ctrl.paginators().push({
-                        text: '...',
-                        url: function() { }
-                    });
-                    for (i = parseInt(ctrl.currentPage()) - 1; i <= parseInt(ctrl.currentPage()) + 1; i++) {
+                    // two ellipses
+                    else {
                         ctrl.paginators().push({
-                            text: i,
-                            url: function() {
-                                ctrl.pageToGet(parseInt(this.text));
-                                if (ctrl.pageToGet() !== self.currentPage()) {
-                                    return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                            text: '...',
+                            url: function () {
+                            }
+                        });
+                        for (i = parseInt(ctrl.currentPage()) - 1; i <= parseInt(ctrl.currentPage()) + 1; i++) {
+                            ctrl.paginators().push({
+                                text: i,
+                                url: function () {
+                                    ctrl.pageToGet(parseInt(this.text));
+                                    if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                        return ctrl.buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                                    }
                                 }
+                            });
+                            /* jshint ignore:line */
+                            // function defined inside loop
+                        }
+                        ctrl.paginators().push({
+                            text: '...',
+                            url: function () {
                             }
-                        });/* jshint ignore:line */
-                        // function defined inside loop
+                        });
                     }
+                    // last page
+                    ctrl.paginators().push({
+                        text: ctrl.totalPages(),
+                        url: function () {
+                            ctrl.pageToGet(ctrl.totalPages());
+                            if (ctrl.pageToGet() !== ctrl.currentPage()) {
+                                return ctrl.buildUrl(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
+                            }
+                        }
+                    });
+                    // next page
                     ctrl.paginators().push({
-                        text: '...',
-                        url: function() { }
+                        url: function () {
+                            return ctrl.nextPage();
+                        },
+                        text: '>'
                     });
                 }
-                // last page
-                ctrl.paginators().push({
-                    text: ctrl.totalPages(),
-                    url: function() {
-                        ctrl.pageToGet(ctrl.totalPages());
-                        if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                            return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
-                        }
-                    }
-                });
-                // next page
-                ctrl.paginators().push({
-                    url: function() { return ctrl.nextPage(); },
-                    text: '>'
-                });
-            }
 
-            return m('.db-activity-nav.text-center', {style: 'margin-top: 5px; margin-bottom: -10px;'}, [
-                component.view.call(this, ctrl.innerCtrl),
-                ctrl.paginators() ? ctrl.paginators().map(function(page) {
-                    return page.url() ? m('.btn.btn-sm.btn-link', { onclick : function() {
-                        ctrl.innerCtrl.getNextItems(page.url());
-                    }}, page.text) : m('.btn.btn-sm.btn-link.disabled', {style: 'color: black'}, page.text);
-                }) : ''
+                return m('.db-activity-nav.text-center', {style: 'margin-top: 5px; margin-bottom: -10px;'}, [
+                    component.view.call(this, ctrl.innerCtrl),
+                    ctrl.paginators() ? ctrl.paginators().map(function (page) {
+                        return page.url() ? m('.btn.btn-sm.btn-link', {
+                            onclick: function () {
+                                ctrl.innerCtrl.getNextItems(page.url());
+                            }
+                        }, page.text) : m('.btn.btn-sm.btn-link.disabled', {style: 'color: black'}, page.text);
+                    }) : ''
 
-            ]);
-        }
+                ]);
+            }
+        };
     };
-
 };
 
 module.exports = {
diff --git a/website/static/js/components/profilePagination.js b/website/static/js/components/profilePagination.js
deleted file mode 100644
index e96b92c84cc..00000000000
--- a/website/static/js/components/profilePagination.js
+++ /dev/null
@@ -1,129 +0,0 @@
-'use strict';
-
-var m = require('mithril'); // exposes mithril methods, useful for redraw etc.
-var $osf = require('js/osfHelpers');
-var iconmap = require('js/iconmap');
-var lodashFind = require('lodash.find');
-var mHelpers = require('js/mithrilHelpers');
-var Raven = require('raven-js');
-
-
-var MAX_PAGES_ON_PAGINATOR = 7;
-var MAX_PAGES_ON_PAGINATOR_SIDE = 5;
-
-var ProfilePagination = function(ctrl, buildUrlFunction) {
-    var i;
-    ctrl.paginators([]);
-    if (ctrl.totalPages() > 1) {
-        // previous page
-        ctrl.paginators().push({
-            url: function() { return ctrl.prevPage(); },
-            text: '<'
-        });
-        // first page
-        ctrl.paginators().push({
-            text: 1,
-            url: function() {
-                ctrl.pageToGet(1);
-                if(ctrl.pageToGet() !== ctrl.currentPage()) {
-                    return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
-                }
-            }
-        });
-        // no ellipses
-        if (ctrl.totalPages() <= MAX_PAGES_ON_PAGINATOR) {
-            for (i = 2; i < ctrl.totalPages(); i++) {
-                ctrl.paginators().push({
-                    text: i,
-                    url: function() {
-                        ctrl.pageToGet(parseInt(this.text));
-                        if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                            return buildUrlFunction(ctrl.pageToGet(), ctrl.user);
-                        }
-                    }
-                });/* jshint ignore:line */
-                // function defined inside loop
-            }
-        }
-        // one ellipse at the end
-        else if (ctrl.currentPage() < MAX_PAGES_ON_PAGINATOR_SIDE - 1) {
-            for (i = 2; i < MAX_PAGES_ON_PAGINATOR_SIDE; i++) {
-                ctrl.paginators().push({
-                    text: i,
-                    url: function() {
-                        ctrl.pageToGet(parseInt(this.text));
-                        if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                            return buildUrlFunction(ctrl.pageToGet(), ctrl.user);
-                        }
-                    }
-                });/* jshint ignore:line */
-                // function defined inside loop
-            }
-            ctrl.paginators().push({
-                text: '...',
-                url: function() { }
-            });
-        }
-        // one ellipse at the beginning
-        else if (ctrl.currentPage() > ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2) {
-            ctrl.paginators().push({
-                text: '...',
-                url: function() { }
-            });
-            for (i = ctrl.totalPages() - MAX_PAGES_ON_PAGINATOR_SIDE + 2; i <= ctrl.totalPages() - 1; i++) {
-                ctrl.paginators().push({
-                    text: i,
-                    url: function() {
-                        ctrl.pageToGet(parseInt(this.text));
-                        if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                            return buildUrlFunction(ctrl.pageToGet(), ctrl.user, ctrl.nodeType);
-                        }
-                    }
-                });/* jshint ignore:line */
-                // function defined inside loop
-            }
-        }
-        // two ellipses
-        else {
-            ctrl.paginators().push({
-                text: '...',
-                url: function() { }
-            });
-            for (i = parseInt(ctrl.currentPage()) - 1; i <= parseInt(ctrl.currentPage()) + 1; i++) {
-                ctrl.paginators().push({
-                    text: i,
-                    url: function() {
-                        ctrl.pageToGet(parseInt(this.text));
-                        if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                            return buildUrlFunction(ctrl.pageToGet(), ctrl.user);
-                        }
-                    }
-                });/* jshint ignore:line */
-                // function defined inside loop
-            }
-            ctrl.paginators().push({
-                text: '...',
-                url: function() { }
-            });
-        }
-        // last page
-        ctrl.paginators().push({
-            text: ctrl.totalPages(),
-            url: function() {
-                ctrl.pageToGet(ctrl.totalPages());
-                if (ctrl.pageToGet() !== ctrl.currentPage()) {
-                    return buildUrlFunction(ctrl.pageToGet(), ctrl.user);
-                }
-            }
-        });
-        // next page
-        ctrl.paginators().push({
-            url: function() { return ctrl.nextPage(); },
-            text: '>'
-        });
-    }
-};
-
-module.exports = {
-    ProfilePagination: ProfilePagination
-};
diff --git a/website/static/js/components/publicNodes.js b/website/static/js/components/publicNodes.js
index f40cb287923..f2ed4815b51 100644
--- a/website/static/js/components/publicNodes.js
+++ b/website/static/js/components/publicNodes.js
@@ -209,7 +209,12 @@ var PublicNodes = {
     }
 };
 
-PublicNodes = withPagination(PublicNodes, _buildUrl);
+var PaginationWrapper = withPagination({
+    buildUrl: _buildUrl
+});
+
+PublicNodes = new PaginationWrapper(PublicNodes);
+
 
 module.exports = {
     PublicNodes: PublicNodes
diff --git a/website/static/js/components/quickFiles.js b/website/static/js/components/quickFiles.js
index b0b610c8a87..0dbbb6ab933 100644
--- a/website/static/js/components/quickFiles.js
+++ b/website/static/js/components/quickFiles.js
@@ -135,7 +135,12 @@ var QuickFiles = {
     }
 };
 
-QuickFiles = withPagination(QuickFiles, _buildUrl);
+var PaginationWrapper = withPagination({
+    buildUrl: _buildUrl
+});
+
+QuickFiles = new PaginationWrapper(QuickFiles);
+
 
 module.exports = {
     QuickFiles: QuickFiles

From dcf1cd06b6653531e0d65f1013414374aa49775e Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Mon, 27 Nov 2017 16:30:14 -0500
Subject: [PATCH 180/192] Refactor PaginationWrapper to also take getNextItems
 function so no assumtions are made

---
 website/static/js/components/pagination.js  |  9 +--
 website/static/js/components/publicNodes.js | 69 +++++++++---------
 website/static/js/components/quickFiles.js  | 79 +++++++++++----------
 3 files changed, 82 insertions(+), 75 deletions(-)

diff --git a/website/static/js/components/pagination.js b/website/static/js/components/pagination.js
index fcfc95e3b6a..b953287c575 100644
--- a/website/static/js/components/pagination.js
+++ b/website/static/js/components/pagination.js
@@ -15,9 +15,8 @@ var WithPagination = function(options) {
     return function PaginationWrapper(component) {
         /**
          * Wrapper around another controller to add pagination functionality.
-         * Assumes that the wrapped controller has a function getNextItems which
-         * takes care of making the appropriate request and calling this wrapper's
-         * updatePagination function.
+         * options should include a buildUrl function to control how to create a new
+         * URL for pagination, and a getNextItems function to handle requests
          */
         return {
             controller: function (ctrlOptions) {
@@ -40,6 +39,8 @@ var WithPagination = function(options) {
                     self.totalPages(Math.ceil(result.meta.total / result.meta.per_page));
                 };
 
+                self.updatePagination = ctrlOptions.updatePagination;
+                self.getNextItems = options.getNextItems;
                 self.buildUrl = options.buildUrl;
                 self.user = ctrlOptions.user._id;
                 self.nodeType = ctrlOptions.nodeType;
@@ -174,7 +175,7 @@ var WithPagination = function(options) {
                     ctrl.paginators() ? ctrl.paginators().map(function (page) {
                         return page.url() ? m('.btn.btn-sm.btn-link', {
                             onclick: function () {
-                                ctrl.innerCtrl.getNextItems(page.url());
+                                ctrl.getNextItems(ctrl.innerCtrl, page.url(), ctrl.updatePagination);
                             }
                         }, page.text) : m('.btn.btn-sm.btn-link.disabled', {style: 'color: black'}, page.text);
                     }) : ''
diff --git a/website/static/js/components/publicNodes.js b/website/static/js/components/publicNodes.js
index f2ed4815b51..0bdc79fc87d 100644
--- a/website/static/js/components/publicNodes.js
+++ b/website/static/js/components/publicNodes.js
@@ -37,6 +37,39 @@ var _buildUrl = function(page, user, nodeType) {
     return $osf.apiV2Url('users/' + user +  '/nodes/', { query: query});
 };
 
+var _getNextItems = function(ctrl, url, updatePagination) {
+    if(ctrl.requestPending()) {
+        return;
+    }
+
+    ctrl.publicProjects([]);
+    ctrl.requestPending(true);
+
+    var promise = m.request({
+        method : 'GET',
+        url : url,
+        background : true,
+        config: mHelpers.apiV2Config({withCredentials: window.contextVars.isOnRootDomain})
+    });
+
+    promise.then(
+        function(result) {
+            ctrl.requestPending(false);
+            ctrl.publicProjects(result.data);
+            updatePagination(result, url);
+            m.redraw();
+            return promise;
+        }, function(xhr, textStatus, error) {
+            ctrl.failed = true;
+            ctrl.requestPending(false);
+            m.redraw();
+            Raven.captureMessage('Error retrieving projects', {extra: {url: url, textStatus: textStatus, error: error}});
+        }
+    );
+};
+
+
+
 function _formatContributors(item) {
 
     var contributorList = item.embeds.contributors.data;
@@ -134,41 +167,10 @@ var PublicNodes = {
         self.publicProjects = m.prop([]);
         self.requestPending = m.prop(false);
 
-        self.getNextItems = function _getProjects (url) {
-            if(self.requestPending()) {
-                return;
-            }
-
-            self.publicProjects([]);
-            self.requestPending(true);
-
-            var promise = m.request({
-                method : 'GET',
-                url : url,
-                background : true,
-                config: mHelpers.apiV2Config({withCredentials: window.contextVars.isOnRootDomain})
-            });
-
-            promise.then(
-                function(result) {
-                    self.requestPending(false);
-                    self.publicProjects(result.data);
-                    options.updatePagination(result, url);
-                    m.redraw();
-                    return promise;
-                }, function(xhr, textStatus, error) {
-                    self.failed = true;
-                    self.requestPending(false);
-                    m.redraw();
-                    Raven.captureMessage('Error retrieving projects', {extra: {url: url, textStatus: textStatus, error: error}});
-                }
-            );
-        };
-
         self.getCurrentProjects = function _getCurrentProjects (page){
             if(!self.requestPending()) {
                 var url = _buildUrl(page, self.user, self.nodeType);
-                return self.getNextItems(url);
+                return _getNextItems(self, url, options.updatePagination);
             }
         };
 
@@ -210,7 +212,8 @@ var PublicNodes = {
 };
 
 var PaginationWrapper = withPagination({
-    buildUrl: _buildUrl
+    buildUrl: _buildUrl,
+    getNextItems: _getNextItems
 });
 
 PublicNodes = new PaginationWrapper(PublicNodes);
diff --git a/website/static/js/components/quickFiles.js b/website/static/js/components/quickFiles.js
index 0dbbb6ab933..2e5e6b7e3a4 100644
--- a/website/static/js/components/quickFiles.js
+++ b/website/static/js/components/quickFiles.js
@@ -24,6 +24,44 @@ var _buildUrl = function(page, user) {
 };
 
 
+var _getNextItems = function(ctrl, url, updatePagination) {
+    if(ctrl.requestPending()) {
+        return;
+    }
+
+    ctrl.quickFiles([]);
+    ctrl.requestPending(true);
+
+    var promise = m.request({
+        method : 'GET',
+        url : url,
+        background : true,
+        config: mHelpers.apiV2Config({withCredentials: window.contextVars.isOnRootDomain})
+    });
+
+    promise.then(
+        function(result) {
+            ctrl.requestPending(false);
+            ctrl.quickFiles(result.data);
+            updatePagination(result, url);
+            m.redraw();
+            return promise;
+        }, function(xhr, textStatus, error) {
+            ctrl.failed = true;
+            ctrl.requestPending(false);
+            m.redraw();
+            Raven.captureMessage('Error retrieving quickfiles', {
+                extra: {
+                    url: url,
+                    textStatus: textStatus,
+                    error: error
+                }
+            });
+        }
+    );
+};
+
+
 var QuickFile = {
 
     controller: function(options) {
@@ -65,46 +103,10 @@ var QuickFiles = {
         self.quickFiles = m.prop([]);
         self.requestPending = m.prop(false);
 
-        self.getNextItems = function _getQuickFiles(url) {
-            if (self.requestPending()) {
-                return;
-            }
-            self.quickFiles([]);
-            self.requestPending(true);
-
-            var promise = m.request({
-                method: 'GET',
-                url: url,
-                background: true,
-                config: mHelpers.apiV2Config({withCredentials: window.contextVars.isOnRootDomain})
-            });
-
-            promise.then(
-                function (result) {
-                    self.requestPending(false);
-                    self.quickFiles(result.data);
-                    options.updatePagination(result, url);
-                    m.redraw();
-                    return promise;
-                }, function (xhr, textStatus, error) {
-                    self.failed = true;
-                    self.requestPending(false);
-                    m.redraw();
-                    Raven.captureMessage('Error retrieving quickfiles', {
-                        extra: {
-                            url: url,
-                            textStatus: textStatus,
-                            error: error
-                        }
-                    });
-                }
-            );
-        };
-
         self.getCurrentQuickFiles = function _getCurrentQuickFiles(page) {
             if (!self.requestPending()) {
                 var url = _buildUrl(page, self.user);
-                return self.getNextItems(url);
+                return _getNextItems(self, url, options.updatePagination);
             }
         };
         self.getCurrentQuickFiles();
@@ -136,7 +138,8 @@ var QuickFiles = {
 };
 
 var PaginationWrapper = withPagination({
-    buildUrl: _buildUrl
+    buildUrl: _buildUrl,
+    getNextItems: _getNextItems
 });
 
 QuickFiles = new PaginationWrapper(QuickFiles);

From 399628a1bad35c7e8bb924993632abcb62724875 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Tue, 28 Nov 2017 14:51:14 -0500
Subject: [PATCH 181/192] change capitalization on pagination component wrapper
 for consistency

---
 website/static/js/components/pagination.js | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/website/static/js/components/pagination.js b/website/static/js/components/pagination.js
index b953287c575..b8ed8f8da6a 100644
--- a/website/static/js/components/pagination.js
+++ b/website/static/js/components/pagination.js
@@ -11,7 +11,7 @@ var MAX_PAGES_ON_PAGINATOR = 7;
 var MAX_PAGES_ON_PAGINATOR_SIDE = 5;
 
 
-var WithPagination = function(options) {
+var withPagination = function(options) {
     return function PaginationWrapper(component) {
         /**
          * Wrapper around another controller to add pagination functionality.
@@ -187,5 +187,5 @@ var WithPagination = function(options) {
 };
 
 module.exports = {
-    withPagination: WithPagination
+    withPagination: withPagination
 };

From 7a01c09fe44922b701181bd47c432092a2c3a3df Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Tue, 28 Nov 2017 15:08:40 -0500
Subject: [PATCH 182/192] Updates to style in quickfile donwload view

---
 website/static/js/components/quickFiles.js | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/website/static/js/components/quickFiles.js b/website/static/js/components/quickFiles.js
index 2e5e6b7e3a4..adcff37c8d3 100644
--- a/website/static/js/components/quickFiles.js
+++ b/website/static/js/components/quickFiles.js
@@ -76,8 +76,8 @@ var QuickFile = {
         return m('div', [
             m('li.project list-group-item list-group-item-node cite-container', [
                 m('h4.list-group-item-heading', [
-                    m('span.component-overflow.f-w-lg', {style: 'line-height: 1.5; width: 100%'}, [
-                        m('span.project-statuses-lg', {style: 'width: 50%; float:left'}, [
+                    m('span.component-overflow.f-w-lg', {style: {lineHeight: 1.5, width: '100%'}}, [
+                        m('span.project-statuses-lg', {style: {width: '50%', float:'left'}}, [
                             m('span', {class: ctrl.icon, style: 'padding-right: 5px;'}, ''),
                             m('a', {'href': viewUrl,
                                 onclick : function () {
@@ -85,7 +85,7 @@ var QuickFile = {
                                 }
                             }, ctrl.file.attributes.name),
                         ]),
-                        m('div', {style: 'width: 50%; float:right; font-size:small; line-height:2;'}, 'downloads: ' + ctrl.file.attributes.extra.downloads)
+                        m('div', {style: {width: '50%', float: 'right', fontSize: 'small', marginTop: '2%'}}, 'downloads: ' + ctrl.file.attributes.extra.downloads)
                     ])
                 ])
             ])

From 21f743b5748a0152bc2b3d43c0c59baa6a543e28 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Tue, 28 Nov 2017 15:12:05 -0500
Subject: [PATCH 183/192] Initialize ctrl.failed in controllers for QFs and
 public nodes

---
 website/static/js/components/pagination.js  | 1 -
 website/static/js/components/publicNodes.js | 1 +
 website/static/js/components/quickFiles.js  | 1 +
 3 files changed, 2 insertions(+), 1 deletion(-)

diff --git a/website/static/js/components/pagination.js b/website/static/js/components/pagination.js
index b8ed8f8da6a..fae0dc6eec2 100644
--- a/website/static/js/components/pagination.js
+++ b/website/static/js/components/pagination.js
@@ -22,7 +22,6 @@ var withPagination = function(options) {
             controller: function (ctrlOptions) {
                 var self = this;
 
-                self.failed = false;
                 self.paginators = m.prop([]);
                 self.nextPage = m.prop('');
                 self.prevPage = m.prop('');
diff --git a/website/static/js/components/publicNodes.js b/website/static/js/components/publicNodes.js
index 0bdc79fc87d..5bc38db576b 100644
--- a/website/static/js/components/publicNodes.js
+++ b/website/static/js/components/publicNodes.js
@@ -160,6 +160,7 @@ var PublicNodes = {
 
     controller: function(options) {
         var self = this;
+        self.failed = false;
         self.user = options.user._id;
         self.isProfile = options.user.is_profile;
         self.nodeType = options.nodeType;
diff --git a/website/static/js/components/quickFiles.js b/website/static/js/components/quickFiles.js
index adcff37c8d3..3651001e02f 100644
--- a/website/static/js/components/quickFiles.js
+++ b/website/static/js/components/quickFiles.js
@@ -97,6 +97,7 @@ var QuickFiles = {
 
     controller: function (options) {
         var self = this;
+        self.failed = false;
         self.user = options.user._id;
         self.isProfile = options.user.is_profile;
 

From 859d0aae067b02012e09958fb80fd0ed815204a6 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 1 Dec 2017 09:52:47 -0500
Subject: [PATCH 184/192] Now more responsive! And a capital D!

---
 website/static/js/components/quickFiles.js | 20 +++++++++++---------
 1 file changed, 11 insertions(+), 9 deletions(-)

diff --git a/website/static/js/components/quickFiles.js b/website/static/js/components/quickFiles.js
index 3651001e02f..283f6de58d5 100644
--- a/website/static/js/components/quickFiles.js
+++ b/website/static/js/components/quickFiles.js
@@ -77,15 +77,17 @@ var QuickFile = {
             m('li.project list-group-item list-group-item-node cite-container', [
                 m('h4.list-group-item-heading', [
                     m('span.component-overflow.f-w-lg', {style: {lineHeight: 1.5, width: '100%'}}, [
-                        m('span.project-statuses-lg', {style: {width: '50%', float:'left'}}, [
-                            m('span', {class: ctrl.icon, style: 'padding-right: 5px;'}, ''),
-                            m('a', {'href': viewUrl,
-                                onclick : function () {
-                                    $osf.trackClick('QuickFiles', 'view', 'view-quickfile-from-profile-page');
-                                }
-                            }, ctrl.file.attributes.name),
-                        ]),
-                        m('div', {style: {width: '50%', float: 'right', fontSize: 'small', marginTop: '2%'}}, 'downloads: ' + ctrl.file.attributes.extra.downloads)
+                        m('div.row',
+                            m('div.col-md-8.project-statuses-lg', [
+                                m('span', {class: ctrl.icon, style: 'padding-right: 5px;'}, ''),
+                                m('a', {'href': viewUrl,
+                                    onclick : function () {
+                                        $osf.trackClick('QuickFiles', 'view', 'view-quickfile-from-profile-page');
+                                    }
+                                }, ctrl.file.attributes.name),
+                            ]),
+                            m('div.col-md-4', {style: {fontSize: 'small', marginTop: '1%'}}, 'Downloads: ' + ctrl.file.attributes.extra.downloads)
+                        )
                     ])
                 ])
             ])

From cee1cb3ac1d76c505c9e0ff5dcd22f3ccb788769 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 1 Dec 2017 11:35:25 -0500
Subject: [PATCH 185/192] Only apply center-align to pagination in wrapper

---
 website/static/js/components/pagination.js | 19 ++++++++++---------
 1 file changed, 10 insertions(+), 9 deletions(-)

diff --git a/website/static/js/components/pagination.js b/website/static/js/components/pagination.js
index fae0dc6eec2..ae747a35fba 100644
--- a/website/static/js/components/pagination.js
+++ b/website/static/js/components/pagination.js
@@ -169,16 +169,17 @@ var withPagination = function(options) {
                     });
                 }
 
-                return m('.db-activity-nav.text-center', {style: 'margin-top: 5px; margin-bottom: -10px;'}, [
+                return m('span', [
                     component.view.call(this, ctrl.innerCtrl),
-                    ctrl.paginators() ? ctrl.paginators().map(function (page) {
-                        return page.url() ? m('.btn.btn-sm.btn-link', {
-                            onclick: function () {
-                                ctrl.getNextItems(ctrl.innerCtrl, page.url(), ctrl.updatePagination);
-                            }
-                        }, page.text) : m('.btn.btn-sm.btn-link.disabled', {style: 'color: black'}, page.text);
-                    }) : ''
-
+                    m('.db-activity-nav.text-center', {style: 'margin-top: 5px; margin-bottom: -10px;'}, [
+                        ctrl.paginators() ? ctrl.paginators().map(function (page) {
+                            return page.url() ? m('.btn.btn-sm.btn-link', {
+                                onclick: function () {
+                                    ctrl.getNextItems(ctrl.innerCtrl, page.url(), ctrl.updatePagination);
+                                }
+                            }, page.text) : m('.btn.btn-sm.btn-link.disabled', {style: 'color: black'}, page.text);
+                        }) : ''
+                    ])
                 ]);
             }
         };

From 77b8bd04dc597654d5099bd95e9fbf6b0e467e56 Mon Sep 17 00:00:00 2001
From: Casey Rollins <case.rollins@gmail.com>
Date: Fri, 1 Dec 2017 10:57:13 -0500
Subject: [PATCH 186/192] Remove unnecessary can_read permission checks.

- Add docstrings.
---
 website/project/views/node.py | 147 +++++++++++++++++++---------------
 1 file changed, 84 insertions(+), 63 deletions(-)

diff --git a/website/project/views/node.py b/website/project/views/node.py
index 063dfa2075d..61862f86ddb 100644
--- a/website/project/views/node.py
+++ b/website/project/views/node.py
@@ -822,6 +822,12 @@ def get_affiliated_institutions(obj):
     return ret
 
 def serialize_children(child_list, nested, indent=0):
+    """
+    Returns the serialized representation of a list of child nodes.
+
+    This is a helper function for _get_children and as such it does not
+    redundantly check permissions.
+    """
     results = []
     for child in child_list:
         results.append({
@@ -836,6 +842,10 @@ def serialize_children(child_list, nested, indent=0):
     return results
 
 def _get_children(node, auth):
+    """
+    Returns the serialized representation of the given node and all of its children
+    for which the given user has ADMIN permission.
+    """
     is_admin = Contributor.objects.filter(node=OuterRef('pk'), admin=True, user=auth.user)
     parent_node_sqs = NodeRelation.objects.filter(child=OuterRef('pk'), is_node_link=False).values('parent__guids___id')
     children = (Node.objects.get_children(node)
@@ -907,58 +917,73 @@ def _get_readable_descendants(auth, node, permission=None):
     return descendants, all_readable
 
 def serialize_child_tree(child_list, user, nested):
+    """
+    Recursively serializes and returns a list of child nodes.
+
+    This is a helper function for node_child_tree and as such it does not
+    redundantly check permissions.
+    """
     serialized_children = []
     for child in child_list:
-        can_read = child.is_public or child.has_read_perm
-        if can_read or child.has_permission_on_children(user, READ):
-            contributors = [{
-                'id': contributor.user._id,
-                'is_admin': contributor.admin,
-                'is_confirmed': contributor.user.is_confirmed,
-                'visible': contributor.visible
-            } for contributor in child.contributor_set.all()]
-
-            serialized_children.append({
-                'node': {
-                    'id': child._id,
-                    'url': child.url if can_read else '',
-                    'title': child.title if can_read else 'Private Project',
-                    'is_public': child.is_public,
-                    'contributors': contributors,
-                    'is_admin': child.has_admin_perm,
-                },
-                'user_id': user._id,
-                'children': serialize_child_tree(nested.get(child._id), user, nested) if child._id in nested.keys() else [],
-                'nodeType': 'project' if not child.parentnode_id else 'component',
-                'category': child.category,
-                'permissions': {
-                    'view': can_read,
-                    'is_admin': child.has_admin_perm
-                }
-            })
+        contributors = [{
+            'id': contributor.user._id,
+            'is_admin': contributor.admin,
+            'is_confirmed': contributor.user.is_confirmed,
+            'visible': contributor.visible
+        } for contributor in child.contributor_set.all()]
+
+        serialized_children.append({
+            'node': {
+                'id': child._id,
+                'url': child.url,
+                'title': child.title,
+                'is_public': child.is_public,
+                'contributors': contributors,
+                'is_admin': child.has_admin_perm,
+            },
+            'user_id': user._id,
+            'children': serialize_child_tree(nested.get(child._id), user, nested) if child._id in nested.keys() else [],
+            'nodeType': 'project' if not child.parentnode_id else 'component',
+            'category': child.category,
+            'permissions': {
+                'view': True,
+                'is_admin': child.has_admin_perm
+            }
+        })
 
     return sorted(serialized_children, key=lambda k: len(k['children']), reverse=True)
 
 def node_child_tree(user, node):
-    """ Format data to test for node privacy settings for use in treebeard.
-    :param user: user object
-    :param node: parent project node object
+    """
+    Returns the serialized representation (for treebeard) of a given node and its children.
+    The given user must have ADMIN access on the given node, and therefore the given user has
+    implicit read permisson on all of node's children (i.e. read permissions aren't checked here)
+
+    :param user: OSFUser object
+    :param node: parent project Node object
     :return: treebeard-formatted data
     """
     serialized_nodes = []
+    is_contrib = node.is_contributor(user)
 
     assert node, '{} is not a valid Node.'.format(node._id)
 
-    is_admin_sqs = Contributor.objects.filter(node=OuterRef('pk'), admin=True, user=user)
-    can_read_sqs = Contributor.objects.filter(node=OuterRef('pk'), read=True, user=user)
-    parent_node_sqs = NodeRelation.objects.filter(child=OuterRef('pk'), is_node_link=False).values('parent__guids___id')
-    children = (Node.objects.get_children(node)
-                .filter(is_deleted=False)
-                .annotate(parentnode_id=Subquery(parent_node_sqs[:1]))
-                .annotate(has_admin_perm=Exists(is_admin_sqs))
-                .annotate(has_read_perm=Exists(can_read_sqs))
-                .include('contributor__user__guids')
-                )
+    if not is_contrib:
+        return []
+
+    is_admin = node.has_permission(user, ADMIN)
+
+    if is_admin:
+        is_admin_sqs = Contributor.objects.filter(node=OuterRef('pk'), admin=True, user=user)
+        parent_node_sqs = NodeRelation.objects.filter(child=OuterRef('pk'), is_node_link=False).values('parent__guids___id')
+        children = (Node.objects.get_children(node)
+                    .filter(is_deleted=False)
+                    .annotate(parentnode_id=Subquery(parent_node_sqs[:1]))
+                    .annotate(has_admin_perm=Exists(is_admin_sqs))
+                    .include('contributor__user__guids')
+                    )
+    else:
+        children = []
 
     nested = defaultdict(list)
     for child in children:
@@ -971,29 +996,25 @@ def node_child_tree(user, node):
         'visible': contributor.visible
     } for contributor in node.contributor_set.all().include('user__guids')]
 
-    can_read = node.has_permission(user, READ)
-    is_admin = node.has_permission(user, ADMIN)
-
-    if can_read or node.has_permission_on_children(user, READ):
-        serialized_nodes.append({
-            'node': {
-                'id': node._id,
-                'url': node.url if can_read else '',
-                'title': node.title if can_read else 'Private Project',
-                'is_public': node.is_public,
-                'contributors': contributors,
-                'is_admin': is_admin
-            },
-            'user_id': user._id,
-            'children': serialize_child_tree(nested.get(node._id), user, nested) if node._id in nested.keys() else [],
-            'kind': 'folder' if not node.parent_node or not node.parent_node.has_permission(user, 'read') else 'node',
-            'nodeType': node.project_or_component,
-            'category': node.category,
-            'permissions': {
-                'view': can_read,
-                'is_admin': is_admin
-            }
-        })
+    serialized_nodes.append({
+        'node': {
+            'id': node._id,
+            'url': node.url,
+            'title': node.title,
+            'is_public': node.is_public,
+            'contributors': contributors,
+            'is_admin': is_admin
+        },
+        'user_id': user._id,
+        'children': serialize_child_tree(nested.get(node._id), user, nested) if node._id in nested.keys() else [],
+        'kind': 'folder' if not node.parent_node or not node.parent_node.has_permission(user, 'read') else 'node',
+        'nodeType': node.project_or_component,
+        'category': node.category,
+        'permissions': {
+            'view': True,
+            'is_admin': is_admin
+        }
+    })
 
     return serialized_nodes
 

From 1622254c25f1a5ac1be6721b3f497ff3159ef057 Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 1 Dec 2017 13:06:14 -0500
Subject: [PATCH 187/192] Remove download counts, where did I get that

---
 website/static/js/components/quickFiles.js | 19 ++++++++-----------
 1 file changed, 8 insertions(+), 11 deletions(-)

diff --git a/website/static/js/components/quickFiles.js b/website/static/js/components/quickFiles.js
index 283f6de58d5..ba9887d974d 100644
--- a/website/static/js/components/quickFiles.js
+++ b/website/static/js/components/quickFiles.js
@@ -77,17 +77,14 @@ var QuickFile = {
             m('li.project list-group-item list-group-item-node cite-container', [
                 m('h4.list-group-item-heading', [
                     m('span.component-overflow.f-w-lg', {style: {lineHeight: 1.5, width: '100%'}}, [
-                        m('div.row',
-                            m('div.col-md-8.project-statuses-lg', [
-                                m('span', {class: ctrl.icon, style: 'padding-right: 5px;'}, ''),
-                                m('a', {'href': viewUrl,
-                                    onclick : function () {
-                                        $osf.trackClick('QuickFiles', 'view', 'view-quickfile-from-profile-page');
-                                    }
-                                }, ctrl.file.attributes.name),
-                            ]),
-                            m('div.col-md-4', {style: {fontSize: 'small', marginTop: '1%'}}, 'Downloads: ' + ctrl.file.attributes.extra.downloads)
-                        )
+                        m('span.col-md-8.project-statuses-lg', [
+                            m('span', {class: ctrl.icon, style: 'padding-right: 5px;'}, ''),
+                            m('a', {'href': viewUrl,
+                                onclick : function () {
+                                    $osf.trackClick('QuickFiles', 'view', 'view-quickfile-from-profile-page');
+                                }
+                            }, ctrl.file.attributes.name),
+                        ])
                     ])
                 ])
             ])

From e7777635a7c4f858a87906a29e2141be8c07a86a Mon Sep 17 00:00:00 2001
From: erinspace <erin.braswell@gmail.com>
Date: Fri, 1 Dec 2017 15:40:06 -0500
Subject: [PATCH 188/192] Add a note about non-included custom mappings

[#OSF-8559]
---
 admin/templates/preprint_providers/enter_custom_taxonomy.html | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/admin/templates/preprint_providers/enter_custom_taxonomy.html b/admin/templates/preprint_providers/enter_custom_taxonomy.html
index 579ee937898..70668b31818 100644
--- a/admin/templates/preprint_providers/enter_custom_taxonomy.html
+++ b/admin/templates/preprint_providers/enter_custom_taxonomy.html
@@ -152,8 +152,8 @@ <h4 class="modal-title" id="exampleModalLabel">Custom Taxonomy Example</h4>
                             <ul>
                                 <li>custom name: the new name for the subject</li>
                                 <li>custom parent: the parent of the subject. Leave blank if it is a toplevel subject.
-                                    *Note*: if adding a new child of an existing bepress parent, you must also add a 'custom' parent with the same name that maps to the existing
-                                    bepress subject. See JSON below for an example.
+                                    *Note*: if adding a new child of an existing bepress parent that hasn't already been included, you must also add a 'custom' parent with the same name that maps to the existing
+                                    bepress subject. See JSON below for an example. This is only necessary because "Engineering" wasn't explicitly included.
                                 </li>
                                 <li>bepress: the existing subject that you would like to repalce with the subject listed in the custom name field.</li>
                             </ul>

From 645fe304b420144a3453837d5314719825f785d5 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Mon, 4 Dec 2017 16:57:47 -0500
Subject: [PATCH 189/192] Pass a list instead of generators of signatures to
 celery.group

as an attempt to fix OSF-8996
---
 website/archiver/tasks.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/website/archiver/tasks.py b/website/archiver/tasks.py
index 31995be132a..cdf74f50db0 100644
--- a/website/archiver/tasks.py
+++ b/website/archiver/tasks.py
@@ -263,13 +263,13 @@ def archive(job_pk):
     logger.info('Received archive task for Node: {0} into Node: {1}'.format(src._id, dst._id))
     return celery.chain(
         [
-            celery.group(
+            celery.group([
                 stat_addon.si(
                     addon_short_name=target.name,
                     job_pk=job_pk,
                 )
                 for target in job.target_addons.all()
-            ),
+            ]),
             archive_node.s(
                 job_pk=job_pk
             )

From fb040e0521d349d872f96adaa4095c33e7369628 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Mon, 4 Dec 2017 23:05:18 -0500
Subject: [PATCH 190/192] Use UTC in celery; update cron schedules to use UTC
 timezones

See OSF-8996
---
 website/settings/defaults.py | 48 +++++++++++++++++++-----------------
 1 file changed, 25 insertions(+), 23 deletions(-)

diff --git a/website/settings/defaults.py b/website/settings/defaults.py
index 59ee5d155e6..0fe29860d43 100644
--- a/website/settings/defaults.py
+++ b/website/settings/defaults.py
@@ -364,6 +364,7 @@ class CeleryConfig:
     Celery Configuration
     http://docs.celeryproject.org/en/latest/userguide/configuration.html
     """
+    timezone = 'UTC'
 
     task_default_queue = 'celery'
     task_low_queue = 'low'
@@ -478,6 +479,7 @@ class CeleryConfig:
         pass
     else:
         #  Setting up a scheduler, essentially replaces an independent cron job
+        # Note: these times must be in UTC
         beat_schedule = {
             '5-minute-emails': {
                 'task': 'website.notifications.tasks.send_users_email',
@@ -486,12 +488,12 @@ class CeleryConfig:
             },
             'daily-emails': {
                 'task': 'website.notifications.tasks.send_users_email',
-                'schedule': crontab(minute=0, hour=0),
+                'schedule': crontab(minute=0, hour=5),  # Daily at 12 a.m. EST
                 'args': ('email_digest',),
             },
             'refresh_addons': {
                 'task': 'scripts.refresh_addon_tokens',
-                'schedule': crontab(minute=0, hour=2),  # Daily 2:00 a.m
+                'schedule': crontab(minute=0, hour=7),  # Daily 2:00 a.m
                 'kwargs': {'dry_run': False, 'addons': {
                     'box': 60,          # https://docs.box.com/docs/oauth-20#section-6-using-the-access-and-refresh-tokens
                     'googledrive': 14,  # https://developers.google.com/identity/protocols/OAuth2#expiration
@@ -500,66 +502,66 @@ class CeleryConfig:
             },
             'retract_registrations': {
                 'task': 'scripts.retract_registrations',
-                'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+                'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
                 'kwargs': {'dry_run': False},
             },
             'embargo_registrations': {
                 'task': 'scripts.embargo_registrations',
-                'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+                'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
                 'kwargs': {'dry_run': False},
             },
             'add_missing_identifiers_to_preprints': {
                 'task': 'scripts.add_missing_identifiers_to_preprints',
-                'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+                'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
                 'kwargs': {'dry_run': False},
             },
             'approve_registrations': {
                 'task': 'scripts.approve_registrations',
-                'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+                'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
                 'kwargs': {'dry_run': False},
             },
             'approve_embargo_terminations': {
                 'task': 'scripts.approve_embargo_terminations',
-                'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+                'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
                 'kwargs': {'dry_run': False},
             },
             'triggered_mails': {
                 'task': 'scripts.triggered_mails',
-                'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+                'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
                 'kwargs': {'dry_run': False},
             },
             'send_queued_mails': {
                 'task': 'scripts.send_queued_mails',
-                'schedule': crontab(minute=0, hour=12),  # Daily 12 p.m.
+                'schedule': crontab(minute=0, hour=17),  # Daily 12 p.m.
                 'kwargs': {'dry_run': False},
             },
             'new-and-noteworthy': {
                 'task': 'scripts.populate_new_and_noteworthy_projects',
-                'schedule': crontab(minute=0, hour=2, day_of_week=6),  # Saturday 2:00 a.m.
+                'schedule': crontab(minute=0, hour=7, day_of_week=6),  # Saturday 2:00 a.m.
                 'kwargs': {'dry_run': False}
             },
             'update_popular_nodes': {
                 'task': 'scripts.populate_popular_projects_and_registrations',
-                'schedule': crontab(minute=0, hour=2),  # Daily 2:00 a.m.
+                'schedule': crontab(minute=0, hour=7),  # Daily 2:00 a.m.
                 'kwargs': {'dry_run': False}
             },
             'run_keen_summaries': {
                 'task': 'scripts.analytics.run_keen_summaries',
-                'schedule': crontab(minute=00, hour=1),  # Daily 1:00 a.m.
+                'schedule': crontab(minute=0, hour=6),  # Daily 1:00 a.m.
                 'kwargs': {'yesterday': True}
             },
             'run_keen_snapshots': {
                 'task': 'scripts.analytics.run_keen_snapshots',
-                'schedule': crontab(minute=0, hour=3),  # Daily 3:00 a.m.
+                'schedule': crontab(minute=0, hour=8),  # Daily 3:00 a.m.
             },
             'run_keen_events': {
                 'task': 'scripts.analytics.run_keen_events',
-                'schedule': crontab(minute=0, hour=4),  # Daily 4:00 a.m.
+                'schedule': crontab(minute=0, hour=9),  # Daily 4:00 a.m.
                 'kwargs': {'yesterday': True}
             },
             'generate_sitemap': {
                 'task': 'scripts.generate_sitemap',
-                'schedule': crontab(minute=0, hour=0),  # Daily 12:00 a.m.
+                'schedule': crontab(minute=0, hour=5),  # Daily 12:00 a.m.
             },
         }
 
@@ -567,42 +569,42 @@ class CeleryConfig:
         # beat_schedule.update({
         #     'usage_audit': {
         #         'task': 'scripts.osfstorage.usage_audit',
-        #         'schedule': crontab(minute=0, hour=0),  # Daily 12 a.m
+        #         'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
         #         'kwargs': {'send_mail': True},
         #     },
         #     'stuck_registration_audit': {
         #         'task': 'scripts.stuck_registration_audit',
-        #         'schedule': crontab(minute=0, hour=6),  # Daily 6 a.m
+        #         'schedule': crontab(minute=0, hour=11),  # Daily 6 a.m
         #         'kwargs': {},
         #     },
         #     'glacier_inventory': {
         #         'task': 'scripts.osfstorage.glacier_inventory',
-        #         'schedule': crontab(minute=0, hour= 0, day_of_week=0),  # Sunday 12:00 a.m.
+        #         'schedule': crontab(minute=0, hour=5, day_of_week=0),  # Sunday 12:00 a.m.
         #         'args': (),
         #     },
         #     'glacier_audit': {
         #         'task': 'scripts.osfstorage.glacier_audit',
-        #         'schedule': crontab(minute=0, hour=6, day_of_week=0),  # Sunday 6:00 a.m.
+        #         'schedule': crontab(minute=0, hour=11, day_of_week=0),  # Sunday 6:00 a.m.
         #         'kwargs': {'dry_run': False},
         #     },
         #     'files_audit_0': {
         #         'task': 'scripts.osfstorage.files_audit.0',
-        #         'schedule': crontab(minute=0, hour=2, day_of_week=0),  # Sunday 2:00 a.m.
+        #         'schedule': crontab(minute=0, hour=7, day_of_week=0),  # Sunday 2:00 a.m.
         #         'kwargs': {'num_of_workers': 4, 'dry_run': False},
         #     },
         #     'files_audit_1': {
         #         'task': 'scripts.osfstorage.files_audit.1',
-        #         'schedule': crontab(minute=0, hour=2, day_of_week=0),  # Sunday 2:00 a.m.
+        #         'schedule': crontab(minute=0, hour=7, day_of_week=0),  # Sunday 2:00 a.m.
         #         'kwargs': {'num_of_workers': 4, 'dry_run': False},
         #     },
         #     'files_audit_2': {
         #         'task': 'scripts.osfstorage.files_audit.2',
-        #         'schedule': crontab(minute=0, hour=2, day_of_week=0),  # Sunday 2:00 a.m.
+        #         'schedule': crontab(minute=0, hour=7, day_of_week=0),  # Sunday 2:00 a.m.
         #         'kwargs': {'num_of_workers': 4, 'dry_run': False},
         #     },
         #     'files_audit_3': {
         #         'task': 'scripts.osfstorage.files_audit.3',
-        #         'schedule': crontab(minute=0, hour=2, day_of_week=0),  # Sunday 2:00 a.m.
+        #         'schedule': crontab(minute=0, hour=7, day_of_week=0),  # Sunday 2:00 a.m.
         #         'kwargs': {'num_of_workers': 4, 'dry_run': False},
         #     },
         # })

From 3acb0aecec7067b71b3f6ca2f70ee809f4d47fef Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Tue, 5 Dec 2017 19:52:08 -0500
Subject: [PATCH 191/192] Prevent updating auto_now fields in preprint
 migration

---
 osf/migrations/0069_auto_20171127_1119.py | 19 +++++++++++--------
 1 file changed, 11 insertions(+), 8 deletions(-)

diff --git a/osf/migrations/0069_auto_20171127_1119.py b/osf/migrations/0069_auto_20171127_1119.py
index 670df08f746..002aec80d25 100644
--- a/osf/migrations/0069_auto_20171127_1119.py
+++ b/osf/migrations/0069_auto_20171127_1119.py
@@ -5,6 +5,8 @@
 
 from django.db import migrations
 from osf.models import PreprintService
+from osf.utils.migrations import disable_auto_now_fields
+
 logger = logging.getLogger(__name__)
 
 def add_preprint_doi_created(apps, schema_editor):
@@ -16,14 +18,15 @@ def add_preprint_doi_created(apps, schema_editor):
     current_preprint = 0
     logger.info('{} published preprints found with preprint_doi_created is null.'.format(preprints_count))
 
-    for preprint in null_preprint_doi_created:
-        current_preprint += 1
-        if preprint.get_identifier('doi'):
-            preprint.preprint_doi_created = preprint.date_published
-            preprint.save()
-            logger.info('Preprint ID {}, {}/{} preprint_doi_created field populated.'.format(preprint._id, current_preprint, preprints_count))
-        else:
-            logger.info('Preprint ID {}, {}/{} skipped because a DOI has not been created.'.format(preprint._id, current_preprint, preprints_count))
+    with disable_auto_now_fields(PreprintService):
+        for preprint in null_preprint_doi_created:
+            current_preprint += 1
+            if preprint.get_identifier('doi'):
+                preprint.preprint_doi_created = preprint.date_published
+                preprint.save()
+                logger.info('Preprint ID {}, {}/{} preprint_doi_created field populated.'.format(preprint._id, current_preprint, preprints_count))
+            else:
+                logger.info('Preprint ID {}, {}/{} skipped because a DOI has not been created.'.format(preprint._id, current_preprint, preprints_count))
 
 def reverse_func(apps, schema_editor):
     """

From b0588895891e4b0185524d28205fb42d7e1a32f7 Mon Sep 17 00:00:00 2001
From: Steven Loria <sloria1@gmail.com>
Date: Tue, 5 Dec 2017 19:52:27 -0500
Subject: [PATCH 192/192] Bump version and update changelog

---
 CHANGELOG    | 17 +++++++++++++++++
 package.json |  2 +-
 2 files changed, 18 insertions(+), 1 deletion(-)

diff --git a/CHANGELOG b/CHANGELOG
index 73c6d1c31ef..15d2cb79ca8 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -2,6 +2,23 @@
 Changelog
 *********
 
+0.125.0 (2017-12-05)
+====================
+
+- Add GitLab addon
+- Add OneDrive addon
+- Upgrade a number of Python dependencies
+- Use django-celery-beat for managing periodic tasks
+- Optimizations for files grid and adding contributors
+- APIv2 optimizations
+- Admin: Add custom taxonomy UI
+- Quick files UI on profile pages
+- APIv2 immediately returns preprint DOIs
+- Add `created` and `modified` fields to all models
+- Address Django deprecation warnings
+- Remove modularmodm as a dependency
+- Remove redundant osf.utils.auth module
+
 0.124.0 (2017-11-13)
 ====================
 
diff --git a/package.json b/package.json
index efe2645d473..4d71452bb52 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
 {
   "name": "OSF",
-  "version": "0.124.0",
+  "version": "0.125.0",
   "description": "Facilitating Open Science",
   "repository": "https://github.com/CenterForOpenScience/osf.io",
   "author": "Center for Open Science",