Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions notifications.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -904,3 +904,10 @@ notification_types:
object_content_type_model_name: abstractnode
template: 'website/templates/add_sso_email_osf4i.html.mako'
tests: []

- name: empty
subject: '{subject}'
__docs__: ...
object_content_type_model_name: abstractnode
template: 'website/templates/empty.html.mako'
tests: []
15 changes: 15 additions & 0 deletions osf/email/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from html import unescape
from typing import Optional
from mako.template import Template as MakoTemplate
import base64


import waffle
Expand Down Expand Up @@ -278,6 +279,20 @@ def send_email_with_send_grid(to_addr, notification_type, context, email_context
if cats:
payload['categories'] = cats

if email_context:
attachment_name = email_context.get('attachment_name')
attachment_content = email_context.get('attachment_content')
if attachment_name and attachment_content:

encoded = base64.b64encode(attachment_content).decode('ascii')

item = {
'content': encoded,
'filename': attachment_name,
'disposition': 'attachment',
}

payload['attachments'] = [item]
try:
sg = SendGridAPIClient(settings.SENDGRID_API_KEY)
resp = sg.client.mail.send.post(request_body=payload)
Expand Down
76 changes: 76 additions & 0 deletions osf/management/commands/add_colon_delim_to_s3_buckets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import logging

from django.core.management.base import BaseCommand
from django.apps import apps
from django.db.models import F, Value
from django.db.models.functions import Concat, Replace

logger = logging.getLogger(__name__)


class Command(BaseCommand):
"""
Adds Colon (':') delineators to s3 buckets to separate them from them from their subfolder, so `<bucket_name>`
becomes `<bucket_name>:/` , the root path. Folder names will also be updated to maintain consistency.

"""

def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
'--reverse',
action='store_true',
dest='reverse',
help='Unsets date_retraction'
)

def handle(self, *args, **options):
reverse = options.get('reverse', False)
if reverse:
reverse_update_folder_names()
else:
update_folder_names()


def update_folder_names():
NodeSettings = apps.get_model('addons_s3', 'NodeSettings')

# Update folder_id for all records
NodeSettings.objects.exclude(
folder_name__contains=':/'
).update(
folder_id=Concat(F('folder_id'), Value(':/'))
)

# Update folder_name for records containing '('
NodeSettings.objects.filter(
folder_name__contains=' ('
).exclude(
folder_name__contains=':/'
).update(
folder_name=Replace(F('folder_name'), Value(' ('), Value(':/ ('))
)
NodeSettings.objects.exclude(
folder_name__contains=':/'
).exclude(
folder_name__contains=' ('
).update(
folder_name=Concat(F('folder_name'), Value(':/'))
)
logger.info('Update Folder Names/IDs complete')


def reverse_update_folder_names():
NodeSettings = apps.get_model('addons_s3', 'NodeSettings')

# Reverse update folder_id for all records
NodeSettings.objects.update(folder_id=Replace(F('folder_id'), Value(':/'), Value('')))

# Reverse update folder_name for records containing ':/ ('
NodeSettings.objects.filter(folder_name__contains=':/ (').update(
folder_name=Replace(F('folder_name'), Value(':/ ('), Value(' ('))
)
NodeSettings.objects.filter(folder_name__contains=':/').update(
folder_name=Replace(F('folder_name'), Value(':/'), Value(''))
)
logger.info('Reverse Update Folder Names/IDs complete')
29 changes: 29 additions & 0 deletions osf/management/commands/add_egap_registration_schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import logging

from django.core.management.base import BaseCommand
from osf.models import RegistrationSchema
from website.project.metadata.schemas import ensure_schema_structure, from_json

logger = logging.getLogger(__name__)


class Command(BaseCommand):
"""Add egap-registration schema to the db.
For now, doing this outside of a migration so it can be individually added to
a staging environment for preview.
"""

def handle(self, *args, **options):
egap_registration_schema = ensure_schema_structure(from_json('egap-registration-3.json'))
schema_obj, created = RegistrationSchema.objects.update_or_create(
name=egap_registration_schema['name'],
schema_version=egap_registration_schema.get('version', 1),
defaults={
'schema': egap_registration_schema,
}
)

if created:
logger.info('Added schema {} to the database'.format(egap_registration_schema['name']))
else:
logger.info('updated existing schema {}'.format(egap_registration_schema['name']))
19 changes: 19 additions & 0 deletions osf/management/commands/add_institution_perm_groups.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import logging

from django.core.management.base import BaseCommand
from osf.models import Institution

logger = logging.getLogger(__name__)


class Command(BaseCommand):
"""A new permissions group was created for Institutions, which will be created upon each new Institution,
but the old institutions will not have this group. This management command creates those groups for the
existing institutions.
"""

def handle(self, *args, **options):
institutions = Institution.objects.all()
for institution in institutions:
institution.update_group_permissions()
logger.info(f'Added perms to {institution.name}.')
96 changes: 96 additions & 0 deletions osf/management/commands/addon_deleted_date.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
import datetime
import logging

from django.core.management.base import BaseCommand
from django.db import connection, transaction
from framework.celery_tasks import app as celery_app

logger = logging.getLogger(__name__)

TABLES_TO_POPULATE_WITH_MODIFIED = [
'addons_zotero_usersettings',
'addons_dropbox_usersettings',
'addons_dropbox_nodesettings',
'addons_figshare_nodesettings',
'addons_figshare_usersettings',
'addons_forward_nodesettings',
'addons_github_nodesettings',
'addons_github_usersettings',
'addons_gitlab_nodesettings',
'addons_gitlab_usersettings',
'addons_googledrive_nodesettings',
'addons_googledrive_usersettings',
'addons_mendeley_nodesettings',
'addons_mendeley_usersettings',
'addons_onedrive_nodesettings',
'addons_onedrive_usersettings',
'addons_osfstorage_nodesettings',
'addons_osfstorage_usersettings',
'addons_bitbucket_nodesettings',
'addons_bitbucket_usersettings',
'addons_owncloud_nodesettings',
'addons_box_nodesettings',
'addons_owncloud_usersettings',
'addons_box_usersettings',
'addons_dataverse_nodesettings',
'addons_dataverse_usersettings',
'addons_s3_nodesettings',
'addons_s3_usersettings',
'addons_twofactor_usersettings',
'addons_wiki_nodesettings',
'addons_zotero_nodesettings'
]

UPDATE_DELETED_WITH_MODIFIED = """UPDATE {} SET deleted=modified
WHERE id IN (SELECT id FROM {} WHERE is_deleted AND deleted IS NULL LIMIT {}) RETURNING id;"""

@celery_app.task(name='management.commands.addon_deleted_date')
def populate_deleted(dry_run=False, page_size=1000):
with transaction.atomic():
for table in TABLES_TO_POPULATE_WITH_MODIFIED:
run_statements(UPDATE_DELETED_WITH_MODIFIED, page_size, table)
if dry_run:
raise RuntimeError('Dry Run -- Transaction rolled back')

def run_statements(statement, page_size, table):
logger.info(f'Populating deleted column in table {table}')
with connection.cursor() as cursor:
cursor.execute(statement.format(table, table, page_size))
rows = cursor.fetchall()
if rows:
logger.info(f'Table {table} still has rows to populate')

class Command(BaseCommand):
help = '''Populates new deleted field for various models. Ensure you have run migrations
before running this script.'''

def add_arguments(self, parser):
parser.add_argument(
'--dry_run',
type=bool,
default=False,
help='Run queries but do not write files',
)
parser.add_argument(
'--page_size',
type=int,
default=1000,
help='How many rows to process at a time',
)

def handle(self, *args, **options):
script_start_time = datetime.datetime.now()
logger.info(f'Script started time: {script_start_time}')
logger.debug(options)

dry_run = options['dry_run']
page_size = options['page_size']

if dry_run:
logger.info('DRY RUN')

populate_deleted(dry_run, page_size)

script_finish_time = datetime.datetime.now()
logger.info(f'Script finished time: {script_finish_time}')
logger.info(f'Run time {script_finish_time - script_start_time}')
89 changes: 89 additions & 0 deletions osf/management/commands/backfill_date_retracted.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
# This is a management command, rather than a migration script, for two primary reasons:
# 1. It makes no changes to database structure (e.g. AlterField), only database content.
# 2. It may need to be ran more than once, as it skips failed registrations.

from datetime import timedelta
import logging

import django
django.setup()

from django.core.management.base import BaseCommand
from django.db import transaction

from osf.models import Registration, Retraction, Sanction
from scripts import utils as script_utils

logger = logging.getLogger(__name__)

def set_date_retracted(*args):
registrations = (
Registration.objects.filter(retraction__state=Sanction.APPROVED, retraction__date_retracted=None)
.select_related('retraction')
.prefetch_related('registered_from__logs')
.prefetch_related('registered_from__guids')
)
total = registrations.count()
logger.info(f'Migrating {total} retractions.')

for registration in registrations:
if not registration.registered_from:
logger.warning(f'Skipping failed registration {registration._id}')
continue
retraction_logs = registration.registered_from.logs.filter(action='retraction_approved', params__retraction_id=registration.retraction._id)
if retraction_logs.count() != 1 and retraction_logs.first().date - retraction_logs.last().date > timedelta(seconds=5):
msg = (
'There should be a retraction_approved log for retraction {} on node {}. No retraction_approved log found.'
if retraction_logs.count() == 0
else 'There should only be one retraction_approved log for retraction {} on node {}. Multiple logs found.'
)
raise Exception(msg.format(registration.retraction._id, registration.registered_from._id))
date_retracted = retraction_logs[0].date
logger.info(
'Setting date_retracted for retraction {} to be {}, from retraction_approved node log {}.'.format(
registration.retraction._id, date_retracted, retraction_logs[0]._id
)
)
registration.retraction.date_retracted = date_retracted
registration.retraction.save()

def unset_date_retracted(*args):
retractions = Retraction.objects.filter(state=Sanction.APPROVED).exclude(date_retracted=None)
logger.info(f'Migrating {retractions.count()} retractions.')

for retraction in retractions:
retraction.date_retracted = None
retraction.save()


class Command(BaseCommand):
"""
Backfill Retraction.date_retracted with `RETRACTION_APPROVED` log date.
"""
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Run migration and roll back changes to db',
)
parser.add_argument(
'--reverse',
action='store_true',
dest='reverse',
help='Unsets date_retraction'
)

def handle(self, *args, **options):
reverse = options.get('reverse', False)
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
with transaction.atomic():
if reverse:
unset_date_retracted()
else:
set_date_retracted()
if dry_run:
raise RuntimeError('Dry run, transaction rolled back.')
Loading
Loading