UI: Web Assets v2 upgrade #85
@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
ASGI config for blender_extensions project.
|
|
||||||
|
|
||||||
It exposes the ASGI callable as a module-level variable named ``application``.
|
|
||||||
|
|
||||||
For more information on this file, see
|
|
||||||
https://docs.djangoproject.com/en/4.0/howto/deployment/asgi/
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from django.core.asgi import get_asgi_application
|
|
||||||
|
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'blender_extensions.settings')
|
|
||||||
|
|
||||||
application = get_asgi_application()
|
|
@ -120,6 +120,7 @@ WSGI_APPLICATION = 'blender_extensions.wsgi.application'
|
|||||||
DATABASES = {
|
DATABASES = {
|
||||||
'default': dj_database_url.config(default='sqlite:///{}'.format(BASE_DIR / 'db.sqlite3')),
|
'default': dj_database_url.config(default='sqlite:///{}'.format(BASE_DIR / 'db.sqlite3')),
|
||||||
}
|
}
|
||||||
|
DATABASES['default']['CONN_MAX_AGE'] = None
|
||||||
|
|
||||||
# Password validation
|
# Password validation
|
||||||
# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators
|
# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators
|
||||||
@ -325,3 +326,7 @@ EMAIL_HOST_PASSWORD = os.getenv('EMAIL_HOST_PASSWORD')
|
|||||||
ACTSTREAM_SETTINGS = {
|
ACTSTREAM_SETTINGS = {
|
||||||
'MANAGER': 'actstream.managers.ActionManager',
|
'MANAGER': 'actstream.managers.ActionManager',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Require file validation for other file processing (e.g. thumbnails).
|
||||||
|
# Should be set for staging/production.
|
||||||
|
REQUIRE_FILE_VALIDATION = os.getenv('REQUIRE_FILE_VALIDATION', False)
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
from typing import Set, Tuple, Mapping, Any
|
from typing import Set, Tuple, Mapping, Any
|
||||||
import copy
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from django.contrib.admin.models import DELETION
|
from django.contrib.admin.models import DELETION
|
||||||
@ -21,6 +20,13 @@ See TrackChangesMixin.pre_save_record().
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def _get_object_state(obj: object, fields=None, include_pk=False) -> dict:
|
||||||
|
data = serializers.serialize('python', [obj], fields=fields)[0]
|
||||||
|
if include_pk:
|
||||||
|
data['fields']['pk'] = data['pk']
|
||||||
|
return data['fields']
|
||||||
|
|
||||||
|
|
||||||
class CreatedModifiedMixin(models.Model):
|
class CreatedModifiedMixin(models.Model):
|
||||||
"""Add standard date fields to a model."""
|
"""Add standard date fields to a model."""
|
||||||
|
|
||||||
@ -48,11 +54,6 @@ class CreatedModifiedMixin(models.Model):
|
|||||||
|
|
||||||
|
|
||||||
class RecordDeletionMixin:
|
class RecordDeletionMixin:
|
||||||
def serialise(self) -> dict:
|
|
||||||
data = serializers.serialize('python', [self])[0]
|
|
||||||
data['fields']['pk'] = data['pk']
|
|
||||||
return data['fields']
|
|
||||||
|
|
||||||
def record_deletion(self):
|
def record_deletion(self):
|
||||||
"""Create a LogEntry describing a deletion of this object."""
|
"""Create a LogEntry describing a deletion of this object."""
|
||||||
msg_args = {'type': type(self), 'pk': self.pk}
|
msg_args = {'type': type(self), 'pk': self.pk}
|
||||||
@ -63,7 +64,7 @@ class RecordDeletionMixin:
|
|||||||
# This shouldn't happen: prior validation steps should have taken care of this.
|
# This shouldn't happen: prior validation steps should have taken care of this.
|
||||||
msg_args['reasons'] = cannot_be_deleted_reasons
|
msg_args['reasons'] = cannot_be_deleted_reasons
|
||||||
logger.error("%(type)s pk=%(pk)s is being deleted but it %(reasons)s", msg_args)
|
logger.error("%(type)s pk=%(pk)s is being deleted but it %(reasons)s", msg_args)
|
||||||
state = self.serialise()
|
state = _get_object_state(self, include_pk=True)
|
||||||
message = [
|
message = [
|
||||||
{
|
{
|
||||||
'deleted': {
|
'deleted': {
|
||||||
@ -123,9 +124,7 @@ class TrackChangesMixin(RecordDeletionMixin, models.Model):
|
|||||||
|
|
||||||
update_fields = kwargs.get('update_fields')
|
update_fields = kwargs.get('update_fields')
|
||||||
was_modified = self._was_modified(db_instance, update_fields=update_fields)
|
was_modified = self._was_modified(db_instance, update_fields=update_fields)
|
||||||
old_instance_data = {
|
old_instance_data = _get_object_state(db_instance, fields=self.track_changes_to_fields)
|
||||||
attr: copy.deepcopy(getattr(db_instance, attr)) for attr in self.track_changes_to_fields
|
|
||||||
}
|
|
||||||
return was_modified, old_instance_data
|
return was_modified, old_instance_data
|
||||||
|
|
||||||
def record_status_change(self, was_changed, old_state, **kwargs):
|
def record_status_change(self, was_changed, old_state, **kwargs):
|
||||||
@ -151,8 +150,9 @@ class TrackChangesMixin(RecordDeletionMixin, models.Model):
|
|||||||
if not was_changed or not self.pk:
|
if not was_changed or not self.pk:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
new_state = _get_object_state(self, fields=self.track_changes_to_fields)
|
||||||
changed_fields = {
|
changed_fields = {
|
||||||
field for field in old_state.keys() if getattr(self, field) != old_state[field]
|
field for field in old_state.keys() if new_state.get(field) != old_state.get(field)
|
||||||
}
|
}
|
||||||
message = [
|
message = [
|
||||||
{
|
{
|
||||||
|
@ -6,7 +6,7 @@ from mdgen import MarkdownPostProvider
|
|||||||
import factory
|
import factory
|
||||||
import factory.fuzzy
|
import factory.fuzzy
|
||||||
|
|
||||||
from extensions.models import Extension, Version, Tag
|
from extensions.models import Extension, Version, Tag, Preview
|
||||||
from ratings.models import Rating
|
from ratings.models import Rating
|
||||||
|
|
||||||
fake_markdown = Faker()
|
fake_markdown = Faker()
|
||||||
@ -35,7 +35,7 @@ class ExtensionFactory(DjangoModelFactory):
|
|||||||
|
|
||||||
if extracted:
|
if extracted:
|
||||||
for _ in extracted:
|
for _ in extracted:
|
||||||
_.extension_preview.create(caption='Media Caption', extension=self)
|
Preview.objects.create(file=_, caption='Media Caption', extension=self)
|
||||||
|
|
||||||
@factory.post_generation
|
@factory.post_generation
|
||||||
def process_extension_id(self, created, extracted, **kwargs):
|
def process_extension_id(self, created, extracted, **kwargs):
|
||||||
|
@ -100,3 +100,10 @@ ABUSE_TYPE = Choices(
|
|||||||
('ABUSE_USER', ABUSE_TYPE_USER, "User"),
|
('ABUSE_USER', ABUSE_TYPE_USER, "User"),
|
||||||
('ABUSE_RATING', ABUSE_TYPE_RATING, "Rating"),
|
('ABUSE_RATING', ABUSE_TYPE_RATING, "Rating"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# **N.B.**: thumbnail sizes are not intended to be changed on the fly:
|
||||||
|
# thumbnails of existing images must exist in MEDIA_ROOT before
|
||||||
|
# the code expecting thumbnails of new dimensions can be deployed!
|
||||||
|
THUMBNAIL_SIZES = {'1080p': [1920, 1080], '360p': [640, 360]}
|
||||||
|
THUMBNAIL_FORMAT = 'PNG'
|
||||||
|
THUMBNAIL_QUALITY = 83
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
from django.core.exceptions import ValidationError
|
||||||
from semantic_version.django_fields import VersionField as SemanticVersionField
|
from semantic_version.django_fields import VersionField as SemanticVersionField
|
||||||
from semantic_version import Version
|
from semantic_version import Version
|
||||||
import json
|
import json
|
||||||
@ -11,7 +12,10 @@ class VersionStringField(SemanticVersionField):
|
|||||||
return value
|
return value
|
||||||
if value is None:
|
if value is None:
|
||||||
return value
|
return value
|
||||||
|
try:
|
||||||
return str(Version(value))
|
return str(Version(value))
|
||||||
|
except Exception as e:
|
||||||
|
raise ValidationError(e)
|
||||||
|
|
||||||
def from_db_value(self, value, expression, connection):
|
def from_db_value(self, value, expression, connection):
|
||||||
return self.to_python(value)
|
return self.to_python(value)
|
||||||
|
@ -66,24 +66,14 @@ class AddPreviewFileForm(forms.ModelForm):
|
|||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
"""Save Preview from the cleaned form data."""
|
"""Save Preview from the cleaned form data."""
|
||||||
# If file with this hash was already uploaded by the same user, return it
|
|
||||||
hash_ = self.instance.generate_hash(self.instance.source)
|
|
||||||
model = self.instance.__class__
|
|
||||||
existing_image = model.objects.filter(original_hash=hash_, user=self.request.user).first()
|
|
||||||
if (
|
|
||||||
existing_image
|
|
||||||
and not existing_image.extension_preview.filter(extension_id=self.extension.id).count()
|
|
||||||
):
|
|
||||||
logger.warning('Found an existing %s pk=%s', model, existing_image.pk)
|
|
||||||
self.instance = existing_image
|
|
||||||
|
|
||||||
# Fill in missing fields from request and the source file
|
# Fill in missing fields from request and the source file
|
||||||
self.instance.user = self.request.user
|
self.instance.user = self.request.user
|
||||||
|
|
||||||
instance = super().save(*args, **kwargs)
|
instance = super().save(*args, **kwargs)
|
||||||
|
|
||||||
# Create extension preview and save caption to it
|
# Create extension preview and save caption to it
|
||||||
instance.extension_preview.create(
|
extensions.models.Preview.objects.create(
|
||||||
|
file=instance,
|
||||||
caption=self.cleaned_data['caption'],
|
caption=self.cleaned_data['caption'],
|
||||||
extension=self.extension,
|
extension=self.extension,
|
||||||
)
|
)
|
||||||
|
20
extensions/migrations/0027_unique_preview_files.py
Normal file
20
extensions/migrations/0027_unique_preview_files.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
# Generated by Django 4.2.11 on 2024-04-23 11:56
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('files', '0007_alter_file_status'),
|
||||||
|
('extensions', '0026_remove_extension_date_deleted_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='preview',
|
||||||
|
name='file',
|
||||||
|
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='files.file'),
|
||||||
|
),
|
||||||
|
]
|
@ -33,7 +33,13 @@ log = logging.getLogger(__name__)
|
|||||||
class RatingMixin:
|
class RatingMixin:
|
||||||
@property
|
@property
|
||||||
def text_ratings_count(self) -> int:
|
def text_ratings_count(self) -> int:
|
||||||
return self.ratings.listed_texts.count()
|
return len(
|
||||||
|
[
|
||||||
|
r
|
||||||
|
for r in self.ratings.all()
|
||||||
|
if r.text is not None and r.is_listed and r.reply_to is None
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def total_ratings_count(self) -> int:
|
def total_ratings_count(self) -> int:
|
||||||
@ -274,10 +280,9 @@ class Extension(CreatedModifiedMixin, RatingMixin, TrackChangesMixin, models.Mod
|
|||||||
def get_previews(self):
|
def get_previews(self):
|
||||||
"""Get preview files, sorted by Preview.position.
|
"""Get preview files, sorted by Preview.position.
|
||||||
|
|
||||||
TODO: Might be better to query Previews directly instead of going
|
Avoid triggering additional querysets, rely on prefetch_related in the view.
|
||||||
for the reverse relationship.
|
|
||||||
"""
|
"""
|
||||||
return self.previews.listed.order_by('extension_preview__position')
|
return [p.file for p in self.preview_set.all() if p.file.is_listed]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def valid_file_statuses(self) -> List[int]:
|
def valid_file_statuses(self) -> List[int]:
|
||||||
@ -288,14 +293,13 @@ class Extension(CreatedModifiedMixin, RatingMixin, TrackChangesMixin, models.Mod
|
|||||||
@property
|
@property
|
||||||
def latest_version(self):
|
def latest_version(self):
|
||||||
"""Retrieve the latest version."""
|
"""Retrieve the latest version."""
|
||||||
return (
|
versions = [
|
||||||
self.versions.filter(
|
v for v in self.versions.all() if v.file and v.file.status in self.valid_file_statuses
|
||||||
file__status__in=self.valid_file_statuses,
|
]
|
||||||
file__isnull=False,
|
if not versions:
|
||||||
)
|
return None
|
||||||
.order_by('date_created')
|
versions = sorted(versions, key=lambda v: v.date_created, reverse=True)
|
||||||
.last()
|
return versions[0]
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def current_version(self):
|
def current_version(self):
|
||||||
@ -653,9 +657,7 @@ class Maintainer(CreatedModifiedMixin, models.Model):
|
|||||||
|
|
||||||
class Preview(CreatedModifiedMixin, RecordDeletionMixin, models.Model):
|
class Preview(CreatedModifiedMixin, RecordDeletionMixin, models.Model):
|
||||||
extension = models.ForeignKey(Extension, on_delete=models.CASCADE)
|
extension = models.ForeignKey(Extension, on_delete=models.CASCADE)
|
||||||
file = models.ForeignKey(
|
file = models.OneToOneField('files.File', on_delete=models.CASCADE)
|
||||||
'files.File', related_name='extension_preview', on_delete=models.CASCADE
|
|
||||||
)
|
|
||||||
caption = models.CharField(max_length=255, default='', null=False, blank=True)
|
caption = models.CharField(max_length=255, default='', null=False, blank=True)
|
||||||
position = models.IntegerField(default=0)
|
position = models.IntegerField(default=0)
|
||||||
|
|
||||||
|
@ -3,6 +3,18 @@
|
|||||||
<a
|
<a
|
||||||
href="https://www.blender.org/download/releases/{{ version.blender_version_min|version_without_patch|replace:".,-" }}/"
|
href="https://www.blender.org/download/releases/{{ version.blender_version_min|version_without_patch|replace:".,-" }}/"
|
||||||
title="{{ version.blender_version_min }}">Blender {{ version.blender_version_min|version_without_patch }}</a>
|
title="{{ version.blender_version_min }}">Blender {{ version.blender_version_min|version_without_patch }}</a>
|
||||||
|
{% if is_editable %}
|
||||||
|
—
|
||||||
|
<input name="blender_version_max" class="form-control-sm"
|
||||||
|
value="{{version.blender_version_max|default_if_none:''}}"
|
||||||
|
placeholder="{% trans 'maximum Blender version' %}"
|
||||||
|
pattern="^([0-9]+\.[0-9]+\.[0-9]+)?$"
|
||||||
|
title="{% trans 'Blender version, e.g. 4.1.0' %}"
|
||||||
|
/>
|
||||||
|
{% for error in form.errors.blender_version_max %}
|
||||||
|
<div class="error">{{ error }}</div>
|
||||||
|
{% endfor %}
|
||||||
|
{% else %}
|
||||||
{% if version.blender_version_max %}
|
{% if version.blender_version_max %}
|
||||||
{% if version.blender_version_max|version_without_patch != version.blender_version_min|version_without_patch %}
|
{% if version.blender_version_max|version_without_patch != version.blender_version_min|version_without_patch %}
|
||||||
—
|
—
|
||||||
@ -13,3 +25,4 @@
|
|||||||
{% else %}
|
{% else %}
|
||||||
{% trans 'and newer' %}
|
{% trans 'and newer' %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
@ -1,23 +1,25 @@
|
|||||||
{% load common filters %}
|
{% load common filters %}
|
||||||
{% with latest=extension.latest_version %}
|
{% with latest=extension.latest_version thumbnail_360p_url=extension.get_previews.0.thumbnail_360p_url %}
|
||||||
<div class="cards-item">
|
|
||||||
<div class="cards-item-content">
|
<div class="ext-card {% if blur %}is-background-blur{% endif %}">
|
||||||
<a href="{{ extension.get_absolute_url }}">
|
{% if blur %}
|
||||||
<div class="cards-item-thumbnail">
|
<div class="ext-card-thumbnail-blur" style="background-image: url({{ thumbnail_360p_url }});"></div>
|
||||||
<img alt="{{ extension.name }}" src="{{ extension.previews.listed.first.source.url }}" title="{{ extension.name }}">
|
{% endif %}
|
||||||
</div>
|
|
||||||
|
<a class="ext-card-thumbnail" href="{{ extension.get_absolute_url }}">
|
||||||
|
<div class="ext-card-thumbnail-img" style="background-image: url({{ thumbnail_360p_url }});" title="{{ extension.name }}"></div>
|
||||||
</a>
|
</a>
|
||||||
<h3 class="cards-item-title">
|
|
||||||
|
<div class="ext-card-body">
|
||||||
|
<h3 class="ext-card-title">
|
||||||
<a href="{{ extension.get_absolute_url }}">{{ extension.name }}</a>
|
<a href="{{ extension.get_absolute_url }}">{{ extension.name }}</a>
|
||||||
</h3>
|
</h3>
|
||||||
<div class="cards-item-excerpt">
|
|
||||||
<p>
|
<p>
|
||||||
{{ latest.tagline }}
|
{{ latest.tagline }}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
|
||||||
<div class="cards-item-extra">
|
<ul class="ext-list-details">
|
||||||
<ul>
|
<li class="ext-card-author">
|
||||||
<li>
|
|
||||||
{% if extension.team %}
|
{% if extension.team %}
|
||||||
{% with team=extension.team %}
|
{% with team=extension.team %}
|
||||||
<a href="{{ team.get_absolute_url }}" title="{{ team.name }}">{{ team.name }}</a>
|
<a href="{{ team.get_absolute_url }}" title="{{ team.name }}">{{ team.name }}</a>
|
||||||
@ -28,10 +30,10 @@
|
|||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
<ul class="cards-item-extra-rating-stars">
|
<ul class="ext-list-details mt-1">
|
||||||
{% if extension.average_score %}
|
{% if extension.average_score %}
|
||||||
<li>
|
<li>
|
||||||
<a class="align-items-center d-flex" href="{{ extension.get_ratings_url }}">
|
<a href="{{ extension.get_ratings_url }}">
|
||||||
{% include "ratings/components/average.html" with score=extension.average_score %}
|
{% include "ratings/components/average.html" with score=extension.average_score %}
|
||||||
({{ extension.text_ratings_count|int_compact }})
|
({{ extension.text_ratings_count|int_compact }})
|
||||||
</a>
|
</a>
|
||||||
|
@ -54,7 +54,7 @@
|
|||||||
<div class="dl-row">
|
<div class="dl-row">
|
||||||
<div class="dl-col">
|
<div class="dl-col">
|
||||||
<dt>{% trans 'Tagline' %}</dt>
|
<dt>{% trans 'Tagline' %}</dt>
|
||||||
<dd title="{{ latest.tagline }}">{{ latest.tagline }}</dd>
|
<dd title="{{ version.tagline }}">{{ version.tagline }}</dd>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@ -63,20 +63,20 @@
|
|||||||
<dt>{% trans 'Version' %}</dt>
|
<dt>{% trans 'Version' %}</dt>
|
||||||
<dd>
|
<dd>
|
||||||
<a href="{{ extension.get_versions_url }}">
|
<a href="{{ extension.get_versions_url }}">
|
||||||
{{ latest.version }}
|
{{ version.version }}
|
||||||
</a>
|
</a>
|
||||||
</dd>
|
</dd>
|
||||||
</div>
|
</div>
|
||||||
<div class="dl-col">
|
<div class="dl-col">
|
||||||
<dt>{% trans 'Size' %}</dt>
|
<dt>{% trans 'Size' %}</dt>
|
||||||
<dd>{{ latest.file.size_bytes|filesizeformat }}</dd>
|
<dd>{{ version.file.size_bytes|filesizeformat }}</dd>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="dl-row">
|
<div class="dl-row">
|
||||||
<div class="dl-col">
|
<div class="dl-col">
|
||||||
<dt>{% trans 'Compatibility' %}</dt>
|
<dt>{% trans 'Compatibility' %}</dt>
|
||||||
<dd>{% include "extensions/components/blender_version.html" with version=latest %}</dd>
|
<dd>{% include "extensions/components/blender_version.html" with version=version is_editable=is_editable form=form %}</dd>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@ -91,8 +91,8 @@
|
|||||||
|
|
||||||
<div class="dl-row">
|
<div class="dl-row">
|
||||||
<div class="dl-col">
|
<div class="dl-col">
|
||||||
<dt>License{{ latest.licenses.count|pluralize }}</dt>
|
<dt>License{{ version.licenses.count|pluralize }}</dt>
|
||||||
{% for license in latest.licenses.all %}
|
{% for license in version.licenses.all %}
|
||||||
<dd>
|
<dd>
|
||||||
{% include "common/components/external_link.html" with url=license.url title=license %}
|
{% include "common/components/external_link.html" with url=license.url title=license %}
|
||||||
</dd>
|
</dd>
|
||||||
@ -102,14 +102,14 @@
|
|||||||
|
|
||||||
<div class="dl-row">
|
<div class="dl-row">
|
||||||
<div class="dl-col">
|
<div class="dl-col">
|
||||||
{% include "extensions/components/detail_card_version_permissions.html" with version=latest %}
|
{% include "extensions/components/detail_card_version_permissions.html" with version=version %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="dl-row">
|
<div class="dl-row">
|
||||||
<dd>
|
<dd>
|
||||||
{% if latest.tags.count %}
|
{% if version.tags.count %}
|
||||||
{% include "extensions/components/tags.html" with small=True version=latest %}
|
{% include "extensions/components/tags.html" with small=True version=version %}
|
||||||
{% else %}
|
{% else %}
|
||||||
No tags.
|
No tags.
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -3,19 +3,17 @@
|
|||||||
{% if previews %}
|
{% if previews %}
|
||||||
<div class="galleria-items{% if previews.count > 5 %} is-many{% endif %}{% if previews.count == 1 %} is-single{% endif %}" id="galleria-items">
|
<div class="galleria-items{% if previews.count > 5 %} is-many{% endif %}{% if previews.count == 1 %} is-single{% endif %}" id="galleria-items">
|
||||||
{% for preview in previews %}
|
{% for preview in previews %}
|
||||||
|
{% with thumbnail_1080p_url=preview.thumbnail_1080p_url %}
|
||||||
<a
|
<a
|
||||||
class="galleria-item js-galleria-item-preview galleria-item-type-{{ preview.content_type|slugify|slice:5 }}{% if forloop.first %} is-active{% endif %}"
|
class="galleria-item js-galleria-item-preview galleria-item-type-{{ preview.content_type|slugify|slice:5 }}{% if forloop.first %} is-active{% endif %}"
|
||||||
href="{{ preview.source.url }}"
|
href="{{ thumbnail_1080p_url }}"
|
||||||
{% if 'video' in preview.content_type %}data-galleria-video-url="{{ preview.source.url }}"{% endif %}
|
{% if 'video' in preview.content_type %}data-galleria-video-url="{{ preview.source.url }}"{% endif %}
|
||||||
data-galleria-content-type="{{ preview.content_type }}"
|
data-galleria-content-type="{{ preview.content_type }}"
|
||||||
data-galleria-index="{{ forloop.counter }}">
|
data-galleria-index="{{ forloop.counter }}">
|
||||||
|
|
||||||
{% if 'video' in preview.content_type and preview.thumbnail %}
|
<img src="{{ thumbnail_1080p_url }}" alt="{{ preview.preview.caption }}">
|
||||||
<img src="{{ preview.thumbnail.url }}" alt="{{ preview.extension_preview.first.caption }}">
|
|
||||||
{% else %}
|
|
||||||
<img src="{{ preview.source.url }}" alt="{{ preview.extension_preview.first.caption }}">
|
|
||||||
{% endif %}
|
|
||||||
</a>
|
</a>
|
||||||
|
{% endwith %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
{% else %}
|
{% else %}
|
||||||
|
@ -78,7 +78,7 @@
|
|||||||
<div class="is-sticky py-3">
|
<div class="is-sticky py-3">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col">
|
<div class="col">
|
||||||
{% include "extensions/components/extension_edit_detail_card.html" with extension=form.instance.extension latest=form.instance is_initial=True %}
|
{% include "extensions/components/extension_edit_detail_card.html" with extension=form.instance.extension version=form.instance is_initial=True %}
|
||||||
|
|
||||||
<section class="card p-3 mt-3">
|
<section class="card p-3 mt-3">
|
||||||
<div class="btn-col">
|
<div class="btn-col">
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
{% extends "common/base.html" %}
|
{% extends "common/base.html" %}
|
||||||
{% load i18n %}
|
{% load cache i18n %}
|
||||||
|
|
||||||
{% block page_title %}Extensions{% endblock page_title %}
|
{% block page_title %}Extensions{% endblock page_title %}
|
||||||
|
|
||||||
@ -32,6 +32,7 @@
|
|||||||
{% endblock hero %}
|
{% endblock hero %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
|
{% cache 60 home %}
|
||||||
<section class="mt-3">
|
<section class="mt-3">
|
||||||
<div class="d-flex">
|
<div class="d-flex">
|
||||||
<h2>
|
<h2>
|
||||||
@ -71,4 +72,5 @@
|
|||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
|
{% endcache %}
|
||||||
{% endblock content %}
|
{% endblock content %}
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
{% block page_title %}{{ extension.name }}{% endblock page_title %}
|
{% block page_title %}{{ extension.name }}{% endblock page_title %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
{% with latest=extension.latest_version author=extension.latest_version.file.user form=form|add_form_classes %}
|
{% with author=extension.latest_version.file.user form=form|add_form_classes %}
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-md-8">
|
<div class="col-md-8">
|
||||||
<h2>{{ extension.get_type_display }} {% trans 'details' %}</h2>
|
<h2>{{ extension.get_type_display }} {% trans 'details' %}</h2>
|
||||||
@ -93,7 +93,7 @@
|
|||||||
<div class="is-sticky py-3">
|
<div class="is-sticky py-3">
|
||||||
<div class="row mb-3">
|
<div class="row mb-3">
|
||||||
<div class="col">
|
<div class="col">
|
||||||
{% include "extensions/components/extension_edit_detail_card.html" with extension=extension latest=latest %}
|
{% include "extensions/components/extension_edit_detail_card.html" with extension=extension version=extension.latest_version %}
|
||||||
|
|
||||||
<section class="card p-3 mt-3">
|
<section class="card p-3 mt-3">
|
||||||
<div class="btn-col">
|
<div class="btn-col">
|
||||||
|
@ -44,8 +44,7 @@
|
|||||||
<div class="is-sticky">
|
<div class="is-sticky">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col">
|
<div class="col">
|
||||||
{% include "extensions/components/extension_edit_detail_card.html" with extension=form.instance.extension latest=form.instance %}
|
{% include "extensions/components/extension_edit_detail_card.html" with extension=form.instance.extension version=form.instance is_editable=True form=form %}
|
||||||
|
|
||||||
<section class="card p-3 mt-3">
|
<section class="card p-3 mt-3">
|
||||||
<div class="btn-col">
|
<div class="btn-col">
|
||||||
<button type="submit" class="btn btn-primary">
|
<button type="submit" class="btn btn-primary">
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
|
from pathlib import Path
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from django.contrib.admin.models import LogEntry, DELETION
|
from django.contrib.admin.models import LogEntry, DELETION
|
||||||
from django.test import TestCase # , TransactionTestCase
|
from django.test import TestCase, override_settings
|
||||||
|
|
||||||
from common.tests.factories.extensions import create_approved_version, create_version
|
from common.tests.factories.extensions import create_approved_version, create_version
|
||||||
from common.tests.factories.files import FileFactory
|
from common.tests.factories.files import FileFactory
|
||||||
@ -10,7 +11,11 @@ import extensions.models
|
|||||||
import files.models
|
import files.models
|
||||||
import reviewers.models
|
import reviewers.models
|
||||||
|
|
||||||
|
TEST_MEDIA_DIR = Path(__file__).resolve().parent / 'media'
|
||||||
|
|
||||||
|
|
||||||
|
# Media file are physically deleted when files records are deleted, hence the override
|
||||||
|
@override_settings(MEDIA_ROOT=TEST_MEDIA_DIR)
|
||||||
class DeleteTest(TestCase):
|
class DeleteTest(TestCase):
|
||||||
fixtures = ['dev', 'licenses']
|
fixtures = ['dev', 'licenses']
|
||||||
|
|
||||||
@ -54,7 +59,7 @@ class DeleteTest(TestCase):
|
|||||||
file_validation,
|
file_validation,
|
||||||
extension,
|
extension,
|
||||||
approval_activity,
|
approval_activity,
|
||||||
preview_file.extension_preview.first(),
|
preview_file.preview,
|
||||||
version,
|
version,
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
@ -74,7 +74,7 @@ class UpdateTest(TestCase):
|
|||||||
self.assertEqual(File.objects.filter(type=File.TYPES.IMAGE).count(), 1)
|
self.assertEqual(File.objects.filter(type=File.TYPES.IMAGE).count(), 1)
|
||||||
self.assertEqual(extension.previews.count(), 1)
|
self.assertEqual(extension.previews.count(), 1)
|
||||||
file1 = extension.previews.all()[0]
|
file1 = extension.previews.all()[0]
|
||||||
self.assertEqual(file1.extension_preview.first().caption, 'First Preview Caption Text')
|
self.assertEqual(file1.preview.caption, 'First Preview Caption Text')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
file1.original_hash,
|
file1.original_hash,
|
||||||
'sha256:643e15eb6c4831173bbcf71b8c85efc70cf3437321bf2559b39aa5e9acfd5340',
|
'sha256:643e15eb6c4831173bbcf71b8c85efc70cf3437321bf2559b39aa5e9acfd5340',
|
||||||
@ -123,8 +123,8 @@ class UpdateTest(TestCase):
|
|||||||
self.assertEqual(extension.previews.count(), 2)
|
self.assertEqual(extension.previews.count(), 2)
|
||||||
file1 = extension.previews.all()[0]
|
file1 = extension.previews.all()[0]
|
||||||
file2 = extension.previews.all()[1]
|
file2 = extension.previews.all()[1]
|
||||||
self.assertEqual(file1.extension_preview.first().caption, 'First Preview Caption Text')
|
self.assertEqual(file1.preview.caption, 'First Preview Caption Text')
|
||||||
self.assertEqual(file2.extension_preview.first().caption, 'Second Preview Caption Text')
|
self.assertEqual(file2.preview.caption, 'Second Preview Caption Text')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
file1.original_hash,
|
file1.original_hash,
|
||||||
'sha256:643e15eb6c4831173bbcf71b8c85efc70cf3437321bf2559b39aa5e9acfd5340',
|
'sha256:643e15eb6c4831173bbcf71b8c85efc70cf3437321bf2559b39aa5e9acfd5340',
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
|
||||||
@ -80,6 +82,64 @@ class PublicViewsTest(_BaseTestCase):
|
|||||||
self.assertTemplateUsed(response, 'extensions/home.html')
|
self.assertTemplateUsed(response, 'extensions/home.html')
|
||||||
|
|
||||||
|
|
||||||
|
class ApiViewsTest(_BaseTestCase):
|
||||||
|
def test_blender_version_filter(self):
|
||||||
|
create_approved_version(blender_version_min='4.0.1')
|
||||||
|
create_approved_version(blender_version_min='4.1.1')
|
||||||
|
create_approved_version(blender_version_min='4.2.1')
|
||||||
|
url = reverse('extensions:api')
|
||||||
|
|
||||||
|
json = self.client.get(
|
||||||
|
url + '?blender_version=4.1.1',
|
||||||
|
HTTP_ACCEPT='application/json',
|
||||||
|
).json()
|
||||||
|
self.assertEqual(len(json['data']), 2)
|
||||||
|
|
||||||
|
json2 = self.client.get(
|
||||||
|
url + '?blender_version=3.0.1',
|
||||||
|
HTTP_ACCEPT='application/json',
|
||||||
|
).json()
|
||||||
|
self.assertEqual(len(json2['data']), 0)
|
||||||
|
|
||||||
|
json3 = self.client.get(
|
||||||
|
url + '?blender_version=4.3.1',
|
||||||
|
HTTP_ACCEPT='application/json',
|
||||||
|
).json()
|
||||||
|
self.assertEqual(len(json3['data']), 3)
|
||||||
|
|
||||||
|
def test_blender_version_filter_latest_not_max_version(self):
|
||||||
|
version = create_approved_version(blender_version_min='4.0.1')
|
||||||
|
version.date_created
|
||||||
|
extension = version.extension
|
||||||
|
create_approved_version(
|
||||||
|
blender_version_min='4.2.1',
|
||||||
|
extension=extension,
|
||||||
|
date_created=version.date_created + timedelta(days=1),
|
||||||
|
version='2.0.0',
|
||||||
|
)
|
||||||
|
create_approved_version(
|
||||||
|
blender_version_min='3.0.0',
|
||||||
|
extension=extension,
|
||||||
|
date_created=version.date_created + timedelta(days=2),
|
||||||
|
version='1.0.1',
|
||||||
|
)
|
||||||
|
create_approved_version(
|
||||||
|
blender_version_min='4.2.1',
|
||||||
|
extension=extension,
|
||||||
|
date_created=version.date_created + timedelta(days=3),
|
||||||
|
version='2.0.1',
|
||||||
|
)
|
||||||
|
url = reverse('extensions:api')
|
||||||
|
|
||||||
|
json = self.client.get(
|
||||||
|
url + '?blender_version=4.1.1',
|
||||||
|
HTTP_ACCEPT='application/json',
|
||||||
|
).json()
|
||||||
|
self.assertEqual(len(json['data']), 1)
|
||||||
|
# we are expecting the latest matching, not the maximum version
|
||||||
|
self.assertEqual(json['data'][0]['version'], '1.0.1')
|
||||||
|
|
||||||
|
|
||||||
class ExtensionDetailViewTest(_BaseTestCase):
|
class ExtensionDetailViewTest(_BaseTestCase):
|
||||||
def test_cannot_view_unlisted_extension_anonymously(self):
|
def test_cannot_view_unlisted_extension_anonymously(self):
|
||||||
extension = _create_extension()
|
extension = _create_extension()
|
||||||
@ -229,3 +289,36 @@ class UpdateVersionViewTest(_BaseTestCase):
|
|||||||
self.client.force_login(random_user)
|
self.client.force_login(random_user)
|
||||||
response = self.client.get(url)
|
response = self.client.get(url)
|
||||||
self.assertEqual(response.status_code, 403)
|
self.assertEqual(response.status_code, 403)
|
||||||
|
|
||||||
|
def test_blender_max_version(self):
|
||||||
|
extension = _create_extension()
|
||||||
|
extension_owner = extension.latest_version.file.user
|
||||||
|
extension.authors.add(extension_owner)
|
||||||
|
self.client.force_login(extension_owner)
|
||||||
|
url = reverse(
|
||||||
|
'extensions:version-update',
|
||||||
|
kwargs={
|
||||||
|
'type_slug': extension.type_slug,
|
||||||
|
'slug': extension.slug,
|
||||||
|
'pk': extension.latest_version.pk,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
version = extension.latest_version
|
||||||
|
|
||||||
|
response = self.client.post(
|
||||||
|
url,
|
||||||
|
{'release_notes': 'text', 'blender_version_max': 'invalid'},
|
||||||
|
)
|
||||||
|
# error page, no redirect
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
version.refresh_from_db()
|
||||||
|
self.assertIsNone(version.blender_version_max)
|
||||||
|
|
||||||
|
response2 = self.client.post(
|
||||||
|
url,
|
||||||
|
{'release_notes': 'text', 'blender_version_max': '4.2.0'},
|
||||||
|
)
|
||||||
|
# success, redirect
|
||||||
|
self.assertEqual(response2.status_code, 302)
|
||||||
|
version.refresh_from_db()
|
||||||
|
self.assertEqual(version.blender_version_max, '4.2.0')
|
||||||
|
@ -42,38 +42,51 @@ class ListedExtensionsSerializer(serializers.ModelSerializer):
|
|||||||
self.fail('invalid_version')
|
self.fail('invalid_version')
|
||||||
|
|
||||||
def to_representation(self, instance):
|
def to_representation(self, instance):
|
||||||
blender_version_min = instance.latest_version.blender_version_min
|
matching_version = None
|
||||||
blender_version_max = instance.latest_version.blender_version_max
|
# avoid triggering additional db queries, reuse the prefetched queryset
|
||||||
|
versions = [
|
||||||
# TODO: get the latest valid version
|
v
|
||||||
# For now we skip the extension if the latest version is not in a valid range.
|
for v in instance.versions.all()
|
||||||
if self.blender_version and not is_in_version_range(
|
if v.file and v.file.status in instance.valid_file_statuses
|
||||||
self.blender_version, blender_version_min, blender_version_max
|
]
|
||||||
|
if not versions:
|
||||||
|
return None
|
||||||
|
versions = sorted(versions, key=lambda v: v.date_created, reverse=True)
|
||||||
|
if self.blender_version:
|
||||||
|
for v in versions:
|
||||||
|
if is_in_version_range(
|
||||||
|
self.blender_version,
|
||||||
|
v.blender_version_min,
|
||||||
|
v.blender_version_max,
|
||||||
):
|
):
|
||||||
return {}
|
matching_version = v
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# same as latest_version, but without triggering a new queryset
|
||||||
|
matching_version = versions[0]
|
||||||
|
|
||||||
|
if not matching_version:
|
||||||
|
return None
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
'id': instance.extension_id,
|
'id': instance.extension_id,
|
||||||
'schema_version': instance.latest_version.schema_version,
|
'schema_version': matching_version.schema_version,
|
||||||
'name': instance.name,
|
'name': instance.name,
|
||||||
'version': instance.latest_version.version,
|
'version': matching_version.version,
|
||||||
'tagline': instance.latest_version.tagline,
|
'tagline': matching_version.tagline,
|
||||||
'archive_hash': instance.latest_version.file.original_hash,
|
'archive_hash': matching_version.file.original_hash,
|
||||||
'archive_size': instance.latest_version.file.size_bytes,
|
'archive_size': matching_version.file.size_bytes,
|
||||||
'archive_url': self.request.build_absolute_uri(instance.latest_version.download_url),
|
'archive_url': self.request.build_absolute_uri(matching_version.download_url),
|
||||||
'type': EXTENSION_TYPE_SLUGS_SINGULAR.get(instance.type),
|
'type': EXTENSION_TYPE_SLUGS_SINGULAR.get(instance.type),
|
||||||
'blender_version_min': instance.latest_version.blender_version_min,
|
'blender_version_min': matching_version.blender_version_min,
|
||||||
'blender_version_max': instance.latest_version.blender_version_max,
|
'blender_version_max': matching_version.blender_version_max,
|
||||||
'website': self.request.build_absolute_uri(instance.get_absolute_url()),
|
'website': self.request.build_absolute_uri(instance.get_absolute_url()),
|
||||||
'maintainer': str(instance.authors.first()),
|
# avoid triggering additional db queries, reuse the prefetched queryset
|
||||||
'license': [
|
'maintainer': str(instance.authors.all()[0]),
|
||||||
license_iter.slug for license_iter in instance.latest_version.licenses.all()
|
'license': [license_iter.slug for license_iter in matching_version.licenses.all()],
|
||||||
],
|
'permissions': [permission.slug for permission in matching_version.permissions.all()],
|
||||||
'permissions': [
|
|
||||||
permission.slug for permission in instance.latest_version.permissions.all()
|
|
||||||
],
|
|
||||||
# TODO: handle copyright
|
# TODO: handle copyright
|
||||||
'tags': [str(tag) for tag in instance.latest_version.tags.all()],
|
'tags': [str(tag) for tag in matching_version.tags.all()],
|
||||||
}
|
}
|
||||||
|
|
||||||
return clean_json_dictionary_from_optional_fields(data)
|
return clean_json_dictionary_from_optional_fields(data)
|
||||||
@ -93,10 +106,18 @@ class ExtensionsAPIView(APIView):
|
|||||||
)
|
)
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
blender_version = request.GET.get('blender_version')
|
blender_version = request.GET.get('blender_version')
|
||||||
|
qs = Extension.objects.listed.prefetch_related(
|
||||||
|
'authors',
|
||||||
|
'versions',
|
||||||
|
'versions__file',
|
||||||
|
'versions__licenses',
|
||||||
|
'versions__permissions',
|
||||||
|
'versions__tags',
|
||||||
|
).all()
|
||||||
serializer = self.serializer_class(
|
serializer = self.serializer_class(
|
||||||
Extension.objects.listed, blender_version=blender_version, request=request, many=True
|
qs, blender_version=blender_version, request=request, many=True
|
||||||
)
|
)
|
||||||
data = serializer.data
|
data = [e for e in serializer.data if e is not None]
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
# TODO implement extension blocking by moderators
|
# TODO implement extension blocking by moderators
|
||||||
|
@ -327,7 +327,7 @@ class UpdateVersionView(LoginRequiredMixin, UserPassesTestMixin, UpdateView):
|
|||||||
|
|
||||||
template_name = 'extensions/new_version_finalise.html'
|
template_name = 'extensions/new_version_finalise.html'
|
||||||
model = Version
|
model = Version
|
||||||
fields = ['release_notes']
|
fields = ['blender_version_max', 'release_notes']
|
||||||
|
|
||||||
def get_success_url(self):
|
def get_success_url(self):
|
||||||
return reverse(
|
return reverse(
|
||||||
|
@ -42,7 +42,19 @@ class HomeView(ListedExtensionsView):
|
|||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
context = super().get_context_data(**kwargs)
|
context = super().get_context_data(**kwargs)
|
||||||
q = super().get_queryset()
|
q = (
|
||||||
|
super()
|
||||||
|
.get_queryset()
|
||||||
|
.prefetch_related(
|
||||||
|
'authors',
|
||||||
|
'preview_set',
|
||||||
|
'preview_set__file',
|
||||||
|
'ratings',
|
||||||
|
'versions',
|
||||||
|
'versions__file',
|
||||||
|
'versions__tags',
|
||||||
|
)
|
||||||
|
)
|
||||||
context['addons'] = q.filter(type=EXTENSION_TYPE_CHOICES.BPY).order_by('-average_score')[:8]
|
context['addons'] = q.filter(type=EXTENSION_TYPE_CHOICES.BPY).order_by('-average_score')[:8]
|
||||||
context['themes'] = q.filter(type=EXTENSION_TYPE_CHOICES.THEME).order_by('-average_score')[
|
context['themes'] = q.filter(type=EXTENSION_TYPE_CHOICES.THEME).order_by('-average_score')[
|
||||||
:8
|
:8
|
||||||
|
@ -1,17 +1,29 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
from django.template.loader import render_to_string
|
||||||
import background_task.admin
|
import background_task.admin
|
||||||
import background_task.models
|
import background_task.models
|
||||||
|
|
||||||
from .models import File, FileValidation
|
from .models import File, FileValidation
|
||||||
import files.signals
|
import files.signals
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def scan_selected_files(self, request, queryset):
|
|
||||||
|
def schedule_scan(self, request, queryset):
|
||||||
"""Scan selected files."""
|
"""Scan selected files."""
|
||||||
for instance in queryset:
|
for instance in queryset:
|
||||||
files.signals.schedule_scan(instance)
|
files.signals.schedule_scan(instance)
|
||||||
|
|
||||||
|
|
||||||
|
def make_thumbnails(self, request, queryset):
|
||||||
|
"""Make thumbnails for selected files."""
|
||||||
|
for instance in queryset.filter(type__in=(File.TYPES.IMAGE, File.TYPES.VIDEO)):
|
||||||
|
files.tasks.make_thumbnails.task_function(file_id=instance.pk)
|
||||||
|
|
||||||
|
|
||||||
class FileValidationInlineAdmin(admin.StackedInline):
|
class FileValidationInlineAdmin(admin.StackedInline):
|
||||||
model = FileValidation
|
model = FileValidation
|
||||||
readonly_fields = ('date_created', 'date_modified', 'is_ok', 'results')
|
readonly_fields = ('date_created', 'date_modified', 'is_ok', 'results')
|
||||||
@ -27,6 +39,28 @@ class FileValidationInlineAdmin(admin.StackedInline):
|
|||||||
|
|
||||||
@admin.register(File)
|
@admin.register(File)
|
||||||
class FileAdmin(admin.ModelAdmin):
|
class FileAdmin(admin.ModelAdmin):
|
||||||
|
class Media:
|
||||||
|
css = {'all': ('files/admin/file.css',)}
|
||||||
|
|
||||||
|
def thumbnails(self, obj):
|
||||||
|
if not obj or not (obj.is_image or obj.is_video):
|
||||||
|
return ''
|
||||||
|
try:
|
||||||
|
context = {'file': obj, 'MEDIA_URL': settings.MEDIA_URL}
|
||||||
|
return render_to_string('files/admin/thumbnails.html', context)
|
||||||
|
except Exception:
|
||||||
|
# Make sure any exception happening here is always logged
|
||||||
|
# (e.g. admin eats exceptions in ModelAdmin properties, making it hard to debug)
|
||||||
|
logger.exception('Failed to render thumbnails')
|
||||||
|
raise
|
||||||
|
|
||||||
|
def get_form(self, request, obj=None, **kwargs):
|
||||||
|
"""Override metadata help text depending on file type."""
|
||||||
|
if obj and (obj.is_image or obj.is_video):
|
||||||
|
help_text = 'Additional information about the file, e.g. existing thumbnails.'
|
||||||
|
kwargs.update({'help_texts': {'metadata': help_text}})
|
||||||
|
return super().get_form(request, obj, **kwargs)
|
||||||
|
|
||||||
view_on_site = False
|
view_on_site = False
|
||||||
save_on_top = True
|
save_on_top = True
|
||||||
|
|
||||||
@ -48,6 +82,9 @@ class FileAdmin(admin.ModelAdmin):
|
|||||||
'date_approved',
|
'date_approved',
|
||||||
'date_status_changed',
|
'date_status_changed',
|
||||||
'size_bytes',
|
'size_bytes',
|
||||||
|
'thumbnails',
|
||||||
|
'thumbnail',
|
||||||
|
'type',
|
||||||
'user',
|
'user',
|
||||||
'original_hash',
|
'original_hash',
|
||||||
'original_name',
|
'original_name',
|
||||||
@ -59,6 +96,9 @@ class FileAdmin(admin.ModelAdmin):
|
|||||||
'^version__extension__name',
|
'^version__extension__name',
|
||||||
'extensions__slug',
|
'extensions__slug',
|
||||||
'extensions__name',
|
'extensions__name',
|
||||||
|
'original_name',
|
||||||
|
'hash',
|
||||||
|
'source',
|
||||||
)
|
)
|
||||||
|
|
||||||
fieldsets = (
|
fieldsets = (
|
||||||
@ -67,9 +107,8 @@ class FileAdmin(admin.ModelAdmin):
|
|||||||
{
|
{
|
||||||
'fields': (
|
'fields': (
|
||||||
'id',
|
'id',
|
||||||
('source', 'thumbnail'),
|
('source', 'thumbnails', 'thumbnail'),
|
||||||
('original_name', 'content_type'),
|
('type', 'content_type', 'original_name'),
|
||||||
'type',
|
|
||||||
'status',
|
'status',
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
@ -99,7 +138,7 @@ class FileAdmin(admin.ModelAdmin):
|
|||||||
)
|
)
|
||||||
|
|
||||||
inlines = [FileValidationInlineAdmin]
|
inlines = [FileValidationInlineAdmin]
|
||||||
actions = [scan_selected_files]
|
actions = [schedule_scan, make_thumbnails]
|
||||||
|
|
||||||
def is_ok(self, obj):
|
def is_ok(self, obj):
|
||||||
return obj.validation.is_ok if hasattr(obj, 'validation') else None
|
return obj.validation.is_ok if hasattr(obj, 'validation') else None
|
||||||
|
@ -7,3 +7,10 @@ class FilesConfig(AppConfig):
|
|||||||
|
|
||||||
def ready(self):
|
def ready(self):
|
||||||
import files.signals # noqa: F401
|
import files.signals # noqa: F401
|
||||||
|
|
||||||
|
# Ubuntu 22.04 and earlier don't have WebP in `/etc/mime.types`,
|
||||||
|
# which makes .webp invalid from standpoint of file upload forms.
|
||||||
|
# FIXME: remove once the application is running on the next Ubuntu 24.04 LTS
|
||||||
|
import mimetypes
|
||||||
|
|
||||||
|
mimetypes.add_type('image/webp', '.webp', strict=True)
|
||||||
|
19
files/migrations/0008_alter_file_thumbnail.py
Normal file
19
files/migrations/0008_alter_file_thumbnail.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# Generated by Django 4.2.11 on 2024-04-23 10:31
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import files.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('files', '0007_alter_file_status'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='file',
|
||||||
|
name='thumbnail',
|
||||||
|
field=models.ImageField(blank=True, editable=False, help_text='Thumbnail generated from uploaded image or video source file', max_length=256, null=True, upload_to=files.models.thumbnail_upload_to),
|
||||||
|
),
|
||||||
|
]
|
@ -6,11 +6,8 @@ from django.contrib.auth import get_user_model
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
|
|
||||||
from common.model_mixins import CreatedModifiedMixin, TrackChangesMixin
|
from common.model_mixins import CreatedModifiedMixin, TrackChangesMixin
|
||||||
from files.utils import get_sha256, guess_mimetype_from_ext
|
from files.utils import get_sha256, guess_mimetype_from_ext, get_thumbnail_upload_to
|
||||||
from constants.base import (
|
from constants.base import FILE_STATUS_CHOICES, FILE_TYPE_CHOICES
|
||||||
FILE_STATUS_CHOICES,
|
|
||||||
FILE_TYPE_CHOICES,
|
|
||||||
)
|
|
||||||
import utils
|
import utils
|
||||||
|
|
||||||
User = get_user_model()
|
User = get_user_model()
|
||||||
@ -41,15 +38,11 @@ def file_upload_to(instance, filename):
|
|||||||
|
|
||||||
|
|
||||||
def thumbnail_upload_to(instance, filename):
|
def thumbnail_upload_to(instance, filename):
|
||||||
prefix = 'thumbnails/'
|
return get_thumbnail_upload_to(instance.hash)
|
||||||
_hash = instance.hash.split(':')[-1]
|
|
||||||
extension = Path(filename).suffix
|
|
||||||
path = Path(prefix, _hash[:2], _hash).with_suffix(extension)
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
class File(CreatedModifiedMixin, TrackChangesMixin, models.Model):
|
class File(CreatedModifiedMixin, TrackChangesMixin, models.Model):
|
||||||
track_changes_to_fields = {'status', 'size_bytes', 'hash'}
|
track_changes_to_fields = {'status', 'size_bytes', 'hash', 'thumbnail', 'metadata'}
|
||||||
|
|
||||||
TYPES = FILE_TYPE_CHOICES
|
TYPES = FILE_TYPE_CHOICES
|
||||||
STATUSES = FILE_STATUS_CHOICES
|
STATUSES = FILE_STATUS_CHOICES
|
||||||
@ -63,7 +56,8 @@ class File(CreatedModifiedMixin, TrackChangesMixin, models.Model):
|
|||||||
null=True,
|
null=True,
|
||||||
blank=True,
|
blank=True,
|
||||||
max_length=256,
|
max_length=256,
|
||||||
help_text='Image thumbnail in case file is a video',
|
help_text='Thumbnail generated from uploaded image or video source file',
|
||||||
|
editable=False,
|
||||||
)
|
)
|
||||||
content_type = models.CharField(max_length=256, null=True, blank=True)
|
content_type = models.CharField(max_length=256, null=True, blank=True)
|
||||||
type = models.PositiveSmallIntegerField(
|
type = models.PositiveSmallIntegerField(
|
||||||
@ -203,6 +197,30 @@ class File(CreatedModifiedMixin, TrackChangesMixin, models.Model):
|
|||||||
def get_submit_url(self) -> str:
|
def get_submit_url(self) -> str:
|
||||||
return self.extension.get_draft_url()
|
return self.extension.get_draft_url()
|
||||||
|
|
||||||
|
def get_thumbnail_of_size(self, size_key: str) -> str:
|
||||||
|
"""Return absolute path portion of the URL of a thumbnail of this file.
|
||||||
|
|
||||||
|
Fall back to the source file, if no thumbnail is stored.
|
||||||
|
Log absence of the thumbnail file instead of exploding somewhere in the templates.
|
||||||
|
"""
|
||||||
|
# We don't (yet?) have thumbnails for anything other than images and videos.
|
||||||
|
assert self.is_image or self.is_video, f'File pk={self.pk} is neither image nor video'
|
||||||
|
|
||||||
|
try:
|
||||||
|
path = self.metadata['thumbnails'][size_key]['path']
|
||||||
|
return self.thumbnail.storage.url(path)
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
log.exception(f'File pk={self.pk} is missing thumbnail "{size_key}": {self.metadata}')
|
||||||
|
return self.source.url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def thumbnail_1080p_url(self) -> str:
|
||||||
|
return self.get_thumbnail_of_size('1080p')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def thumbnail_360p_url(self) -> str:
|
||||||
|
return self.get_thumbnail_of_size('360p')
|
||||||
|
|
||||||
|
|
||||||
class FileValidation(CreatedModifiedMixin, TrackChangesMixin, models.Model):
|
class FileValidation(CreatedModifiedMixin, TrackChangesMixin, models.Model):
|
||||||
track_changes_to_fields = {'is_ok', 'results'}
|
track_changes_to_fields = {'is_ok', 'results'}
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from django.db.models.signals import pre_save, post_save, pre_delete
|
from django.conf import settings
|
||||||
|
from django.db.models.signals import pre_save, post_save, pre_delete, post_delete
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
|
|
||||||
import files.models
|
import files.models
|
||||||
import files.tasks
|
import files.tasks
|
||||||
|
import files.utils
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -35,7 +37,55 @@ def _scan_new_file(
|
|||||||
schedule_scan(instance)
|
schedule_scan(instance)
|
||||||
|
|
||||||
|
|
||||||
|
def schedule_thumbnails(file: files.models.File) -> None:
|
||||||
|
"""Schedule thumbnail generation for a given file."""
|
||||||
|
if not file.is_image and not file.is_video:
|
||||||
|
return
|
||||||
|
args = {'pk': file.pk, 'type': file.get_type_display()}
|
||||||
|
logger.info('Scheduling thumbnail generation for file pk=%(pk)s type=%(type)s', args)
|
||||||
|
verbose_name = f'make thumbnails for "{file.source.name}"'
|
||||||
|
files.tasks.make_thumbnails(file_id=file.pk, creator=file, verbose_name=verbose_name)
|
||||||
|
|
||||||
|
|
||||||
|
def _schedule_thumbnails_when_created(
|
||||||
|
sender: object, instance: files.models.File, created: bool, **kwargs: object
|
||||||
|
) -> None:
|
||||||
|
if not created:
|
||||||
|
return
|
||||||
|
|
||||||
|
schedule_thumbnails(instance)
|
||||||
|
|
||||||
|
|
||||||
|
def _schedule_thumbnails_when_validated(
|
||||||
|
sender: object, instance: files.models.FileValidation, created: bool, **kwargs: object
|
||||||
|
) -> None:
|
||||||
|
if not created:
|
||||||
|
return
|
||||||
|
|
||||||
|
if not instance.is_ok:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Generate thumbnails if initial scan found no issues
|
||||||
|
schedule_thumbnails(instance.file)
|
||||||
|
|
||||||
|
|
||||||
|
if settings.REQUIRE_FILE_VALIDATION:
|
||||||
|
# Only schedule thumbnails when file is validated
|
||||||
|
post_save.connect(_schedule_thumbnails_when_validated, sender=files.models.FileValidation)
|
||||||
|
else:
|
||||||
|
# Schedule thumbnails when a new file is created
|
||||||
|
post_save.connect(_schedule_thumbnails_when_created, sender=files.models.File)
|
||||||
|
|
||||||
|
|
||||||
@receiver(pre_delete, sender=files.models.File)
|
@receiver(pre_delete, sender=files.models.File)
|
||||||
@receiver(pre_delete, sender=files.models.FileValidation)
|
@receiver(pre_delete, sender=files.models.FileValidation)
|
||||||
def _log_deletion(sender: object, instance: files.models.File, **kwargs: object) -> None:
|
def _log_deletion(sender: object, instance: files.models.File, **kwargs: object) -> None:
|
||||||
instance.record_deletion()
|
instance.record_deletion()
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_delete, sender=files.models.File)
|
||||||
|
def delete_orphaned_files(sender: object, instance: files.models.File, **kwargs: object) -> None:
|
||||||
|
"""Delete source and thumbnail files from storage when File record is deleted."""
|
||||||
|
files.utils.delete_file_in_storage(instance.source.name)
|
||||||
|
files.utils.delete_file_in_storage(instance.thumbnail.name)
|
||||||
|
files.utils.delete_thumbnails(instance.metadata)
|
||||||
|
11
files/static/files/admin/file.css
Normal file
11
files/static/files/admin/file.css
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
.file-thumbnail {
|
||||||
|
display: inline-block;
|
||||||
|
border: grey solid 1px;
|
||||||
|
margin-left: 0.5rem;
|
||||||
|
}
|
||||||
|
.file-thumbnail-size {
|
||||||
|
position: absolute;
|
||||||
|
background: rgba(255, 255, 255, 0.5);
|
||||||
|
padding-right: 0.5rem;
|
||||||
|
padding-left: 0.5rem;
|
||||||
|
}
|
@ -27,3 +27,45 @@ def clamdscan(file_id: int):
|
|||||||
file_validation.results = scan_result
|
file_validation.results = scan_result
|
||||||
file_validation.is_ok = is_ok
|
file_validation.is_ok = is_ok
|
||||||
file_validation.save(update_fields={'results', 'is_ok', 'date_modified'})
|
file_validation.save(update_fields={'results', 'is_ok', 'date_modified'})
|
||||||
|
|
||||||
|
|
||||||
|
@background(schedule={'action': TaskSchedule.RESCHEDULE_EXISTING})
|
||||||
|
def make_thumbnails(file_id: int) -> None:
|
||||||
|
"""Generate thumbnails for a given file, store them in thumbnail and metadata columns."""
|
||||||
|
file = files.models.File.objects.get(pk=file_id)
|
||||||
|
args = {'pk': file_id, 'type': file.get_type_display()}
|
||||||
|
|
||||||
|
if not file.is_image and not file.is_video:
|
||||||
|
logger.error('File pk=%(pk)s of type "%(type)s" is neither an image nor a video', args)
|
||||||
|
return
|
||||||
|
if settings.REQUIRE_FILE_VALIDATION and not file.validation.is_ok:
|
||||||
|
logger.error("File pk={pk} is flagged, won't make thumbnails".format(**args))
|
||||||
|
return
|
||||||
|
|
||||||
|
# For an image, source of the thumbnails is the original image
|
||||||
|
source_path = file.source.path
|
||||||
|
thumbnail_field = file.thumbnail
|
||||||
|
unchanged_thumbnail = thumbnail_field.name
|
||||||
|
|
||||||
|
if file.is_video:
|
||||||
|
frame_path = files.utils.get_thumbnail_upload_to(file.hash)
|
||||||
|
# For a video, source of the thumbnails is a frame extracted with ffpeg
|
||||||
|
files.utils.extract_frame(source_path, frame_path)
|
||||||
|
thumbnail_field.name = frame_path
|
||||||
|
source_path = frame_path
|
||||||
|
|
||||||
|
thumbnails = files.utils.make_thumbnails(source_path, file.hash)
|
||||||
|
|
||||||
|
if not thumbnail_field.name:
|
||||||
|
thumbnail_field.name = thumbnails['1080p']['path']
|
||||||
|
|
||||||
|
update_fields = set()
|
||||||
|
if thumbnail_field.name != unchanged_thumbnail:
|
||||||
|
update_fields.add('thumbnail')
|
||||||
|
if file.metadata.get('thumbnails') != thumbnails:
|
||||||
|
file.metadata.update({'thumbnails': thumbnails})
|
||||||
|
update_fields.add('metadata')
|
||||||
|
if update_fields:
|
||||||
|
args['update_fields'] = update_fields
|
||||||
|
logger.info('Made thumbnails for file pk=%(pk)s, updating %(update_fields)s', args)
|
||||||
|
file.save(update_fields=update_fields)
|
||||||
|
8
files/templates/files/admin/thumbnails.html
Normal file
8
files/templates/files/admin/thumbnails.html
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
<div class="file-thumbnails">
|
||||||
|
{% for size_key, thumb in file.metadata.thumbnails.items %}
|
||||||
|
<div class="file-thumbnail">
|
||||||
|
<span class="file-thumbnail-size">{{ thumb.size.0 }}x{{ thumb.size.1 }}px</span>
|
||||||
|
<img height="{% widthratio thumb.size.1 10 1 %}" src="{{ MEDIA_URL }}{{ thumb.path }}" title={{ thumb.path }}>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
@ -42,9 +42,11 @@ class FileTest(TestCase):
|
|||||||
'new_state': {'status': 'Approved'},
|
'new_state': {'status': 'Approved'},
|
||||||
'object': '<File: test.zip (Approved)>',
|
'object': '<File: test.zip (Approved)>',
|
||||||
'old_state': {
|
'old_state': {
|
||||||
'status': 2,
|
|
||||||
'hash': 'foobar',
|
'hash': 'foobar',
|
||||||
|
'metadata': {},
|
||||||
'size_bytes': 7149,
|
'size_bytes': 7149,
|
||||||
|
'status': 2,
|
||||||
|
'thumbnail': '',
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
112
files/tests/test_tasks.py
Normal file
112
files/tests/test_tasks.py
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import patch
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from django.test import TestCase, override_settings
|
||||||
|
|
||||||
|
from common.tests.factories.files import FileFactory
|
||||||
|
from files.tasks import make_thumbnails
|
||||||
|
import files.models
|
||||||
|
|
||||||
|
TEST_MEDIA_DIR = Path(__file__).resolve().parent / 'media'
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(MEDIA_ROOT=TEST_MEDIA_DIR, REQUIRE_FILE_VALIDATION=True)
|
||||||
|
class TasksTest(TestCase):
|
||||||
|
def test_make_thumbnails_fails_when_no_validation(self):
|
||||||
|
file = FileFactory(original_hash='foobar', source='file/original_image_source.jpg')
|
||||||
|
|
||||||
|
with self.assertRaises(files.models.File.validation.RelatedObjectDoesNotExist):
|
||||||
|
make_thumbnails.task_function(file_id=file.pk)
|
||||||
|
|
||||||
|
@patch('files.utils.make_thumbnails')
|
||||||
|
def test_make_thumbnails_fails_when_validation_not_ok(self, mock_make_thumbnails):
|
||||||
|
file = FileFactory(original_hash='foobar', source='file/original_image_source.jpg')
|
||||||
|
files.models.FileValidation.objects.create(file=file, is_ok=False, results={})
|
||||||
|
|
||||||
|
with self.assertLogs(level=logging.ERROR) as logs:
|
||||||
|
make_thumbnails.task_function(file_id=file.pk)
|
||||||
|
|
||||||
|
self.maxDiff = None
|
||||||
|
self.assertEqual(
|
||||||
|
logs.output[0], f"ERROR:files.tasks:File pk={file.pk} is flagged, won't make thumbnails"
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_make_thumbnails.assert_not_called()
|
||||||
|
|
||||||
|
@patch('files.utils.make_thumbnails')
|
||||||
|
def test_make_thumbnails_fails_when_not_image_or_video(self, mock_make_thumbnails):
|
||||||
|
file = FileFactory(
|
||||||
|
original_hash='foobar', source='file/source.zip', type=files.models.File.TYPES.THEME
|
||||||
|
)
|
||||||
|
|
||||||
|
with self.assertLogs(level=logging.ERROR) as logs:
|
||||||
|
make_thumbnails.task_function(file_id=file.pk)
|
||||||
|
|
||||||
|
self.maxDiff = None
|
||||||
|
self.assertEqual(
|
||||||
|
logs.output[0],
|
||||||
|
f'ERROR:files.tasks:File pk={file.pk} of type "Theme" is neither an image nor a video',
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_make_thumbnails.assert_not_called()
|
||||||
|
|
||||||
|
@patch('files.utils.resize_image')
|
||||||
|
@patch('files.utils.Image')
|
||||||
|
def test_make_thumbnails_for_image(self, mock_image, mock_resize_image):
|
||||||
|
file = FileFactory(original_hash='foobar', source='file/original_image_source.jpg')
|
||||||
|
files.models.FileValidation.objects.create(file=file, is_ok=True, results={})
|
||||||
|
self.assertIsNone(file.thumbnail.name)
|
||||||
|
self.assertEqual(file.metadata, {})
|
||||||
|
|
||||||
|
make_thumbnails.task_function(file_id=file.pk)
|
||||||
|
|
||||||
|
mock_image.open.assert_called_once_with(
|
||||||
|
str(TEST_MEDIA_DIR / 'file' / 'original_image_source.jpg')
|
||||||
|
)
|
||||||
|
mock_image.open.return_value.close.assert_called_once()
|
||||||
|
|
||||||
|
file.refresh_from_db()
|
||||||
|
self.assertEqual(file.thumbnail.name, 'thumbnails/fo/foobar_1920x1080.png')
|
||||||
|
self.assertEqual(
|
||||||
|
file.metadata,
|
||||||
|
{
|
||||||
|
'thumbnails': {
|
||||||
|
'1080p': {'path': 'thumbnails/fo/foobar_1920x1080.png', 'size': [1920, 1080]},
|
||||||
|
'360p': {'path': 'thumbnails/fo/foobar_640x360.png', 'size': [640, 360]},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
@patch('files.utils.resize_image')
|
||||||
|
@patch('files.utils.Image')
|
||||||
|
@patch('files.utils.FFmpeg')
|
||||||
|
def test_make_thumbnails_for_video(self, mock_ffmpeg, mock_image, mock_resize_image):
|
||||||
|
file = FileFactory(
|
||||||
|
original_hash='deadbeef', source='file/path.mp4', type=files.models.File.TYPES.VIDEO
|
||||||
|
)
|
||||||
|
files.models.FileValidation.objects.create(file=file, is_ok=True, results={})
|
||||||
|
self.assertIsNone(file.thumbnail.name)
|
||||||
|
self.assertEqual(file.metadata, {})
|
||||||
|
|
||||||
|
make_thumbnails.task_function(file_id=file.pk)
|
||||||
|
|
||||||
|
mock_ffmpeg.assert_called_once_with()
|
||||||
|
mock_image.open.assert_called_once_with(
|
||||||
|
str(TEST_MEDIA_DIR / 'thumbnails' / 'de' / 'deadbeef.png')
|
||||||
|
)
|
||||||
|
mock_image.open.return_value.close.assert_called_once()
|
||||||
|
|
||||||
|
file.refresh_from_db()
|
||||||
|
# Check that the extracted frame is stored instead of the large thumbnail
|
||||||
|
self.assertEqual(file.thumbnail.name, 'thumbnails/de/deadbeef.png')
|
||||||
|
# Check that File metadata and thumbnail fields were updated
|
||||||
|
self.assertEqual(
|
||||||
|
file.metadata,
|
||||||
|
{
|
||||||
|
'thumbnails': {
|
||||||
|
'1080p': {'path': 'thumbnails/de/deadbeef_1920x1080.png', 'size': [1920, 1080]},
|
||||||
|
'360p': {'path': 'thumbnails/de/deadbeef_640x360.png', 'size': [640, 360]},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
@ -1,6 +1,20 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import patch, ANY
|
||||||
|
import tempfile
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
|
||||||
from files.utils import find_path_by_name, find_exact_path, filter_paths_by_ext
|
from files.utils import (
|
||||||
|
extract_frame,
|
||||||
|
filter_paths_by_ext,
|
||||||
|
find_exact_path,
|
||||||
|
find_path_by_name,
|
||||||
|
get_thumbnail_upload_to,
|
||||||
|
make_thumbnails,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Reusing test files from the extensions app
|
||||||
|
TEST_FILES_DIR = Path(__file__).resolve().parent.parent.parent / 'extensions' / 'tests' / 'files'
|
||||||
|
|
||||||
|
|
||||||
class UtilsTest(TestCase):
|
class UtilsTest(TestCase):
|
||||||
@ -98,3 +112,49 @@ class UtilsTest(TestCase):
|
|||||||
]
|
]
|
||||||
paths = filter_paths_by_ext(name_list, '.md')
|
paths = filter_paths_by_ext(name_list, '.md')
|
||||||
self.assertEqual(list(paths), [])
|
self.assertEqual(list(paths), [])
|
||||||
|
|
||||||
|
def test_get_thumbnail_upload_to(self):
|
||||||
|
for file_hash, kwargs, expected in (
|
||||||
|
('foobar', {}, 'thumbnails/fo/foobar.png'),
|
||||||
|
('deadbeef', {'width': None, 'height': None}, 'thumbnails/de/deadbeef.png'),
|
||||||
|
('deadbeef', {'width': 640, 'height': 360}, 'thumbnails/de/deadbeef_640x360.png'),
|
||||||
|
):
|
||||||
|
with self.subTest(file_hash=file_hash, kwargs=kwargs):
|
||||||
|
self.assertEqual(get_thumbnail_upload_to(file_hash, **kwargs), expected)
|
||||||
|
|
||||||
|
@patch('files.utils.resize_image')
|
||||||
|
def test_make_thumbnails(self, mock_resize_image):
|
||||||
|
self.assertEqual(
|
||||||
|
{
|
||||||
|
'1080p': {'path': 'thumbnails/fo/foobar_1920x1080.png', 'size': [1920, 1080]},
|
||||||
|
'360p': {'path': 'thumbnails/fo/foobar_640x360.png', 'size': [640, 360]},
|
||||||
|
},
|
||||||
|
make_thumbnails(TEST_FILES_DIR / 'test_preview_image_0001.png', 'foobar'),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(len(mock_resize_image.mock_calls), 2)
|
||||||
|
for expected_size in ([1920, 1080], [640, 360]):
|
||||||
|
with self.subTest(expected_size=expected_size):
|
||||||
|
mock_resize_image.assert_any_call(
|
||||||
|
ANY,
|
||||||
|
expected_size,
|
||||||
|
ANY,
|
||||||
|
output_format='PNG',
|
||||||
|
quality=83,
|
||||||
|
optimize=True,
|
||||||
|
progressive=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
@patch('files.utils.FFmpeg')
|
||||||
|
def test_extract_frame(self, mock_ffmpeg):
|
||||||
|
with tempfile.TemporaryDirectory() as output_dir:
|
||||||
|
extract_frame('path/to/source/video.mp4', output_dir + '/frame.png')
|
||||||
|
mock_ffmpeg.return_value.option.return_value.input.return_value.output.assert_any_call(
|
||||||
|
output_dir + '/frame.png', {'ss': '00:00:00.01', 'frames:v': 1, 'update': 'true'}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(len(mock_ffmpeg.mock_calls), 5)
|
||||||
|
mock_ffmpeg.assert_any_call()
|
||||||
|
mock_ffmpeg.return_value.option.return_value.input.assert_any_call(
|
||||||
|
'path/to/source/video.mp4'
|
||||||
|
)
|
||||||
|
124
files/utils.py
124
files/utils.py
@ -1,18 +1,26 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
import datetime
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
import io
|
||||||
import logging
|
import logging
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
|
import tempfile
|
||||||
import toml
|
import toml
|
||||||
import typing
|
import typing
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
|
from PIL import Image
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.files.storage import default_storage
|
||||||
|
from ffmpeg import FFmpeg, FFmpegFileNotFound, FFmpegInvalidCommand, FFmpegError
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
import clamd
|
import clamd
|
||||||
import magic
|
import magic
|
||||||
|
|
||||||
|
from constants.base import THUMBNAIL_FORMAT, THUMBNAIL_SIZES, THUMBNAIL_QUALITY
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
MODULE_DIR = Path(__file__).resolve().parent
|
MODULE_DIR = Path(__file__).resolve().parent
|
||||||
THEME_SCHEMA = []
|
THEME_SCHEMA = []
|
||||||
@ -172,3 +180,119 @@ def run_clamdscan(abs_path: str) -> tuple:
|
|||||||
result = clamd_socket.instream(f)['stream']
|
result = clamd_socket.instream(f)['stream']
|
||||||
logger.info('File at path=%s scanned: %s', abs_path, result)
|
logger.info('File at path=%s scanned: %s', abs_path, result)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def delete_file_in_storage(file_name: str) -> None:
|
||||||
|
"""Delete file from disk or whatever other default storage."""
|
||||||
|
if not file_name:
|
||||||
|
return
|
||||||
|
|
||||||
|
if not default_storage.exists(file_name):
|
||||||
|
logger.warning("%s doesn't exist in storage, nothing to delete", file_name)
|
||||||
|
else:
|
||||||
|
logger.info('Deleting %s from storage', file_name)
|
||||||
|
default_storage.delete(file_name)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_thumbnails(file_metadata: dict) -> None:
|
||||||
|
"""Read thumbnail paths from given metadata and delete them from storage."""
|
||||||
|
thumbnails = file_metadata.get('thumbnails', {})
|
||||||
|
for _, thumb in thumbnails.items():
|
||||||
|
path = thumb.get('path', '')
|
||||||
|
if not path:
|
||||||
|
continue
|
||||||
|
delete_file_in_storage(path)
|
||||||
|
|
||||||
|
|
||||||
|
def get_thumbnail_upload_to(file_hash: str, width: int = None, height: int = None) -> str:
|
||||||
|
"""Return a full media path of a thumbnail.
|
||||||
|
|
||||||
|
Optionally, append thumbnail dimensions to the file name.
|
||||||
|
"""
|
||||||
|
prefix = 'thumbnails/'
|
||||||
|
_hash = file_hash.split(':')[-1]
|
||||||
|
thumbnail_ext = THUMBNAIL_FORMAT.lower()
|
||||||
|
if thumbnail_ext == 'jpeg':
|
||||||
|
thumbnail_ext = 'jpg'
|
||||||
|
suffix = f'.{thumbnail_ext}'
|
||||||
|
size_suffix = f'_{width}x{height}' if width and height else ''
|
||||||
|
path = Path(prefix, _hash[:2], f'{_hash}{size_suffix}').with_suffix(suffix)
|
||||||
|
return str(path)
|
||||||
|
|
||||||
|
|
||||||
|
def resize_image(image: Image, size: tuple, output, output_format: str = 'PNG', **output_params):
|
||||||
|
"""Resize a models.ImageField to a given size and write it into output file."""
|
||||||
|
start_t = datetime.datetime.now()
|
||||||
|
|
||||||
|
source_image = image.convert('RGBA' if output_format == 'PNG' else 'RGB')
|
||||||
|
source_image.thumbnail(size, Image.LANCZOS)
|
||||||
|
source_image.save(output, output_format, **output_params)
|
||||||
|
|
||||||
|
end_t = datetime.datetime.now()
|
||||||
|
args = {'source': image, 'size': size, 'time': (end_t - start_t).microseconds / 1000}
|
||||||
|
logger.info('%(source)s to %(size)s done in %(time)sms', args)
|
||||||
|
|
||||||
|
|
||||||
|
def make_thumbnails(
|
||||||
|
source_path: str, file_hash: str, output_format: str = THUMBNAIL_FORMAT
|
||||||
|
) -> dict:
|
||||||
|
"""Generate thumbnail files for given file and a predefined list of dimensions.
|
||||||
|
|
||||||
|
Resulting thumbnail paths a derived from the given file hash and thumbnail sizes.
|
||||||
|
Return a dict of size keys to output paths of generated thumbnail images.
|
||||||
|
"""
|
||||||
|
start_t = datetime.datetime.now()
|
||||||
|
thumbnails = {}
|
||||||
|
abs_path = os.path.join(settings.MEDIA_ROOT, source_path)
|
||||||
|
image = Image.open(abs_path)
|
||||||
|
for size_key, size in THUMBNAIL_SIZES.items():
|
||||||
|
w, h = size
|
||||||
|
output_path = get_thumbnail_upload_to(file_hash, width=w, height=h)
|
||||||
|
with tempfile.TemporaryFile() as f:
|
||||||
|
logger.info('Resizing %s to %s (%s)', abs_path, size, output_format)
|
||||||
|
resize_image(
|
||||||
|
image,
|
||||||
|
size,
|
||||||
|
f,
|
||||||
|
output_format=THUMBNAIL_FORMAT,
|
||||||
|
quality=THUMBNAIL_QUALITY,
|
||||||
|
optimize=True,
|
||||||
|
progressive=True,
|
||||||
|
)
|
||||||
|
logger.info('Saving a thumbnail to %s', output_path)
|
||||||
|
# Overwrite files instead of allowing storage generate a deduplicating suffix
|
||||||
|
if default_storage.exists(output_path):
|
||||||
|
logger.warning('%s exists, overwriting', output_path)
|
||||||
|
default_storage.delete(output_path)
|
||||||
|
default_storage.save(output_path, f)
|
||||||
|
thumbnails[size_key] = {'size': size, 'path': output_path}
|
||||||
|
image.close()
|
||||||
|
|
||||||
|
end_t = datetime.datetime.now()
|
||||||
|
args = {'source': source_path, 'time': (end_t - start_t).microseconds / 1000}
|
||||||
|
logger.info('%(source)s done in %(time)sms', args)
|
||||||
|
return thumbnails
|
||||||
|
|
||||||
|
|
||||||
|
def extract_frame(source_path: str, output_path: str, at_time: str = '00:00:00.01'):
|
||||||
|
"""Extract a single frame of a video at a given path, write it to the given output path."""
|
||||||
|
try:
|
||||||
|
start_t = datetime.datetime.now()
|
||||||
|
abs_path = os.path.join(settings.MEDIA_ROOT, output_path)
|
||||||
|
ffmpeg = (
|
||||||
|
FFmpeg()
|
||||||
|
.option('y')
|
||||||
|
.input(source_path)
|
||||||
|
.output(abs_path, {'ss': at_time, 'frames:v': 1, 'update': 'true'})
|
||||||
|
)
|
||||||
|
output_dir = os.path.dirname(abs_path)
|
||||||
|
if not os.path.isdir(output_dir):
|
||||||
|
os.mkdir(output_dir)
|
||||||
|
ffmpeg.execute()
|
||||||
|
|
||||||
|
end_t = datetime.datetime.now()
|
||||||
|
args = {'source': source_path, 'time': (end_t - start_t).microseconds / 1000}
|
||||||
|
logger.info('%(source)s done in %(time)sms', args)
|
||||||
|
except (FFmpegError, FFmpegFileNotFound, FFmpegInvalidCommand) as e:
|
||||||
|
logger.exception(f'Failed to extract a frame: {e.message}, {" ".join(ffmpeg.arguments)}')
|
||||||
|
raise
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
with_items:
|
with_items:
|
||||||
- clamav-daemon
|
- clamav-daemon
|
||||||
- clamav-unofficial-sigs
|
- clamav-unofficial-sigs
|
||||||
|
- ffmpeg
|
||||||
- git
|
- git
|
||||||
- libpq-dev
|
- libpq-dev
|
||||||
- nginx-full
|
- nginx-full
|
||||||
@ -48,18 +49,7 @@
|
|||||||
tags:
|
tags:
|
||||||
- dotenv
|
- dotenv
|
||||||
|
|
||||||
- name: Copying ASGI config files
|
- import_tasks: tasks/configure_uwsgi.yaml
|
||||||
ansible.builtin.template:
|
|
||||||
src: "{{ item.src }}"
|
|
||||||
dest: "{{ item.dest }}"
|
|
||||||
mode: 0644
|
|
||||||
loop:
|
|
||||||
- { src: templates/asgi/asgi.service, dest: "/etc/systemd/system/{{ service_name }}.service" }
|
|
||||||
notify:
|
|
||||||
- restart service
|
|
||||||
tags:
|
|
||||||
- asgi
|
|
||||||
- gunicorn
|
|
||||||
|
|
||||||
- import_tasks: tasks/deploy.yaml
|
- import_tasks: tasks/deploy.yaml
|
||||||
|
|
||||||
|
22
playbooks/tasks/configure_uwsgi.yaml
Normal file
22
playbooks/tasks/configure_uwsgi.yaml
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
---
|
||||||
|
- name: Ensure /etc/uwsgi directory
|
||||||
|
ansible.builtin.file:
|
||||||
|
path: /etc/uwsgi
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
state: directory
|
||||||
|
mode: '0755'
|
||||||
|
tags:
|
||||||
|
- uwsgi
|
||||||
|
- name: Copying uWSGI config files
|
||||||
|
ansible.builtin.template:
|
||||||
|
src: "{{ item.src }}"
|
||||||
|
dest: "{{ item.dest }}"
|
||||||
|
mode: 0644
|
||||||
|
loop:
|
||||||
|
- { src: templates/uwsgi/uwsgi.ini, dest: "/etc/uwsgi/{{ service_name }}.ini" }
|
||||||
|
- { src: templates/uwsgi/uwsgi.service, dest: "/etc/systemd/system/{{ service_name }}.service" }
|
||||||
|
notify:
|
||||||
|
- restart service
|
||||||
|
tags:
|
||||||
|
- uwsgi
|
@ -1,24 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description={{ project_name }} {{ env|capitalize }}
|
|
||||||
After=syslog.target network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
User={{ user }}
|
|
||||||
Group={{ group }}
|
|
||||||
EnvironmentFile={{ env_file }}
|
|
||||||
ExecStart={{ dir.source }}/.venv/bin/gunicorn {{ asgi_module }} -b 127.0.0.1:{{ port }} --max-requests {{ max_requests }} --max-requests-jitter {{ max_requests_jitter }} --workers {{ workers }} -k uvicorn.workers.UvicornWorker
|
|
||||||
ExecReload=/bin/kill -s HUP $MAINPID
|
|
||||||
Restart=always
|
|
||||||
KillMode=mixed
|
|
||||||
Type=notify
|
|
||||||
SyslogIdentifier={{ service_name }}
|
|
||||||
NotifyAccess=all
|
|
||||||
WorkingDirectory={{ dir.source }}
|
|
||||||
|
|
||||||
PrivateTmp=true
|
|
||||||
ProtectHome=true
|
|
||||||
ProtectSystem=full
|
|
||||||
CapabilityBoundingSet=~CAP_SYS_ADMIN
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
34
playbooks/templates/uwsgi/uwsgi.ini
Normal file
34
playbooks/templates/uwsgi/uwsgi.ini
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
[uwsgi]
|
||||||
|
uid = {{ user }}
|
||||||
|
gid = {{ group }}
|
||||||
|
pidfile = {{ uwsgi_pid }}
|
||||||
|
env = DJANGO_SETTINGS_MODULE={{ django_settings_module }}
|
||||||
|
env = LANG=en_US.UTF-8
|
||||||
|
|
||||||
|
# Django-related settings
|
||||||
|
# the base directory (full path)
|
||||||
|
chdir = {{ dir.source }}
|
||||||
|
# Django's wsgi file
|
||||||
|
module = {{ uwsgi_module }}
|
||||||
|
# the virtualenv (full path)
|
||||||
|
virtualenv = {{ dir.source }}/.venv/
|
||||||
|
|
||||||
|
buffer-size = 32768
|
||||||
|
max-requests = 5000
|
||||||
|
|
||||||
|
# process-related settings
|
||||||
|
master = true
|
||||||
|
# maximum number of worker processes
|
||||||
|
processes = 4
|
||||||
|
# listen on HTTP port, use Keep-Alive
|
||||||
|
http11-socket = 127.0.0.1:{{ port }}
|
||||||
|
http-keepalive = 1
|
||||||
|
|
||||||
|
# clear environment on exit
|
||||||
|
vacuum = true
|
||||||
|
# silence "OSError: write error" generated by timing out clients
|
||||||
|
disable-write-exception = true
|
||||||
|
# running behind a proxy_pass, X-FORWARDED-FOR is the "real" IP address that should be logged
|
||||||
|
log-x-forwarded-for = true
|
||||||
|
# disable request logging: web server in front of uWSGI does this job better
|
||||||
|
disable-logging = true
|
23
playbooks/templates/uwsgi/uwsgi.service
Normal file
23
playbooks/templates/uwsgi/uwsgi.service
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
[Unit]
|
||||||
|
Description={{ project_name }} {{ env|capitalize }} service.
|
||||||
|
After=syslog.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
User={{ user }}
|
||||||
|
Group={{ group }}
|
||||||
|
EnvironmentFile={{ env_file }}
|
||||||
|
ExecStart={{ dir.source }}/.venv/bin/uwsgi --ini /etc/uwsgi/{{ service_name }}.ini
|
||||||
|
Restart=always
|
||||||
|
KillSignal=SIGQUIT
|
||||||
|
Type=notify
|
||||||
|
SyslogIdentifier={{ service_name }}
|
||||||
|
NotifyAccess=all
|
||||||
|
WorkingDirectory={{ dir.source }}
|
||||||
|
|
||||||
|
PrivateTmp=true
|
||||||
|
ProtectHome=true
|
||||||
|
ProtectSystem=full
|
||||||
|
CapabilityBoundingSet=~CAP_SYS_ADMIN
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
@ -3,9 +3,8 @@ project_name: Blender Extensions
|
|||||||
project_slug: blender-extensions
|
project_slug: blender-extensions
|
||||||
service_name: "{{ project_slug }}-{{ env }}"
|
service_name: "{{ project_slug }}-{{ env }}"
|
||||||
background_service_name: '{{ service_name }}-background.service'
|
background_service_name: '{{ service_name }}-background.service'
|
||||||
|
|
||||||
asgi_module: blender_extensions.asgi:application
|
|
||||||
django_settings_module: blender_extensions.settings
|
django_settings_module: blender_extensions.settings
|
||||||
|
uwsgi_module: blender_extensions.wsgi:application
|
||||||
max_requests: 1000
|
max_requests: 1000
|
||||||
max_requests_jitter: 50
|
max_requests_jitter: 50
|
||||||
port: 8200
|
port: 8200
|
||||||
@ -21,6 +20,7 @@ dir:
|
|||||||
errors: "/var/www/{{ service_name }}/html/errors"
|
errors: "/var/www/{{ service_name }}/html/errors"
|
||||||
|
|
||||||
env_file: "{{ dir.source }}/.env"
|
env_file: "{{ dir.source }}/.env"
|
||||||
|
uwsgi_pid: "{{ dir.source }}/{{ service_name }}.pid"
|
||||||
|
|
||||||
nginx:
|
nginx:
|
||||||
user: www-data
|
user: www-data
|
||||||
|
@ -40,6 +40,7 @@ mistune==2.0.4
|
|||||||
multidict==6.0.2
|
multidict==6.0.2
|
||||||
oauthlib==3.2.0
|
oauthlib==3.2.0
|
||||||
Pillow==9.2.0
|
Pillow==9.2.0
|
||||||
|
python-ffmpeg==2.0.12
|
||||||
python-magic==0.4.27
|
python-magic==0.4.27
|
||||||
requests==2.28.1
|
requests==2.28.1
|
||||||
requests-oauthlib==1.3.1
|
requests-oauthlib==1.3.1
|
||||||
@ -49,6 +50,5 @@ six==1.16.0
|
|||||||
sqlparse==0.4.2
|
sqlparse==0.4.2
|
||||||
toml==0.10.2
|
toml==0.10.2
|
||||||
urllib3==1.26.11
|
urllib3==1.26.11
|
||||||
uvicorn==0.18.2
|
|
||||||
webencodings==0.5.1
|
webencodings==0.5.1
|
||||||
yarl==1.7.2
|
yarl==1.7.2
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
psycopg2==2.9.3
|
psycopg2==2.9.3
|
||||||
gunicorn==20.1.0
|
uwsgi==2.0.23
|
||||||
|
@ -76,12 +76,14 @@
|
|||||||
<h3>Previews Pending Approval</h3>
|
<h3>Previews Pending Approval</h3>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
{% for preview in pending_previews %}
|
{% for preview in pending_previews %}
|
||||||
|
{% with thumbnail_1080p_url=preview.file.thumbnail_1080p_url %}
|
||||||
<div class="col-md-3">
|
<div class="col-md-3">
|
||||||
<a href="{{ preview.file.source.url }}" class="d-block mb-2" title="{{ preview.caption }}" target="_blank">
|
<a href="{{ preview.file.source.url }}" class="d-block mb-2" title="{{ preview.caption }}" target="_blank">
|
||||||
<img class="img-fluid rounded" src="{{ preview.file.source.url }}" alt="{{ preview.caption }}">
|
<img class="img-fluid rounded" src="{{ thumbnail_1080p_url }}" alt="{{ preview.caption }}">
|
||||||
</a>
|
</a>
|
||||||
{% include "common/components/status.html" with object=preview.file class="d-block" %}
|
{% include "common/components/status.html" with object=preview.file class="d-block" %}
|
||||||
</div>
|
</div>
|
||||||
|
{% endwith %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
|
@ -7,6 +7,7 @@ from django.contrib.admin.utils import NestedObjects
|
|||||||
from django.contrib.auth.models import AbstractUser
|
from django.contrib.auth.models import AbstractUser
|
||||||
from django.db import models, DEFAULT_DB_ALIAS, transaction
|
from django.db import models, DEFAULT_DB_ALIAS, transaction
|
||||||
from django.templatetags.static import static
|
from django.templatetags.static import static
|
||||||
|
from django.utils.dateparse import parse_datetime
|
||||||
|
|
||||||
from common.model_mixins import TrackChangesMixin
|
from common.model_mixins import TrackChangesMixin
|
||||||
from files.utils import get_sha256_from_value
|
from files.utils import get_sha256_from_value
|
||||||
@ -89,7 +90,7 @@ class User(TrackChangesMixin, AbstractUser):
|
|||||||
date_deletion_requested,
|
date_deletion_requested,
|
||||||
)
|
)
|
||||||
self.is_active = False
|
self.is_active = False
|
||||||
self.date_deletion_requested = date_deletion_requested
|
self.date_deletion_requested = parse_datetime(date_deletion_requested)
|
||||||
self.save(update_fields=['is_active', 'date_deletion_requested'])
|
self.save(update_fields=['is_active', 'date_deletion_requested'])
|
||||||
|
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
|
@ -6,6 +6,7 @@ from django.contrib.auth import get_user_model
|
|||||||
from django.contrib.auth.models import Group
|
from django.contrib.auth.models import Group
|
||||||
from django.db.models.signals import m2m_changed, pre_save
|
from django.db.models.signals import m2m_changed, pre_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
|
from django.utils.dateparse import parse_datetime
|
||||||
|
|
||||||
from blender_id_oauth_client import signals as bid_signals
|
from blender_id_oauth_client import signals as bid_signals
|
||||||
|
|
||||||
@ -36,7 +37,7 @@ def update_user(
|
|||||||
Copy 'full_name' from the received 'oauth_info' and attempt to copy avatar from Blender ID.
|
Copy 'full_name' from the received 'oauth_info' and attempt to copy avatar from Blender ID.
|
||||||
"""
|
"""
|
||||||
instance.full_name = oauth_info.get('full_name') or ''
|
instance.full_name = oauth_info.get('full_name') or ''
|
||||||
instance.confirmed_email_at = oauth_info.get('confirmed_email_at')
|
instance.confirmed_email_at = parse_datetime(oauth_info.get('confirmed_email_at') or '')
|
||||||
instance.save()
|
instance.save()
|
||||||
|
|
||||||
bid.copy_avatar_from_blender_id(user=instance)
|
bid.copy_avatar_from_blender_id(user=instance)
|
||||||
|
@ -11,6 +11,7 @@ from django.core.exceptions import ObjectDoesNotExist
|
|||||||
from django.db.utils import IntegrityError
|
from django.db.utils import IntegrityError
|
||||||
from django.http import HttpResponse, HttpResponseBadRequest
|
from django.http import HttpResponse, HttpResponseBadRequest
|
||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
|
from django.utils.dateparse import parse_datetime
|
||||||
from django.views.decorators.csrf import csrf_exempt
|
from django.views.decorators.csrf import csrf_exempt
|
||||||
from django.views.decorators.http import require_POST
|
from django.views.decorators.http import require_POST
|
||||||
|
|
||||||
@ -107,7 +108,7 @@ def handle_user_modified(payload: Dict[Any, Any]) -> None:
|
|||||||
update_fields.add('full_name')
|
update_fields.add('full_name')
|
||||||
|
|
||||||
if 'confirmed_email_at' in payload:
|
if 'confirmed_email_at' in payload:
|
||||||
user.confirmed_email_at = payload['confirmed_email_at']
|
user.confirmed_email_at = parse_datetime(payload.get('confirmed_email_at') or '')
|
||||||
update_fields.add('confirmed_email_at')
|
update_fields.add('confirmed_email_at')
|
||||||
|
|
||||||
if update_fields:
|
if update_fields:
|
||||||
|
Loading…
Reference in New Issue
Block a user