Notification emails #80
@ -7,7 +7,7 @@ from common.tests.factories.files import FileFactory
|
||||
from common.tests.factories.teams import TeamFactory
|
||||
from files.models import File
|
||||
from constants.version_permissions import VERSION_PERMISSION_FILE, VERSION_PERMISSION_NETWORK
|
||||
from constants.licenses import LICENSE_GPL2
|
||||
from constants.licenses import LICENSE_GPL2, LICENSE_GPL3
|
||||
from extensions.models import Extension, Tag
|
||||
|
||||
FILE_SOURCES = {
|
||||
@ -54,6 +54,7 @@ to setup the `addon preferences`.
|
||||
|
||||
...
|
||||
'''
|
||||
LICENSES = (LICENSE_GPL2.id, LICENSE_GPL3.id)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@ -100,7 +101,7 @@ class Command(BaseCommand):
|
||||
# Create a few publicly listed extensions
|
||||
for i in range(10):
|
||||
extension__type = random.choice(Extension.TYPES)[0]
|
||||
create_approved_version(
|
||||
version = create_approved_version(
|
||||
file__status=File.STATUSES.APPROVED,
|
||||
# extension__status=Extension.STATUSES.APPROVED,
|
||||
extension__type=extension__type,
|
||||
@ -116,16 +117,20 @@ class Command(BaseCommand):
|
||||
)
|
||||
],
|
||||
)
|
||||
for i in range(random.randint(1, len(LICENSES))):
|
||||
version.licenses.add(LICENSES[i])
|
||||
|
||||
# Create a few unlisted extension versions
|
||||
for i in range(5):
|
||||
extension__type = random.choice(Extension.TYPES)[0]
|
||||
create_version(
|
||||
version = create_version(
|
||||
file__status=random.choice(
|
||||
(File.STATUSES.DISABLED, File.STATUSES.DISABLED_BY_AUTHOR)
|
||||
),
|
||||
tags=random.sample(tags[extension__type], k=1),
|
||||
)
|
||||
for i in range(random.randint(1, len(LICENSES))):
|
||||
version.licenses.add(LICENSES[i])
|
||||
|
||||
example_version.extension.average_score = 5.0
|
||||
example_version.extension.save(update_fields={'average_score'})
|
||||
|
@ -1,7 +1,6 @@
|
||||
from urllib.parse import urljoin, urlparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.sites.shortcuts import get_current_site
|
||||
@ -30,8 +29,8 @@ def absolutify(url: str, request=None) -> str:
|
||||
return url
|
||||
|
||||
proto = 'http' if settings.DEBUG else 'https'
|
||||
|
||||
return urljoin(f'{proto}://', get_current_site(request).domain, url)
|
||||
domain = get_current_site(request).domain
|
||||
return urljoin(f'{proto}://{domain}', url)
|
||||
|
||||
|
||||
@register.simple_tag(takes_context=True)
|
||||
@ -41,31 +40,6 @@ def absolute_url(context, path: str) -> str:
|
||||
return absolutify(path, request=request)
|
||||
|
||||
|
||||
# A (temporary?) copy of this is in services/utils.py. See bug 1055654.
|
||||
def user_media_path(what):
|
||||
"""Make it possible to override storage paths in settings.
|
||||
|
||||
By default, all storage paths are in the MEDIA_ROOT.
|
||||
|
||||
This is backwards compatible.
|
||||
|
||||
"""
|
||||
default = os.path.join(settings.MEDIA_ROOT, what)
|
||||
key = f'{what.upper()}_PATH'
|
||||
return getattr(settings, key, default)
|
||||
|
||||
|
||||
# A (temporary?) copy of this is in services/utils.py. See bug 1055654.
|
||||
def user_media_url(what):
|
||||
"""
|
||||
Generate default media url, and make possible to override it from
|
||||
settings.
|
||||
"""
|
||||
default = f'{settings.MEDIA_URL}{what}/'
|
||||
key = '{}_URL'.format(what.upper().replace('-', '_'))
|
||||
return getattr(settings, key, default)
|
||||
|
||||
|
||||
class PaginationRenderer:
|
||||
def __init__(self, pager):
|
||||
self.pager = pager
|
||||
|
@ -58,6 +58,7 @@ EXTENSION_TYPE_PLURAL = {
|
||||
EXTENSION_TYPE_CHOICES.THEME: _('Themes'),
|
||||
}
|
||||
EXTENSION_SLUGS_PATH = '|'.join(EXTENSION_TYPE_SLUGS.values())
|
||||
EXTENSION_SLUG_TYPES = {v: k for k, v in EXTENSION_TYPE_SLUGS_SINGULAR.items()}
|
||||
|
||||
ALLOWED_EXTENSION_MIMETYPES = ('application/zip', )
|
||||
# FIXME: this controls the initial widget rendered server-side, and server-side validation
|
||||
|
@ -135,6 +135,7 @@ class VersionAdmin(admin.ModelAdmin):
|
||||
'tagline',
|
||||
'date_created',
|
||||
'date_modified',
|
||||
'date_deleted',
|
||||
'average_score',
|
||||
'download_count',
|
||||
)
|
||||
@ -146,7 +147,7 @@ class VersionAdmin(admin.ModelAdmin):
|
||||
'fields': (
|
||||
'id',
|
||||
'tagline',
|
||||
('date_created', 'date_modified'),
|
||||
('date_created', 'date_modified', 'date_deleted'),
|
||||
'extension',
|
||||
'version',
|
||||
'blender_version_min',
|
||||
|
@ -142,6 +142,7 @@ class Extension(
|
||||
'description',
|
||||
'support',
|
||||
'website',
|
||||
'date_deleted',
|
||||
}
|
||||
TYPES = EXTENSION_TYPE_CHOICES
|
||||
STATUSES = EXTENSION_STATUS_CHOICES
|
||||
@ -189,7 +190,8 @@ class Extension(
|
||||
ordering = ['-average_score', '-date_created', 'name']
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.get_type_display()} "{self.name}"'
|
||||
label_deleted = f'{self.date_deleted and " (DELETED ❌)" or ""}'
|
||||
return f'{self.get_type_display()} "{self.name}"{label_deleted}'
|
||||
|
||||
@property
|
||||
def type_slug(self) -> str:
|
||||
@ -580,7 +582,8 @@ class Version(CreatedModifiedMixin, RatingMixin, TrackChangesMixin, SoftDeleteMi
|
||||
self.tags.add(tag)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f'{self.extension} v{self.version}'
|
||||
label_deleted = f'{self.date_deleted and " (DELETED ❌)" or ""}'
|
||||
return f'{self.extension} v{self.version}{label_deleted}'
|
||||
|
||||
def is_listed(self):
|
||||
# To be public, a version must not be deleted, must belong to a public
|
||||
|
@ -7,7 +7,6 @@ from django.dispatch import receiver
|
||||
|
||||
from constants.activity import Flag
|
||||
import extensions.models
|
||||
import extensions.tasks
|
||||
import files.models
|
||||
|
||||
|
||||
|
@ -5,6 +5,12 @@
|
||||
{% block page_title %}{{ extension.name }}{% endblock page_title %}
|
||||
|
||||
{% block content %}
|
||||
{% if extension.latest_version %}
|
||||
{% with latest=extension.latest_version %}
|
||||
{% include "files/components/scan_details.html" with file=latest.file %}
|
||||
{% endwith %}
|
||||
{% endif %}
|
||||
|
||||
{% has_maintainer extension as is_maintainer %}
|
||||
{% with latest=extension.latest_version %}
|
||||
|
||||
|
BIN
extensions/tests/files/addon-without-dir.zip
Normal file
BIN
extensions/tests/files/addon-without-dir.zip
Normal file
Binary file not shown.
BIN
extensions/tests/files/invalid-addon-dir-no-init.zip
Normal file
BIN
extensions/tests/files/invalid-addon-dir-no-init.zip
Normal file
Binary file not shown.
BIN
extensions/tests/files/invalid-addon-no-init.zip
Normal file
BIN
extensions/tests/files/invalid-addon-no-init.zip
Normal file
Binary file not shown.
BIN
extensions/tests/files/invalid-manifest-path.zip
Normal file
BIN
extensions/tests/files/invalid-manifest-path.zip
Normal file
Binary file not shown.
BIN
extensions/tests/files/invalid-manifest-toml.zip
Normal file
BIN
extensions/tests/files/invalid-manifest-toml.zip
Normal file
Binary file not shown.
BIN
extensions/tests/files/invalid-no-manifest.zip
Normal file
BIN
extensions/tests/files/invalid-no-manifest.zip
Normal file
Binary file not shown.
BIN
extensions/tests/files/invalid-theme-multiple-xmls.zip
Normal file
BIN
extensions/tests/files/invalid-theme-multiple-xmls.zip
Normal file
Binary file not shown.
@ -74,6 +74,14 @@ class CreateFileTest(TestCase):
|
||||
|
||||
version = combined_meta_data.get("version", "0.1.0")
|
||||
extension_id = combined_meta_data.get("id", "foobar").strip()
|
||||
type_slug = combined_meta_data['type']
|
||||
init_path = None
|
||||
|
||||
if type_slug == 'add-on':
|
||||
# Add the required __init__.py file
|
||||
init_path = os.path.join(self.temp_directory, '__init__.py')
|
||||
with open(init_path, 'w') as init_file:
|
||||
init_file.write('')
|
||||
|
||||
with open(manifest_path, "w") as manifest_file:
|
||||
toml.dump(combined_meta_data, manifest_file)
|
||||
@ -81,6 +89,10 @@ class CreateFileTest(TestCase):
|
||||
with zipfile.ZipFile(output_path, "w") as my_zip:
|
||||
arcname = f"{extension_id}-{version}/{os.path.basename(manifest_path)}"
|
||||
my_zip.write(manifest_path, arcname=arcname)
|
||||
if init_path:
|
||||
# Write the __init__.py file too
|
||||
arcname = f"{extension_id}-{version}/{os.path.basename(init_path)}"
|
||||
my_zip.write(init_path, arcname=arcname)
|
||||
|
||||
os.remove(manifest_path)
|
||||
return output_path
|
||||
@ -259,7 +271,7 @@ class ValidateManifestTest(CreateFileTest):
|
||||
self.client.force_login(user)
|
||||
|
||||
file_data = {
|
||||
"id": "<b>id-with-hyphens</b>",
|
||||
"id": "id-with-hyphens",
|
||||
}
|
||||
|
||||
bad_file = self._create_file_from_data("theme.zip", file_data, self.user)
|
||||
@ -271,7 +283,29 @@ class ValidateManifestTest(CreateFileTest):
|
||||
self.assertEqual(response.status_code, 200)
|
||||
error = response.context['form'].errors.get('source')
|
||||
self.assertEqual(len(error), 1)
|
||||
self.assertIn('"<b>id-with-hyphens</b>"', error[0])
|
||||
self.assertIn('"id-with-hyphens"', error[0])
|
||||
|
||||
def test_name_left_as_is(self):
|
||||
user = UserFactory()
|
||||
self.client.force_login(user)
|
||||
|
||||
file_data = {
|
||||
# If we ever need to restrict content of Extension's name,
|
||||
# it should be done at the manifest validation step.
|
||||
"name": "Name. - With Extra spaces and other characters Ж",
|
||||
}
|
||||
|
||||
extension_file = self._create_file_from_data("theme.zip", file_data, self.user)
|
||||
with open(extension_file, 'rb') as fp:
|
||||
response = self.client.post(
|
||||
self._get_submit_url(), {'source': fp, 'agreed_with_terms': True}
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 302)
|
||||
file = File.objects.first()
|
||||
extension = file.extension
|
||||
self.assertEqual(extension.slug, 'name-with-extra-spaces-and-other-characters')
|
||||
self.assertEqual(extension.name, 'Name. - With Extra spaces and other characters Ж')
|
||||
|
||||
|
||||
class ValidateManifestFields(TestCase):
|
||||
|
@ -47,6 +47,7 @@ class ExtensionTest(TestCase):
|
||||
'name': 'Extension name',
|
||||
'status': 1,
|
||||
'support': 'https://example.com/',
|
||||
'date_deleted': None,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
@ -29,6 +29,7 @@ EXPECTED_EXTENSION_DATA = {
|
||||
'size_bytes': 53959,
|
||||
'tags': ['Sequencer'],
|
||||
'version_str': '0.1.0',
|
||||
'slug': 'edit-breakdown',
|
||||
},
|
||||
'blender_gis-2.2.8.zip': {
|
||||
'metadata': {
|
||||
@ -43,6 +44,7 @@ EXPECTED_EXTENSION_DATA = {
|
||||
'size_bytes': 434471,
|
||||
'tags': ['3D View'],
|
||||
'version_str': '2.2.8',
|
||||
'slug': 'blendergis',
|
||||
},
|
||||
'amaranth-1.0.8.zip': {
|
||||
'metadata': {
|
||||
@ -57,8 +59,30 @@ EXPECTED_EXTENSION_DATA = {
|
||||
'size_bytes': 72865,
|
||||
'tags': [],
|
||||
'version_str': '1.0.8',
|
||||
'slug': 'amaranth',
|
||||
},
|
||||
}
|
||||
EXPECTED_VALIDATION_ERRORS = {
|
||||
'empty.txt': {'source': ['Only .zip files are accepted.']},
|
||||
'empty.zip': {'source': ['The submitted file is empty.']},
|
||||
'invalid-archive.zip': {'source': ['Only .zip files are accepted.']},
|
||||
'invalid-manifest-path.zip': {
|
||||
'source': [
|
||||
'The manifest file should be at the top level of the archive, or one level deep.',
|
||||
],
|
||||
},
|
||||
'invalid-addon-no-init.zip': {
|
||||
'source': ['An add-on should have an __init__.py file.'],
|
||||
},
|
||||
'invalid-addon-dir-no-init.zip': {
|
||||
'source': ['An add-on should have an __init__.py file.'],
|
||||
},
|
||||
'invalid-no-manifest.zip': {
|
||||
'source': ['The manifest file is missing.'],
|
||||
},
|
||||
'invalid-manifest-toml.zip': {'source': ['Could not parse the manifest file.']},
|
||||
'invalid-theme-multiple-xmls.zip': {'source': ['A theme should have exactly one XML file.']},
|
||||
}
|
||||
|
||||
|
||||
class SubmitFileTest(TestCase):
|
||||
@ -75,6 +99,7 @@ class SubmitFileTest(TestCase):
|
||||
blender_version_min: str,
|
||||
size_bytes: int,
|
||||
file_hash: str,
|
||||
slug: str,
|
||||
**other_metadata,
|
||||
):
|
||||
self.assertEqual(File.objects.count(), 0)
|
||||
@ -88,6 +113,9 @@ class SubmitFileTest(TestCase):
|
||||
self.assertEqual(File.objects.count(), 1)
|
||||
file = File.objects.first()
|
||||
self.assertEqual(response['Location'], file.get_submit_url())
|
||||
extension = file.extension
|
||||
self.assertEqual(extension.slug, slug)
|
||||
self.assertEqual(extension.name, name)
|
||||
self.assertEqual(file.original_name, file_name)
|
||||
self.assertEqual(file.size_bytes, size_bytes)
|
||||
self.assertEqual(file.original_hash, file_hash)
|
||||
@ -116,46 +144,28 @@ class SubmitFileTest(TestCase):
|
||||
{'agreed_with_terms': ['This field is required.']},
|
||||
)
|
||||
|
||||
def test_validation_errors_invalid_extension(self):
|
||||
def test_validation_errors(self):
|
||||
self.assertEqual(Extension.objects.count(), 0)
|
||||
user = UserFactory()
|
||||
self.client.force_login(user)
|
||||
|
||||
with open(TEST_FILES_DIR / 'empty.txt', 'rb') as fp:
|
||||
response = self.client.post(self.url, {'source': fp, 'agreed_with_terms': True})
|
||||
for test_archive, extected_errors in EXPECTED_VALIDATION_ERRORS.items():
|
||||
with self.subTest(test_archive=test_archive):
|
||||
with open(TEST_FILES_DIR / test_archive, 'rb') as fp:
|
||||
response = self.client.post(self.url, {'source': fp, 'agreed_with_terms': True})
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertDictEqual(
|
||||
response.context['form'].errors,
|
||||
{'source': ['Only .zip files are accepted.']},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertDictEqual(response.context['form'].errors, extected_errors)
|
||||
|
||||
def test_validation_errors_empty_file(self):
|
||||
def test_addon_without_top_level_directory(self):
|
||||
self.assertEqual(Extension.objects.count(), 0)
|
||||
user = UserFactory()
|
||||
self.client.force_login(user)
|
||||
|
||||
with open(TEST_FILES_DIR / 'empty.zip', 'rb') as fp:
|
||||
with open(TEST_FILES_DIR / 'addon-without-dir.zip', 'rb') as fp:
|
||||
response = self.client.post(self.url, {'source': fp, 'agreed_with_terms': True})
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertDictEqual(
|
||||
response.context['form'].errors,
|
||||
{'source': ['The submitted file is empty.']},
|
||||
)
|
||||
|
||||
def test_validation_errors_not_actually_a_zip(self):
|
||||
self.assertEqual(Extension.objects.count(), 0)
|
||||
user = UserFactory()
|
||||
self.client.force_login(user)
|
||||
|
||||
with open(TEST_FILES_DIR / 'not_a.zip', 'rb') as fp:
|
||||
response = self.client.post(self.url, {'source': fp, 'agreed_with_terms': True})
|
||||
|
||||
self.assertDictEqual(
|
||||
response.context['form'].errors,
|
||||
{'source': ['Only .zip files are accepted.']},
|
||||
)
|
||||
self.assertEqual(response.status_code, 302)
|
||||
|
||||
def test_theme_file(self):
|
||||
self.assertEqual(File.objects.count(), 0)
|
||||
|
@ -52,8 +52,10 @@ class PublicViewsTest(_BaseTestCase):
|
||||
response = self.client.get(url, HTTP_ACCEPT=HTTP_ACCEPT)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response['Content-Type'], 'application/json')
|
||||
self.assertEqual(len(response.json()), 3)
|
||||
for _, v in response.json().items():
|
||||
json = response.json()
|
||||
self.assertEqual(len(json['data']), 3)
|
||||
for v in json['data']:
|
||||
self.assertIn('id', v)
|
||||
self.assertIn('name', v)
|
||||
self.assertIn('tagline', v)
|
||||
self.assertIn('version', v)
|
||||
@ -191,7 +193,7 @@ class ListedExtensionsTest(_BaseTestCase):
|
||||
self.assertEqual(response['Content-Type'], 'application/json')
|
||||
|
||||
# Basic sanity check to make sure we are getting the result of listed
|
||||
listed_count = len(response.json())
|
||||
listed_count = len(response.json()['data'])
|
||||
self.assertEqual(Extension.objects.listed.count(), listed_count)
|
||||
return listed_count
|
||||
|
||||
|
@ -18,7 +18,6 @@ urlpatterns = [
|
||||
path('api/v1/extensions/', api.ExtensionsAPIView.as_view(), name='api'),
|
||||
# Public pages
|
||||
path('', public.HomeView.as_view(), name='home'),
|
||||
path('', api.ExtensionsAPIView.as_view(), name='home-api'),
|
||||
path('search/', public.SearchView.as_view(), name='search'),
|
||||
path('author/<int:user_id>/', public.SearchView.as_view(), name='by-author'),
|
||||
path('search/', public.SearchView.as_view(), name='search'),
|
||||
|
@ -53,6 +53,7 @@ class ListedExtensionsSerializer(serializers.ModelSerializer):
|
||||
return {}
|
||||
|
||||
data = {
|
||||
'id': instance.extension_id,
|
||||
'schema_version': instance.latest_version.schema_version,
|
||||
'name': instance.name,
|
||||
'version': instance.latest_version.version,
|
||||
@ -75,16 +76,12 @@ class ListedExtensionsSerializer(serializers.ModelSerializer):
|
||||
'tags': [str(tag) for tag in instance.latest_version.tags.all()],
|
||||
}
|
||||
|
||||
return {instance.extension_id: clean_json_dictionary_from_optional_fields(data)}
|
||||
return clean_json_dictionary_from_optional_fields(data)
|
||||
|
||||
|
||||
class ExtensionsAPIView(APIView):
|
||||
serializer_class = ListedExtensionsSerializer
|
||||
|
||||
@staticmethod
|
||||
def _convert_list_to_dict(data):
|
||||
return {k: v for d in data for k, v in d.items()}
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
@ -99,5 +96,12 @@ class ExtensionsAPIView(APIView):
|
||||
serializer = self.serializer_class(
|
||||
Extension.objects.listed, blender_version=blender_version, request=request, many=True
|
||||
)
|
||||
data_as_dict = self._convert_list_to_dict(serializer.data)
|
||||
return Response(data_as_dict)
|
||||
data = serializer.data
|
||||
return Response(
|
||||
{
|
||||
# TODO implement extension blocking by moderators
|
||||
'blocklist': [],
|
||||
'data': data,
|
||||
'version': 'v1',
|
||||
}
|
||||
)
|
||||
|
@ -1,6 +1,28 @@
|
||||
from django.contrib import admin
|
||||
import background_task.admin
|
||||
import background_task.models
|
||||
|
||||
from .models import File
|
||||
from .models import File, FileValidation
|
||||
import files.signals
|
||||
|
||||
|
||||
def scan_selected_files(self, request, queryset):
|
||||
"""Scan selected files."""
|
||||
for instance in queryset:
|
||||
files.signals.schedule_scan(instance)
|
||||
|
||||
|
||||
class FileValidationInlineAdmin(admin.StackedInline):
|
||||
model = FileValidation
|
||||
readonly_fields = ('date_created', 'date_modified', 'is_ok', 'results')
|
||||
extra = 0
|
||||
|
||||
def _nope(self, request, obj):
|
||||
return False
|
||||
|
||||
has_add_permission = _nope
|
||||
has_change_permission = _nope
|
||||
has_delete_permission = _nope
|
||||
|
||||
|
||||
@admin.register(File)
|
||||
@ -9,13 +31,14 @@ class FileAdmin(admin.ModelAdmin):
|
||||
save_on_top = True
|
||||
|
||||
list_filter = (
|
||||
'validation__is_ok',
|
||||
'type',
|
||||
'status',
|
||||
'date_status_changed',
|
||||
'date_approved',
|
||||
'date_deleted',
|
||||
)
|
||||
list_display = ('original_name', 'extension', 'user', 'date_created', 'type', 'status')
|
||||
list_display = ('original_name', 'extension', 'user', 'date_created', 'type', 'status', 'is_ok')
|
||||
|
||||
list_select_related = ('version__extension', 'user')
|
||||
|
||||
@ -77,3 +100,56 @@ class FileAdmin(admin.ModelAdmin):
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
inlines = [FileValidationInlineAdmin]
|
||||
actions = [scan_selected_files]
|
||||
|
||||
def is_ok(self, obj):
|
||||
return obj.validation.is_ok if hasattr(obj, 'validation') else None
|
||||
|
||||
is_ok.boolean = True
|
||||
|
||||
|
||||
try:
|
||||
admin.site.unregister(background_task.models.Task)
|
||||
admin.site.unregister(background_task.models.CompletedTask)
|
||||
except admin.site.NotRegistered:
|
||||
pass
|
||||
|
||||
|
||||
class TaskMixin:
|
||||
"""Modify a few properties of background tasks displayed in admin."""
|
||||
|
||||
def no_errors(self, obj):
|
||||
"""Replace background_task's "has_error".
|
||||
|
||||
Make Django's red/green boolean icons less confusing
|
||||
in the context of "there's an error during task run".
|
||||
"""
|
||||
return not bool(obj.last_error)
|
||||
|
||||
no_errors.boolean = True
|
||||
|
||||
|
||||
@admin.register(background_task.models.Task)
|
||||
@admin.register(background_task.models.CompletedTask)
|
||||
class TaskAdmin(background_task.admin.TaskAdmin, TaskMixin):
|
||||
date_hierarchy = 'run_at'
|
||||
list_display = [
|
||||
'run_at',
|
||||
'task_name',
|
||||
'task_params',
|
||||
'attempts',
|
||||
'no_errors',
|
||||
'locked_by',
|
||||
'locked_by_pid_running',
|
||||
]
|
||||
list_filter = (
|
||||
'task_name',
|
||||
'run_at',
|
||||
'failed_at',
|
||||
'locked_at',
|
||||
'attempts',
|
||||
'creator_content_type',
|
||||
)
|
||||
search_fields = ['task_name', 'task_params', 'last_error', 'verbose_name']
|
||||
|
@ -12,10 +12,7 @@ from .validators import (
|
||||
FileMIMETypeValidator,
|
||||
ManifestValidator,
|
||||
)
|
||||
from constants.base import (
|
||||
EXTENSION_TYPE_SLUGS_SINGULAR,
|
||||
ALLOWED_EXTENSION_MIMETYPES,
|
||||
)
|
||||
from constants.base import EXTENSION_SLUG_TYPES, ALLOWED_EXTENSION_MIMETYPES
|
||||
import files.models
|
||||
import files.utils as utils
|
||||
|
||||
@ -28,6 +25,20 @@ logger = logging.getLogger(__name__)
|
||||
class FileForm(forms.ModelForm):
|
||||
msg_only_zip_files = _('Only .zip files are accepted.')
|
||||
|
||||
# Mimicking how django.forms.fields.Field handles validation error messages.
|
||||
# TODO: maybe this should be a custom SourceFileField with all these validators and messages
|
||||
error_messages = {
|
||||
'invalid_manifest_path': _(
|
||||
'The manifest file should be at the top level of the archive, or one level deep.'
|
||||
),
|
||||
# TODO: surface TOML parsing errors?
|
||||
'invalid_manifest_toml': _('Could not parse the manifest file.'),
|
||||
'invalid_missing_init': _('An add-on should have an __init__.py file.'),
|
||||
'missing_or_multiple_theme_xml': _('A theme should have exactly one XML file.'),
|
||||
'invalid_zip_archive': msg_only_zip_files,
|
||||
'missing_manifest_toml': _('The manifest file is missing.'),
|
||||
}
|
||||
|
||||
class Meta:
|
||||
model = files.models.File
|
||||
fields = ('source', 'type', 'metadata', 'agreed_with_terms', 'user')
|
||||
@ -38,7 +49,7 @@ class FileForm(forms.ModelForm):
|
||||
validators=[
|
||||
FileMIMETypeValidator(
|
||||
allowed_mimetypes=ALLOWED_EXTENSION_MIMETYPES,
|
||||
message=msg_only_zip_files,
|
||||
message=error_messages['invalid_zip_archive'],
|
||||
),
|
||||
],
|
||||
widget=forms.ClearableFileInput(
|
||||
@ -128,22 +139,19 @@ class FileForm(forms.ModelForm):
|
||||
|
||||
errors = []
|
||||
if not zipfile.is_zipfile(file_path):
|
||||
errors.append('File is not .zip')
|
||||
raise forms.ValidationError(self.error_messages['invalid_zip_archive'])
|
||||
|
||||
manifest = utils.read_manifest_from_zip(file_path)
|
||||
manifest, error_codes = utils.read_manifest_from_zip(file_path)
|
||||
for code in error_codes:
|
||||
errors.append(forms.ValidationError(self.error_messages[code]))
|
||||
if errors:
|
||||
self.add_error('source', errors)
|
||||
|
||||
if manifest is None:
|
||||
errors.append('A valid manifest file could not be found')
|
||||
else:
|
||||
if manifest:
|
||||
ManifestValidator(manifest)
|
||||
ExtensionIDManifestValidator(manifest, self.extension)
|
||||
|
||||
extension_types = {v: k for k, v in EXTENSION_TYPE_SLUGS_SINGULAR.items()}
|
||||
if errors:
|
||||
raise forms.ValidationError({'source': errors}, code='invalid')
|
||||
|
||||
self.cleaned_data['metadata'] = manifest
|
||||
# TODO: Error handling
|
||||
self.cleaned_data['type'] = extension_types[manifest['type']]
|
||||
self.cleaned_data['metadata'] = manifest
|
||||
self.cleaned_data['type'] = EXTENSION_SLUG_TYPES[manifest['type']]
|
||||
|
||||
return self.cleaned_data
|
||||
|
@ -0,0 +1,40 @@
|
||||
# Generated by Django 4.2.11 on 2024-04-12 09:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('files', '0004_alter_file_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='filevalidation',
|
||||
old_name='validation',
|
||||
new_name='results',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='filevalidation',
|
||||
name='results',
|
||||
field=models.JSONField(),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='filevalidation',
|
||||
name='errors',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='filevalidation',
|
||||
name='notices',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='filevalidation',
|
||||
name='warnings',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='filevalidation',
|
||||
old_name='is_valid',
|
||||
new_name='is_ok',
|
||||
),
|
||||
]
|
@ -1,7 +1,6 @@
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any
|
||||
import logging
|
||||
import re
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.db import models
|
||||
@ -104,7 +103,8 @@ class File(CreatedModifiedMixin, TrackChangesMixin, SoftDeleteMixin, models.Mode
|
||||
objects = FileManager()
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f'{self.original_name} ({self.get_status_display()})'
|
||||
label_deleted = f'{self.date_deleted and " (DELETED ❌)" or ""}'
|
||||
return f'{self.original_name} ({self.get_status_display()}){label_deleted}'
|
||||
|
||||
@property
|
||||
def has_been_validated(self):
|
||||
@ -177,11 +177,7 @@ class File(CreatedModifiedMixin, TrackChangesMixin, SoftDeleteMixin, models.Mode
|
||||
data = self.metadata
|
||||
|
||||
extension_id = data.get('id')
|
||||
original_name = data.get('name', self.original_name)
|
||||
name_as_path = Path(original_name)
|
||||
for suffix in name_as_path.suffixes:
|
||||
original_name = original_name.replace(suffix, '')
|
||||
name = re.sub(r'[-_ ]+', ' ', original_name)
|
||||
name = data.get('name', self.original_name)
|
||||
return {
|
||||
'name': name,
|
||||
'slug': utils.slugify(name),
|
||||
@ -210,11 +206,8 @@ class File(CreatedModifiedMixin, TrackChangesMixin, SoftDeleteMixin, models.Mode
|
||||
|
||||
|
||||
class FileValidation(CreatedModifiedMixin, TrackChangesMixin, models.Model):
|
||||
track_changes_to_fields = {'is_valid', 'errors', 'warnings', 'notices', 'validation'}
|
||||
track_changes_to_fields = {'is_ok', 'results'}
|
||||
|
||||
file = models.OneToOneField(File, related_name='validation', on_delete=models.CASCADE)
|
||||
is_valid = models.BooleanField(default=False)
|
||||
errors = models.IntegerField(default=0)
|
||||
warnings = models.IntegerField(default=0)
|
||||
notices = models.IntegerField(default=0)
|
||||
validation = models.TextField()
|
||||
is_ok = models.BooleanField(default=False)
|
||||
results = models.JSONField()
|
||||
|
@ -1,7 +1,12 @@
|
||||
from django.db.models.signals import pre_save
|
||||
import logging
|
||||
|
||||
from django.db.models.signals import pre_save, post_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
import files.models
|
||||
import files.tasks
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@receiver(pre_save, sender=files.models.File)
|
||||
@ -9,3 +14,19 @@ def _record_changes(sender: object, instance: files.models.File, **kwargs: objec
|
||||
was_changed, old_state = instance.pre_save_record()
|
||||
|
||||
instance.record_status_change(was_changed, old_state, **kwargs)
|
||||
|
||||
|
||||
def schedule_scan(file: files.models.File) -> None:
|
||||
"""Schedule a scan of a given file."""
|
||||
logger.info('Scheduling a scan for file pk=%s', file.pk)
|
||||
files.tasks.clamdscan(file_id=file.pk, creator=file, verbose_name=file.source.name)
|
||||
|
||||
|
||||
@receiver(post_save, sender=files.models.File)
|
||||
def _scan_new_file(
|
||||
sender: object, instance: files.models.File, created: bool, **kwargs: object
|
||||
) -> None:
|
||||
if not created:
|
||||
return
|
||||
|
||||
schedule_scan(instance)
|
||||
|
29
files/tasks.py
Normal file
29
files/tasks.py
Normal file
@ -0,0 +1,29 @@
|
||||
import logging
|
||||
import os.path
|
||||
|
||||
from background_task import background
|
||||
from background_task.tasks import TaskSchedule
|
||||
from django.conf import settings
|
||||
|
||||
import files.models
|
||||
import files.utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@background(schedule={'action': TaskSchedule.RESCHEDULE_EXISTING})
|
||||
def clamdscan(file_id: int):
|
||||
"""Run a scan of a given file and save its output as a FileValidation record."""
|
||||
file = files.models.File.objects.get(pk=file_id)
|
||||
abs_path = os.path.join(settings.MEDIA_ROOT, file.source.path)
|
||||
scan_status, scan_found = files.utils.run_clamdscan(abs_path)
|
||||
logger.info('File pk=%s scanned: %s', file.pk, (scan_status, scan_found))
|
||||
scan_result = {'clamdscan': [scan_status, scan_found]}
|
||||
is_ok = scan_status == 'OK'
|
||||
file_validation, is_new = files.models.FileValidation.objects.get_or_create(
|
||||
file=file, defaults={'results': scan_result, 'is_ok': is_ok}
|
||||
)
|
||||
if not is_new:
|
||||
file_validation.results = scan_result
|
||||
file_validation.is_ok = is_ok
|
||||
file_validation.save(update_fields={'results', 'is_ok', 'date_modified'})
|
21
files/templates/files/components/scan_details.html
Normal file
21
files/templates/files/components/scan_details.html
Normal file
@ -0,0 +1,21 @@
|
||||
{% load common i18n %}
|
||||
{# FIXME: we might want to rephrase is_moderator in terms of Django's (group) permissions #}
|
||||
{% if perms.files.view_file or request.user.is_moderator %}
|
||||
{% with file_validation=file.validation %}
|
||||
{% if file_validation and not file_validation.is_ok %}
|
||||
<section>
|
||||
<div class="card pb-3 pt-4 px-4 mb-3 ext-detail-download-danger">
|
||||
<h3>⚠ {% trans "Suspicious upload" %}</h3>
|
||||
{% blocktrans asvar alert_text %}Scan of the {{ file }} indicates malicious content.{% endblocktrans %}
|
||||
<h4>
|
||||
{{ alert_text }}
|
||||
{% if perms.files.view_file %}{# Moderators don't necessarily have access to the admin #}
|
||||
{% url 'admin:files_file_change' file.pk as admin_file_url %}
|
||||
<a href="{{ admin_file_url }}" target="_blank">{% trans "See details" %}</a>
|
||||
{% endif %}
|
||||
</h4>
|
||||
</div>
|
||||
</section>
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
{% endif %}
|
10
files/templates/files/components/scan_details_flag.html
Normal file
10
files/templates/files/components/scan_details_flag.html
Normal file
@ -0,0 +1,10 @@
|
||||
{% load common i18n %}
|
||||
{# FIXME: we might want to rephrase is_moderator in terms of Django's (group) permissions #}
|
||||
{% if perms.files.view_file or request.user.is_moderator %}
|
||||
{% with file_validation=file.validation %}
|
||||
{% if file_validation and not file_validation.is_ok %}
|
||||
{% blocktrans asvar alert_text %}Scan of the {{ file }} indicates malicious content.{% endblocktrans %}
|
||||
<b class="text-danger pt-2" title="{{ alert_text }}">⚠</b>
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
{% endif %}
|
107
files/tests/test_signals.py
Normal file
107
files/tests/test_signals.py
Normal file
@ -0,0 +1,107 @@
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from background_task.models import Task
|
||||
from django.conf import settings
|
||||
from django.test import TestCase, override_settings
|
||||
|
||||
from common.tests.factories.files import FileFactory
|
||||
import files.models
|
||||
import files.tasks
|
||||
|
||||
|
||||
@unittest.skipUnless(shutil.which('clamd'), 'requires clamd')
|
||||
@override_settings(MEDIA_ROOT='/tmp/')
|
||||
class FileScanTest(TestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.temp_directory = tempfile.mkdtemp(prefix=settings.MEDIA_ROOT)
|
||||
|
||||
def tearDown(self):
|
||||
super().tearDown()
|
||||
shutil.rmtree(self.temp_directory)
|
||||
|
||||
def test_scan_flags_found_invalid(self):
|
||||
test_file_path = os.path.join(self.temp_directory, 'test_file.zip')
|
||||
test_content = (
|
||||
b'X5O!P%@AP[4\PZX54(P^)7CC)7}$EICAR-STANDARD-ANTIVIRUS-TEST-FILE!$H+H*' # noqa: W605
|
||||
)
|
||||
with open(test_file_path, 'wb+') as test_file:
|
||||
test_file.write(test_content)
|
||||
|
||||
file = FileFactory(source=test_file_path)
|
||||
self.assertFalse(hasattr(file, 'validation'))
|
||||
|
||||
# A background task should have been created
|
||||
task = Task.objects.created_by(creator=file).first()
|
||||
self.assertIsNotNone(task)
|
||||
self.assertEqual(task.task_name, 'files.tasks.clamdscan')
|
||||
self.assertEqual(task.task_params, f'[[], {{"file_id": {file.pk}}}]')
|
||||
|
||||
# Actually run the task as if by background runner
|
||||
task_args, task_kwargs = task.params()
|
||||
files.tasks.clamdscan.task_function(*task_args, **task_kwargs)
|
||||
|
||||
file.refresh_from_db()
|
||||
self.assertFalse(file.validation.is_ok)
|
||||
result = file.validation.results['clamdscan']
|
||||
self.assertEqual(result, ['FOUND', 'Win.Test.EICAR_HDB-1'])
|
||||
|
||||
def test_scan_flags_found_invalid_updates_existing_validation(self):
|
||||
test_file_path = os.path.join(self.temp_directory, 'test_file.zip')
|
||||
test_content = (
|
||||
b'X5O!P%@AP[4\PZX54(P^)7CC)7}$EICAR-STANDARD-ANTIVIRUS-TEST-FILE!$H+H*' # noqa: W605
|
||||
)
|
||||
with open(test_file_path, 'wb+') as test_file:
|
||||
test_file.write(test_content)
|
||||
|
||||
file = FileFactory(source=test_file_path)
|
||||
# Make sure validation record exists before scanner runs
|
||||
existing_validation = files.models.FileValidation(file=file, results={})
|
||||
existing_validation.save()
|
||||
self.assertTrue(hasattr(file, 'validation'))
|
||||
old_date_modified = existing_validation.date_modified
|
||||
|
||||
# A background task should have been created
|
||||
task = Task.objects.created_by(creator=file).first()
|
||||
self.assertIsNotNone(task)
|
||||
self.assertEqual(task.task_name, 'files.tasks.clamdscan')
|
||||
self.assertEqual(task.task_params, f'[[], {{"file_id": {file.pk}}}]')
|
||||
|
||||
# Actually run the task as if by background runner
|
||||
task_args, task_kwargs = task.params()
|
||||
files.tasks.clamdscan.task_function(*task_args, **task_kwargs)
|
||||
|
||||
self.assertFalse(file.validation.is_ok)
|
||||
file.validation.refresh_from_db()
|
||||
result = file.validation.results['clamdscan']
|
||||
self.assertEqual(result, ['FOUND', 'Win.Test.EICAR_HDB-1'])
|
||||
self.assertEqual(existing_validation.pk, file.validation.pk)
|
||||
|
||||
existing_validation.refresh_from_db()
|
||||
self.assertGreater(existing_validation.date_modified, old_date_modified)
|
||||
|
||||
def test_scan_flags_nothing_found_valid(self):
|
||||
test_file_path = os.path.join(self.temp_directory, 'test_file.zip')
|
||||
with open(test_file_path, 'wb+') as test_file:
|
||||
test_file.write(b'some file')
|
||||
|
||||
file = FileFactory(source=test_file_path)
|
||||
self.assertFalse(hasattr(file, 'validation'))
|
||||
|
||||
# A background task should have been created
|
||||
task = Task.objects.created_by(creator=file).first()
|
||||
self.assertIsNotNone(task)
|
||||
self.assertEqual(task.task_name, 'files.tasks.clamdscan')
|
||||
self.assertEqual(task.task_params, f'[[], {{"file_id": {file.pk}}}]')
|
||||
|
||||
# Actually run the task as if by background runner
|
||||
task_args, task_kwargs = task.params()
|
||||
files.tasks.clamdscan.task_function(*task_args, **task_kwargs)
|
||||
|
||||
file.refresh_from_db()
|
||||
self.assertTrue(file.validation.is_ok)
|
||||
result = file.validation.results['clamdscan']
|
||||
self.assertEqual(result, ['OK', None])
|
@ -1,6 +1,6 @@
|
||||
from django.test import TestCase
|
||||
|
||||
from files.utils import find_file_inside_zip_list
|
||||
from files.utils import find_path_by_name, find_exact_path, filter_paths_by_ext
|
||||
|
||||
|
||||
class UtilsTest(TestCase):
|
||||
@ -10,7 +10,7 @@ class UtilsTest(TestCase):
|
||||
name_list = [
|
||||
"blender_manifest.toml",
|
||||
]
|
||||
manifest_file = find_file_inside_zip_list(self.manifest, name_list)
|
||||
manifest_file = find_path_by_name(name_list, self.manifest)
|
||||
self.assertEqual(manifest_file, "blender_manifest.toml")
|
||||
|
||||
def test_find_manifest_nested(self):
|
||||
@ -23,21 +23,21 @@ class UtilsTest(TestCase):
|
||||
"foobar-1.0.3/manifest.toml",
|
||||
"foobar-1.0.3/manifest.json",
|
||||
]
|
||||
manifest_file = find_file_inside_zip_list(self.manifest, name_list)
|
||||
manifest_file = find_path_by_name(name_list, self.manifest)
|
||||
self.assertEqual(manifest_file, "foobar-1.0.3/blender_manifest.toml")
|
||||
|
||||
def test_find_manifest_no_zipped_folder(self):
|
||||
name_list = [
|
||||
"foobar-1.0.3/blender_manifest.toml",
|
||||
]
|
||||
manifest_file = find_file_inside_zip_list(self.manifest, name_list)
|
||||
manifest_file = find_path_by_name(name_list, self.manifest)
|
||||
self.assertEqual(manifest_file, "foobar-1.0.3/blender_manifest.toml")
|
||||
|
||||
def test_find_manifest_no_manifest(self):
|
||||
name_list = [
|
||||
"foobar-1.0.3/",
|
||||
]
|
||||
manifest_file = find_file_inside_zip_list(self.manifest, name_list)
|
||||
manifest_file = find_path_by_name(name_list, self.manifest)
|
||||
self.assertEqual(manifest_file, None)
|
||||
|
||||
def test_find_manifest_with_space(self):
|
||||
@ -47,5 +47,54 @@ class UtilsTest(TestCase):
|
||||
"foobar-1.0.3/blender_manifest.toml.txt",
|
||||
"blender_manifest.toml/my_files.py",
|
||||
]
|
||||
manifest_file = find_file_inside_zip_list(self.manifest, name_list)
|
||||
manifest_file = find_path_by_name(name_list, self.manifest)
|
||||
self.assertEqual(manifest_file, None)
|
||||
|
||||
def test_find_exact_path_found(self):
|
||||
name_list = [
|
||||
'foobar-1.0.3/theme.xml',
|
||||
'foobar-1.0.3/theme1.xml',
|
||||
'foobar-1.0.3/theme2.txt',
|
||||
'foobar-1.0.3/__init__.py',
|
||||
'foobar-1.0.3/foobar/__init__.py',
|
||||
'foobar-1.0.3/foobar-1.0.3/__init__.py',
|
||||
'blender_manifest.toml',
|
||||
]
|
||||
path = find_exact_path(name_list, 'foobar-1.0.3/__init__.py')
|
||||
self.assertEqual(path, 'foobar-1.0.3/__init__.py')
|
||||
|
||||
def test_find_exact_path_nothing_found(self):
|
||||
name_list = [
|
||||
'foobar-1.0.3/theme.xml',
|
||||
'foobar-1.0.3/theme1.xml',
|
||||
'foobar-1.0.3/theme2.txt',
|
||||
'foobar-1.0.3/foobar/__init__.py',
|
||||
'foobar-1.0.3/foobar-1.0.3/__init__.py',
|
||||
'blender_manifest.toml',
|
||||
]
|
||||
path = find_exact_path(name_list, 'foobar-1.0.3/__init__.py')
|
||||
self.assertIsNone(path)
|
||||
|
||||
def test_filter_paths_by_ext_found(self):
|
||||
name_list = [
|
||||
'foobar-1.0.3/theme.xml',
|
||||
'foobar-1.0.3/theme1.xml',
|
||||
'foobar-1.0.3/theme2.txt',
|
||||
'foobar-1.0.3/__init__.py',
|
||||
'foobar-1.0.3/foobar-1.0.3/__init__.py',
|
||||
'blender_manifest.toml',
|
||||
]
|
||||
paths = filter_paths_by_ext(name_list, '.xml')
|
||||
self.assertEqual(list(paths), ['foobar-1.0.3/theme.xml', 'foobar-1.0.3/theme1.xml'])
|
||||
|
||||
def test_filter_paths_by_ext_nothing_found(self):
|
||||
name_list = [
|
||||
'foobar-1.0.3/theme.xml',
|
||||
'foobar-1.0.3/theme1.md.xml',
|
||||
'foobar-1.0.3/theme2.txt',
|
||||
'foobar-1.0.3/__init__.py',
|
||||
'foobar-1.0.3/foobar-1.0.3/__init__.py',
|
||||
'blender_manifest.toml',
|
||||
]
|
||||
paths = filter_paths_by_ext(name_list, '.md')
|
||||
self.assertEqual(list(paths), [])
|
||||
|
@ -4,10 +4,13 @@ import io
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import os.path
|
||||
import toml
|
||||
import typing
|
||||
import zipfile
|
||||
|
||||
from lxml import etree
|
||||
import clamd
|
||||
import magic
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -48,41 +51,104 @@ def get_sha256_from_value(value: str):
|
||||
return hash_.hexdigest()
|
||||
|
||||
|
||||
def find_file_inside_zip_list(file_to_read: str, name_list: list) -> str:
|
||||
"""Return the first occurance of file_to_read insize a zip name_list"""
|
||||
for file_path in name_list:
|
||||
def find_path_by_name(paths: typing.List[str], name: str) -> typing.Optional[str]:
|
||||
"""Return the first occurrence of file name in a given list of paths."""
|
||||
for file_path in paths:
|
||||
# Remove leading/trailing whitespace from file path
|
||||
file_path_stripped = file_path.strip()
|
||||
# Check if the basename of the stripped path is equal to the target file name
|
||||
if os.path.basename(file_path_stripped) == file_to_read:
|
||||
if os.path.basename(file_path_stripped) == name:
|
||||
return file_path_stripped
|
||||
return None
|
||||
|
||||
|
||||
def find_exact_path(paths: typing.List[str], exact_path: str) -> typing.Optional[str]:
|
||||
"""Return a first path equal to a given one if it exists in a given list of paths."""
|
||||
matching_paths = (path for path in paths if path == exact_path)
|
||||
return next(matching_paths, None)
|
||||
|
||||
|
||||
def filter_paths_by_ext(paths: typing.List[str], ext: str) -> typing.Iterable[str]:
|
||||
"""Generate a list of paths having a given extension from a given list of paths."""
|
||||
for file_path in paths:
|
||||
# Get file path's extension
|
||||
_, file_path_ext = os.path.splitext(file_path)
|
||||
# Check if this file's extension matches the extension we are looking for
|
||||
if file_path_ext.lower() == ext.lower():
|
||||
yield file_path
|
||||
|
||||
|
||||
def read_manifest_from_zip(archive_path):
|
||||
file_to_read = 'blender_manifest.toml'
|
||||
"""Read and validate extension's manifest file and contents of the archive.
|
||||
|
||||
In any extension archive, a valid `blender_manifest.toml` file is expected
|
||||
to be found at the top level of the archive, or inside a single nested directory.
|
||||
Additionally, depending on the extension type defined in the manifest,
|
||||
the archive is expected to have a particular file structure:
|
||||
|
||||
* for themes, a single XML file is expected next to the manifest;
|
||||
|
||||
* for add-ons, the following structure is expected:
|
||||
|
||||
```
|
||||
some-addon.zip
|
||||
└─ an-optional-dir
|
||||
├─ blender_manifest.toml
|
||||
├─ __init__.py
|
||||
└─ (...)
|
||||
```
|
||||
"""
|
||||
manifest_name = 'blender_manifest.toml'
|
||||
error_codes = []
|
||||
try:
|
||||
with zipfile.ZipFile(archive_path) as myzip:
|
||||
manifest_filepath = find_file_inside_zip_list(file_to_read, myzip.namelist())
|
||||
bad_file = myzip.testzip()
|
||||
if bad_file is not None:
|
||||
logger.error('Bad file in ZIP')
|
||||
error_codes.append('invalid_zip_archive')
|
||||
return None, error_codes
|
||||
|
||||
file_list = myzip.namelist()
|
||||
manifest_filepath = find_path_by_name(file_list, manifest_name)
|
||||
|
||||
if manifest_filepath is None:
|
||||
logger.info(f"File '{file_to_read}' not found in the archive.")
|
||||
return None
|
||||
logger.info(f"File '{manifest_name}' not found in the archive.")
|
||||
error_codes.append('missing_manifest_toml')
|
||||
return None, error_codes
|
||||
|
||||
# Manifest file is expected to be no deeper than one directory down
|
||||
if os.path.dirname(os.path.dirname(manifest_filepath)) != '':
|
||||
error_codes.append('invalid_manifest_path')
|
||||
return None, error_codes
|
||||
|
||||
# Extract the file content
|
||||
with myzip.open(manifest_filepath) as file_content:
|
||||
# TODO: handle TOML loading error
|
||||
toml_content = toml.loads(file_content.read().decode())
|
||||
return toml_content
|
||||
|
||||
# If manifest was parsed successfully, do additional type-specific validation
|
||||
type_slug = toml_content['type']
|
||||
if type_slug == 'theme':
|
||||
theme_xmls = filter_paths_by_ext(file_list, '.xml')
|
||||
if len(list(theme_xmls)) != 1:
|
||||
error_codes.append('missing_or_multiple_theme_xml')
|
||||
elif type_slug == 'add-on':
|
||||
# __init__.py is expected to be next to the manifest
|
||||
expected_init_path = os.path.join(os.path.dirname(manifest_filepath), '__init__.py')
|
||||
init_filepath = find_exact_path(file_list, expected_init_path)
|
||||
if not init_filepath:
|
||||
error_codes.append('invalid_missing_init')
|
||||
|
||||
return toml_content, error_codes
|
||||
|
||||
except toml.decoder.TomlDecodeError as e:
|
||||
# TODO: This error should be propagate to the user
|
||||
logger.error(f"Manifest Error: {e.msg}")
|
||||
error_codes.append('invalid_manifest_toml')
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error extracting from archive: {e}")
|
||||
error_codes.append('invalid_zip_archive')
|
||||
|
||||
return None
|
||||
return None, error_codes
|
||||
|
||||
|
||||
def guess_mimetype_from_ext(file_name: str) -> str:
|
||||
@ -97,3 +163,12 @@ def guess_mimetype_from_content(file_obj) -> str:
|
||||
# This file might be read again by validation or other utilities
|
||||
file_obj.seek(0)
|
||||
return mimetype_from_bytes
|
||||
|
||||
|
||||
def run_clamdscan(abs_path: str) -> tuple:
|
||||
logger.info('Scanning file at path=%s', abs_path)
|
||||
clamd_socket = clamd.ClamdUnixSocket()
|
||||
with open(abs_path, 'rb') as f:
|
||||
result = clamd_socket.instream(f)['stream']
|
||||
logger.info('File at path=%s scanned: %s', abs_path, result)
|
||||
return result
|
||||
|
@ -5,6 +5,7 @@
|
||||
ansible.builtin.systemd: name={{ item }} daemon_reload=yes state=restarted enabled=yes
|
||||
with_items:
|
||||
- "{{ service_name }}"
|
||||
- "{{ service_name }}-background"
|
||||
tags:
|
||||
- always
|
||||
|
||||
|
@ -9,6 +9,8 @@
|
||||
- name: Installing required packages
|
||||
ansible.builtin.apt: name={{ item }} state=present
|
||||
with_items:
|
||||
- clamav-daemon
|
||||
- clamav-unofficial-sigs
|
||||
- git
|
||||
- libpq-dev
|
||||
- nginx-full
|
||||
|
@ -7,6 +7,12 @@
|
||||
with_fileglob:
|
||||
- ../templates/other-services/*.service
|
||||
|
||||
- name: Enabling clamav-daemon
|
||||
ansible.builtin.systemd:
|
||||
name: clamav-daemon
|
||||
state: started
|
||||
enabled: true
|
||||
|
||||
- name: Enabling systemd services
|
||||
ansible.builtin.systemd:
|
||||
name: "{{ service_name }}-{{ item }}"
|
||||
|
@ -11,8 +11,6 @@ ExecReload=/bin/kill -s HUP $MAINPID
|
||||
Restart=always
|
||||
KillMode=mixed
|
||||
Type=notify
|
||||
StandardError=syslog
|
||||
StandardOutput=syslog
|
||||
SyslogIdentifier={{ service_name }}
|
||||
NotifyAccess=all
|
||||
WorkingDirectory={{ dir.source }}
|
||||
|
@ -4,6 +4,4 @@ Description=restart {{ background_service_name }} task handler
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/bin/systemctl restart {{ background_service_name }}
|
||||
StandardError=syslog
|
||||
StandardOutput=syslog
|
||||
SyslogIdentifier={{ service_name }}
|
||||
|
@ -7,11 +7,9 @@ User={{ user }}
|
||||
Group={{ group }}
|
||||
EnvironmentFile={{ env_file }}
|
||||
ExecStart={{ dir.source }}/.venv/bin/python {{ dir.source }}/manage.py process_tasks
|
||||
ExecStop=kill -s SIGTSTP $MAINPID
|
||||
Restart=always
|
||||
KillSignal=SIGQUIT
|
||||
Type=idle
|
||||
StandardError=syslog
|
||||
StandardOutput=syslog
|
||||
SyslogIdentifier={{ service_name }}
|
||||
NotifyAccess=all
|
||||
WorkingDirectory={{ dir.source }}
|
||||
|
@ -7,8 +7,6 @@ User={{ user }}
|
||||
Group={{ group }}
|
||||
EnvironmentFile={{ env_file }}
|
||||
ExecStart={{ dir.source }}/.venv/bin/python {{ dir.source }}/manage.py clearsessions
|
||||
StandardError=syslog
|
||||
StandardOutput=syslog
|
||||
SyslogIdentifier={{ service_name }}
|
||||
NotifyAccess=all
|
||||
WorkingDirectory={{ dir.source }}
|
||||
|
@ -7,8 +7,6 @@ User={{ user }}
|
||||
Group={{ group }}
|
||||
EnvironmentFile={{ env_file }}
|
||||
ExecStart={{ dir.source }}/.venv/bin/python {{ dir.source }}/manage.py queue_deletion_request
|
||||
StandardError=syslog
|
||||
StandardOutput=syslog
|
||||
SyslogIdentifier={{ service_name }}
|
||||
NotifyAccess=all
|
||||
WorkingDirectory={{ dir.source }}
|
||||
|
@ -7,8 +7,6 @@ User={{ user }}
|
||||
Group={{ group }}
|
||||
EnvironmentFile={{ env_file }}
|
||||
ExecStart={{ dir.source }}/.venv/bin/python {{ dir.source }}/manage.py write_stats
|
||||
StandardError=syslog
|
||||
StandardOutput=syslog
|
||||
SyslogIdentifier={{ service_name }}
|
||||
NotifyAccess=all
|
||||
WorkingDirectory={{ dir.source }}
|
||||
|
@ -7,6 +7,7 @@ backports.zoneinfo==0.2.1;python_version<"3.9"
|
||||
bleach==5.0.1
|
||||
blender-id-oauth-client @ git+https://projects.blender.org/infrastructure/blender-id-oauth-client.git@cca32643e5118f050b504d803c9ae79dc3fdf350
|
||||
certifi==2022.6.15
|
||||
clamd==1.0.2
|
||||
charset-normalizer==2.1.0
|
||||
click==8.1.3
|
||||
colorhash==1.0.4
|
||||
|
@ -22,6 +22,7 @@
|
||||
<span>{{ extension.review_activity.all.last.date_created|naturaltime_compact }}</span>
|
||||
</a>
|
||||
{% endif %}
|
||||
{% include "files/components/scan_details_flag.html" with file=extension.latest_version.file %}
|
||||
</td>
|
||||
<td>
|
||||
<a href="{{ extension.get_review_url }}" class="text-decoration-none">
|
||||
|
@ -17,6 +17,14 @@ class CommentsViewTest(TestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.context['object_list']), 1)
|
||||
|
||||
# Deleted extensions don't show up in the approval queue
|
||||
self.assertIsNone(self.default_version.extension.date_deleted)
|
||||
self.default_version.extension.delete()
|
||||
self.assertIsNotNone(self.default_version.extension.date_deleted)
|
||||
r = self.client.get(reverse('reviewers:approval-queue'))
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.context['object_list']), 0)
|
||||
|
||||
# Page is visible for every extension and does not require authentication
|
||||
def test_visibility(self):
|
||||
r = self.client.get(
|
||||
|
@ -19,7 +19,7 @@ class ApprovalQueueView(ListView):
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
Extension.objects.all()
|
||||
Extension.objects.exclude_deleted
|
||||
.exclude(status=Extension.STATUSES.APPROVED)
|
||||
.order_by('-date_created')
|
||||
)
|
||||
|
26
utils.py
26
utils.py
@ -4,7 +4,6 @@ import itertools
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
import unicodedata
|
||||
|
||||
from urllib.parse import (
|
||||
parse_qsl,
|
||||
@ -20,6 +19,8 @@ from django.http import HttpRequest
|
||||
from django.http.response import HttpResponseRedirectBase
|
||||
from django.utils.encoding import force_bytes, force_str
|
||||
from django.utils.http import _urlparse
|
||||
import django.utils.text
|
||||
|
||||
|
||||
User = get_user_model()
|
||||
log = logging.getLogger(__name__)
|
||||
@ -60,25 +61,12 @@ def utc_millesecs_from_epoch(for_datetime=None):
|
||||
return int(seconds * 1000)
|
||||
|
||||
|
||||
# Extra characters outside of alphanumerics that we'll allow.
|
||||
SLUG_OK = '-_~'
|
||||
def slugify(s: str):
|
||||
"""Convert a given string to a URL slug.
|
||||
|
||||
|
||||
def slugify(s, ok=SLUG_OK, lower=True, spaces=False, delimiter='-'):
|
||||
# L and N signify letter/number.
|
||||
# http://www.unicode.org/reports/tr44/tr44-4.html#GC_Values_Table
|
||||
rv = []
|
||||
|
||||
for c in force_str(s):
|
||||
cat = unicodedata.category(c)[0]
|
||||
if cat in 'LN' or c in ok:
|
||||
rv.append(c)
|
||||
if cat == 'Z': # space
|
||||
rv.append(' ')
|
||||
new = ''.join(rv).strip()
|
||||
if not spaces:
|
||||
new = re.sub(r'[-\s]+', delimiter, new)
|
||||
return new.lower() if lower else new
|
||||
Do it the same way Django does it, but replace underscores with dashes first.
|
||||
"""
|
||||
return django.utils.text.slugify(s.replace('_', '-'))
|
||||
|
||||
|
||||
def urlparams(url_, hash=None, **query):
|
||||
|
Loading…
Reference in New Issue
Block a user