Merge branch 'master' into storefront-nuxt

This commit is contained in:
Egor Pavlovich Gorbunov 2026-03-02 00:33:39 +03:00
commit 2c4e66832f
31 changed files with 840 additions and 101 deletions

View file

@ -5,8 +5,7 @@ LABEL authors="fureunoir"
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
LANG=C.UTF-8 \
DEBIAN_FRONTEND=noninteractive \
PATH="/root/.local/bin:$PATH"
DEBIAN_FRONTEND=noninteractive
WORKDIR /app
@ -33,18 +32,16 @@ RUN set -eux; \
rm -rf /var/lib/apt/lists/*; \
pip install --upgrade pip
RUN curl -LsSf https://astral.sh/uv/install.sh | sh
ENV PATH="/root/.local/bin:/root/.cargo/bin:$PATH"
RUN curl -LsSf https://astral.sh/uv/install.sh | UV_INSTALL_DIR=/usr/local/bin sh
RUN uv venv /opt/schon-python
ENV VIRTUAL_ENV=/opt/schon-python
ENV UV_PROJECT_ENVIRONMENT=/opt/schon-python
ENV PATH="/opt/schon-python/bin:/root/.local/bin:/root/.cargo/bin:$PATH"
ENV PATH="/opt/schon-python/bin:/usr/local/bin:$PATH"
COPY pyproject.toml pyproject.toml
COPY uv.lock uv.lock
RUN set -eux; \
RUN uv venv /opt/schon-python && \
uv sync --extra worker --extra openai --locked
COPY ./scripts/Docker/app-entrypoint.sh /usr/local/bin/app-entrypoint.sh
@ -52,4 +49,11 @@ RUN chmod +x /usr/local/bin/app-entrypoint.sh
COPY . .
RUN groupadd --system --gid 1000 schon && \
useradd --system --uid 1000 --gid schon --shell /bin/bash --create-home schon && \
mkdir -p /app/static /app/media && \
chown -R schon:schon /app /opt/schon-python
USER schon
ENTRYPOINT ["/usr/bin/bash", "app-entrypoint.sh"]

View file

@ -5,8 +5,7 @@ LABEL authors="fureunoir"
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
LANG=C.UTF-8 \
DEBIAN_FRONTEND=noninteractive \
PATH="/root/.local/bin:$PATH"
DEBIAN_FRONTEND=noninteractive
WORKDIR /app
@ -33,17 +32,16 @@ RUN set -eux; \
rm -rf /var/lib/apt/lists/*; \
pip install --upgrade pip
RUN curl -LsSf https://astral.sh/uv/install.sh | sh
ENV PATH="/root/.local/bin:/root/.cargo/bin:$PATH"
RUN uv venv /opt/schon-python
RUN curl -LsSf https://astral.sh/uv/install.sh | UV_INSTALL_DIR=/usr/local/bin sh
ENV VIRTUAL_ENV=/opt/schon-python
ENV UV_PROJECT_ENVIRONMENT=/opt/schon-python
ENV PATH="/opt/schon-python/bin:/root/.local/bin:/root/.cargo/bin:$PATH"
ENV PATH="/opt/schon-python/bin:/usr/local/bin:$PATH"
COPY pyproject.toml pyproject.toml
COPY uv.lock uv.lock
RUN set -eux; \
RUN uv venv /opt/schon-python && \
uv sync --extra worker --extra openai --locked
COPY ./scripts/Docker/beat-entrypoint.sh /usr/local/bin/beat-entrypoint.sh
@ -51,4 +49,11 @@ RUN chmod +x /usr/local/bin/beat-entrypoint.sh
COPY . .
RUN groupadd --system --gid 1000 schon && \
useradd --system --uid 1000 --gid schon --shell /bin/bash --create-home schon && \
mkdir -p /app/media && \
chown -R schon:schon /app /opt/schon-python
USER schon
ENTRYPOINT ["/usr/bin/bash", "beat-entrypoint.sh"]

View file

@ -5,8 +5,7 @@ LABEL authors="fureunoir"
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
LANG=C.UTF-8 \
DEBIAN_FRONTEND=noninteractive \
PATH="/root/.local/bin:$PATH"
DEBIAN_FRONTEND=noninteractive
WORKDIR /app
@ -33,17 +32,16 @@ RUN set -eux; \
rm -rf /var/lib/apt/lists/*; \
pip install --upgrade pip
RUN curl -LsSf https://astral.sh/uv/install.sh | sh
ENV PATH="/root/.local/bin:/root/.cargo/bin:$PATH"
RUN uv venv /opt/schon-python
RUN curl -LsSf https://astral.sh/uv/install.sh | UV_INSTALL_DIR=/usr/local/bin sh
ENV VIRTUAL_ENV=/opt/schon-python
ENV UV_PROJECT_ENVIRONMENT=/opt/schon-python
ENV PATH="/opt/schon-python/bin:/root/.local/bin:/root/.cargo/bin:$PATH"
ENV PATH="/opt/schon-python/bin:/usr/local/bin:$PATH"
COPY pyproject.toml pyproject.toml
COPY uv.lock uv.lock
RUN set -eux; \
RUN uv venv /opt/schon-python && \
uv sync --extra worker --extra openai --locked
COPY ./scripts/Docker/stock-updater-entrypoint.sh /usr/local/bin/stock-updater-entrypoint.sh
@ -51,4 +49,11 @@ RUN chmod +x /usr/local/bin/stock-updater-entrypoint.sh
COPY . .
RUN groupadd --system --gid 1000 schon && \
useradd --system --uid 1000 --gid schon --shell /bin/bash --create-home schon && \
mkdir -p /app/media && \
chown -R schon:schon /app /opt/schon-python
USER schon
ENTRYPOINT ["/usr/bin/bash", "stock-updater-entrypoint.sh"]

View file

@ -5,8 +5,7 @@ LABEL authors="fureunoir"
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
LANG=C.UTF-8 \
DEBIAN_FRONTEND=noninteractive \
PATH="/root/.local/bin:$PATH"
DEBIAN_FRONTEND=noninteractive
WORKDIR /app
@ -33,18 +32,16 @@ RUN set -eux; \
rm -rf /var/lib/apt/lists/*; \
pip install --upgrade pip
RUN curl -LsSf https://astral.sh/uv/install.sh | sh
ENV PATH="/root/.local/bin:/root/.cargo/bin:$PATH"
RUN curl -LsSf https://astral.sh/uv/install.sh | UV_INSTALL_DIR=/usr/local/bin sh
RUN uv venv /opt/schon-python
ENV VIRTUAL_ENV=/opt/schon-python
ENV UV_PROJECT_ENVIRONMENT=/opt/schon-python
ENV PATH="/opt/schon-python/bin:/root/.local/bin:/root/.cargo/bin:$PATH"
ENV PATH="/opt/schon-python/bin:/usr/local/bin:$PATH"
COPY pyproject.toml pyproject.toml
COPY uv.lock uv.lock
RUN set -eux; \
RUN uv venv /opt/schon-python && \
uv sync --extra worker --extra openai --locked
COPY ./scripts/Docker/worker-entrypoint.sh /usr/local/bin/worker-entrypoint.sh
@ -52,4 +49,11 @@ RUN chmod +x /usr/local/bin/worker-entrypoint.sh
COPY . .
RUN groupadd --system --gid 1000 schon && \
useradd --system --uid 1000 --gid schon --shell /bin/bash --create-home schon && \
mkdir -p /app/media && \
chown -R schon:schon /app /opt/schon-python
USER schon
ENTRYPOINT ["/usr/bin/bash", "worker-entrypoint.sh"]

View file

@ -13,6 +13,8 @@ services:
restart: always
volumes:
- .:/app
- static-data:/app/static
- media-data:/app/media
ports:
- "8000:8000"
env_file:
@ -131,6 +133,7 @@ services:
restart: always
volumes:
- .:/app
- media-data:/app/media
env_file:
- .env
environment:
@ -159,6 +162,7 @@ services:
restart: always
volumes:
- .:/app
- media-data:/app/media
env_file:
- .env
environment:
@ -187,6 +191,7 @@ services:
restart: always
volumes:
- .:/app
- media-data:/app/media
env_file:
- .env
environment:
@ -245,3 +250,5 @@ volumes:
redis-data:
es-data:
prometheus-data:
static-data:
media-data:

View file

@ -1046,16 +1046,10 @@ class AddressAdmin(DjangoQLSearchMixin, FieldsetsMixin, GISModelAdmin):
"country",
"user",
)
list_filter = (
"country",
"region",
)
search_fields = (
"street",
"city",
"postal_code",
"user__email",
)
# country and region are encrypted — DB-level filtering is not possible
list_filter = ()
# street, city, postal_code are encrypted — DB-level search is not possible
search_fields = ("user__email",)
readonly_fields = (
"uuid",
"modified",

View file

@ -750,6 +750,20 @@ PRODUCT_SCHEMA = {
**BASE_ERRORS,
},
),
"exact_list": extend_schema(
tags=[
"products",
],
summary=_("retrieve exact products by identifier"),
description=_(
"retrieve a list of products by identifier type (uuid, slug, or sku). "
"Send a POST request with `identificator_type` and `identificators` (list of values)."
),
responses={
status.HTTP_200_OK: ProductSimpleSerializer(many=True),
**BASE_ERRORS,
},
),
"seo_meta": extend_schema(
tags=[
"products",

View file

@ -15,10 +15,11 @@ from engine.core.graphene.object_types import (
BulkProductInput,
FeedbackType,
OrderType,
ProductType,
SearchResultsType,
WishlistType,
)
from engine.core.models import Address, Order, OrderProduct, Wishlist
from engine.core.models import Address, Order, OrderProduct, Product, Wishlist
from engine.core.utils import format_attributes, is_url_safe
from engine.core.utils.caching import web_cache
from engine.core.utils.emailing import contact_us_email
@ -574,6 +575,31 @@ class BuyProduct(Mutation):
)
class RetrieveExactProducts(Mutation):
class Meta:
description = _("retrieve exact products by identificator")
class Arguments:
identificator_type = String(required=True)
identificators = List(String, required=True)
products = List(ProductType, required=True)
def mutate(self, info, identificator_type: str, identificators: list[str]):
match identificator_type:
case "uuid":
products = Product.objects.filter(uuid__in=identificators)
case "slug":
products = Product.objects.filter(slug__in=identificators)
case "sku":
products = Product.objects.filter(sku__in=identificators)
case _:
raise BadRequest(
_("identificator_type must be one of: uuid, slug, sku")
)
return RetrieveExactProducts(products=products) # ty: ignore[unknown-argument]
# noinspection PyUnusedLocal,PyTypeChecker
class FeedbackProductAction(Mutation):
class Meta:

View file

@ -99,11 +99,13 @@ class Command(BaseCommand):
def staff_user(self):
user, _ = User.objects.get_or_create(
email=f"staff@{DEMO_EMAIL_DOMAIN}",
first_name="Alice",
last_name="Schon",
is_staff=True,
is_active=True,
is_verified=True,
defaults={
"first_name": "Alice",
"last_name": "Schon",
"is_staff": True,
"is_active": True,
"is_verified": True,
},
)
if _:
user.set_password("Staff!Demo888")
@ -116,12 +118,14 @@ class Command(BaseCommand):
def super_user(self):
user, _ = User.objects.get_or_create(
email=f"super@{DEMO_EMAIL_DOMAIN}",
first_name="Bob",
last_name="Schon",
is_superuser=True,
is_staff=True,
is_active=True,
is_verified=True,
defaults={
"first_name": "Bob",
"last_name": "Schon",
"is_superuser": True,
"is_staff": True,
"is_active": True,
"is_verified": True,
},
)
if _:
user.set_password("Super!Demo888")

View file

@ -0,0 +1,186 @@
import base64
import encrypted_fields.fields
from cryptography.fernet import Fernet, MultiFernet
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
from django.db import migrations, models
def _make_fernet(settings):
keys = []
salt_keys = (
settings.SALT_KEY
if isinstance(settings.SALT_KEY, list)
else [settings.SALT_KEY]
)
for secret_key in [settings.SECRET_KEY] + list(
getattr(settings, "SECRET_KEY_FALLBACKS", [])
):
for salt_key in salt_keys:
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=salt_key.encode("utf-8"),
iterations=100_000,
backend=default_backend(),
)
keys.append(
base64.urlsafe_b64encode(kdf.derive(secret_key.encode("utf-8")))
)
return MultiFernet([Fernet(k) for k in keys]) if len(keys) > 1 else Fernet(keys[0])
def encrypt_address_fields(apps, schema_editor):
import json
from django.conf import settings
f = _make_fernet(settings)
def enc(value):
if value is None:
return None
if not isinstance(value, str):
value = str(value)
return f.encrypt(value.encode("utf-8")).decode("utf-8")
def enc_json(value):
if value is None:
return None
if not isinstance(value, str):
value = json.dumps(value, default=str)
return f.encrypt(value.encode("utf-8")).decode("utf-8")
with schema_editor.connection.cursor() as cursor:
cursor.execute(
"SELECT uuid, address_line, street, district, city, region, "
"postal_code, country, raw_data, api_response FROM core_address"
)
rows = cursor.fetchall()
for (
row_id,
address_line,
street,
district,
city,
region,
postal_code,
country,
raw_data,
api_response,
) in rows:
cursor.execute(
"UPDATE core_address SET "
"address_line=%s, street=%s, district=%s, city=%s, region=%s, "
"postal_code=%s, country=%s, raw_data=%s, api_response=%s "
"WHERE uuid=%s",
[
enc(address_line),
enc(street),
enc(district),
enc(city),
enc(region),
enc(postal_code),
enc(country),
enc_json(raw_data),
enc_json(api_response),
row_id,
],
)
class Migration(migrations.Migration):
dependencies = [
("core", "0056_pastedimage"),
]
operations = [
# Encrypt text fields
migrations.AlterField(
model_name="address",
name="address_line",
field=encrypted_fields.fields.EncryptedTextField(
blank=True,
null=True,
help_text="address line for the customer",
verbose_name="address line",
),
),
migrations.AlterField(
model_name="address",
name="street",
field=encrypted_fields.fields.EncryptedCharField(
max_length=255,
null=True,
verbose_name="street",
),
),
migrations.AlterField(
model_name="address",
name="district",
field=encrypted_fields.fields.EncryptedCharField(
max_length=255,
null=True,
verbose_name="district",
),
),
migrations.AlterField(
model_name="address",
name="city",
field=encrypted_fields.fields.EncryptedCharField(
max_length=100,
null=True,
verbose_name="city",
),
),
migrations.AlterField(
model_name="address",
name="region",
field=encrypted_fields.fields.EncryptedCharField(
max_length=100,
null=True,
verbose_name="region",
),
),
migrations.AlterField(
model_name="address",
name="postal_code",
field=encrypted_fields.fields.EncryptedCharField(
max_length=20,
null=True,
verbose_name="postal code",
),
),
migrations.AlterField(
model_name="address",
name="country",
field=encrypted_fields.fields.EncryptedCharField(
max_length=40,
null=True,
verbose_name="country",
),
),
# JSON fields: JSONB → TEXT (encrypted JSON string)
migrations.AlterField(
model_name="address",
name="raw_data",
field=models.TextField(
blank=True,
null=True,
help_text="full JSON response from geocoder for this address",
),
),
migrations.AlterField(
model_name="address",
name="api_response",
field=models.TextField(
blank=True,
null=True,
help_text="stored JSON response from the geocoding service",
),
),
# Re-encrypt all existing plaintext values
migrations.RunPython(encrypt_address_fields, migrations.RunPython.noop),
]

View file

@ -47,6 +47,7 @@ from django.utils.functional import cached_property
from django.utils.http import urlsafe_base64_encode
from django.utils.translation import gettext_lazy as _
from django_extensions.db.fields import AutoSlugField
from encrypted_fields.fields import EncryptedCharField, EncryptedTextField
from mptt.fields import TreeForeignKey
from mptt.models import MPTTModel
@ -72,6 +73,7 @@ from engine.core.utils.lists import FAILED_STATUSES
from engine.core.utils.markdown import strip_markdown
from engine.core.validators import validate_category_image_dimensions
from engine.payments.models import Transaction
from schon.fields import EncryptedJSONTextField
from schon.utils.misc import create_object
if TYPE_CHECKING:
@ -1133,18 +1135,18 @@ class Address(NiceModel):
is_publicly_visible = False
address_line = TextField(
address_line = EncryptedTextField(
blank=True,
null=True,
help_text=_("address line for the customer"),
verbose_name=_("address line"),
)
street = CharField(_("street"), max_length=255, null=True)
district = CharField(_("district"), max_length=255, null=True)
city = CharField(_("city"), max_length=100, null=True)
region = CharField(_("region"), max_length=100, null=True)
postal_code = CharField(_("postal code"), max_length=20, null=True)
country = CharField(_("country"), max_length=40, null=True)
street = EncryptedCharField(_("street"), max_length=255, null=True)
district = EncryptedCharField(_("district"), max_length=255, null=True)
city = EncryptedCharField(_("city"), max_length=100, null=True)
region = EncryptedCharField(_("region"), max_length=100, null=True)
postal_code = EncryptedCharField(_("postal code"), max_length=20, null=True)
country = EncryptedCharField(_("country"), max_length=40, null=True)
location: PointField = PointField(
geography=True,
@ -1154,13 +1156,13 @@ class Address(NiceModel):
help_text=_("geolocation point: (longitude, latitude)"),
)
raw_data = JSONField(
raw_data = EncryptedJSONTextField(
blank=True,
null=True,
help_text=_("full JSON response from geocoder for this address"),
)
api_response = JSONField(
api_response = EncryptedJSONTextField(
blank=True,
null=True,
help_text=_("stored JSON response from the geocoding service"),

View file

@ -107,10 +107,10 @@ class CategoryDetailSerializer(ModelSerializer):
return list(serializer.data)
return []
def get_min_price(self, obj: Category):
def get_min_price(self, obj: Category) -> float:
return obj.min_price
def get_max_price(self, obj: Category):
def get_max_price(self, obj: Category) -> float:
return obj.max_price

View file

@ -4,6 +4,7 @@ import os
import traceback
from contextlib import suppress
from datetime import date, timedelta
from os import getenv
import requests
from constance import config
@ -35,6 +36,7 @@ from django_ratelimit.decorators import ratelimit
from drf_spectacular.utils import extend_schema_view
from drf_spectacular.views import SpectacularAPIView
from graphene_file_upload.django import FileUploadGraphQLView
from graphql.validation import NoSchemaIntrospectionCustomRule
from rest_framework import status
from rest_framework.parsers import MultiPartParser
from rest_framework.permissions import AllowAny, IsAdminUser
@ -85,6 +87,7 @@ from engine.core.utils.commerce import (
from engine.core.utils.emailing import contact_us_email
from engine.core.utils.languages import get_flag_by_language
from engine.payments.serializers import TransactionProcessSerializer
from schon.graphql_validators import QueryDepthLimitRule
from schon.utils.renderers import camelize
logger = logging.getLogger(__name__)
@ -121,7 +124,14 @@ sitemap_detail.__doc__ = _( # ty:ignore[invalid-assignment]
)
_graphql_validation_rules = [QueryDepthLimitRule]
if getenv("GRAPHQL_INTROSPECTION", "").lower() in ("1", "true", "yes"):
_graphql_validation_rules.append(NoSchemaIntrospectionCustomRule)
class CustomGraphQLView(FileUploadGraphQLView):
validation_rules = tuple(_graphql_validation_rules)
def get_context(self, request):
return request

View file

@ -530,6 +530,34 @@ class ProductViewSet(SchonViewSet):
self.check_object_permissions(self.request, obj)
return obj
@action(detail=False, methods=("POST",), url_path="retrieve-exact")
@method_decorator(ratelimit(key="ip", rate="8/s" if not settings.DEBUG else "44/s"))
def exact_list(self, request: Request, *args, **kwargs) -> Response:
identificator_type = request.data.get("identificator_type")
identificators = request.data.get("identificators", [])
if not identificator_type or not identificators:
return Response(
{"detail": _("identificator_type and identificators are required")},
status=status.HTTP_400_BAD_REQUEST,
)
match identificator_type:
case "uuid":
qs = self.get_queryset().filter(uuid__in=identificators)
case "slug":
qs = self.get_queryset().filter(slug__in=identificators)
case "sku":
qs = self.get_queryset().filter(sku__in=identificators)
case _:
return Response(
{"detail": _("identificator_type must be one of: uuid, slug, sku")},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = ProductSimpleSerializer(qs, many=True)
return Response(serializer.data)
# noinspection PyUnusedLocal
@action(detail=True, methods=("GET",), url_path="feedbacks")
@method_decorator(ratelimit(key="ip", rate="2/s" if not settings.DEBUG else "44/s"))
@ -1193,7 +1221,7 @@ class AddressViewSet(SchonViewSet):
filterset_class = AddressFilter
queryset = Address.objects.all()
serializer_class = AddressSerializer
additional = {"create": "ALLOW", "retrieve": "ALLOW"}
additional = {"create": "ALLOW"}
def get_serializer_class(self):
if self.action == "create":
@ -1211,15 +1239,6 @@ class AddressViewSet(SchonViewSet):
return Address.objects.none()
def retrieve(self, request: Request, *args, **kwargs) -> Response:
try:
address = Address.objects.get(uuid=str(kwargs.get("pk")))
return Response(
status=status.HTTP_200_OK, data=self.get_serializer(address).data
)
except Address.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
def create(self, request: Request, *args, **kwargs) -> Response:
create_serializer = AddressCreateSerializer(
data=request.data, context={"request": request}

View file

@ -110,7 +110,8 @@ class UserAdmin(ActivationActionsMixin, BaseUserAdmin, ModelAdmin):
),
)
list_display = ("email", "phone_number", "is_verified", "is_active", "is_staff")
search_fields = ("email", "phone_number")
# phone_number is encrypted — DB-level search is not possible for it
search_fields = ("email",)
list_filter = (
"is_verified",
"is_active",

View file

@ -0,0 +1,136 @@
import base64
import encrypted_fields.fields
from cryptography.fernet import Fernet, MultiFernet
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
from django.db import migrations, models
def _make_fernet(settings):
keys = []
salt_keys = (
settings.SALT_KEY
if isinstance(settings.SALT_KEY, list)
else [settings.SALT_KEY]
)
for secret_key in [settings.SECRET_KEY] + list(
getattr(settings, "SECRET_KEY_FALLBACKS", [])
):
for salt_key in salt_keys:
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=salt_key.encode("utf-8"),
iterations=100_000,
backend=default_backend(),
)
keys.append(
base64.urlsafe_b64encode(kdf.derive(secret_key.encode("utf-8")))
)
return MultiFernet([Fernet(k) for k in keys]) if len(keys) > 1 else Fernet(keys[0])
def encrypt_user_pii(apps, schema_editor):
import json
from django.conf import settings
f = _make_fernet(settings)
def enc(value):
if value is None:
return None
if not isinstance(value, str):
value = str(value)
return f.encrypt(value.encode("utf-8")).decode("utf-8")
def enc_json(value):
if value is None:
return None
# value may be a Python dict (from JSONB) or a JSON string (after TEXT cast)
if not isinstance(value, str):
value = json.dumps(value, default=str)
return f.encrypt(value.encode("utf-8")).decode("utf-8")
with schema_editor.connection.cursor() as cursor:
cursor.execute(
"SELECT uuid, phone_number, first_name, last_name, attributes "
"FROM vibes_auth_user"
)
rows = cursor.fetchall()
for row_id, phone, first, last, attrs in rows:
cursor.execute(
"UPDATE vibes_auth_user "
"SET phone_number=%s, first_name=%s, last_name=%s, attributes=%s "
"WHERE uuid=%s",
[enc(phone), enc(first), enc(last), enc_json(attrs), row_id],
)
class Migration(migrations.Migration):
dependencies = [
(
"vibes_auth",
"0009_delete_emailimage_remove_emailtemplate_html_content_and_more",
),
]
operations = [
# Add activation token timestamp
migrations.AddField(
model_name="user",
name="activation_token_created",
field=models.DateTimeField(
blank=True,
null=True,
verbose_name="activation token created",
),
),
# Encrypt phone_number (also drops unique constraint)
migrations.AlterField(
model_name="user",
name="phone_number",
field=encrypted_fields.fields.EncryptedCharField(
blank=True,
max_length=20,
null=True,
verbose_name="phone_number",
),
),
# Encrypt first_name
migrations.AlterField(
model_name="user",
name="first_name",
field=encrypted_fields.fields.EncryptedCharField(
blank=True,
max_length=150,
null=True,
verbose_name="first_name",
),
),
# Encrypt last_name
migrations.AlterField(
model_name="user",
name="last_name",
field=encrypted_fields.fields.EncryptedCharField(
blank=True,
max_length=150,
null=True,
verbose_name="last_name",
),
),
# Encrypt attributes (JSONB → TEXT with JSON serialisation)
migrations.AlterField(
model_name="user",
name="attributes",
field=models.TextField(
blank=True,
null=True,
verbose_name="attributes",
),
),
# Re-encrypt existing plaintext values using raw SQL
migrations.RunPython(encrypt_user_pii, migrations.RunPython.noop),
]

View file

@ -23,6 +23,7 @@ from django.templatetags.static import static
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from encrypted_fields.fields import EncryptedCharField
from rest_framework_simplejwt.token_blacklist.models import (
BlacklistedToken as BaseBlacklistedToken,
)
@ -35,6 +36,7 @@ from engine.payments.models import Balance
from engine.vibes_auth.choices import SenderType, ThreadStatus
from engine.vibes_auth.managers import UserManager
from engine.vibes_auth.validators import validate_phone_number
from schon.fields import EncryptedJSONTextField
class User(AbstractUser, NiceModel):
@ -51,10 +53,9 @@ class User(AbstractUser, NiceModel):
return "users/" + str(self.uuid) + "/" + args[0]
email = EmailField(_("email"), unique=True, help_text=_("user email address"))
phone_number = CharField(
phone_number = EncryptedCharField(
_("phone_number"),
max_length=20,
unique=True,
blank=True,
null=True,
help_text=_("user phone number"),
@ -63,8 +64,12 @@ class User(AbstractUser, NiceModel):
],
)
username: None = None
first_name = CharField(_("first_name"), max_length=150, blank=True, null=True)
last_name = CharField(_("last_name"), max_length=150, blank=True, null=True)
first_name = EncryptedCharField(
_("first_name"), max_length=150, blank=True, null=True
)
last_name = EncryptedCharField(
_("last_name"), max_length=150, blank=True, null=True
)
avatar = ImageField(
null=True,
verbose_name=_("avatar"),
@ -90,6 +95,11 @@ class User(AbstractUser, NiceModel):
)
activation_token = UUIDField(default=uuid4, verbose_name=_("activation token"))
activation_token_created = DateTimeField(
null=True,
blank=True,
verbose_name=_("activation token created"),
)
unsubscribe_token = UUIDField(
default=uuid4,
verbose_name=_("unsubscribe token"),
@ -102,7 +112,7 @@ class User(AbstractUser, NiceModel):
blank=False,
max_length=7,
)
attributes = JSONField(
attributes = EncryptedJSONTextField(
verbose_name=_("attributes"), default=dict, blank=True, null=True
)
@ -135,8 +145,25 @@ class User(AbstractUser, NiceModel):
def recently_viewed(self):
return cache.get(f"user_{self.uuid}_rv", [])
def check_token(self, token):
return str(token) == str(self.activation_token)
def save(self, *args, **kwargs):
if self._state.adding and self.activation_token_created is None:
self.activation_token_created = timezone.now()
super().save(*args, **kwargs)
def refresh_activation_token(self) -> None:
"""Generate a fresh activation token and update its timestamp."""
self.activation_token = uuid4()
self.activation_token_created = timezone.now()
def check_token(self, token) -> bool:
from datetime import timedelta
if str(token) != str(self.activation_token):
return False
if self.activation_token_created:
if timezone.now() > self.activation_token_created + timedelta(hours=24):
return False
return True
def __str__(self):
return self.email

View file

@ -26,7 +26,10 @@ from engine.vibes_auth.serializers import (
MergeRecentlyViewedSerializer,
UserSerializer,
)
from engine.vibes_auth.utils.emailing import send_reset_password_email_task
from engine.vibes_auth.utils.emailing import (
send_reset_password_email_task,
send_verification_email_task,
)
logger = logging.getLogger(__name__)
@ -130,6 +133,23 @@ class UserViewSet(
ratelimit(key="ip", rate="5/h" if not settings.DEBUG else "888/h")
)
def create(self, request: Request, *args, **kwargs) -> Response:
email = request.data.get("email")
if email:
with suppress(User.DoesNotExist):
pending = User.objects.get(
email=email, is_active=False, is_verified=False
)
pending.refresh_activation_token()
pending.save()
send_verification_email_task.delay(user_pk=str(pending.uuid))
return Response(
{
"detail": _(
"Account already registered but not yet activated. A new activation email has been sent."
)
},
status=status.HTTP_200_OK,
)
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.save()

View file

@ -21,13 +21,14 @@ dependencies = [
"django-dbbackup==5.2.0",
"django-elasticsearch-dsl==8.2",
"django-extensions==4.1",
"django-fernet-encrypted-fields==0.3.1",
"django-filter==25.2",
"django-health-check==4.0.6",
"django-health-check==4.1.0",
"django-import-export[all]==4.4.0",
"django-json-widget==2.1.1",
"django-model-utils==5.0.0",
"django-md-field==0.1.0",
"django-modeltranslation==0.19.19",
"django-modeltranslation==0.20.2",
"django-mptt==0.18.0",
"django-redis==6.0.0",
"django-ratelimit==4.1.0",
@ -47,7 +48,7 @@ dependencies = [
"drf-spectacular-websocket==1.3.1",
"drf-orjson-renderer==1.8.0",
"elasticsearch-dsl==8.18.0",
"filelock==3.24.3",
"filelock==3.25.0",
"filetype==1.2.0",
"graphene-django==3.2.3",
"graphene-file-upload==1.3.0",

43
schon/fields.py Normal file
View file

@ -0,0 +1,43 @@
import json
from encrypted_fields.fields import EncryptedTextField
class EncryptedJSONTextField(EncryptedTextField):
"""
Stores a JSON-serializable value as Fernet-encrypted TEXT.
Unlike EncryptedJSONField (which uses JSONB and breaks with psycopg3's
automatic JSONBdict conversion), this field stores the JSON as a plain
TEXT column, encrypting the full serialised string. The column type in
the DB is text, not jsonb.
"""
def get_internal_type(self) -> str:
return "TextField"
def get_prep_value(self, value):
if value is not None and not isinstance(value, str):
value = json.dumps(value, default=str)
return super().get_prep_value(value)
def from_db_value(self, value, expression, connection):
value = super().from_db_value(value, expression, connection)
if value is None:
return None
if isinstance(value, str):
try:
return json.loads(value)
except (ValueError, TypeError):
pass
return value
def to_python(self, value):
if isinstance(value, (dict, list)):
return value
if isinstance(value, str):
try:
return json.loads(value)
except (ValueError, TypeError):
pass
return value

View file

@ -0,0 +1,56 @@
from graphql import GraphQLError
from graphql.language.ast import (
FieldNode,
FragmentDefinitionNode,
FragmentSpreadNode,
InlineFragmentNode,
OperationDefinitionNode,
)
from graphql.validation import ValidationRule
MAX_QUERY_DEPTH = 8
def _max_field_depth(node, fragments, depth=0):
if not hasattr(node, "selection_set") or not node.selection_set:
return depth
return max(
(
_selection_depth(sel, fragments, depth)
for sel in node.selection_set.selections
),
default=depth,
)
def _selection_depth(node, fragments, depth):
if isinstance(node, FieldNode):
return _max_field_depth(node, fragments, depth + 1)
if isinstance(node, InlineFragmentNode):
return _max_field_depth(node, fragments, depth)
if isinstance(node, FragmentSpreadNode):
fragment = fragments.get(node.name.value)
if fragment:
return _max_field_depth(fragment, fragments, depth)
return depth
class QueryDepthLimitRule(ValidationRule):
"""Prevents DoS via deeply nested GraphQL queries (max depth: 8)."""
def enter_document(self, node, *_args):
fragments = {
defn.name.value: defn
for defn in node.definitions
if isinstance(defn, FragmentDefinitionNode)
}
for defn in node.definitions:
if not isinstance(defn, OperationDefinitionNode):
continue
depth = _max_field_depth(defn, fragments)
if depth > MAX_QUERY_DEPTH:
self.report_error(
GraphQLError(
f"Query depth limit exceeded: max {MAX_QUERY_DEPTH}, got {depth}."
)
)

View file

@ -20,6 +20,7 @@ BASE_DIR: Path = Path(__file__).resolve().parent.parent.parent
INITIALIZED: bool = (BASE_DIR / ".initialized").exists()
SECRET_KEY: str = getenv("SECRET_KEY", "SUPER_SECRET_KEY")
SALT_KEY: str = getenv("SALT_KEY", "schon-default-salt-key-change-in-production")
DEBUG: bool = bool(int(getenv("DEBUG", "1")))
DEBUG_DATABASE: bool = bool(int(getenv("DEBUG_DATABASE", "0")))
DEBUG_CELERY: bool = bool(int(getenv("DEBUG_DATABASE", "0")))
@ -422,8 +423,24 @@ if getenv("SENTRY_DSN"):
if isinstance(data, dict):
# noinspection PyShadowingNames
cleaned: dict[str, Any] = {}
_SENSITIVE_KEYS = {
"password",
"confirm_password",
"phone_number",
"phone",
"email",
"street",
"postal_code",
"postal",
"passport",
"secret",
"token",
"address",
"first_name",
"last_name",
}
for key, value in data.items():
if key.lower() in ("password", "confirm_password"):
if key.lower() in _SENSITIVE_KEYS:
cleaned[key] = "[FILTERED]"
else:
cleaned[key] = scrub_sensitive(value)

View file

@ -16,6 +16,14 @@ CONSTANCE_ADDITIONAL_FIELDS = {
"widget": "engine.core.widgets.JSONTableWidget",
},
],
"password": [
"django.forms.CharField",
{
"required": False,
"widget": "django.forms.PasswordInput",
"widget_attrs": {"render_value": True},
},
],
}
CONSTANCE_CONFIG = OrderedDict(
@ -67,7 +75,11 @@ CONSTANCE_CONFIG = OrderedDict(
),
(
"EMAIL_HOST_PASSWORD",
(getenv("EMAIL_HOST_PASSWORD", "SUPERsecretPASSWORD"), _("SMTP password")),
(
getenv("EMAIL_HOST_PASSWORD", "SUPERsecretPASSWORD"),
_("SMTP password"),
"password",
),
),
("EMAIL_FROM", (getenv("EMAIL_FROM", "Schon"), _("Mail from option"))),
### Features Options ###

View file

@ -26,6 +26,14 @@ REST_FRAMEWORK: dict[str, Any] = {
"DEFAULT_PARSER_CLASSES": ("schon.utils.parsers.CamelCaseParser",),
"DEFAULT_SCHEMA_CLASS": "drf_spectacular.generators.AutoSchema",
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.AllowAny",),
"DEFAULT_THROTTLE_CLASSES": [
"rest_framework.throttling.AnonRateThrottle",
"rest_framework.throttling.UserRateThrottle",
],
"DEFAULT_THROTTLE_RATES": {
"anon": "60/minute",
"user": "600/minute",
},
"JSON_UNDERSCOREIZE": {
"no_underscore_before_number": False,
},

View file

@ -53,6 +53,7 @@ SCHON_LANGUAGE_CODE=$(prompt_default SCHON_LANGUAGE_CODE "en-gb")
SECRET_KEY=$(prompt_autogen SECRET_KEY 32)
JWT_SIGNING_KEY=$(prompt_autogen JWT_SIGNING_KEY 64)
SALT_KEY=$(prompt_autogen SALT_KEY 32)
ALLOWED_HOSTS=$(prompt_default ALLOWED_HOSTS "schon.wiseless.xyz api.schon.wiseless.xyz")
CSRF_TRUSTED_ORIGINS=$(prompt_default CSRF_TRUSTED_ORIGINS "https://schon.wiseless.xyz https://api.schon.wiseless.xyz https://www.schon.wiseless.xyz")
@ -101,6 +102,7 @@ SCHON_LANGUAGE_CODE="${SCHON_LANGUAGE_CODE}"
SECRET_KEY="${SECRET_KEY}"
JWT_SIGNING_KEY="${JWT_SIGNING_KEY}"
SALT_KEY="${SALT_KEY}"
ALLOWED_HOSTS="${ALLOWED_HOSTS}"
CSRF_TRUSTED_ORIGINS="${CSRF_TRUSTED_ORIGINS}"

View file

@ -0,0 +1,48 @@
#!/usr/bin/env bash
# =============================================================================
# migrate-media.sh
# Migrates user-uploaded media files from host bind-mount (eVibes / early Schon)
# into the new Docker-managed named volume (media-data).
#
# Run this ONCE after upgrading from an eVibes or pre-volume Schon instance.
# =============================================================================
set -euo pipefail
HOST_MEDIA="$(pwd)/media"
echo "Schon — media migration from host bind-mount → named volume"
echo ""
if [ ! -d "$HOST_MEDIA" ]; then
echo "No ./media directory found on host. Nothing to migrate."
exit 0
fi
FILE_COUNT=$(find "$HOST_MEDIA" -type f | wc -l | tr -d ' ')
if [ "$FILE_COUNT" -eq 0 ]; then
echo "Host ./media directory is empty. Nothing to migrate."
exit 0
fi
echo "Found $FILE_COUNT file(s) in $HOST_MEDIA"
echo ""
echo "This will copy them into the Docker named volume 'media-data'."
read -rp "Continue? [y/N] " confirm
if [ "$confirm" != "y" ] && [ "$confirm" != "Y" ]; then
echo "Migration cancelled."
exit 0
fi
echo ""
echo "Copying files..."
docker compose run --rm \
-v "$HOST_MEDIA":/old_media:ro \
app bash -c "
cp -a /old_media/. /app/media/
COUNT=\$(find /app/media -type f | wc -l)
echo \"Migration complete: \$COUNT file(s) now in media volume.\"
"
echo ""
echo "Done. You can safely ignore the host ./media directory — it is no longer used."
echo "To remove it: rm -rf ./media"

View file

@ -13,6 +13,10 @@ log_success "Services were shut down successfully!"
# Remove volumes
log_step "Removing volumes..."
docker volume remove -f schon_postgres-data || log_warning "Failed to remove postgres-data volume"
docker volume remove -f schon_redis-data || log_warning "Failed to remove redis-data volume"
docker volume remove -f schon_static-data || log_warning "Failed to remove static-data volume"
docker volume remove -f schon_media-data || log_warning "Failed to remove media-data volume"
docker volume rm -f schon_prometheus-data || log_warning "Failed to remove prometheus-data volume"
docker volume rm -f schon_es-data || log_warning "Failed to remove es-data volume"
log_success "Volumes were removed successfully!"

View file

@ -59,6 +59,7 @@ $SCHON_LANGUAGE_CODE = Prompt-Default 'SCHON_LANGUAGE_CODE' 'en-gb'
$SECRET_KEY = Prompt-AutoGen 'SECRET_KEY' 32
$JWT_SIGNING_KEY = Prompt-AutoGen 'JWT_SIGNING_KEY' 64
$SALT_KEY = Prompt-AutoGen 'SALT_KEY' 32
$ALLOWED_HOSTS = Prompt-Default 'ALLOWED_HOSTS' 'schon.wiseless.xyz api.schon.wiseless.xyz'
$CSRF_TRUSTED_ORIGINS = Prompt-Default 'CSRF_TRUSTED_ORIGINS' 'https://schon.wiseless.xyz https://api.schon.wiseless.xyz https://www.schon.wiseless.xyz'
@ -108,6 +109,7 @@ $lines = @(
""
"SECRET_KEY=""$SECRET_KEY"""
"JWT_SIGNING_KEY=""$JWT_SIGNING_KEY"""
"SALT_KEY=""$SALT_KEY"""
""
"ALLOWED_HOSTS=""$ALLOWED_HOSTS"""
"CSRF_TRUSTED_ORIGINS=""$CSRF_TRUSTED_ORIGINS"""

View file

@ -0,0 +1,51 @@
# =============================================================================
# migrate-media.ps1
# Migrates user-uploaded media files from host bind-mount (eVibes / early Schon)
# into the new Docker-managed named volume (media-data).
#
# Run this ONCE after upgrading from an eVibes or pre-volume Schon instance.
# =============================================================================
Set-StrictMode -Version Latest
$ErrorActionPreference = 'Stop'
$HostMedia = Join-Path (Get-Location) "media"
Write-Host "Schon - media migration from host bind-mount to named volume" -ForegroundColor Cyan
Write-Host ""
if (-not (Test-Path $HostMedia)) {
Write-Host "No .\media directory found on host. Nothing to migrate." -ForegroundColor Yellow
exit 0
}
$FileCount = (Get-ChildItem -Path $HostMedia -Recurse -File).Count
if ($FileCount -eq 0) {
Write-Host "Host .\media directory is empty. Nothing to migrate." -ForegroundColor Yellow
exit 0
}
Write-Host "Found $FileCount file(s) in $HostMedia" -ForegroundColor White
Write-Host ""
Write-Host "This will copy them into the Docker named volume 'media-data'." -ForegroundColor White
$confirm = Read-Host "Continue? [y/N]"
if ($confirm -ne "y" -and $confirm -ne "Y") {
Write-Host "Migration cancelled." -ForegroundColor Yellow
exit 0
}
Write-Host ""
Write-Host "Copying files..." -ForegroundColor White
$HostMediaUnix = $HostMedia -replace '\\', '/' -replace '^([A-Za-z]):', { "/$(($_.Value[0]).ToString().ToLower())" }
docker compose run --rm `
-v "${HostMediaUnix}:/old_media:ro" `
app bash -c @"
cp -a /old_media/. /app/media/
COUNT=`$(find /app/media -type f | wc -l)
echo "Migration complete: `$COUNT file(s) now in media volume."
"@
Write-Host ""
Write-Host "Done. You can safely ignore the host .\media directory - it is no longer used." -ForegroundColor Green
Write-Host "To remove it: Remove-Item -Recurse -Force .\media" -ForegroundColor Gray

View file

@ -21,6 +21,22 @@ Write-Success "Services were shut down successfully!"
# Remove volumes
Write-Step "Removing volumes..."
docker volume remove -f schon_postgres-data
if ($LASTEXITCODE -ne 0) {
Write-Warning-Custom "Failed to remove postgres-data volume"
}
docker volume remove -f schon_redis-data
if ($LASTEXITCODE -ne 0) {
Write-Warning-Custom "Failed to remove redis-data volume"
}
docker volume remove -f schon_static-data
if ($LASTEXITCODE -ne 0) {
Write-Warning-Custom "Failed to remove static-data volume"
}
docker volume remove -f schon_media-data
if ($LASTEXITCODE -ne 0) {
Write-Warning-Custom "Failed to remove media-data volume"
}
docker volume remove -f schon_prometheus-data
if ($LASTEXITCODE -ne 0) {
Write-Warning-Custom "Failed to remove prometheus-data volume"

39
uv.lock
View file

@ -866,6 +866,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/64/96/d967ca440d6a8e3861120f51985d8e5aec79b9a8bdda16041206adfe7adc/django_extensions-4.1-py3-none-any.whl", hash = "sha256:0699a7af28f2523bf8db309a80278519362cd4b6e1fd0a8cd4bf063e1e023336", size = 232980, upload-time = "2025-04-11T01:15:37.701Z" },
]
[[package]]
name = "django-fernet-encrypted-fields"
version = "0.3.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cryptography" },
{ name = "django" },
]
sdist = { url = "https://files.pythonhosted.org/packages/1a/aa/529af3888215b8a660fc3897d6d63eaf1de9aa0699c633ca0ec483d4361c/django_fernet_encrypted_fields-0.3.1.tar.gz", hash = "sha256:5ed328c7f9cc7f2d452bb2e125f3ea2bea3563a259fa943e5a1c626175889a71", size = 5265, upload-time = "2025-11-10T08:39:57.398Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/52/7f/4e0b7ed8413fa58e7a77017342e8ab0e977d41cfc376ab9180ae75f216ec/django_fernet_encrypted_fields-0.3.1-py3-none-any.whl", hash = "sha256:3bd2abab02556dc6e15a58a61161ee6c5cdf45a50a8a52d9e035009eb54c6442", size = 5484, upload-time = "2025-11-10T08:39:55.866Z" },
]
[[package]]
name = "django-filter"
version = "25.2"
@ -880,15 +893,15 @@ wheels = [
[[package]]
name = "django-health-check"
version = "4.0.6"
version = "4.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django" },
{ name = "dnspython" },
]
sdist = { url = "https://files.pythonhosted.org/packages/92/fe/718725c58fd177cff0cfb8abe3010f2cad582713f2bc52eaf7120b750dec/django_health_check-4.0.6.tar.gz", hash = "sha256:03837041ba8a235e810e16218f2ef3feb372c4af72776fa3676c16435c72171c", size = 20763, upload-time = "2026-02-23T17:11:40.625Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f8/e6/37aefb657a522c410844d7fe617c997036ef570157905518c2bf03abfb8d/django_health_check-4.1.0.tar.gz", hash = "sha256:7c9ea01edad40b3ea1d9b5780fcec3fdc1e263f9d0c8503af3b043e6a4b38964", size = 21124, upload-time = "2026-03-01T16:03:10.728Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3a/44/2fa6ec47c1c18159c094f7d00397a208b6311e8b26d603dd22ba6e79b99d/django_health_check-4.0.6-py3-none-any.whl", hash = "sha256:efba106bc4f92b1b084f3af751e9eeb0b5c1af77d0af212e432ede2ba8f1e94f", size = 25813, upload-time = "2026-02-23T17:11:39.419Z" },
{ url = "https://files.pythonhosted.org/packages/a2/01/a2a8e5250c0bf352660208bf62dd6bdc67d3c52c16a412da9c382b0bfd12/django_health_check-4.1.0-py3-none-any.whl", hash = "sha256:a823ec81aa108909855159fa1fdb84d443d0c25057de4cc98bfe230ae52d0751", size = 26173, upload-time = "2026-03-01T16:03:09.03Z" },
]
[[package]]
@ -957,15 +970,15 @@ wheels = [
[[package]]
name = "django-modeltranslation"
version = "0.19.19"
version = "0.20.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c1/af/3c34fca94fccae681a15734bc6557cf9be958d1d063ddbb233580b894054/django_modeltranslation-0.19.19.tar.gz", hash = "sha256:26dd8454f19540a2eb05e303608a2d89dd80aacb75ab95f8ea272cf4324d2644", size = 77750, upload-time = "2025-12-15T10:25:38.112Z" }
sdist = { url = "https://files.pythonhosted.org/packages/62/aa/3492ab6250e0c87e26f2f4fd522e4c9b214da871bfe4089a13888a802c6e/django_modeltranslation-0.20.2.tar.gz", hash = "sha256:d687bdcae9305dcdc8d644b75f1d687f355073e82049f4b9c4ba0feb347a0980", size = 81589, upload-time = "2026-03-01T15:59:07.262Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/2a/fdf265e91e37ee363b2c45f3d2a01752a3b38ab082a3ec0b0677105bd367/django_modeltranslation-0.19.19-py3-none-any.whl", hash = "sha256:55ac2ce47486b9e8ca18b155f7705170a53b7e1346bf7bf89304e99787486e8f", size = 93441, upload-time = "2025-12-15T10:25:35.942Z" },
{ url = "https://files.pythonhosted.org/packages/79/46/6c1edcab4233baa02cc6cfaf43e01ccc0c28757f9450171e54d790512e21/django_modeltranslation-0.20.2-py3-none-any.whl", hash = "sha256:964385c3cf3a584f564e995f2e9ff0e961d0297ef04ff51821ac529e12064290", size = 98257, upload-time = "2026-03-01T15:59:06.09Z" },
]
[[package]]
@ -1333,11 +1346,11 @@ wheels = [
[[package]]
name = "filelock"
version = "3.24.3"
version = "3.25.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/73/92/a8e2479937ff39185d20dd6a851c1a63e55849e447a55e798cc2e1f49c65/filelock-3.24.3.tar.gz", hash = "sha256:011a5644dc937c22699943ebbfc46e969cdde3e171470a6e40b9533e5a72affa", size = 37935, upload-time = "2026-02-19T00:48:20.543Z" }
sdist = { url = "https://files.pythonhosted.org/packages/77/18/a1fd2231c679dcb9726204645721b12498aeac28e1ad0601038f94b42556/filelock-3.25.0.tar.gz", hash = "sha256:8f00faf3abf9dc730a1ffe9c354ae5c04e079ab7d3a683b7c32da5dd05f26af3", size = 40158, upload-time = "2026-03-01T15:08:45.916Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9c/0f/5d0c71a1aefeb08efff26272149e07ab922b64f46c63363756224bd6872e/filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d", size = 24331, upload-time = "2026-02-19T00:48:18.465Z" },
{ url = "https://files.pythonhosted.org/packages/f9/0b/de6f54d4a8bedfe8645c41497f3c18d749f0bd3218170c667bf4b81d0cdd/filelock-3.25.0-py3-none-any.whl", hash = "sha256:5ccf8069f7948f494968fc0713c10e5c182a9c9d9eef3a636307a20c2490f047", size = 26427, upload-time = "2026-03-01T15:08:44.593Z" },
]
[[package]]
@ -3352,6 +3365,7 @@ dependencies = [
{ name = "django-debug-toolbar" },
{ name = "django-elasticsearch-dsl" },
{ name = "django-extensions" },
{ name = "django-fernet-encrypted-fields" },
{ name = "django-filter" },
{ name = "django-health-check" },
{ name = "django-import-export", extra = ["all"] },
@ -3454,13 +3468,14 @@ requires-dist = [
{ name = "django-debug-toolbar", specifier = "==6.2.0" },
{ name = "django-elasticsearch-dsl", specifier = "==8.2" },
{ name = "django-extensions", specifier = "==4.1" },
{ name = "django-fernet-encrypted-fields", specifier = "==0.3.1" },
{ name = "django-filter", specifier = "==25.2" },
{ name = "django-health-check", specifier = "==4.0.6" },
{ name = "django-health-check", specifier = "==4.1.0" },
{ name = "django-import-export", extras = ["all"], specifier = "==4.4.0" },
{ name = "django-json-widget", specifier = "==2.1.1" },
{ name = "django-md-field", specifier = "==0.1.0" },
{ name = "django-model-utils", specifier = "==5.0.0" },
{ name = "django-modeltranslation", specifier = "==0.19.19" },
{ name = "django-modeltranslation", specifier = "==0.20.2" },
{ name = "django-mptt", specifier = "==0.18.0" },
{ name = "django-ratelimit", specifier = "==4.1.0" },
{ name = "django-redis", specifier = "==6.0.0" },
@ -3481,7 +3496,7 @@ requires-dist = [
{ name = "drf-spectacular", specifier = "==0.29.0" },
{ name = "drf-spectacular-websocket", specifier = "==1.3.1" },
{ name = "elasticsearch-dsl", specifier = "==8.18.0" },
{ name = "filelock", specifier = "==3.24.3" },
{ name = "filelock", specifier = "==3.25.0" },
{ name = "filetype", specifier = "==1.2.0" },
{ name = "graphene-django", specifier = "==3.2.3" },
{ name = "graphene-file-upload", specifier = "==1.3.0" },