diff --git a/core/elasticsearch/__init__.py b/core/elasticsearch/__init__.py index d64dd82f..2e5158e6 100644 --- a/core/elasticsearch/__init__.py +++ b/core/elasticsearch/__init__.py @@ -1,11 +1,15 @@ from django.conf import settings from django.http import Http404 +from django.shortcuts import get_object_or_404 from django.utils.text import slugify from django.utils.translation import gettext_lazy as _ from django_elasticsearch_dsl import fields from django_elasticsearch_dsl.registries import registry from elasticsearch import NotFoundError from elasticsearch.dsl import Q, Search +from rest_framework.request import Request + +from core.models import Brand, Category, Product SMART_FIELDS = [ "name^4", @@ -29,7 +33,7 @@ SMART_FIELDS = [ ] -def process_query(query: str = ""): +def process_query(query: str = "", request: Request | None = None): """ Perform a lenient, typo‑tolerant, multi‑index search. @@ -41,7 +45,6 @@ def process_query(query: str = ""): query = query.strip() try: - # Build the boolean query q = Q( "bool", should=[ @@ -62,27 +65,53 @@ def process_query(query: str = ""): minimum_should_match=1, ) - # Execute search across multiple indices - search = Search(index=["products", "categories", "brands", "posts"]).query(q).extra(size=100) + search = ( + Search(index=["products", "categories", "brands", "posts"]) + .query(q) + .extra(size=100) + ) response = search.execute() - # Collect results, guard against None values results: dict = {"products": [], "categories": [], "brands": [], "posts": []} for hit in response.hits: obj_uuid = getattr(hit, "uuid", None) or hit.meta.id - obj_name = getattr(hit, "name", None) or getattr(hit, "title", None) or "N/A" - # Safely generate a slug - obj_slug = getattr(hit, "slug", None) or slugify(f"{obj_uuid}{obj_name}") + obj_name = ( + getattr(hit, "name", None) or getattr(hit, "title", None) or "N/A" + ) + raw_slug = getattr(hit, "slug", None) + if raw_slug: + obj_slug = raw_slug + elif hit.meta.index == "brands": + obj_slug = slugify(obj_name) + elif hit.meta.index == "categories": + obj_slug = slugify(f"{obj_name}") + + image_url = None idx = hit.meta.index - if idx in results: - results[idx].append( - { - "uuid": str(obj_uuid), - "name": obj_name, - "slug": obj_slug, - } - ) + if idx == "products" and request: + prod = get_object_or_404(Product, uuid=obj_uuid) + first = prod.images.order_by("priority").first() + if first and first.image: + image_url = request.build_absolute_uri(first.image.url) + elif idx == "brands" and request: + brand = get_object_or_404(Brand, uuid=obj_uuid) + if brand.small_logo: + image_url = request.build_absolute_uri(brand.small_logo.url) + elif idx == "categories" and request: + cat = get_object_or_404(Category, uuid=obj_uuid) + if cat.image: + image_url = request.build_absolute_uri(cat.image.url) + + results[idx].append( + { + "uuid": str(obj_uuid), + "name": obj_name, + "slug": obj_slug, + "image": image_url, + } + ) + return results except NotFoundError: raise Http404 @@ -129,13 +158,29 @@ COMMON_ANALYSIS = { "filter": { "edge_ngram_filter": {"type": "edge_ngram", "min_gram": 1, "max_gram": 20}, "ngram_filter": {"type": "ngram", "min_gram": 2, "max_gram": 20}, - "double_metaphone": {"type": "phonetic", "encoder": "double_metaphone", "replace": False}, + "double_metaphone": { + "type": "phonetic", + "encoder": "double_metaphone", + "replace": False, + }, }, "analyzer": { - "autocomplete": {"tokenizer": "standard", "filter": ["lowercase", "asciifolding", "edge_ngram_filter"]}, - "autocomplete_search": {"tokenizer": "standard", "filter": ["lowercase", "asciifolding"]}, - "name_ngram": {"tokenizer": "standard", "filter": ["lowercase", "asciifolding", "ngram_filter"]}, - "name_phonetic": {"tokenizer": "standard", "filter": ["lowercase", "asciifolding", "double_metaphone"]}, + "autocomplete": { + "tokenizer": "standard", + "filter": ["lowercase", "asciifolding", "edge_ngram_filter"], + }, + "autocomplete_search": { + "tokenizer": "standard", + "filter": ["lowercase", "asciifolding"], + }, + "name_ngram": { + "tokenizer": "standard", + "filter": ["lowercase", "asciifolding", "ngram_filter"], + }, + "name_phonetic": { + "tokenizer": "standard", + "filter": ["lowercase", "asciifolding", "double_metaphone"], + }, "query_lc": {"tokenizer": "standard", "filter": ["lowercase", "asciifolding"]}, }, } @@ -158,7 +203,9 @@ def _add_multilang_fields(cls): copy_to="name", fields={ "raw": fields.KeywordField(ignore_above=256), - "ngram": fields.TextField(analyzer="name_ngram", search_analyzer="query_lc"), + "ngram": fields.TextField( + analyzer="name_ngram", search_analyzer="query_lc" + ), "phonetic": fields.TextField(analyzer="name_phonetic"), }, ), @@ -181,7 +228,9 @@ def _add_multilang_fields(cls): copy_to="description", fields={ "raw": fields.KeywordField(ignore_above=256), - "ngram": fields.TextField(analyzer="name_ngram", search_analyzer="query_lc"), + "ngram": fields.TextField( + analyzer="name_ngram", search_analyzer="query_lc" + ), "phonetic": fields.TextField(analyzer="name_phonetic"), }, ), diff --git a/core/elasticsearch/documents.py b/core/elasticsearch/documents.py index 165da75e..4a2c895e 100644 --- a/core/elasticsearch/documents.py +++ b/core/elasticsearch/documents.py @@ -11,9 +11,13 @@ class _BaseDoc(ActiveOnlyMixin, Document): analyzer="standard", fields={ "raw": fields.KeywordField(ignore_above=256), - "ngram": fields.TextField(analyzer="name_ngram", search_analyzer="query_lc"), + "ngram": fields.TextField( + analyzer="name_ngram", search_analyzer="query_lc" + ), "phonetic": fields.TextField(analyzer="name_phonetic"), - "auto": fields.TextField(analyzer="autocomplete", search_analyzer="autocomplete_search"), + "auto": fields.TextField( + analyzer="autocomplete", search_analyzer="autocomplete_search" + ), }, ) description = fields.TextField( @@ -21,11 +25,16 @@ class _BaseDoc(ActiveOnlyMixin, Document): analyzer="standard", fields={ "raw": fields.KeywordField(ignore_above=256), - "ngram": fields.TextField(analyzer="name_ngram", search_analyzer="query_lc"), + "ngram": fields.TextField( + analyzer="name_ngram", search_analyzer="query_lc" + ), "phonetic": fields.TextField(analyzer="name_phonetic"), - "auto": fields.TextField(analyzer="autocomplete", search_analyzer="autocomplete_search"), + "auto": fields.TextField( + analyzer="autocomplete", search_analyzer="autocomplete_search" + ), }, ) + slug = fields.KeywordField(attr="slug", index=False) class Index: settings = { @@ -76,7 +85,9 @@ class BrandDocument(ActiveOnlyMixin, Document): analyzer="standard", fields={ "raw": fields.KeywordField(ignore_above=256), - "ngram": fields.TextField(analyzer="name_ngram", search_analyzer="query_lc"), + "ngram": fields.TextField( + analyzer="name_ngram", search_analyzer="query_lc" + ), "phonetic": fields.TextField(analyzer="name_phonetic"), }, ) diff --git a/core/management/commands/populate_slugs.py b/core/management/commands/populate_slugs.py deleted file mode 100644 index 082c0b6a..00000000 --- a/core/management/commands/populate_slugs.py +++ /dev/null @@ -1,32 +0,0 @@ -import logging - -from django.core.management.base import BaseCommand -from django.db import transaction - -from core.models import Product - -logger = logging.getLogger(__name__) - - -class Command(BaseCommand): - help = "Populate slug field for all Product instances" - - def handle(self, *args, **options): - qs = Product.objects.filter(slug__isnull=True) - total = qs.count() - self.stdout.write(f"Starting slug population for {total} products") - - for idx, product in enumerate(qs.iterator(), start=1): - try: - product.slug = None - with transaction.atomic(): - product.save(update_fields=["slug"]) - - self.stdout.write( - self.style.SUCCESS(f"[{idx}/{total}] (Product ID: {product.pk}) slug set to '{product.slug}'") - ) - except Exception as e: - logger.exception(f"Product {product.pk}: slug population failed") - self.stderr.write(self.style.ERROR(f"[{idx}/{total}] (Product ID: {product.pk}) ERROR: {e}")) - - self.stdout.write(self.style.SUCCESS("Slug population complete.")) diff --git a/core/management/commands/rebuild_slugs.py b/core/management/commands/rebuild_slugs.py new file mode 100644 index 00000000..4aacc752 --- /dev/null +++ b/core/management/commands/rebuild_slugs.py @@ -0,0 +1,50 @@ +from django.core.management.base import BaseCommand +from django.db import transaction +from django.utils.crypto import get_random_string + +from core.models import Brand, Category, Product + + +class Command(BaseCommand): + help = "Rebuild slug field for all slugified instances" + + def reset_em(self, queryset): + total = queryset.count() + self.stdout.write( + f"Starting slug rebuilding for {total} {queryset.model._meta.verbose_name_plural}" + ) + for idx, instance in enumerate(queryset.iterator(), start=1): + try: + if ( + queryset.filter(name=instance.name).exclude(pk=instance.pk).count() + >= 1 + ): + instance.name = f"{instance.name} - {get_random_string(length=3, allowed_chars='0123456789')}" + instance.save() + instance.slug = None + with transaction.atomic(): + instance.save(update_fields=["slug"]) + + self.stdout.write( + self.style.SUCCESS( + f"[{idx}/{total}] ({queryset.model._meta.verbose_name_plural} UUID:" + f" {instance.pk}) slug set to '{instance.slug}'" + ) + ) + except Exception as e: + self.stderr.write( + self.style.ERROR( + f"[{idx}/{total}] ({queryset.model._meta.verbose_name_plural}: {instance.pk}) ERROR: {e}" + ) + ) + + def handle(self, *args, **options): + + for queryset in [ + Brand.objects.all(), + Category.objects.all(), + Product.objects.all(), + ]: + self.reset_em(queryset) + + self.stdout.write(self.style.SUCCESS("Slug rebuild complete.")) diff --git a/core/migrations/0026_brand_slug_alter_category_slug_alter_product_slug.py b/core/migrations/0026_brand_slug_alter_category_slug_alter_product_slug.py new file mode 100644 index 00000000..3a0a57b3 --- /dev/null +++ b/core/migrations/0026_brand_slug_alter_category_slug_alter_product_slug.py @@ -0,0 +1,50 @@ +# Generated by Django 5.2 on 2025-06-18 19:21 + +import django_extensions.db.fields +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0025_alter_product_category"), + ] + + operations = [ + migrations.AddField( + model_name="brand", + name="slug", + field=django_extensions.db.fields.AutoSlugField( + allow_unicode=True, + blank=True, + editable=False, + null=True, + populate_from=("name",), + unique=True, + ), + ), + migrations.AlterField( + model_name="category", + name="slug", + field=django_extensions.db.fields.AutoSlugField( + allow_unicode=True, + blank=True, + editable=False, + null=True, + populate_from=("name",), + unique=True, + ), + ), + migrations.AlterField( + model_name="product", + name="slug", + field=django_extensions.db.fields.AutoSlugField( + allow_unicode=True, + blank=True, + editable=False, + null=True, + populate_from=("category__slug", "brand__slug", "name", "uuid"), + unique=True, + ), + ), + ] diff --git a/core/models.py b/core/models.py index 78db2031..4fcc74f5 100644 --- a/core/models.py +++ b/core/models.py @@ -214,7 +214,7 @@ class Category(ExportModelOperationsMixin("category"), NiceModel, MPTTModel): ) slug: str = AutoSlugField( # type: ignore - populate_from=("uuid", "name"), + populate_from=("name",), allow_unicode=True, unique=True, editable=False, @@ -281,6 +281,13 @@ class Brand(ExportModelOperationsMixin("brand"), NiceModel): help_text=_("optional categories that this brand is associated with"), verbose_name=_("associated categories"), ) + slug: str = AutoSlugField( # type: ignore + populate_from=("name",), + allow_unicode=True, + unique=True, + editable=False, + null=True, + ) def __str__(self): return self.name @@ -341,7 +348,7 @@ class Product(ExportModelOperationsMixin("product"), NiceModel): verbose_name=_("part number"), ) slug: str | None = AutoSlugField( # type: ignore - populate_from=("uuid", "category__name", "name"), + populate_from=("category__slug", "brand__slug", "name", "uuid"), allow_unicode=True, unique=True, editable=False, diff --git a/core/views.py b/core/views.py index 0a102bf1..82ad4d9f 100644 --- a/core/views.py +++ b/core/views.py @@ -91,7 +91,12 @@ class SupportedLanguagesView(APIView): permission_classes = [ AllowAny, ] - renderer_classes = [CamelCaseJSONRenderer, MultiPartRenderer, XMLRenderer, YAMLRenderer] + renderer_classes = [ + CamelCaseJSONRenderer, + MultiPartRenderer, + XMLRenderer, + YAMLRenderer, + ] def get(self, request): return Response( @@ -116,10 +121,17 @@ class WebsiteParametersView(APIView): permission_classes = [ AllowAny, ] - renderer_classes = [CamelCaseJSONRenderer, MultiPartRenderer, XMLRenderer, YAMLRenderer] + renderer_classes = [ + CamelCaseJSONRenderer, + MultiPartRenderer, + XMLRenderer, + YAMLRenderer, + ] def get(self, request): - return Response(data=camelize(get_project_parameters()), status=status.HTTP_200_OK) + return Response( + data=camelize(get_project_parameters()), status=status.HTTP_200_OK + ) @extend_schema_view(**CACHE_SCHEMA) @@ -128,7 +140,12 @@ class CacheOperatorView(APIView): permission_classes = [ AllowAny, ] - renderer_classes = [CamelCaseJSONRenderer, MultiPartRenderer, XMLRenderer, YAMLRenderer] + renderer_classes = [ + CamelCaseJSONRenderer, + MultiPartRenderer, + XMLRenderer, + YAMLRenderer, + ] def post(self, request, *args, **kwargs): return Response( @@ -145,7 +162,12 @@ class CacheOperatorView(APIView): @extend_schema_view(**CONTACT_US_SCHEMA) class ContactUsView(APIView): serializer_class = ContactUsSerializer - renderer_classes = [CamelCaseJSONRenderer, MultiPartRenderer, XMLRenderer, YAMLRenderer] + renderer_classes = [ + CamelCaseJSONRenderer, + MultiPartRenderer, + XMLRenderer, + YAMLRenderer, + ] @ratelimit(key="ip", rate="2/h") def post(self, request, *args, **kwargs): @@ -161,7 +183,12 @@ class RequestCursedURLView(APIView): permission_classes = [ AllowAny, ] - renderer_classes = [CamelCaseJSONRenderer, MultiPartRenderer, XMLRenderer, YAMLRenderer] + renderer_classes = [ + CamelCaseJSONRenderer, + MultiPartRenderer, + XMLRenderer, + YAMLRenderer, + ] @ratelimit(key="ip", rate="10/h") def post(self, request, *args, **kwargs): @@ -174,7 +201,9 @@ class RequestCursedURLView(APIView): try: data = cache.get(url, None) if not data: - response = requests.get(url, headers={"content-type": "application/json"}) + response = requests.get( + url, headers={"content-type": "application/json"} + ) response.raise_for_status() data = camelize(response.json()) cache.set(url, data, 86400) @@ -196,10 +225,23 @@ class GlobalSearchView(APIView): It returns a response grouping matched items by index. """ - renderer_classes = [CamelCaseJSONRenderer, MultiPartRenderer, XMLRenderer, YAMLRenderer] + renderer_classes = [ + CamelCaseJSONRenderer, + MultiPartRenderer, + XMLRenderer, + YAMLRenderer, + ] def get(self, request, *args, **kwargs): - return Response(camelize({"results": process_query(request.GET.get("q", "").strip())})) + return Response( + camelize( + { + "results": process_query( + query=request.GET.get("q", "").strip(), request=request + ) + } + ) + ) @extend_schema_view(**BUY_AS_BUSINESS_SCHEMA) @@ -209,19 +251,29 @@ class BuyAsBusinessView(APIView): serializer = BuyAsBusinessOrderSerializer(data=request.data) serializer.is_valid(raise_exception=True) order = Order.objects.create(status="MOMENTAL") - products = [product.get("product_uuid") for product in serializer.validated_data.get("products")] + products = [ + product.get("product_uuid") + for product in serializer.validated_data.get("products") + ] transaction = order.buy_without_registration( products=products, promocode_uuid=serializer.validated_data.get("promocode_uuid"), customer_name=serializer.validated_data.get("customer_name"), customer_email=serializer.validated_data.get("customer_email"), customer_phone=serializer.validated_data.get("customer_phone"), - customer_billing_address=serializer.validated_data.get("customer_billing_address_uuid"), - customer_shipping_address=serializer.validated_data.get("customer_shipping_address_uuid"), + customer_billing_address=serializer.validated_data.get( + "customer_billing_address_uuid" + ), + customer_shipping_address=serializer.validated_data.get( + "customer_shipping_address_uuid" + ), payment_method=serializer.validated_data.get("payment_method"), is_business=True, ) - return Response(status=status.HTTP_202_ACCEPTED, data=TransactionProcessSerializer(transaction).data) + return Response( + status=status.HTTP_202_ACCEPTED, + data=TransactionProcessSerializer(transaction).data, + ) def download_digital_asset_view(request, *args, **kwargs): @@ -235,7 +287,9 @@ def download_digital_asset_view(request, *args, **kwargs): download.num_downloads += 1 download.save() - file_path = download.order_product.product.stocks.first().digital_asset.file.path + file_path = ( + download.order_product.product.stocks.first().digital_asset.file.path + ) content_type, encoding = mimetypes.guess_type(file_path) if not content_type: @@ -255,7 +309,10 @@ def download_digital_asset_view(request, *args, **kwargs): except Exception as e: capture_exception(e) - return JsonResponse({"error": "An error occurred while trying to download the digital asset"}, status=500) + return JsonResponse( + {"error": "An error occurred while trying to download the digital asset"}, + status=500, + ) def favicon_view(request, *args, **kwargs):