diff --git a/README.md b/README.md index b3ccd15a4f674ee1cee585c21f7bd8b18edf8979..b3af6a1048c84a20e06325171fe6df34ea1b6fd4 100644 --- a/README.md +++ b/README.md @@ -59,4 +59,11 @@ Compiled application files will be located in `frontend/dist/`. * Login: Superleksykograf * Hasło: valier111 + + +## Database manual migration + + export LOADING_THE_SLOWOSIEC_ONTOLOGY_HIERARCHY_DISABLED=true + python manage.py makemigrations <module-name> (e.g. python manage.py makemigrations users) + python manage.py migrate <module-name> (e.g. python manage.py migrate users) diff --git a/entries/views.py b/entries/views.py index c74883da870a19558801bc9580b4f3a74f8951f0..659a18d63b5cd409e758ee90bc8ebd33f228be09 100644 --- a/entries/views.py +++ b/entries/views.py @@ -423,7 +423,7 @@ def get_entries(request): assert(not errors_dict) linked_ids = set() - if request.session['show_linked_entries']: + if request.session['show_linked_entries'] and not with_lexical_units: entries_linked = Entry.objects.filter(pk__in=( Entry.objects .filter(subentries__schema_hooks__argument_connections__schema_connections__subentry__entry__in=entries) @@ -454,12 +454,7 @@ def get_entries(request): ) if with_lexical_units: - if exclude_status is not None: - entries = entries.filter(lexical_units__frames__status__iexact=exclude_status) - entries = entries.filter(lexical_units__frames__isnull=False) - if has_unified_frame == 'true': - entries = entries.filter(lexical_units__frames__slowal_frame_2_unified_frame__isnull=False) - frameQueryset = Frame.objects.select_related("slowal_frame_2_unified_frame").prefetch_related(Prefetch("assignments", to_attr="_assignments")); + frameQueryset = Frame.objects.prefetch_related(Prefetch("assignments", to_attr="_assignments")) entries = entries.prefetch_related( Prefetch( "lexical_units", @@ -472,6 +467,11 @@ def get_entries(request): ) ) ) + if exclude_status is not None: + entries = entries.filter(lexical_units__frames__status__iexact=exclude_status) + entries = entries.filter(lexical_units__frames__isnull=False) + if has_unified_frame == 'true': + entries = entries.filter(lexical_units__frames__slowal_frame_2_unified_frame__isnull=False) filtered = entries.count() @@ -481,7 +481,7 @@ def get_entries(request): def iter_lexical_units(e): for lu in e.lexical_units.all(): lu._frame = lu._frames[0] if lu._frames and len(lu._frames) > 0 else None - if lu._frame is None: + if lu._frame is None or not hasattr(lu._frame, 'slowal_frame_2_unified_frame'): continue else: yield lu @@ -517,7 +517,6 @@ def get_entries(request): } for e in entries[first_index:last_index] ], } - return JsonResponse(result) return JsonResponse({})