286 lines
8.5 KiB
Python
286 lines
8.5 KiB
Python
"""
|
|
AI Engine UI views - Server-rendered templates
|
|
"""
|
|
from django.contrib import messages
|
|
from django.contrib.auth.decorators import login_required
|
|
from django.core.paginator import Paginator
|
|
from django.db.models import Count, Q
|
|
from django.shortcuts import get_object_or_404, redirect, render
|
|
from django.utils import timezone
|
|
from django.views.decorators.http import require_http_methods
|
|
|
|
from .forms import AnalyzeTextForm, SentimentFilterForm
|
|
from .models import SentimentResult
|
|
from .services import AIEngineService
|
|
|
|
|
|
@login_required
|
|
def sentiment_list(request):
|
|
"""
|
|
Sentiment results list view with filters and pagination.
|
|
|
|
Features:
|
|
- Server-side pagination
|
|
- Advanced filters (sentiment, language, confidence, etc.)
|
|
- Search by text
|
|
- Statistics dashboard
|
|
"""
|
|
# Base queryset
|
|
queryset = SentimentResult.objects.select_related('content_type').all()
|
|
|
|
# Apply filters from request
|
|
sentiment_filter = request.GET.get('sentiment')
|
|
if sentiment_filter:
|
|
queryset = queryset.filter(sentiment=sentiment_filter)
|
|
|
|
language_filter = request.GET.get('language')
|
|
if language_filter:
|
|
queryset = queryset.filter(language=language_filter)
|
|
|
|
ai_service_filter = request.GET.get('ai_service')
|
|
if ai_service_filter:
|
|
queryset = queryset.filter(ai_service=ai_service_filter)
|
|
|
|
min_confidence = request.GET.get('min_confidence')
|
|
if min_confidence:
|
|
try:
|
|
queryset = queryset.filter(confidence__gte=float(min_confidence))
|
|
except ValueError:
|
|
pass
|
|
|
|
# Search
|
|
search_query = request.GET.get('search')
|
|
if search_query:
|
|
queryset = queryset.filter(text__icontains=search_query)
|
|
|
|
# Date range filters
|
|
date_from = request.GET.get('date_from')
|
|
if date_from:
|
|
queryset = queryset.filter(created_at__gte=date_from)
|
|
|
|
date_to = request.GET.get('date_to')
|
|
if date_to:
|
|
queryset = queryset.filter(created_at__lte=date_to)
|
|
|
|
# Ordering
|
|
order_by = request.GET.get('order_by', '-created_at')
|
|
queryset = queryset.order_by(order_by)
|
|
|
|
# Pagination
|
|
page_size = int(request.GET.get('page_size', 25))
|
|
paginator = Paginator(queryset, page_size)
|
|
page_number = request.GET.get('page', 1)
|
|
page_obj = paginator.get_page(page_number)
|
|
|
|
# Statistics
|
|
stats = AIEngineService.get_sentiment_stats(queryset)
|
|
|
|
# Filter form
|
|
filter_form = SentimentFilterForm(request.GET)
|
|
|
|
context = {
|
|
'page_obj': page_obj,
|
|
'results': page_obj.object_list,
|
|
'stats': stats,
|
|
'filter_form': filter_form,
|
|
'filters': request.GET,
|
|
}
|
|
|
|
return render(request, 'ai_engine/sentiment_list.html', context)
|
|
|
|
|
|
@login_required
|
|
def sentiment_detail(request, pk):
|
|
"""
|
|
Sentiment result detail view.
|
|
|
|
Features:
|
|
- Full sentiment analysis details
|
|
- Keywords, entities, emotions
|
|
- Link to related object
|
|
"""
|
|
result = get_object_or_404(
|
|
SentimentResult.objects.select_related('content_type'),
|
|
pk=pk
|
|
)
|
|
|
|
# Get related object if it exists
|
|
related_object = None
|
|
try:
|
|
related_object = result.content_object
|
|
except Exception:
|
|
pass
|
|
|
|
context = {
|
|
'result': result,
|
|
'related_object': related_object,
|
|
}
|
|
|
|
return render(request, 'ai_engine/sentiment_detail.html', context)
|
|
|
|
|
|
@login_required
|
|
@require_http_methods(["GET", "POST"])
|
|
def analyze_text_view(request):
|
|
"""
|
|
Manual text analysis view.
|
|
|
|
Allows users to manually analyze text for sentiment.
|
|
"""
|
|
result = None
|
|
|
|
if request.method == 'POST':
|
|
form = AnalyzeTextForm(request.POST)
|
|
if form.is_valid():
|
|
try:
|
|
# Perform analysis
|
|
analysis = AIEngineService.sentiment.analyze_text(
|
|
text=form.cleaned_data['text'],
|
|
language=form.cleaned_data.get('language') or None,
|
|
extract_keywords=form.cleaned_data.get('extract_keywords', True),
|
|
extract_entities=form.cleaned_data.get('extract_entities', True),
|
|
detect_emotions=form.cleaned_data.get('detect_emotions', True),
|
|
)
|
|
|
|
result = analysis
|
|
messages.success(request, "Text analyzed successfully!")
|
|
|
|
except Exception as e:
|
|
messages.error(request, f"Error analyzing text: {str(e)}")
|
|
else:
|
|
messages.error(request, "Please correct the errors below.")
|
|
else:
|
|
form = AnalyzeTextForm()
|
|
|
|
context = {
|
|
'form': form,
|
|
'result': result,
|
|
}
|
|
|
|
return render(request, 'ai_engine/analyze_text.html', context)
|
|
|
|
|
|
@login_required
|
|
def sentiment_dashboard(request):
|
|
"""
|
|
Sentiment analytics dashboard.
|
|
|
|
Features:
|
|
- Overall sentiment statistics
|
|
- Sentiment trends over time
|
|
- Top keywords
|
|
- Language distribution
|
|
- Service performance
|
|
"""
|
|
# Get date range from request (default: last 30 days)
|
|
from datetime import timedelta
|
|
|
|
date_from = request.GET.get('date_from')
|
|
date_to = request.GET.get('date_to')
|
|
|
|
if not date_from:
|
|
date_from = timezone.now() - timedelta(days=30)
|
|
if not date_to:
|
|
date_to = timezone.now()
|
|
|
|
# Base queryset
|
|
queryset = SentimentResult.objects.filter(
|
|
created_at__gte=date_from,
|
|
created_at__lte=date_to
|
|
)
|
|
|
|
# Overall statistics
|
|
overall_stats = AIEngineService.get_sentiment_stats(queryset)
|
|
|
|
# Language distribution
|
|
language_stats = queryset.values('language').annotate(
|
|
count=Count('id')
|
|
).order_by('-count')
|
|
|
|
# Sentiment by language
|
|
sentiment_by_language = {}
|
|
for lang in ['en', 'ar']:
|
|
lang_queryset = queryset.filter(language=lang)
|
|
sentiment_by_language[lang] = AIEngineService.get_sentiment_stats(lang_queryset)
|
|
|
|
# AI service distribution
|
|
service_stats = queryset.values('ai_service').annotate(
|
|
count=Count('id')
|
|
).order_by('-count')
|
|
|
|
# Recent results
|
|
recent_results = queryset.select_related('content_type').order_by('-created_at')[:10]
|
|
|
|
# Top keywords (aggregate from all results)
|
|
all_keywords = []
|
|
for result in queryset:
|
|
all_keywords.extend(result.keywords)
|
|
|
|
# Count keyword frequency
|
|
from collections import Counter
|
|
keyword_counts = Counter(all_keywords)
|
|
top_keywords = keyword_counts.most_common(20)
|
|
|
|
# Sentiment trend (by day)
|
|
from django.db.models.functions import TruncDate
|
|
sentiment_trend = queryset.annotate(
|
|
date=TruncDate('created_at')
|
|
).values('date', 'sentiment').annotate(
|
|
count=Count('id')
|
|
).order_by('date')
|
|
|
|
# Organize trend data
|
|
trend_data = {}
|
|
for item in sentiment_trend:
|
|
date_str = item['date'].strftime('%Y-%m-%d')
|
|
if date_str not in trend_data:
|
|
trend_data[date_str] = {'positive': 0, 'neutral': 0, 'negative': 0}
|
|
trend_data[date_str][item['sentiment']] = item['count']
|
|
|
|
context = {
|
|
'overall_stats': overall_stats,
|
|
'language_stats': language_stats,
|
|
'sentiment_by_language': sentiment_by_language,
|
|
'service_stats': service_stats,
|
|
'recent_results': recent_results,
|
|
'top_keywords': top_keywords,
|
|
'trend_data': trend_data,
|
|
'date_from': date_from,
|
|
'date_to': date_to,
|
|
}
|
|
|
|
return render(request, 'ai_engine/sentiment_dashboard.html', context)
|
|
|
|
|
|
@login_required
|
|
@require_http_methods(["POST"])
|
|
def reanalyze_sentiment(request, pk):
|
|
"""
|
|
Re-analyze sentiment for a specific result.
|
|
|
|
This can be useful when the AI model is updated.
|
|
"""
|
|
result = get_object_or_404(SentimentResult, pk=pk)
|
|
|
|
try:
|
|
# Get the related object
|
|
related_object = result.content_object
|
|
|
|
if related_object:
|
|
# Re-analyze
|
|
new_result = AIEngineService.sentiment.analyze_and_save(
|
|
text=result.text,
|
|
content_object=related_object,
|
|
language=result.language
|
|
)
|
|
|
|
messages.success(request, "Sentiment re-analyzed successfully!")
|
|
return redirect('ai_engine:sentiment_detail', pk=new_result.id)
|
|
else:
|
|
messages.error(request, "Related object not found.")
|
|
|
|
except Exception as e:
|
|
messages.error(request, f"Error re-analyzing sentiment: {str(e)}")
|
|
|
|
return redirect('ai_engine:sentiment_detail', pk=pk)
|