2026-02-12 15:09:48 +03:00

302 lines
9.4 KiB
Python

"""
Celery tasks for AI-powered comment analysis.
Coordinates between SocialComment model and OpenRouter service.
"""
import logging
from typing import Optional
from datetime import timedelta
from celery import shared_task
from django.conf import settings
from django.utils import timezone
from django.db.models import Q
from apps.social.models import SocialComment
from apps.social.services.ai_service import OpenRouterService
logger = logging.getLogger(__name__)
@shared_task(bind=True, max_retries=3)
def analyze_pending_comments_task(
self,
limit: Optional[int] = None,
platform_type: Optional[str] = None,
hours_ago: Optional[int] = None
):
"""
Analyze comments that haven't been analyzed yet (individually).
Args:
limit: Maximum number of comments to analyze
platform_type: Filter by platform type (LI, GO, FB, IG, TT, X, YT)
hours_ago: Only analyze comments added in the last N hours
"""
service = OpenRouterService()
if not service.is_configured():
logger.error("OpenRouter service not configured")
return {
'success': False,
'error': 'OpenRouter service not configured',
'analyzed': 0,
'failed': 0
}
# Build queryset for unanalyzed comments
queryset = SocialComment.objects.filter(
Q(ai_analysis__isnull=True) | Q(ai_analysis={})
).order_by('-created_at')
if platform_type:
queryset = queryset.filter(platform_type=platform_type)
if hours_ago:
cutoff_time = timezone.now() - timedelta(hours=hours_ago)
queryset = queryset.filter(added_at__gte=cutoff_time)
if limit:
queryset = queryset[:limit]
comments = list(queryset)
if not comments:
logger.info("No pending comments to analyze")
return {
'success': True,
'analyzed': 0,
'failed': 0,
'message': 'No pending comments to analyze'
}
logger.info(f"Found {len(comments)} pending comments to analyze")
analyzed_count = 0
failed_count = 0
# Analyze each comment individually
for comment in comments:
logger.info(f"Analyzing comment {comment.id} ({analyzed_count + 1}/{len(comments)})")
# Trigger analysis for this comment
result = service.analyze_comment(str(comment.id), comment.text)
if result.get('success'):
analysis = result.get('analysis', {})
# Build bilingual analysis structure
ai_analysis = {
'sentiment': analysis.get('sentiment', {}),
'summaries': analysis.get('summaries', {}),
'keywords': analysis.get('keywords', {}),
'topics': analysis.get('topics', {}),
'entities': analysis.get('entities', []),
'emotions': analysis.get('emotions', {}),
'metadata': {
**result.get('metadata', {}),
'analyzed_at': timezone.now().isoformat()
}
}
# Update comment with bilingual analysis
comment.ai_analysis = ai_analysis
comment.save()
analyzed_count += 1
logger.debug(f"Updated comment {comment.id} with bilingual analysis")
else:
error = result.get('error', 'Unknown error')
logger.error(f"Analysis failed for comment {comment.id}: {error}")
failed_count += 1
logger.info(
f"Analysis complete: {analyzed_count} analyzed, "
f"{failed_count} failed"
)
return {
'success': True,
'analyzed': analyzed_count,
'failed': failed_count,
'total': len(comments)
}
@shared_task
def analyze_comment_task(comment_id: int):
"""
Analyze a single comment.
Args:
comment_id: ID of the comment to analyze
"""
service = OpenRouterService()
if not service.is_configured():
logger.error("OpenRouter service not configured")
return {
'success': False,
'error': 'OpenRouter service not configured'
}
try:
comment = SocialComment.objects.get(id=comment_id)
except SocialComment.DoesNotExist:
logger.error(f"Comment {comment_id} not found")
return {
'success': False,
'error': f'Comment {comment_id} not found'
}
logger.info(f"Analyzing comment {comment_id}")
# Analyze single comment
result = service.analyze_comment(str(comment.id), comment.text)
if result.get('success'):
analysis = result.get('analysis', {})
# Build bilingual analysis structure
ai_analysis = {
'sentiment': analysis.get('sentiment', {}),
'summaries': analysis.get('summaries', {}),
'keywords': analysis.get('keywords', {}),
'topics': analysis.get('topics', {}),
'entities': analysis.get('entities', []),
'emotions': analysis.get('emotions', {}),
'metadata': {
**result.get('metadata', {}),
'analyzed_at': timezone.now().isoformat()
}
}
# Update comment with bilingual analysis
comment.ai_analysis = ai_analysis
comment.save()
sentiment_en = ai_analysis.get('sentiment', {}).get('classification', {}).get('en')
confidence_val = ai_analysis.get('sentiment', {}).get('confidence', 0)
logger.info(f"Comment {comment_id} analyzed successfully: {sentiment_en}")
return {
'success': True,
'comment_id': comment_id,
'sentiment': sentiment_en,
'confidence': float(confidence_val)
}
else:
error = result.get('error', 'Unknown error')
logger.error(f"Analysis failed for comment {comment_id}: {error}")
return {
'success': False,
'error': error
}
@shared_task
def reanalyze_comment_task(comment_id: int):
"""
Re-analyze a specific comment (overwrite existing analysis).
Args:
comment_id: ID of the comment to re-analyze
"""
logger.info(f"Re-analyzing comment {comment_id}")
return analyze_comment_task(comment_id)
@shared_task
def daily_unanalyzed_comments_task():
"""
Daily task to analyze any comments that haven't been analyzed (individually).
This is a backup mechanism to catch any comments that were missed.
Runs once per day and analyzes all comments with empty ai_analysis.
"""
logger.info("=" * 80)
logger.info("STARTING DAILY UNANALYZED COMMENTS CHECK")
logger.info("=" * 80)
service = OpenRouterService()
max_analyze = getattr(settings, 'DAILY_ANALYSIS_LIMIT', 100)
if not service.is_configured():
logger.error("OpenRouter service not configured, skipping daily analysis")
return {
'success': False,
'error': 'OpenRouter service not configured'
}
# Get unanalyzed comments
from django.db.models import Q
queryset = SocialComment.objects.filter(
Q(ai_analysis__isnull=True) | Q(ai_analysis={})
).order_by('-created_at')[:max_analyze]
comments = list(queryset)
if not comments:
logger.info("No unanalyzed comments found")
return {
'success': True,
'analyzed': 0,
'failed': 0,
'message': 'No unanalyzed comments found'
}
logger.info(f"Found {len(comments)} unanalyzed comments")
analyzed_count = 0
failed_count = 0
# Analyze each comment individually
for comment in comments:
logger.info(f"Daily analysis: Analyzing comment {comment.id} ({analyzed_count + 1}/{len(comments)})")
# Trigger analysis for this comment
result = service.analyze_comment(str(comment.id), comment.text)
if result.get('success'):
analysis = result.get('analysis', {})
# Build bilingual analysis structure
ai_analysis = {
'sentiment': analysis.get('sentiment', {}),
'summaries': analysis.get('summaries', {}),
'keywords': analysis.get('keywords', {}),
'topics': analysis.get('topics', {}),
'entities': analysis.get('entities', []),
'emotions': analysis.get('emotions', {}),
'metadata': {
**result.get('metadata', {}),
'analyzed_at': timezone.now().isoformat(),
'analysis_type': 'daily_check'
}
}
# Update comment with bilingual analysis
comment.ai_analysis = ai_analysis
comment.save()
analyzed_count += 1
logger.debug(f"Updated comment {comment.id} with daily analysis")
else:
error = result.get('error', 'Unknown error')
logger.error(f"Daily analysis failed for comment {comment.id}: {error}")
failed_count += 1
logger.info(
f"Daily analysis complete: {analyzed_count} analyzed, "
f"{failed_count} failed"
)
return {
'success': True,
'analyzed': analyzed_count,
'failed': failed_count,
'total': len(comments)
}