162 lines
6.5 KiB
Python
162 lines
6.5 KiB
Python
|
|
# youtube/tasks.py
|
|
from celery import shared_task
|
|
from django.utils import timezone
|
|
import datetime
|
|
from django.utils.dateparse import parse_datetime
|
|
from apps.social.models import SocialAccount, SocialContent, SocialComment
|
|
from apps.social.services.youtube import YouTubeService, YouTubeAPIError
|
|
import logging
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
def parse_youtube_timestamp(ts):
|
|
if not ts:
|
|
return timezone.now()
|
|
try:
|
|
dt = parse_datetime(ts)
|
|
return dt if dt else timezone.now()
|
|
except:
|
|
return timezone.now()
|
|
|
|
# social/tasks/youtube.py
|
|
|
|
@shared_task
|
|
def poll_new_comments_task():
|
|
"""
|
|
FAST POLLING (Delta Sync).
|
|
"""
|
|
accounts = SocialAccount.objects.filter(platform_type='YT', is_active=True)
|
|
|
|
for account in accounts:
|
|
try:
|
|
logger.info(f"Polling comments for {account.name}")
|
|
|
|
# Iterate through videos to check for new comments
|
|
videos = SocialContent.objects.filter(platform_type='YT', account=account)
|
|
|
|
if not videos.exists():
|
|
logger.info(f"No videos found in DB for {account.name}. Skipping poll.")
|
|
continue
|
|
|
|
logger.info(f"Scanning {videos.count()} videos for new comments...")
|
|
|
|
for video in videos:
|
|
try:
|
|
# Fetch comments
|
|
comments = YouTubeService.fetch_video_comments(account, video.content_id)
|
|
|
|
if not comments:
|
|
continue
|
|
|
|
# Log what we are doing
|
|
logger.info(f"Checking video {video.content_id} for new comments...")
|
|
|
|
new_comments_count = 0
|
|
latest_comment_time = video.last_comment_sync_at
|
|
|
|
for item in comments:
|
|
top = item['snippet']['topLevelComment']
|
|
c_id = top['id']
|
|
|
|
# Check if comment already exists in database
|
|
if SocialComment.objects.filter(platform_type='YT', comment_id=c_id).exists():
|
|
logger.debug(f"Skipping existing comment {c_id}")
|
|
continue
|
|
|
|
# Parse Time
|
|
pub_str = top['snippet']['publishedAt']
|
|
c_time = parse_youtube_timestamp(pub_str)
|
|
|
|
# Save new comment
|
|
SocialComment.objects.create(
|
|
platform_type='YT',
|
|
comment_id=c_id,
|
|
account=account,
|
|
content=video,
|
|
author_name=top['snippet'].get('authorDisplayName', 'User'),
|
|
author_id=top['snippet'].get('authorChannelId', {}).get('value'),
|
|
text=top['snippet'].get('textDisplay', ''),
|
|
created_at=c_time,
|
|
comment_data=item
|
|
)
|
|
|
|
new_comments_count += 1
|
|
|
|
# Track latest comment time
|
|
if c_time > latest_comment_time:
|
|
latest_comment_time = c_time
|
|
|
|
# Update video's last sync timestamp if we found new comments
|
|
if new_comments_count > 0:
|
|
logger.info(f"Found {new_comments_count} new comments for video {video.content_id}")
|
|
video.last_comment_sync_at = latest_comment_time
|
|
video.save()
|
|
else:
|
|
logger.debug(f"No new comments for video {video.content_id}")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error syncing video {video.content_id}: {e}")
|
|
|
|
account.last_synced_at = timezone.now()
|
|
account.save()
|
|
|
|
except Exception as e:
|
|
logger.error(f"Polling Error for account {account.name}: {e}")
|
|
|
|
@shared_task
|
|
def deep_historical_backfill_task(account_id):
|
|
"""
|
|
DEEP SYNC.
|
|
Fetches ALL videos and ALL comments.
|
|
Used for "First Run".
|
|
"""
|
|
try:
|
|
account = SocialAccount.objects.get(id=account_id, platform_type='YT')
|
|
logger.info(f"Starting Deep Backfill for {account.name}")
|
|
|
|
# 1. Get Videos
|
|
videos_data = YouTubeService.fetch_user_videos(account)
|
|
|
|
for vid_data in videos_data:
|
|
vid_id = vid_data['id']
|
|
video, created = SocialContent.objects.get_or_create(
|
|
platform_type='YT',
|
|
content_id=vid_id,
|
|
defaults={
|
|
'account': account,
|
|
'title': vid_data.get('snippet', {}).get('title', ''),
|
|
'text': vid_data.get('snippet', {}).get('description', ''),
|
|
'created_at': parse_youtube_timestamp(vid_data.get('snippet', {}).get('publishedAt')),
|
|
'content_data': vid_data
|
|
}
|
|
)
|
|
|
|
# 2. Get All Comments for this video
|
|
comments = YouTubeService.fetch_video_comments(account, vid_id)
|
|
|
|
for c_data in comments:
|
|
top = c_data['snippet']['topLevelComment']
|
|
|
|
SocialComment.objects.update_or_create(
|
|
platform_type='YT',
|
|
comment_id=c_data['id'],
|
|
defaults={
|
|
'account': account,
|
|
'content': video,
|
|
'author_name': top['snippet'].get('authorDisplayName', 'User'),
|
|
'author_id': top['snippet'].get('authorChannelId', {}).get('value'),
|
|
'text': top['snippet'].get('textDisplay', ''),
|
|
'created_at': parse_youtube_timestamp(top['snippet'].get('publishedAt')),
|
|
'comment_data': c_data
|
|
}
|
|
)
|
|
|
|
if parse_youtube_timestamp(top['snippet'].get('publishedAt')) > video.last_comment_sync_at:
|
|
video.last_comment_sync_at = parse_youtube_timestamp(top['snippet'].get('publishedAt'))
|
|
video.save()
|
|
|
|
except Exception as e:
|
|
logger.error(f"Backfill Error: {e}")
|
|
|