302 lines
12 KiB
Python
302 lines
12 KiB
Python
# social/tasks/meta.py
|
|
from celery import shared_task
|
|
from django.utils import timezone
|
|
from django.utils.dateparse import parse_datetime
|
|
from apps.social.models import SocialAccount, SocialContent, SocialComment
|
|
from apps.social.services.meta import MetaService, MetaAPIError
|
|
import logging
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
def parse_meta_timestamp(ts):
|
|
if not ts:
|
|
return timezone.now()
|
|
try:
|
|
dt = parse_datetime(ts)
|
|
return dt if dt else timezone.now()
|
|
except:
|
|
return timezone.now()
|
|
|
|
|
|
# ============================================================================
|
|
# TASK 1: HISTORICAL BACKFILL (Deep Sync)
|
|
# ============================================================================
|
|
# Use this when connecting a NEW account to get ALL past history.
|
|
# ============================================================================
|
|
|
|
@shared_task(bind=True)
|
|
def meta_historical_backfill_task(self, account_id):
|
|
"""
|
|
Deep Sync: Fetches ALL Posts and ALL Comments.
|
|
Used for "First Run" or "Full Resync".
|
|
"""
|
|
try:
|
|
account = SocialAccount.objects.get(id=account_id)
|
|
if account.platform_type not in ['META', 'FB', 'IG']:
|
|
return
|
|
|
|
if account.is_token_expired():
|
|
account.is_active = False
|
|
account.save()
|
|
return
|
|
|
|
logger.info(f"Starting DEEP HISTORICAL BACKFILL for {account.name}...")
|
|
|
|
# 1. DISCOVERY
|
|
entities = MetaService.discover_pages_and_ig(account.access_token)
|
|
|
|
# 2. FALLBACK
|
|
if not entities:
|
|
logger.info("No linked business accounts found. Backfilling User Profile.")
|
|
entities = [{
|
|
'platform': 'FB',
|
|
'native_id': 'me',
|
|
'name': account.name,
|
|
'access_token': account.access_token,
|
|
'is_permanent': False
|
|
}]
|
|
|
|
# 3. SYNC ENTITIES (Fetch ALL Posts, then ALL Comments)
|
|
total_posts_synced = 0
|
|
total_comments_synced = 0
|
|
|
|
for entity in entities:
|
|
platform = entity['platform']
|
|
entity_id = entity['native_id']
|
|
entity_token = entity['access_token']
|
|
|
|
logger.info(f"Backfilling entity: {entity['name']} ({platform})")
|
|
|
|
# 1. Fetch ALL Posts (No timestamp filter)
|
|
posts_data = MetaService.fetch_posts(entity_id, entity_token, platform)
|
|
logger.info(f" -> Found {len(posts_data)} posts")
|
|
|
|
for post in posts_data:
|
|
post_id = post['id']
|
|
|
|
# FB uses 'created_time', IG uses 'timestamp'
|
|
post_timestamp = post.get('timestamp') if platform == 'IG' else post.get('created_time')
|
|
# 2. Save Content
|
|
content_obj, created = SocialContent.objects.update_or_create(
|
|
platform_type='META',
|
|
content_id=post_id,
|
|
defaults={
|
|
'account': account,
|
|
'source_platform': platform,
|
|
'text': post.get('message') or post.get('caption', ''),
|
|
'created_at': parse_meta_timestamp(post_timestamp),
|
|
'content_data': post
|
|
}
|
|
)
|
|
if created:
|
|
total_posts_synced += 1
|
|
|
|
# Save Entity Token for Replies
|
|
if entity_id != 'me':
|
|
if 'access_token' not in content_obj.content_data:
|
|
content_obj.content_data['access_token'] = entity_token
|
|
content_obj.save()
|
|
|
|
# 3. Fetch ALL Comments for this post (No 'since' parameter)
|
|
# This ensures we get history
|
|
comments = MetaService.fetch_comments_for_post(post_id, entity_token, since_timestamp=None)
|
|
|
|
for c in comments:
|
|
c_id = c['id']
|
|
|
|
# CRITICAL: Check for duplicates
|
|
if not SocialComment.objects.filter(comment_id=c_id).exists():
|
|
author = c.get('from', {})
|
|
author_name = author.get('name') or author.get('username') or 'User'
|
|
comment_text = c.get('message') or c.get('text') or ''
|
|
|
|
SocialComment.objects.create(
|
|
account=account,
|
|
content=content_obj,
|
|
platform_type='META',
|
|
source_platform=platform,
|
|
comment_id=c_id,
|
|
author_name=author_name,
|
|
author_id=author.get('id') if isinstance(author, dict) else '',
|
|
text=comment_text,
|
|
created_at=parse_meta_timestamp(c.get('created_time')),
|
|
comment_data=c,
|
|
like_count=c.get('like_count', 0)
|
|
)
|
|
total_comments_synced += 1
|
|
# Update content bookmark to latest comment found
|
|
if content_obj.last_comment_sync_at is None or parse_meta_timestamp(c.get('created_time')) > content_obj.last_comment_sync_at:
|
|
content_obj.last_comment_sync_at = parse_meta_timestamp(c.get('created_time'))
|
|
content_obj.save()
|
|
|
|
account.last_synced_at = timezone.now()
|
|
account.save()
|
|
logger.info(f"Deep Backfill Complete: {account.name}. Posts: {total_posts_synced}, Comments: {total_comments_synced}")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Backfill Failed for {account_id}: {e}", exc_info=True)
|
|
raise self.retry(exc=e, countdown=60)
|
|
|
|
|
|
# ============================================================================
|
|
# TASK 2: POLL NEW COMMENTS (Delta Sync)
|
|
# ============================================================================
|
|
# Runs automatically via Celery Beat.
|
|
# Iterates existing posts to find NEW comments only.
|
|
# Mirrors your YouTube poll_new_comments_task logic.
|
|
# ============================================================================
|
|
|
|
@shared_task
|
|
def meta_poll_new_comments_task():
|
|
"""
|
|
FAST POLLING (Delta Sync).
|
|
Runs automatically via Celery Beat.
|
|
Fetches all new comments that don't exist in database.
|
|
"""
|
|
accounts = SocialAccount.objects.filter(platform_type__in=['META'], is_active=True)
|
|
|
|
for account in accounts:
|
|
try:
|
|
logger.info(f"Polling comments for {account.name}")
|
|
|
|
# 1. Iterate through existing posts in database (Like YouTube)
|
|
# We only check posts we already know about.
|
|
videos = SocialContent.objects.filter(platform_type='META', account=account)
|
|
|
|
for video in videos:
|
|
try:
|
|
# 2. Fetch comments using timestamp filter
|
|
since_ts = video.last_comment_sync_at
|
|
token_to_use = video.content_data.get('access_token', account.access_token)
|
|
|
|
comments = MetaService.fetch_comments_for_post(video.content_id, token_to_use, since_ts)
|
|
|
|
if not comments:
|
|
continue
|
|
|
|
new_comments_count = 0
|
|
latest_comment_time = video.last_comment_sync_at
|
|
|
|
# 3. Check for duplicates using ID
|
|
for c in comments:
|
|
c_id = c['id']
|
|
|
|
if SocialComment.objects.filter(comment_id=c_id).exists():
|
|
# Skip existing comments
|
|
continue
|
|
|
|
# Parse Data
|
|
c_time = parse_meta_timestamp(c.get('created_time'))
|
|
author = c.get('from', {})
|
|
author_name = author.get('name') or author.get('username') or 'User'
|
|
comment_text = c.get('message') or c.get('text') or ''
|
|
|
|
# Save new comment
|
|
SocialComment.objects.create(
|
|
account=account,
|
|
content=video,
|
|
platform_type='META',
|
|
source_platform=video.source_platform, # Inherit from content
|
|
comment_id=c_id,
|
|
author_name=author_name,
|
|
author_id=author.get('id') if isinstance(author, dict) else '',
|
|
text=comment_text,
|
|
created_at=c_time,
|
|
comment_data=c,
|
|
like_count=c.get('like_count', 0)
|
|
)
|
|
|
|
new_comments_count += 1
|
|
|
|
# Track latest comment time for bookmarking
|
|
if c_time > latest_comment_time:
|
|
latest_comment_time = c_time
|
|
|
|
# 4. Update Content Bookmark
|
|
if new_comments_count > 0:
|
|
logger.info(f"Found {new_comments_count} new comments for post {video.content_id}")
|
|
video.last_comment_sync_at = latest_comment_time
|
|
video.save()
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error syncing post {video.content_id}: {e}")
|
|
|
|
account.last_synced_at = timezone.now()
|
|
account.save()
|
|
|
|
except Exception as e:
|
|
logger.error(f"Polling Error for account {account.name}: {e}")
|
|
|
|
|
|
# ============================================================================
|
|
# TASK 3: WEBHOOK (Realtime)
|
|
# ============================================================================
|
|
|
|
@shared_task(bind=True)
|
|
def process_webhook_comment_task(self, comment_id, page_native_id, account_id):
|
|
"""
|
|
WEBHOOK TASK
|
|
Handles real-time updates.
|
|
"""
|
|
try:
|
|
# 1. Get Account
|
|
account = SocialAccount.objects.get(id=account_id)
|
|
|
|
# 2. Find specific Page Token
|
|
entities = MetaService.discover_pages_and_ig(account.access_token)
|
|
|
|
page_token = account.access_token
|
|
for entity in entities:
|
|
if entity['native_id'] == page_native_id and entity['platform'] == 'FB':
|
|
page_token = entity['access_token']
|
|
break
|
|
|
|
# 3. Fetch Comment
|
|
data = MetaService.fetch_single_comment(comment_id, page_token)
|
|
|
|
post_obj = data.get('post')
|
|
post_id = post_obj.get('id') if isinstance(post_obj, dict) else post_obj
|
|
|
|
author = data.get('from', {})
|
|
|
|
# 4. Determine Source
|
|
source_platform = 'FB'
|
|
if isinstance(author, dict) and isinstance(author.get('id'), str) and len(str(author.get('id'))) > 15:
|
|
source_platform = 'IG'
|
|
|
|
# 5. Get or Create Content
|
|
content, _ = SocialContent.objects.get_or_create(
|
|
platform_type='META',
|
|
content_id=post_id,
|
|
defaults={
|
|
'account': account,
|
|
'text': '',
|
|
'created_at': timezone.now(),
|
|
'content_data': {},
|
|
'source_platform': source_platform
|
|
}
|
|
)
|
|
|
|
# 6. Save Comment (Update_or_create allows safe re-runs)
|
|
comment_text = data.get('message') or data.get('text') or ''
|
|
author_name = author.get('name') or author.get('username') or 'User'
|
|
|
|
SocialComment.objects.update_or_create(
|
|
platform_type='META',
|
|
comment_id=data['id'],
|
|
defaults={
|
|
'account': account,
|
|
'content': content,
|
|
'source_platform': source_platform,
|
|
'author_name': author_name,
|
|
'author_id': author.get('id'),
|
|
'text': comment_text,
|
|
'created_at': parse_meta_timestamp(data.get('created_time')),
|
|
'synced_via_webhook': True,
|
|
'comment_data': data
|
|
}
|
|
)
|
|
except SocialAccount.DoesNotExist:
|
|
logger.error(f"Account {account_id} not found for Webhook")
|
|
except Exception as e:
|
|
logger.error(f"Webhook Task Failed: {e}", exc_info=True) |