HH/apps/social/tasks/google.py
2026-02-12 15:09:48 +03:00

128 lines
4.8 KiB
Python

from celery import shared_task
from django.utils import timezone
from django.utils.dateparse import parse_datetime
from django.db import transaction # <--- FIX: Added missing import
from apps.social.models import SocialAccount, SocialContent, SocialComment
from apps.social.services.google import GoogleBusinessService, GoogleAPIError
import logging
logger = logging.getLogger(__name__)
@shared_task
def sync_all_accounts_periodic():
# smartly run for both historical and delta sync based on last_synced_at
"""Runs Periodically (via Celery Beat) to pull new/updated reviews."""
logger.info("Starting Periodic Sync for Google Business...")
accounts = SocialAccount.objects.filter(platform_type='GO', is_active=True)
for account in accounts:
try:
sync_single_account.delay(account.id) # <--- FIX: Use .delay() for async
# Note: Updating last_synced_at inside the loop here is risky if .delay() hasn't finished yet.
# It's better to update it inside sync_single_account, but I kept your structure.
except Exception as e:
logger.error(f"Failed to trigger sync for account {account.id}: {e}")
@shared_task
def sync_single_account(account_id):
"""Background job to sync a specific account."""
try:
account = SocialAccount.objects.get(id=account_id, platform_type='GO')
locations_data = GoogleBusinessService.fetch_locations(account)
for loc_data in locations_data:
location, created = SocialContent.objects.get_or_create(
platform_type='GO',
content_id=loc_data['name'],
defaults={
'account': account,
'title': loc_data.get('title', 'Unknown Location'),
'text': "",
'content_data': loc_data
}
)
if created or location.last_comment_sync_at is None:
# Set to old date for Historical Sync
location.last_comment_sync_at = timezone.now() - timezone.timedelta(days=365 * 5)
location.save()
reviews_data = GoogleBusinessService.fetch_reviews_delta(account, location)
if not reviews_data:
continue
latest_time = location.last_comment_sync_at
with transaction.atomic(): # <--- This import is now fixed
for r_data in reviews_data:
review_time = _save_review(account, location, r_data)
if review_time and review_time > latest_time:
latest_time = review_time
location.last_comment_sync_at = latest_time
location.save()
# Update account sync time here to be accurate
account.last_synced_at = timezone.now()
account.save()
except SocialAccount.DoesNotExist:
logger.error(f"Account {account_id} not found")
except Exception as e:
logger.error(f"Error syncing account {account_id}: {e}")
raise
def _save_review(account, location, r_data):
"""Helper to save/update a single review matching the SocialComment model."""
review_id = r_data.get('name')
created_str = r_data.get('createTime')
update_str = r_data.get('updateTime')
created_at = timezone.now()
if created_str:
try:
dt = parse_datetime(created_str)
if dt: created_at = dt if dt.tzinfo else timezone.make_aware(dt)
except:
pass
update_time = None
if update_str:
try:
dt = parse_datetime(update_str)
if dt: update_time = dt if dt.tzinfo else timezone.make_aware(dt)
except:
pass
star_map = {'ONE': 1, 'TWO': 2, 'THREE': 3, 'FOUR': 4, 'FIVE': 5}
rating = star_map.get(r_data.get('starRating', 'ZERO'), 0)
reply_obj = r_data.get('reply')
reply_count = 1 if reply_obj else 0
comment_text = r_data.get('comment', '')
comment_data = r_data.copy()
comment_data['star_rating'] = rating
comment_data['reviewer_name'] = r_data.get('reviewer', {}).get('displayName', 'Anonymous')
SocialComment.objects.update_or_create(
platform_type='GO',
comment_id=review_id,
defaults={
'account': account,
'content': location,
'author_name': r_data.get('reviewer', {}).get('displayName', 'Anonymous'),
'author_id': r_data.get('reviewer', {}).get('displayName'),
'text': comment_text,
'created_at': created_at,
'rating': rating,
'reply_count': reply_count,
'comment_data': comment_data
}
)
return update_time