added HaikalBot logic and Tours
This commit is contained in:
parent
cba8a39d1b
commit
c212a65185
6
.idea/sqldialects.xml
generated
Normal file
6
.idea/sqldialects.xml
generated
Normal file
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="SqlDialectMappings">
|
||||
<file url="file://$PROJECT_DIR$/haikalbot/management/commands/generate_support_yaml.py" dialect="GenericSQL" />
|
||||
</component>
|
||||
</project>
|
||||
@ -28,6 +28,7 @@ urlpatterns += i18n_patterns(
|
||||
path('appointment/', include('appointment.urls')),
|
||||
path('plans/', include('plans.urls')),
|
||||
path("schema/", Schema.as_view()),
|
||||
path('tours/', include('tours.urls')),
|
||||
# path('', include(tf_urls)),
|
||||
)
|
||||
|
||||
|
||||
0
database.sqlite
Normal file
0
database.sqlite
Normal file
2950
haikal_kb.yaml
Normal file
2950
haikal_kb.yaml
Normal file
File diff suppressed because it is too large
Load Diff
@ -20,7 +20,7 @@ from sqlalchemy.orm import relationship
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Configuration settings
|
||||
LLM_MODEL = getattr(settings, 'MODEL_ANALYZER_LLM_MODEL', 'qwen:7b-chat')
|
||||
LLM_MODEL = getattr(settings, 'MODEL_ANALYZER_LLM_MODEL', 'qwen3:8b')
|
||||
LLM_TEMPERATURE = getattr(settings, 'MODEL_ANALYZER_LLM_TEMPERATURE', 0.3)
|
||||
LLM_MAX_TOKENS = getattr(settings, 'MODEL_ANALYZER_LLM_MAX_TOKENS', 2048)
|
||||
CACHE_TIMEOUT = getattr(settings, 'MODEL_ANALYZER_CACHE_TIMEOUT', 3600)
|
||||
|
||||
801
haikalbot/haikal_agent.py
Normal file
801
haikalbot/haikal_agent.py
Normal file
@ -0,0 +1,801 @@
|
||||
import asyncio
|
||||
import sqlite3
|
||||
import json
|
||||
import re
|
||||
import logging
|
||||
from typing import List, Dict, Any, Optional, Union
|
||||
from dataclasses import dataclass, asdict
|
||||
from enum import Enum
|
||||
import os
|
||||
from functools import reduce
|
||||
import operator
|
||||
|
||||
# Pydantic and AI imports
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic_ai import Agent, RunContext
|
||||
from pydantic_ai.models.openai import OpenAIModel
|
||||
from pydantic_ai.providers.openai import OpenAIProvider
|
||||
|
||||
# Optional Django imports (if available)
|
||||
try:
|
||||
from django.apps import apps
|
||||
from django.db import models, connection
|
||||
from django.db.models import QuerySet, Q, F, Sum, Avg, Count, Max, Min
|
||||
from django.core.exceptions import FieldDoesNotExist
|
||||
from django.conf import settings
|
||||
|
||||
DJANGO_AVAILABLE = True
|
||||
except ImportError:
|
||||
DJANGO_AVAILABLE = False
|
||||
|
||||
# Optional database drivers
|
||||
try:
|
||||
import psycopg2
|
||||
|
||||
POSTGRESQL_AVAILABLE = True
|
||||
except ImportError:
|
||||
POSTGRESQL_AVAILABLE = False
|
||||
|
||||
try:
|
||||
import pymysql
|
||||
|
||||
MYSQL_AVAILABLE = True
|
||||
except ImportError:
|
||||
MYSQL_AVAILABLE = False
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Configuration
|
||||
class DatabaseConfig:
|
||||
LLM_MODEL = settings.MODEL_ANALYZER_LLM_MODEL
|
||||
LLM_BASE_URL = "http://localhost:11434/v1"
|
||||
LLM_TEMPERATURE = 0.3
|
||||
MAX_RESULTS = 1000
|
||||
SUPPORTED_CHART_TYPES = ["bar", "line", "pie", "doughnut", "radar", "scatter"]
|
||||
|
||||
|
||||
class DatabaseType(Enum):
|
||||
SQLITE = "sqlite"
|
||||
POSTGRESQL = "postgresql"
|
||||
MYSQL = "mysql"
|
||||
|
||||
|
||||
|
||||
@dataclass
|
||||
class DatabaseConnection:
|
||||
db_type: DatabaseType
|
||||
connection_string: str
|
||||
database_name: Optional[str] = None
|
||||
host: Optional[str] = None
|
||||
port: Optional[int] = None
|
||||
user: Optional[str] = None
|
||||
password: Optional[str] = None
|
||||
schema_info: Optional[Dict] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueryResult:
|
||||
status: str
|
||||
data: Union[List[Dict], Dict]
|
||||
metadata: Dict[str, Any]
|
||||
chart_data: Optional[Dict] = None
|
||||
language: str = "en"
|
||||
error: Optional[str] = None
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert to dictionary for JSON serialization."""
|
||||
return asdict(self)
|
||||
|
||||
|
||||
class DatabaseSchema(BaseModel):
|
||||
tables: Dict[str, List[Dict[str, Any]]] = Field(
|
||||
description="Database schema with table names as keys and column info as values"
|
||||
)
|
||||
relationships: Optional[List[Dict[str, Any]]] = Field(
|
||||
default=None,
|
||||
description="Foreign key relationships between tables"
|
||||
)
|
||||
|
||||
|
||||
class InsightRequest(BaseModel):
|
||||
prompt: str = Field(description="Natural language query from user")
|
||||
database_path: Optional[str] = Field(default=None, description="Path to database file (for SQLite)")
|
||||
chart_type: Optional[str] = Field(default=None, description="Preferred chart type")
|
||||
limit: Optional[int] = Field(default=1000, description="Maximum number of results")
|
||||
language: Optional[str] = Field(default="auto", description="Response language preference")
|
||||
use_django: Optional[bool] = Field(default=True, description="Use Django database if available")
|
||||
|
||||
|
||||
class DatabaseInsightSystem:
|
||||
def __init__(self, config: DatabaseConfig = None):
|
||||
self.config = config or DatabaseConfig()
|
||||
self.model = OpenAIModel(
|
||||
model_name=self.config.LLM_MODEL,
|
||||
provider=OpenAIProvider(base_url=self.config.LLM_BASE_URL)
|
||||
)
|
||||
self.db_connection = None
|
||||
self._setup_agents()
|
||||
|
||||
def _setup_agents(self):
|
||||
"""Initialize the AI agents for schema analysis and query generation."""
|
||||
|
||||
# Query generation and execution agent
|
||||
self.query_agent = Agent(
|
||||
self.model,
|
||||
deps_type=DatabaseSchema,
|
||||
output_type=str,
|
||||
system_prompt="""You are an intelligent database query generator and analyst.
|
||||
Given a natural language prompt and database schema, you must:
|
||||
|
||||
1. ANALYZE the user's request in English or Arabic
|
||||
2. IDENTIFY relevant tables and columns from the schema
|
||||
3. GENERATE appropriate SQL query or analysis approach
|
||||
4. DETERMINE if aggregation, grouping, or joins are needed
|
||||
5. SUGGEST appropriate visualization type
|
||||
6. EXECUTE the query and provide insights
|
||||
|
||||
Response format should be JSON:
|
||||
{
|
||||
"analysis": "Brief analysis of the request",
|
||||
"query_type": "select|aggregate|join|complex",
|
||||
"sql_query": "Generated SQL query",
|
||||
"chart_suggestion": "bar|line|pie|etc",
|
||||
"expected_fields": ["field1", "field2"],
|
||||
"language": "en|ar"
|
||||
}
|
||||
|
||||
Handle both English and Arabic prompts. For Arabic text, respond in Arabic.
|
||||
Focus on providing actionable insights, not just raw data."""
|
||||
)
|
||||
|
||||
def _get_django_database_config(self) -> Optional[DatabaseConnection]:
|
||||
"""Extract database configuration from Django settings."""
|
||||
if not DJANGO_AVAILABLE:
|
||||
return None
|
||||
|
||||
try:
|
||||
# Get default database configuration
|
||||
db_config = settings.DATABASES.get('default', {})
|
||||
if not db_config:
|
||||
logger.warning("No default database configuration found in Django settings")
|
||||
return None
|
||||
|
||||
engine = db_config.get('ENGINE', '')
|
||||
db_name = db_config.get('NAME', '')
|
||||
host = db_config.get('HOST', 'localhost')
|
||||
port = db_config.get('PORT', None)
|
||||
user = db_config.get('USER', '')
|
||||
password = db_config.get('PASSWORD', '')
|
||||
|
||||
# Determine database type from engine
|
||||
if 'sqlite' in engine.lower():
|
||||
db_type = DatabaseType.SQLITE
|
||||
connection_string = db_name # For SQLite, NAME is the file path
|
||||
elif 'postgresql' in engine.lower():
|
||||
db_type = DatabaseType.POSTGRESQL
|
||||
port = port or 5432
|
||||
connection_string = f"postgresql://{user}:{password}@{host}:{port}/{db_name}"
|
||||
elif 'mysql' in engine.lower():
|
||||
db_type = DatabaseType.MYSQL
|
||||
port = port or 3306
|
||||
connection_string = f"mysql://{user}:{password}@{host}:{port}/{db_name}"
|
||||
else:
|
||||
logger.warning(f"Unsupported database engine: {engine}")
|
||||
return None
|
||||
|
||||
return DatabaseConnection(
|
||||
db_type=db_type,
|
||||
connection_string=connection_string,
|
||||
database_name=db_name,
|
||||
host=host,
|
||||
port=port,
|
||||
user=user,
|
||||
password=password
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get Django database config: {e}")
|
||||
return None
|
||||
|
||||
def analyze_database_schema_sync(self, request: InsightRequest) -> DatabaseSchema:
|
||||
"""Synchronous wrapper for schema analysis."""
|
||||
return asyncio.run(self.analyze_database_schema(request))
|
||||
|
||||
async def analyze_database_schema(self, request: InsightRequest) -> DatabaseSchema:
|
||||
"""Extract and analyze database schema."""
|
||||
try:
|
||||
# Try Django first if available and requested
|
||||
if request.use_django and DJANGO_AVAILABLE:
|
||||
django_config = self._get_django_database_config()
|
||||
if django_config:
|
||||
self.db_connection = django_config
|
||||
return await self._analyze_django_schema()
|
||||
|
||||
# Fallback to direct database connection
|
||||
if request.database_path:
|
||||
# Assume SQLite for direct file path
|
||||
self.db_connection = DatabaseConnection(
|
||||
db_type=DatabaseType.SQLITE,
|
||||
connection_string=request.database_path
|
||||
)
|
||||
return await self._analyze_sqlite_schema(request.database_path)
|
||||
|
||||
raise ValueError("No database configuration available")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Schema analysis failed: {e}")
|
||||
raise
|
||||
|
||||
async def _analyze_sqlite_schema(self, db_path: str) -> DatabaseSchema:
|
||||
"""Analyze SQLite database schema."""
|
||||
try:
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get table names
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||
tables = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
schema_data = {}
|
||||
relationships = []
|
||||
|
||||
for table in tables:
|
||||
# Get column information
|
||||
cursor.execute(f"PRAGMA table_info({table})")
|
||||
columns = []
|
||||
for col in cursor.fetchall():
|
||||
columns.append({
|
||||
"name": col[1],
|
||||
"type": col[2],
|
||||
"notnull": bool(col[3]),
|
||||
"default_value": col[4],
|
||||
"primary_key": bool(col[5])
|
||||
})
|
||||
schema_data[table] = columns
|
||||
|
||||
# Get foreign key relationships
|
||||
cursor.execute(f"PRAGMA foreign_key_list({table})")
|
||||
for fk in cursor.fetchall():
|
||||
relationships.append({
|
||||
"from_table": table,
|
||||
"from_column": fk[3],
|
||||
"to_table": fk[2],
|
||||
"to_column": fk[4]
|
||||
})
|
||||
|
||||
conn.close()
|
||||
return DatabaseSchema(tables=schema_data, relationships=relationships)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"SQLite schema analysis failed: {e}")
|
||||
raise
|
||||
|
||||
async def _analyze_django_schema(self) -> DatabaseSchema:
|
||||
"""Analyze Django models schema."""
|
||||
if not DJANGO_AVAILABLE:
|
||||
raise ImportError("Django is not available")
|
||||
|
||||
schema_data = {}
|
||||
relationships = []
|
||||
|
||||
for model in apps.get_models():
|
||||
table_name = model._meta.db_table
|
||||
columns = []
|
||||
|
||||
for field in model._meta.get_fields():
|
||||
if not field.is_relation:
|
||||
columns.append({
|
||||
"name": field.name,
|
||||
"type": field.get_internal_type(),
|
||||
"notnull": not getattr(field, 'null', True),
|
||||
"primary_key": getattr(field, 'primary_key', False)
|
||||
})
|
||||
else:
|
||||
# Handle relationships
|
||||
if hasattr(field, 'related_model') and field.related_model:
|
||||
relationships.append({
|
||||
"from_table": table_name,
|
||||
"from_column": field.name,
|
||||
"to_table": field.related_model._meta.db_table,
|
||||
"relationship_type": field.get_internal_type()
|
||||
})
|
||||
|
||||
schema_data[table_name] = columns
|
||||
|
||||
return DatabaseSchema(tables=schema_data, relationships=relationships)
|
||||
|
||||
async def _analyze_postgresql_schema(self, connection_string: str) -> DatabaseSchema:
|
||||
"""Analyze PostgreSQL database schema."""
|
||||
if not POSTGRESQL_AVAILABLE:
|
||||
raise ImportError("psycopg2 is not available")
|
||||
|
||||
try:
|
||||
import psycopg2
|
||||
from psycopg2.extras import RealDictCursor
|
||||
|
||||
conn = psycopg2.connect(connection_string)
|
||||
cursor = conn.cursor(cursor_factory=RealDictCursor)
|
||||
|
||||
# Get table names
|
||||
cursor.execute("""
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
""")
|
||||
tables = [row['table_name'] for row in cursor.fetchall()]
|
||||
|
||||
schema_data = {}
|
||||
relationships = []
|
||||
|
||||
for table in tables:
|
||||
# Get column information
|
||||
cursor.execute("""
|
||||
SELECT column_name, data_type, is_nullable, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = %s
|
||||
ORDER BY ordinal_position
|
||||
""", (table,))
|
||||
|
||||
columns = []
|
||||
for col in cursor.fetchall():
|
||||
columns.append({
|
||||
"name": col['column_name'],
|
||||
"type": col['data_type'],
|
||||
"notnull": col['is_nullable'] == 'NO',
|
||||
"default_value": col['column_default'],
|
||||
"primary_key": False # Will be updated below
|
||||
})
|
||||
|
||||
# Get primary key information
|
||||
cursor.execute("""
|
||||
SELECT column_name
|
||||
FROM information_schema.key_column_usage
|
||||
WHERE table_name = %s
|
||||
AND constraint_name LIKE '%_pkey'
|
||||
""", (table,))
|
||||
|
||||
pk_columns = [row['column_name'] for row in cursor.fetchall()]
|
||||
for col in columns:
|
||||
if col['name'] in pk_columns:
|
||||
col['primary_key'] = True
|
||||
|
||||
schema_data[table] = columns
|
||||
|
||||
# Get foreign key relationships
|
||||
cursor.execute("""
|
||||
SELECT kcu.column_name,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM information_schema.table_constraints AS tc
|
||||
JOIN information_schema.key_column_usage AS kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
JOIN information_schema.constraint_column_usage AS ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND tc.table_name = %s
|
||||
""", (table,))
|
||||
|
||||
for fk in cursor.fetchall():
|
||||
relationships.append({
|
||||
"from_table": table,
|
||||
"from_column": fk['column_name'],
|
||||
"to_table": fk['foreign_table_name'],
|
||||
"to_column": fk['foreign_column_name']
|
||||
})
|
||||
|
||||
conn.close()
|
||||
return DatabaseSchema(tables=schema_data, relationships=relationships)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"PostgreSQL schema analysis failed: {e}")
|
||||
raise
|
||||
|
||||
async def _analyze_mysql_schema(self, connection_string: str) -> DatabaseSchema:
|
||||
"""Analyze MySQL database schema."""
|
||||
if not MYSQL_AVAILABLE:
|
||||
raise ImportError("pymysql is not available")
|
||||
|
||||
try:
|
||||
import pymysql
|
||||
|
||||
# Parse connection string to get connection parameters
|
||||
# Format: mysql://user:password@host:port/database
|
||||
import urllib.parse
|
||||
parsed = urllib.parse.urlparse(connection_string)
|
||||
|
||||
conn = pymysql.connect(
|
||||
host=parsed.hostname,
|
||||
port=parsed.port or 3306,
|
||||
user=parsed.username,
|
||||
password=parsed.password,
|
||||
database=parsed.path[1:], # Remove leading slash
|
||||
cursorclass=pymysql.cursors.DictCursor
|
||||
)
|
||||
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get table names
|
||||
cursor.execute("SHOW TABLES")
|
||||
tables = [list(row.values())[0] for row in cursor.fetchall()]
|
||||
|
||||
schema_data = {}
|
||||
relationships = []
|
||||
|
||||
for table in tables:
|
||||
# Get column information
|
||||
cursor.execute(f"DESCRIBE {table}")
|
||||
columns = []
|
||||
for col in cursor.fetchall():
|
||||
columns.append({
|
||||
"name": col['Field'],
|
||||
"type": col['Type'],
|
||||
"notnull": col['Null'] == 'NO',
|
||||
"default_value": col['Default'],
|
||||
"primary_key": col['Key'] == 'PRI'
|
||||
})
|
||||
|
||||
schema_data[table] = columns
|
||||
|
||||
# Get foreign key relationships
|
||||
cursor.execute(f"""
|
||||
SELECT
|
||||
COLUMN_NAME,
|
||||
REFERENCED_TABLE_NAME,
|
||||
REFERENCED_COLUMN_NAME
|
||||
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
||||
WHERE TABLE_NAME = '{table}'
|
||||
AND REFERENCED_TABLE_NAME IS NOT NULL
|
||||
""")
|
||||
|
||||
for fk in cursor.fetchall():
|
||||
relationships.append({
|
||||
"from_table": table,
|
||||
"from_column": fk['COLUMN_NAME'],
|
||||
"to_table": fk['REFERENCED_TABLE_NAME'],
|
||||
"to_column": fk['REFERENCED_COLUMN_NAME']
|
||||
})
|
||||
|
||||
conn.close()
|
||||
return DatabaseSchema(tables=schema_data, relationships=relationships)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"MySQL schema analysis failed: {e}")
|
||||
raise
|
||||
|
||||
def _detect_language(self, text: str) -> str:
|
||||
"""Detect if text is Arabic or English."""
|
||||
arabic_chars = re.findall(r'[\u0600-\u06FF]', text)
|
||||
return "ar" if len(arabic_chars) > len(text) * 0.3 else "en"
|
||||
|
||||
def _execute_query_sync(self, query: str) -> List[Dict]:
|
||||
"""Synchronous wrapper for query execution."""
|
||||
return asyncio.run(self._execute_query(query))
|
||||
|
||||
async def _execute_query(self, query: str) -> List[Dict]:
|
||||
"""Execute query based on the current database connection."""
|
||||
if not self.db_connection:
|
||||
raise ValueError("No database connection established")
|
||||
|
||||
if self.db_connection.db_type == DatabaseType.SQLITE:
|
||||
return await self._execute_sqlite_query(self.db_connection.connection_string, query)
|
||||
# elif self.db_connection.db_type == DatabaseType.DJANGO and DJANGO_AVAILABLE:
|
||||
# return await self._execute_django_query(query)
|
||||
elif self.db_connection.db_type == DatabaseType.POSTGRESQL:
|
||||
return await self._execute_postgresql_query(self.db_connection.connection_string, query)
|
||||
elif self.db_connection.db_type == DatabaseType.MYSQL:
|
||||
return await self._execute_mysql_query(self.db_connection.connection_string, query)
|
||||
else:
|
||||
raise ValueError(f"Unsupported database type: {self.db_connection.db_type}")
|
||||
|
||||
async def _execute_sqlite_query(self, db_path: str, query: str) -> List[Dict]:
|
||||
"""Execute SQL query on SQLite database."""
|
||||
try:
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(query)
|
||||
|
||||
# Get column names
|
||||
columns = [description[0] for description in cursor.description]
|
||||
|
||||
# Fetch results and convert to dictionaries
|
||||
results = cursor.fetchall()
|
||||
data = [dict(zip(columns, row)) for row in results]
|
||||
|
||||
conn.close()
|
||||
return data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"SQLite query execution failed: {e}")
|
||||
raise
|
||||
|
||||
async def _execute_django_query(self, query: str) -> List[Dict]:
|
||||
"""Execute raw SQL query using Django's database connection."""
|
||||
try:
|
||||
from django.db import connection
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
columns = [col[0] for col in cursor.description]
|
||||
results = cursor.fetchall()
|
||||
data = [dict(zip(columns, row)) for row in results]
|
||||
|
||||
return data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Django query execution failed: {e}")
|
||||
raise
|
||||
|
||||
async def _execute_postgresql_query(self, connection_string: str, query: str) -> List[Dict]:
|
||||
"""Execute SQL query on PostgreSQL database."""
|
||||
try:
|
||||
import psycopg2
|
||||
from psycopg2.extras import RealDictCursor
|
||||
|
||||
conn = psycopg2.connect(connection_string)
|
||||
cursor = conn.cursor(cursor_factory=RealDictCursor)
|
||||
cursor.execute(query)
|
||||
|
||||
results = cursor.fetchall()
|
||||
data = [dict(row) for row in results]
|
||||
|
||||
conn.close()
|
||||
return data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"PostgreSQL query execution failed: {e}")
|
||||
raise
|
||||
|
||||
async def _execute_mysql_query(self, connection_string: str, query: str) -> List[Dict]:
|
||||
"""Execute SQL query on MySQL database."""
|
||||
try:
|
||||
import pymysql
|
||||
import urllib.parse
|
||||
|
||||
parsed = urllib.parse.urlparse(connection_string)
|
||||
|
||||
conn = pymysql.connect(
|
||||
host=parsed.hostname,
|
||||
port=parsed.port or 3306,
|
||||
user=parsed.username,
|
||||
password=parsed.password,
|
||||
database=parsed.path[1:],
|
||||
cursorclass=pymysql.cursors.DictCursor
|
||||
)
|
||||
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(query)
|
||||
results = cursor.fetchall()
|
||||
|
||||
conn.close()
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"MySQL query execution failed: {e}")
|
||||
raise
|
||||
|
||||
def _prepare_chart_data(self, data: List[Dict], chart_type: str, fields: List[str]) -> Optional[Dict]:
|
||||
"""Prepare data for chart visualization."""
|
||||
if not data or not fields:
|
||||
return None
|
||||
|
||||
chart_type = chart_type.lower()
|
||||
if chart_type not in self.config.SUPPORTED_CHART_TYPES:
|
||||
chart_type = "bar"
|
||||
|
||||
try:
|
||||
# Extract labels and values
|
||||
labels = []
|
||||
datasets = []
|
||||
|
||||
if len(fields) >= 1:
|
||||
labels = [str(item.get(fields[0], "")) for item in data]
|
||||
|
||||
if chart_type in ["pie", "doughnut"]:
|
||||
# Single dataset for pie charts
|
||||
values = []
|
||||
for item in data:
|
||||
if len(fields) > 1:
|
||||
try:
|
||||
value = float(item.get(fields[1], 0) or 0)
|
||||
except (ValueError, TypeError):
|
||||
value = 1
|
||||
values.append(value)
|
||||
else:
|
||||
values.append(1)
|
||||
|
||||
return {
|
||||
"type": chart_type,
|
||||
"labels": labels,
|
||||
"data": values,
|
||||
"backgroundColor": [
|
||||
f"rgba({50 + i * 30}, {100 + i * 25}, {200 + i * 20}, 0.7)"
|
||||
for i in range(len(values))
|
||||
]
|
||||
}
|
||||
else:
|
||||
# Multiple datasets for other chart types
|
||||
for i, field in enumerate(fields[1:], 1):
|
||||
try:
|
||||
dataset_values = []
|
||||
for item in data:
|
||||
try:
|
||||
value = float(item.get(field, 0) or 0)
|
||||
except (ValueError, TypeError):
|
||||
value = 0
|
||||
dataset_values.append(value)
|
||||
|
||||
datasets.append({
|
||||
"label": field,
|
||||
"data": dataset_values,
|
||||
"backgroundColor": f"rgba({50 + i * 40}, {100 + i * 30}, 235, 0.6)",
|
||||
"borderColor": f"rgba({50 + i * 40}, {100 + i * 30}, 235, 1.0)",
|
||||
"borderWidth": 2
|
||||
})
|
||||
except Exception as e:
|
||||
logger.warning(f"Error processing field {field}: {e}")
|
||||
|
||||
return {
|
||||
"type": chart_type,
|
||||
"labels": labels,
|
||||
"datasets": datasets
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Chart preparation failed: {e}")
|
||||
return None
|
||||
|
||||
def get_insights_sync(self, request: InsightRequest) -> Dict[str, Any]:
|
||||
"""Synchronous wrapper for get_insights - for Django views."""
|
||||
try:
|
||||
result = asyncio.run(self.get_insights(request))
|
||||
return result.to_dict()
|
||||
except Exception as e:
|
||||
logger.error(f"Synchronous insight generation failed: {e}")
|
||||
return {
|
||||
"status": "error",
|
||||
"data": [],
|
||||
"metadata": {},
|
||||
"error": str(e),
|
||||
"language": "en"
|
||||
}
|
||||
|
||||
async def get_insights(self, request: InsightRequest) -> QueryResult:
|
||||
"""Main method to get database insights from natural language prompt."""
|
||||
try:
|
||||
# Detect language
|
||||
language = self._detect_language(request.prompt) if request.language == "auto" else request.language
|
||||
|
||||
# Analyze database schema
|
||||
schema = await self.analyze_database_schema(request)
|
||||
|
||||
# Generate query plan using AI
|
||||
query_response = await self.query_agent.run(
|
||||
f"User prompt: {request.prompt}\nLanguage: {language}",
|
||||
database_schema=schema
|
||||
)
|
||||
|
||||
# Parse AI response
|
||||
try:
|
||||
query_plan = json.loads(query_response.output)
|
||||
except json.JSONDecodeError:
|
||||
# Fallback: extract SQL from response
|
||||
sql_match = re.search(r'SELECT.*?;', query_response.output, re.IGNORECASE | re.DOTALL)
|
||||
if sql_match:
|
||||
query_plan = {
|
||||
"sql_query": sql_match.group(0),
|
||||
"chart_suggestion": "bar",
|
||||
"expected_fields": [],
|
||||
"language": language
|
||||
}
|
||||
else:
|
||||
raise ValueError("Could not parse AI response")
|
||||
|
||||
# Execute query
|
||||
sql_query = query_plan.get("sql_query", "")
|
||||
if not sql_query:
|
||||
raise ValueError("No SQL query generated")
|
||||
|
||||
data = await self._execute_query(sql_query)
|
||||
|
||||
# Prepare chart data
|
||||
chart_data = None
|
||||
chart_type = request.chart_type or query_plan.get("chart_suggestion", "bar")
|
||||
expected_fields = query_plan.get("expected_fields", [])
|
||||
|
||||
if data and expected_fields:
|
||||
chart_data = self._prepare_chart_data(data, chart_type, expected_fields)
|
||||
elif data:
|
||||
# Use first few fields if no specific fields suggested
|
||||
available_fields = list(data[0].keys()) if data else []
|
||||
chart_data = self._prepare_chart_data(data, chart_type, available_fields[:3])
|
||||
|
||||
# Prepare result
|
||||
return QueryResult(
|
||||
status="success",
|
||||
data=data[:request.limit] if data else [],
|
||||
metadata={
|
||||
"total_count": len(data) if data else 0,
|
||||
"query": sql_query,
|
||||
"analysis": query_plan.get("analysis", ""),
|
||||
"fields": expected_fields or (list(data[0].keys()) if data else []),
|
||||
"database_type": self.db_connection.db_type.value if self.db_connection else "unknown"
|
||||
},
|
||||
chart_data=chart_data,
|
||||
language=language
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Insight generation failed: {e}")
|
||||
return QueryResult(
|
||||
status="error",
|
||||
data=[],
|
||||
metadata={},
|
||||
error=str(e),
|
||||
language=language if 'language' in locals() else "en"
|
||||
)
|
||||
|
||||
# # Static method for Django view compatibility
|
||||
# @staticmethod
|
||||
# def get_insights(django_request, prompt: str, **kwargs) -> Dict[str, Any]:
|
||||
# """
|
||||
# Static method compatible with your Django view.
|
||||
# This method signature matches what your view is calling.
|
||||
#
|
||||
# Args:
|
||||
# django_request: Django HttpRequest object (not used but kept for compatibility)
|
||||
# prompt: Natural language query string
|
||||
# **kwargs: Additional parameters
|
||||
#
|
||||
# Returns:
|
||||
# Dictionary with query results
|
||||
# """
|
||||
# try:
|
||||
# # Create system instance
|
||||
# system = DatabaseInsightSystem()
|
||||
#
|
||||
# # Extract language from Django request if available
|
||||
# language = "auto"
|
||||
# if hasattr(django_request, 'LANGUAGE_CODE'):
|
||||
# language = django_request.LANGUAGE_CODE
|
||||
#
|
||||
# # Create insight request
|
||||
# insight_request = InsightRequest(
|
||||
# prompt=prompt,
|
||||
# language=language,
|
||||
# use_django=True,
|
||||
# **kwargs
|
||||
# )
|
||||
#
|
||||
# # Get insights synchronously
|
||||
# return system.get_insights_sync(insight_request)
|
||||
#
|
||||
# except Exception as e:
|
||||
# logger.error(f"Static get_insights failed: {e}")
|
||||
# return {
|
||||
# "status": "error",
|
||||
# "data": [],
|
||||
# "metadata": {},
|
||||
# "error": str(e),
|
||||
# "language": language if 'language' in locals() else "en"
|
||||
# }
|
||||
|
||||
|
||||
# Convenience function for Django views (alternative approach)
|
||||
def analyze_prompt_sync(prompt: str, **kwargs) -> Dict[str, Any]:
|
||||
"""
|
||||
Synchronous function to analyze a prompt and return insights.
|
||||
Perfect for Django views.
|
||||
|
||||
Args:
|
||||
prompt: Natural language query
|
||||
**kwargs: Additional parameters for InsightRequest
|
||||
|
||||
Returns:
|
||||
Dictionary with query results
|
||||
"""
|
||||
system = DatabaseInsightSystem()
|
||||
request = InsightRequest(prompt=prompt, **kwargs)
|
||||
return system.get_insights_sync(request)
|
||||
File diff suppressed because it is too large
Load Diff
@ -5,9 +5,11 @@ import importlib
|
||||
import yaml
|
||||
import os
|
||||
from django.conf import settings
|
||||
from django.template.loaders.app_directories import get_app_template_dirs
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate YAML support knowledge base from Django views and models"
|
||||
help = "Generate YAML support knowledge base from Django views, models, and templates"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
output_file = "haikal_kb.yaml"
|
||||
@ -18,6 +20,8 @@ class Command(BaseCommand):
|
||||
"generated_from": "Django",
|
||||
},
|
||||
"features": {},
|
||||
"user_workflows": {}, # New section for step-by-step instructions
|
||||
"templates": {},
|
||||
"glossary": {}
|
||||
}
|
||||
|
||||
@ -41,6 +45,49 @@ class Command(BaseCommand):
|
||||
all_models.append((model._meta.app_label, model.__name__, extract_doc(model)))
|
||||
return all_models
|
||||
|
||||
def get_all_templates():
|
||||
template_dirs = get_app_template_dirs('templates')
|
||||
templates = []
|
||||
|
||||
for template_dir in template_dirs:
|
||||
app_name = os.path.basename(os.path.dirname(os.path.dirname(template_dir)))
|
||||
for root, dirs, files in os.walk(template_dir):
|
||||
for file in files:
|
||||
if file.endswith(('.html', '.htm', '.txt')):
|
||||
rel_path = os.path.relpath(os.path.join(root, file), template_dir)
|
||||
with open(os.path.join(root, file), 'r', encoding='utf-8', errors='ignore') as f:
|
||||
try:
|
||||
content = f.read()
|
||||
# Extract template comment documentation if it exists
|
||||
doc = ""
|
||||
if '{# DOC:' in content and '#}' in content:
|
||||
doc_parts = content.split('{# DOC:')
|
||||
for part in doc_parts[1:]:
|
||||
if '#}' in part:
|
||||
doc += part.split('#}')[0].strip() + "\n"
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.WARNING(f"Error reading {rel_path}: {e}"))
|
||||
continue
|
||||
|
||||
templates.append((app_name, rel_path, doc.strip()))
|
||||
return templates
|
||||
|
||||
# Look for workflow documentation files
|
||||
def get_workflow_docs():
|
||||
workflows = {}
|
||||
workflow_dir = os.path.join(settings.BASE_DIR, 'docs', 'workflows')
|
||||
if os.path.exists(workflow_dir):
|
||||
for file in os.listdir(workflow_dir):
|
||||
if file.endswith('.yaml') or file.endswith('.yml'):
|
||||
try:
|
||||
with open(os.path.join(workflow_dir, file), 'r') as f:
|
||||
workflow_data = yaml.safe_load(f)
|
||||
for workflow_name, workflow_info in workflow_data.items():
|
||||
workflows[workflow_name] = workflow_info
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.WARNING(f"Error reading workflow file {file}: {e}"))
|
||||
return workflows
|
||||
|
||||
# Extract views
|
||||
for app, mod in get_all_views_modules():
|
||||
for name, obj in inspect.getmembers(mod, inspect.isfunction):
|
||||
@ -52,6 +99,25 @@ class Command(BaseCommand):
|
||||
"type": "view_function"
|
||||
}
|
||||
|
||||
# Look for @workflow decorator or WORKFLOW tag in docstring
|
||||
if hasattr(obj, 'workflow_steps') or 'WORKFLOW:' in doc:
|
||||
workflow_name = name.replace('_', ' ').title()
|
||||
steps = []
|
||||
|
||||
if hasattr(obj, 'workflow_steps'):
|
||||
steps = obj.workflow_steps
|
||||
elif 'WORKFLOW:' in doc:
|
||||
workflow_section = doc.split('WORKFLOW:')[1].strip()
|
||||
steps_text = workflow_section.split('\n')
|
||||
steps = [step.strip() for step in steps_text if step.strip()]
|
||||
|
||||
if steps:
|
||||
kb["user_workflows"][workflow_name] = {
|
||||
"description": f"How to {name.replace('_', ' ')}",
|
||||
"steps": steps,
|
||||
"source": f"{app}.views.{name}"
|
||||
}
|
||||
|
||||
# Extract models
|
||||
for app, name, doc in get_all_model_classes():
|
||||
if doc:
|
||||
@ -61,6 +127,52 @@ class Command(BaseCommand):
|
||||
"type": "model_class"
|
||||
}
|
||||
|
||||
# Extract templates
|
||||
for app, template_path, doc in get_all_templates():
|
||||
template_id = f"{app}:{template_path}"
|
||||
if doc: # Only include templates with documentation
|
||||
kb["templates"][template_id] = {
|
||||
"description": doc,
|
||||
"path": template_path,
|
||||
"app": app
|
||||
}
|
||||
|
||||
# Add workflow documentation
|
||||
kb["user_workflows"].update(get_workflow_docs())
|
||||
|
||||
# Add manual workflow examples if no workflows were found
|
||||
if not kb["user_workflows"]:
|
||||
kb["user_workflows"] = {
|
||||
"Add New Car": {
|
||||
"description": "How to add a new car to the inventory",
|
||||
"steps": [
|
||||
"Navigate to the Inventory section by clicking 'Inventory' in the main menu",
|
||||
"Click the 'Add Car' button in the top right corner",
|
||||
"Enter the VIN number or scan it using the barcode scanner",
|
||||
"Select the car make from the dropdown menu",
|
||||
"Select the car series from the available options",
|
||||
"Select the trim level for the car",
|
||||
"Fill in additional details like color, mileage, and price",
|
||||
"Click 'Save' to add the car to inventory, or 'Save & Add Another' to continue adding cars"
|
||||
],
|
||||
"source": "manual_documentation"
|
||||
},
|
||||
"Create New Invoice": {
|
||||
"description": "How to create a new invoice",
|
||||
"steps": [
|
||||
"Navigate to the Finance section by clicking 'Finance' in the main menu",
|
||||
"Click the 'Invoices' tab",
|
||||
"Click the 'Create New Invoice' button",
|
||||
"Select a customer from the dropdown or click 'Add New Customer'",
|
||||
"Select the car(s) to include in the invoice",
|
||||
"Add any additional services or parts by clicking 'Add Item'",
|
||||
"Set the payment terms and due date",
|
||||
"Click 'Save Draft' to save without finalizing, or 'Finalize Invoice' to complete"
|
||||
],
|
||||
"source": "manual_documentation"
|
||||
}
|
||||
}
|
||||
|
||||
with open(output_file, "w", encoding="utf-8") as f:
|
||||
yaml.dump(kb, f, allow_unicode=True, sort_keys=False)
|
||||
|
||||
|
||||
50
haikalbot/migrations/0001_initial.py
Normal file
50
haikalbot/migrations/0001_initial.py
Normal file
@ -0,0 +1,50 @@
|
||||
# Generated by Django 5.2.1 on 2025-06-12 16:25
|
||||
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('inventory', '__first__'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AnalysisCache',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('prompt_hash', models.CharField(db_index=True, max_length=64)),
|
||||
('dealer_id', models.IntegerField(blank=True, db_index=True, null=True)),
|
||||
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('expires_at', models.DateTimeField(db_index=True)),
|
||||
('result', models.JSONField()),
|
||||
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'verbose_name_plural': 'Analysis caches',
|
||||
'indexes': [models.Index(fields=['prompt_hash', 'dealer_id'], name='haikalbot_a_prompt__b98e1e_idx'), models.Index(fields=['expires_at'], name='haikalbot_a_expires_e790cd_idx')],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ChatLog',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('user_message', models.TextField()),
|
||||
('chatbot_response', models.TextField()),
|
||||
('timestamp', models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
('dealer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='chatlogs', to='inventory.dealer')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-timestamp'],
|
||||
'indexes': [models.Index(fields=['dealer', 'timestamp'], name='haikalbot_c_dealer__6f8d63_idx')],
|
||||
},
|
||||
),
|
||||
]
|
||||
Binary file not shown.
@ -1,19 +0,0 @@
|
||||
from langchain.document_loaders import TextLoader
|
||||
from langchain.indexes import VectorstoreIndexCreator
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
from langchain.chains import RetrievalQA
|
||||
|
||||
# Load YAML doc
|
||||
loader = TextLoader("haikal_kb.yaml")
|
||||
index = VectorstoreIndexCreator().from_loaders([loader])
|
||||
|
||||
# Setup QA chain
|
||||
qa = RetrievalQA.from_chain_type(
|
||||
llm=ChatOpenAI(model="gpt-3.5-turbo", temperature=0),
|
||||
retriever=index.vectorstore.as_retriever()
|
||||
)
|
||||
|
||||
# Ask a question
|
||||
query = "How do I add a new invoice?"
|
||||
response = qa.run(query)
|
||||
print("Answer:", response)
|
||||
77
haikalbot/utils/ask_haikalbot.py
Normal file
77
haikalbot/utils/ask_haikalbot.py
Normal file
@ -0,0 +1,77 @@
|
||||
from langchain_community.document_loaders import TextLoader
|
||||
from langchain.indexes import VectorstoreIndexCreator
|
||||
from langchain_community.llms import Ollama
|
||||
from langchain.chains import RetrievalQA
|
||||
from langchain_community.embeddings import HuggingFaceEmbeddings
|
||||
from langchain.prompts import PromptTemplate
|
||||
# from django.conf import settings
|
||||
|
||||
|
||||
# Load YAML doc
|
||||
loader = TextLoader("haikal_kb.yaml")
|
||||
|
||||
# Create embeddings model
|
||||
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
|
||||
|
||||
# Create an instance of VectorstoreIndexCreator with the embeddings
|
||||
index_creator = VectorstoreIndexCreator(embedding=embeddings)
|
||||
|
||||
# Then call the from_loaders method on the instance
|
||||
index = index_creator.from_loaders([loader])
|
||||
|
||||
# Create LLM instance
|
||||
llm = Ollama(model="qwen3:8b", temperature=0.3)
|
||||
|
||||
# Define a custom prompt template for instructional responses
|
||||
template = """
|
||||
You are Haikal, an assistant for the car inventory management system.
|
||||
Your goal is to provide clear step-by-step instructions for users to complete tasks.
|
||||
|
||||
Use the following pieces of context to answer the question at the end.
|
||||
If you don't know the answer, just say you don't know. Don't try to make up an answer.
|
||||
|
||||
Context:
|
||||
{context}
|
||||
|
||||
Question: {question}
|
||||
|
||||
Provide a clear step-by-step guide with numbered instructions. Include:
|
||||
1. Where to click in the interface
|
||||
2. What to enter or select
|
||||
3. Any buttons to press to complete the action
|
||||
4. Any alternatives or shortcuts if available
|
||||
|
||||
Helpful Step-by-Step Instructions:"""
|
||||
|
||||
PROMPT = PromptTemplate(
|
||||
template=template,
|
||||
input_variables=["context", "question"]
|
||||
)
|
||||
|
||||
# Setup QA chain
|
||||
qa = RetrievalQA.from_chain_type(
|
||||
llm=llm,
|
||||
chain_type="stuff",
|
||||
retriever=index.vectorstore.as_retriever(),
|
||||
return_source_documents=True,
|
||||
chain_type_kwargs={"prompt": PROMPT}
|
||||
)
|
||||
|
||||
# Function to run a query
|
||||
def ask_haikal(query):
|
||||
response = qa.invoke({"query": query})
|
||||
print("\n" + "="*50)
|
||||
print(f"Question: {query}")
|
||||
print("="*50)
|
||||
print("\nAnswer:")
|
||||
print(response["result"])
|
||||
print("\nSources:")
|
||||
for doc in response["source_documents"]:
|
||||
print(f"- {doc.metadata.get('source', 'Unknown source')}")
|
||||
print("="*50)
|
||||
return response["result"]
|
||||
|
||||
# # Example query
|
||||
# if __name__ == "__main__":
|
||||
# query = "How do I add a new car to the inventory? answer in Arabic"
|
||||
# ask_haikal(query)
|
||||
@ -4,12 +4,12 @@ from django.shortcuts import render
|
||||
from django.utils.translation import gettext as _
|
||||
from django.views import View
|
||||
import logging
|
||||
|
||||
from .ai_agent import analyze_prompt
|
||||
# from .haikal_agent import DatabaseInsightSystem, analyze_prompt_sync
|
||||
from .utils.export import export_to_excel, export_to_csv
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# analyze_prompt_ai = DatabaseInsightSystem
|
||||
|
||||
class HaikalBot(LoginRequiredMixin, View):
|
||||
def get(self, request, *args, **kwargs):
|
||||
@ -33,11 +33,9 @@ class HaikalBot(LoginRequiredMixin, View):
|
||||
if not prompt:
|
||||
error_msg = _("Prompt is required.") if language != "ar" else "الاستعلام مطلوب."
|
||||
return JsonResponse({"status": "error", "error": error_msg}, status=400)
|
||||
|
||||
try:
|
||||
result = analyze_prompt(prompt)
|
||||
|
||||
# Handle export requests if data is available
|
||||
if export and result.get("status") == "success" and result.get("data"):
|
||||
try:
|
||||
if export == "excel":
|
||||
|
||||
@ -6,8 +6,10 @@ alabaster==1.0.0
|
||||
albucore==0.0.24
|
||||
albumentations==2.0.7
|
||||
annotated-types==0.7.0
|
||||
anthropic==0.52.2
|
||||
anyio==4.9.0
|
||||
arabic-reshaper==3.0.0
|
||||
argcomplete==3.6.2
|
||||
arrow==1.3.0
|
||||
asgiref==3.8.1
|
||||
astor==0.8.1
|
||||
@ -19,13 +21,17 @@ beautifulsoup4==4.13.4
|
||||
bleach==6.2.0
|
||||
blessed==1.21.0
|
||||
blinker==1.9.0
|
||||
boto3==1.38.29
|
||||
botocore==1.38.29
|
||||
Brotli==1.1.0
|
||||
cachetools==5.5.2
|
||||
cattrs==24.1.3
|
||||
certifi==2025.4.26
|
||||
cffi==1.17.1
|
||||
chardet==5.2.0
|
||||
charset-normalizer==3.4.2
|
||||
click==8.2.1
|
||||
cohere==5.15.0
|
||||
colorama==0.4.6
|
||||
commonmark==0.9.1
|
||||
contourpy==1.3.2
|
||||
@ -55,6 +61,7 @@ django-cors-headers==4.7.0
|
||||
django-countries==7.6.1
|
||||
django-crispy-forms==2.4
|
||||
django-debug-toolbar==5.2.0
|
||||
django-easy-audit==1.3.7
|
||||
django-extensions==4.1
|
||||
django-filter==25.1
|
||||
django-formtools==2.5.1
|
||||
@ -94,7 +101,11 @@ docutils==0.21.2
|
||||
easy-thumbnails==2.10
|
||||
emoji==2.14.1
|
||||
et_xmlfile==2.0.0
|
||||
eval_type_backport==0.2.2
|
||||
executing==2.2.0
|
||||
Faker==37.3.0
|
||||
fasta2a==0.2.14
|
||||
fastavro==1.11.1
|
||||
filelock==3.18.0
|
||||
fire==0.7.0
|
||||
fonttools==4.58.0
|
||||
@ -102,28 +113,37 @@ fpdf==1.7.2
|
||||
fpdf2==2.8.3
|
||||
frozenlist==1.6.0
|
||||
fsspec==2025.5.1
|
||||
google-auth==2.40.2
|
||||
google-genai==1.18.0
|
||||
googleapis-common-protos==1.70.0
|
||||
gprof2dot==2025.4.14
|
||||
graphqlclient==0.2.4
|
||||
greenlet==3.2.2
|
||||
griffe==1.7.3
|
||||
groq==0.26.0
|
||||
h11==0.16.0
|
||||
h2==4.2.0
|
||||
hf-xet==1.1.3
|
||||
hpack==4.1.0
|
||||
hstspreload==2025.1.1
|
||||
httpcore==1.0.9
|
||||
httpx==0.28.1
|
||||
httpx-sse==0.4.0
|
||||
huggingface-hub==0.32.4
|
||||
hyperframe==6.1.0
|
||||
icalendar==6.3.1
|
||||
idna==3.10
|
||||
imageio==2.37.0
|
||||
imagesize==1.4.1
|
||||
imgaug==0.4.0
|
||||
importlib_metadata==8.7.0
|
||||
iso4217==1.12.20240625
|
||||
isodate==0.7.2
|
||||
isort==6.0.1
|
||||
itsdangerous==2.2.0
|
||||
Jinja2==3.1.6
|
||||
jiter==0.10.0
|
||||
jmespath==1.0.1
|
||||
joblib==1.5.1
|
||||
jsonpatch==1.33
|
||||
jsonpointer==3.0.0
|
||||
@ -132,12 +152,15 @@ kiwisolver==1.4.8
|
||||
langchain==0.3.25
|
||||
langchain-community==0.3.24
|
||||
langchain-core==0.3.61
|
||||
langchain-ollama==0.3.3
|
||||
langchain-text-splitters==0.3.8
|
||||
langsmith==0.3.42
|
||||
lazy_loader==0.4
|
||||
ledger==1.0.1
|
||||
libretranslatepy==2.1.4
|
||||
lmdb==1.6.2
|
||||
logfire==3.18.0
|
||||
logfire-api==3.17.0
|
||||
luhnchecker==0.0.12
|
||||
lxml==5.4.0
|
||||
Markdown==3.8
|
||||
@ -146,7 +169,9 @@ MarkupSafe==3.0.2
|
||||
marshmallow==3.26.1
|
||||
matplotlib==3.10.3
|
||||
mccabe==0.7.0
|
||||
mcp==1.9.2
|
||||
mdurl==0.1.2
|
||||
mistralai==1.8.1
|
||||
MouseInfo==0.1.3
|
||||
mpmath==1.3.0
|
||||
multidict==6.4.4
|
||||
@ -158,11 +183,19 @@ num2words==0.5.14
|
||||
numpy==2.2.6
|
||||
oauthlib==3.2.2
|
||||
ofxtools==0.9.5
|
||||
ollama==0.4.8
|
||||
openai==1.82.0
|
||||
opencv-contrib-python==4.11.0.86
|
||||
opencv-python==4.11.0.86
|
||||
opencv-python-headless==4.11.0.86
|
||||
openpyxl==3.1.5
|
||||
opentelemetry-api==1.34.0
|
||||
opentelemetry-exporter-otlp-proto-common==1.34.0
|
||||
opentelemetry-exporter-otlp-proto-http==1.34.0
|
||||
opentelemetry-instrumentation==0.55b0
|
||||
opentelemetry-proto==1.34.0
|
||||
opentelemetry-sdk==1.34.0
|
||||
opentelemetry-semantic-conventions==0.55b0
|
||||
opt_einsum==3.4.0
|
||||
orjson==3.10.18
|
||||
outcome==1.3.0.post0
|
||||
@ -174,18 +207,25 @@ phonenumbers==8.13.42
|
||||
pillow==10.4.0
|
||||
platformdirs==4.3.8
|
||||
prometheus_client==0.22.0
|
||||
prompt_toolkit==3.0.51
|
||||
propcache==0.3.1
|
||||
protobuf==6.31.0
|
||||
protobuf==5.29.5
|
||||
psycopg==3.2.9
|
||||
psycopg-binary==3.2.9
|
||||
psycopg-c==3.2.9
|
||||
psycopg2-binary==2.9.10
|
||||
py-moneyed==3.0
|
||||
pyasn1==0.6.1
|
||||
pyasn1_modules==0.4.2
|
||||
PyAutoGUI==0.9.54
|
||||
pyclipper==1.3.0.post6
|
||||
pycodestyle==2.13.0
|
||||
pycparser==2.22
|
||||
pydantic==2.11.5
|
||||
pydantic-ai==0.2.14
|
||||
pydantic-ai-slim==0.2.14
|
||||
pydantic-evals==0.2.14
|
||||
pydantic-graph==0.2.14
|
||||
pydantic-settings==2.9.1
|
||||
pydantic_core==2.33.2
|
||||
pydotplus==2.0.2
|
||||
@ -212,6 +252,7 @@ python-bidi==0.6.6
|
||||
python-dateutil==2.9.0.post0
|
||||
python-docx==1.1.2
|
||||
python-dotenv==1.1.0
|
||||
python-multipart==0.0.20
|
||||
python-openid==2.2.5
|
||||
python-slugify==8.0.4
|
||||
python-stdnum==2.1
|
||||
@ -234,12 +275,16 @@ requests-oauthlib==2.0.0
|
||||
requests-toolbelt==1.0.0
|
||||
rfc3986==2.0.0
|
||||
rich==14.0.0
|
||||
rsa==4.9.1
|
||||
rubicon-objc==0.5.0
|
||||
s3transfer==0.13.0
|
||||
sacremoses==0.1.1
|
||||
safetensors==0.5.3
|
||||
scikit-image==0.25.2
|
||||
scikit-learn==1.6.1
|
||||
scipy==1.15.3
|
||||
selenium==4.33.0
|
||||
sentence-transformers==4.1.0
|
||||
sentencepiece==0.2.0
|
||||
shapely==2.1.1
|
||||
simsimd==6.2.1
|
||||
@ -251,7 +296,9 @@ sortedcontainers==2.4.0
|
||||
soupsieve==2.7
|
||||
SQLAlchemy==2.0.41
|
||||
sqlparse==0.5.3
|
||||
sse-starlette==2.3.6
|
||||
stanza==1.10.1
|
||||
starlette==0.47.0
|
||||
stringzilla==3.12.5
|
||||
suds==1.2.0
|
||||
swapper==1.3.0
|
||||
@ -264,14 +311,17 @@ threadpoolctl==3.6.0
|
||||
tifffile==2025.5.24
|
||||
tinycss2==1.4.0
|
||||
tinyhtml5==2.0.0
|
||||
tokenizers==0.21.1
|
||||
tomli==2.2.1
|
||||
tomlkit==0.13.2
|
||||
torch==2.7.0
|
||||
tqdm==4.67.1
|
||||
transformers==4.52.4
|
||||
trio==0.30.0
|
||||
trio-websocket==0.12.2
|
||||
twilio==9.6.1
|
||||
types-python-dateutil==2.9.0.20250516
|
||||
types-requests==2.32.0.20250602
|
||||
typing-inspect==0.9.0
|
||||
typing-inspection==0.4.1
|
||||
typing_extensions==4.13.2
|
||||
@ -279,6 +329,7 @@ tzdata==2025.2
|
||||
Unidecode==1.4.0
|
||||
upgrade-requirements==1.7.0
|
||||
urllib3==2.4.0
|
||||
uvicorn==0.34.3
|
||||
vin==0.6.2
|
||||
vininfo==1.8.0
|
||||
vishap==0.1.5
|
||||
@ -287,10 +338,13 @@ wcwidth==0.2.13
|
||||
weasyprint==65.1
|
||||
webencodings==0.5.1
|
||||
websocket-client==1.8.0
|
||||
websockets==15.0.1
|
||||
Werkzeug==3.1.3
|
||||
wikipedia==1.4.0
|
||||
wrapt==1.17.2
|
||||
wsproto==1.2.0
|
||||
xmlsec==1.3.15
|
||||
yarl==1.20.0
|
||||
zipp==3.22.0
|
||||
zopfli==0.2.3.post1
|
||||
zstandard==0.23.0
|
||||
|
||||
77
run_haikal_qa.py
Normal file
77
run_haikal_qa.py
Normal file
@ -0,0 +1,77 @@
|
||||
from langchain_community.document_loaders import TextLoader
|
||||
from langchain.indexes import VectorstoreIndexCreator
|
||||
from langchain_community.llms import Ollama
|
||||
from langchain.chains import RetrievalQA
|
||||
from langchain_community.embeddings import HuggingFaceEmbeddings
|
||||
from langchain.prompts import PromptTemplate
|
||||
# from django.conf import settings
|
||||
|
||||
|
||||
# Load YAML doc
|
||||
loader = TextLoader("haikal_kb.yaml")
|
||||
|
||||
# Create embeddings model
|
||||
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
|
||||
|
||||
# Create an instance of VectorstoreIndexCreator with the embeddings
|
||||
index_creator = VectorstoreIndexCreator(embedding=embeddings)
|
||||
|
||||
# Then call the from_loaders method on the instance
|
||||
index = index_creator.from_loaders([loader])
|
||||
|
||||
# Create LLM instance
|
||||
llm = Ollama(model="qwen3:8b", temperature=0.3)
|
||||
|
||||
# Define a custom prompt template for instructional responses
|
||||
template = """
|
||||
You are Haikal, an assistant for the car inventory management system.
|
||||
Your goal is to provide clear step-by-step instructions for users to complete tasks.
|
||||
|
||||
Use the following pieces of context to answer the question at the end.
|
||||
If you don't know the answer, just say you don't know. Don't try to make up an answer.
|
||||
|
||||
Context:
|
||||
{context}
|
||||
|
||||
Question: {question}
|
||||
|
||||
Provide a clear step-by-step guide with numbered instructions. Include:
|
||||
1. Where to click in the interface
|
||||
2. What to enter or select
|
||||
3. Any buttons to press to complete the action
|
||||
4. Any alternatives or shortcuts if available
|
||||
|
||||
Helpful Step-by-Step Instructions:"""
|
||||
|
||||
PROMPT = PromptTemplate(
|
||||
template=template,
|
||||
input_variables=["context", "question"]
|
||||
)
|
||||
|
||||
# Setup QA chain
|
||||
qa = RetrievalQA.from_chain_type(
|
||||
llm=llm,
|
||||
chain_type="stuff",
|
||||
retriever=index.vectorstore.as_retriever(),
|
||||
return_source_documents=True,
|
||||
chain_type_kwargs={"prompt": PROMPT}
|
||||
)
|
||||
|
||||
# Function to run a query
|
||||
def ask_haikal(query):
|
||||
response = qa.invoke({"query": query})
|
||||
print("\n" + "="*50)
|
||||
print(f"Question: {query}")
|
||||
print("="*50)
|
||||
print("\nAnswer:")
|
||||
print(response["result"])
|
||||
print("\nSources:")
|
||||
for doc in response["source_documents"]:
|
||||
print(f"- {doc.metadata.get('source', 'Unknown source')}")
|
||||
print("="*50)
|
||||
return response["result"]
|
||||
|
||||
# Example query
|
||||
if __name__ == "__main__":
|
||||
query = "How do I add a new car to the inventory? answer in Arabic"
|
||||
ask_haikal(query)
|
||||
159
sql_agent.py
Normal file
159
sql_agent.py
Normal file
@ -0,0 +1,159 @@
|
||||
import asyncio
|
||||
import sqlite3
|
||||
import json
|
||||
from typing import List, Dict
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from pydantic_ai import Agent, RunContext
|
||||
from pydantic_ai.models.openai import OpenAIModel
|
||||
from pydantic_ai.providers.openai import OpenAIProvider
|
||||
import os
|
||||
|
||||
import logfire
|
||||
|
||||
logfire.configure(send_to_logfire='if-token-present')
|
||||
logfire.instrument_pydantic_ai()
|
||||
|
||||
# Define the OpenAI model (replace with your actual model if needed)
|
||||
model = OpenAIModel(
|
||||
model_name="qwen2.5:14b", # Or your preferred model
|
||||
provider=OpenAIProvider(base_url='http://localhost:11434/v1') # Or your provider
|
||||
)
|
||||
|
||||
|
||||
class DatabaseSchema(BaseModel):
|
||||
tables: Dict[str, List[Dict[str, str]]] = Field(
|
||||
description="A dictionary where keys are table names and values are lists of column dictionaries (name, type)")
|
||||
|
||||
|
||||
# Agent to get the database schema
|
||||
schema_agent = Agent(
|
||||
model,
|
||||
deps_type=str,
|
||||
output_type=str,
|
||||
system_prompt="""You are a helpful assistant that extracts the schema of a SQLite database.
|
||||
When the user provides a database path, use the <tool>get_database_schema</tool> to retrieve the schema.
|
||||
Your ONLY response should be the raw JSON string representing the database schema. Do not include any other text.
|
||||
The JSON should be a dictionary where keys are table names, and values are lists of column dictionaries.
|
||||
Each column dictionary should include 'name', 'type', 'notnull', 'dflt_value', and 'pk' keys.
|
||||
If there is an error, return a JSON string containing an "error" key with a list of error messages."""
|
||||
)
|
||||
|
||||
|
||||
@schema_agent.tool
|
||||
async def get_database_schema(ctx: RunContext[str], db_path: str) -> str:
|
||||
"""Retrieves the schema of the SQLite database and returns it as a JSON string."""
|
||||
print(f"Database path: {db_path}")
|
||||
try:
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||
tables = [row[0] for row in cursor.fetchall()]
|
||||
print(tables)
|
||||
|
||||
schema = {}
|
||||
for table in tables:
|
||||
cursor.execute(f"PRAGMA table_info({table})")
|
||||
columns = [
|
||||
{
|
||||
"name": col[1],
|
||||
"type": col[2],
|
||||
"notnull": col[3],
|
||||
"dflt_value": col[4],
|
||||
"pk": col[5],
|
||||
}
|
||||
for col in cursor.fetchall()
|
||||
]
|
||||
schema[table] = columns
|
||||
print(schema)
|
||||
conn.close()
|
||||
return json.dumps(schema)
|
||||
except sqlite3.Error as e:
|
||||
error_json = json.dumps({"error": [str(e)]})
|
||||
return error_json
|
||||
except Exception as e:
|
||||
error_json = json.dumps({"error": [str(e)]})
|
||||
return error_json
|
||||
|
||||
|
||||
# Agent to generate and execute SQL queries
|
||||
sql_agent = Agent(
|
||||
model,
|
||||
deps_type=DatabaseSchema,
|
||||
output_type=str,
|
||||
system_prompt="""You are a highly precise SQL query generator for a SQLite database.
|
||||
You are given the EXACT database schema, which is a dictionary where keys are table names and values are lists of column dictionaries (with 'name' and 'type').
|
||||
Your ABSOLUTE priority is to generate SQL queries that ONLY use the table and column names exactly as they appear in this schema to answer the user's question.
|
||||
|
||||
Follow these strict steps:
|
||||
1. **Analyze User Question:** Understand the user's request.
|
||||
2. **Match Schema EXACTLY:** Identify the specific table(s) and column(s) in the provided schema whose names EXACTLY match the entities and information requested in the user's question.
|
||||
3. **Generate STRICT SQL:** Construct a valid SQL query that selects the identified column(s) from the identified table(s). You MUST use the exact names from the schema. Do not use aliases or make any assumptions about naming conventions. Aim for the simplest possible query.
|
||||
4. **Execute Query:** Use the <tool>execute_sql_query</tool> to run your generated SQL.
|
||||
5. **Return interactive Answer as if you are a sports person:** Provide a direct and simple answer to the user's question based on the query results.
|
||||
6. **No Results:** If the query returns empty list, respond with: 'No matching entries found.'
|
||||
7. **Error Handling:** If there's any error in generating or executing the SQL, return a JSON string with an "error" key and a list of error messages.
|
||||
|
||||
|
||||
"""
|
||||
)
|
||||
# Example:
|
||||
# Schema: {'Country': [{'name': 'id', 'type': 'INTEGER'}, {'name': 'name', 'type': 'TEXT'}]}
|
||||
# User Question: "What are the country names?"
|
||||
# Generated SQL: SELECT name FROM Country;
|
||||
# Expected Answer: The countries are Belgium, England, France, ...
|
||||
|
||||
@sql_agent.tool
|
||||
async def execute_sql_query(ctx: RunContext[DatabaseSchema], query: str) -> str:
|
||||
"""Executes the SQL query and returns a simple string answer."""
|
||||
db_path = os.path.join(os.getcwd(), 'db.sqlite3')
|
||||
print(query)
|
||||
try:
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(query)
|
||||
results = cursor.fetchall()
|
||||
columns = [description[0] for description in cursor.description]
|
||||
rows = [dict(zip(columns, row)) for row in results]
|
||||
conn.close()
|
||||
print(rows)
|
||||
return rows
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
async def main():
|
||||
db_path = os.path.join(os.getcwd(), 'db.sqlite3')
|
||||
print(f"Database path: {db_path}")
|
||||
user_question = "how many cars do we have in the inventory"
|
||||
|
||||
# 1. Get the database schema
|
||||
schema_result = await schema_agent.run(db_path)
|
||||
print("Schema Agent Response:", schema_result)
|
||||
print("Schema Agent Output:", schema_result.output)
|
||||
|
||||
if "error" in schema_result.output:
|
||||
print(f"Error getting schema: {schema_result.output}")
|
||||
return
|
||||
|
||||
try:
|
||||
schema_data = json.loads(schema_result.output)
|
||||
database_schema = DatabaseSchema(tables=schema_data)
|
||||
print("Parsed Database Schema:", database_schema)
|
||||
|
||||
# 2. Use the schema to answer the user question
|
||||
sql_response = await sql_agent.run(user_question, database_schema=database_schema.tables)
|
||||
print("SQL Agent Response:", sql_response)
|
||||
print("SQL Agent Output:", sql_response.output)
|
||||
|
||||
if "error" in sql_response.output:
|
||||
print(f"Error executing SQL: {sql_response.output}")
|
||||
|
||||
except json.JSONDecodeError:
|
||||
print(f"Error: Could not parse schema agent response as JSON: {schema_result.output}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
61
static/js/tours/add-new-car_tour.json
Normal file
61
static/js/tours/add-new-car_tour.json
Normal file
@ -0,0 +1,61 @@
|
||||
{
|
||||
"name": "Add New Car",
|
||||
"description": "How to add a new car to the inventory",
|
||||
"steps": [
|
||||
{
|
||||
"title": "Step 1",
|
||||
"intro": "Navigate to the Inventory section by clicking 'Inventory' in the main menu",
|
||||
"position": "bottom",
|
||||
"element": "#inventory-nav",
|
||||
"click": "#inventory-nav"
|
||||
},
|
||||
{
|
||||
"title": "Step 2",
|
||||
"intro": "Click the 'Inventory' button in the top right corner",
|
||||
"position": "bottom",
|
||||
"element": ".parent-wrapper label-1"
|
||||
},
|
||||
{
|
||||
"title": "Step 3",
|
||||
"intro": "Click the 'Add Car' button in the top right corner",
|
||||
"position": "bottom",
|
||||
"element": "#btn-add-car"
|
||||
},
|
||||
{
|
||||
"title": "Step 4",
|
||||
"intro": "Enter the VIN number or scan it using the barcode scanner",
|
||||
"position": "bottom",
|
||||
"element": "#nv-inventory"
|
||||
},
|
||||
{
|
||||
"title": "Step 5",
|
||||
"intro": "Select the car make from the dropdown menu",
|
||||
"position": "bottom",
|
||||
"element": "#make-select, select[name='make'], .make-field"
|
||||
},
|
||||
{
|
||||
"title": "Step 6",
|
||||
"intro": "Select the car series from the available options",
|
||||
"position": "bottom",
|
||||
"element": "#series-select, select[name='series'], .series-field"
|
||||
},
|
||||
{
|
||||
"title": "Step 7",
|
||||
"intro": "Select the trim level for the car",
|
||||
"position": "bottom",
|
||||
"element": "#trim-select, select[name='trim'], .trim-field"
|
||||
},
|
||||
{
|
||||
"title": "Step 8",
|
||||
"intro": "Fill in additional details like color, mileage, and price",
|
||||
"position": "bottom",
|
||||
"element": "#price-input, input[name='price'], .price-field"
|
||||
},
|
||||
{
|
||||
"title": "Step 9",
|
||||
"intro": "Click 'Save' to add the car to inventory, or 'Save & Add Another' to continue adding cars",
|
||||
"position": "bottom",
|
||||
"element": "#inventory-menu, .inventory-nav, nav .inventory"
|
||||
}
|
||||
]
|
||||
}
|
||||
52
static/js/tours/create-new-invoice_tour.json
Normal file
52
static/js/tours/create-new-invoice_tour.json
Normal file
@ -0,0 +1,52 @@
|
||||
{
|
||||
"name": "Create New Invoice",
|
||||
"description": "How to create a new invoice",
|
||||
"steps": [
|
||||
{
|
||||
"title": "Step 1",
|
||||
"intro": "Navigate to the Finance section by clicking 'Finance' in the main menu",
|
||||
"position": "bottom",
|
||||
"element": "#finance-menu, .finance-nav, nav .finance"
|
||||
},
|
||||
{
|
||||
"title": "Step 2",
|
||||
"intro": "Click the 'Invoices' tab",
|
||||
"position": "bottom",
|
||||
"element": "#invoice-section, .invoice-tab, #create-invoice"
|
||||
},
|
||||
{
|
||||
"title": "Step 3",
|
||||
"intro": "Click the 'Create New Invoice' button",
|
||||
"position": "bottom",
|
||||
"element": "#invoice-section, .invoice-tab, #create-invoice"
|
||||
},
|
||||
{
|
||||
"title": "Step 4",
|
||||
"intro": "Select a customer from the dropdown or click 'Add New Customer'",
|
||||
"position": "bottom",
|
||||
"element": "#customer-select, select[name='customer'], .customer-field"
|
||||
},
|
||||
{
|
||||
"title": "Step 5",
|
||||
"intro": "Select the car(s) to include in the invoice",
|
||||
"position": "bottom",
|
||||
"element": "#invoice-section, .invoice-tab, #create-invoice"
|
||||
},
|
||||
{
|
||||
"title": "Step 6",
|
||||
"intro": "Add any additional services or parts by clicking 'Add Item'",
|
||||
"position": "bottom"
|
||||
},
|
||||
{
|
||||
"title": "Step 7",
|
||||
"intro": "Set the payment terms and due date",
|
||||
"position": "bottom"
|
||||
},
|
||||
{
|
||||
"title": "Step 8",
|
||||
"intro": "Click 'Save Draft' to save without finalizing, or 'Finalize Invoice' to complete",
|
||||
"position": "bottom",
|
||||
"element": "button[type='submit'], .btn-save, #save-button"
|
||||
}
|
||||
]
|
||||
}
|
||||
163
static/js/tours/help-button.js
Normal file
163
static/js/tours/help-button.js
Normal file
@ -0,0 +1,163 @@
|
||||
/**
|
||||
* Help Button Component
|
||||
* Provides context-aware help based on the current page
|
||||
*/
|
||||
class HelpButton {
|
||||
constructor(options = {}) {
|
||||
this.options = Object.assign({
|
||||
position: 'bottom-right',
|
||||
icon: 'question-circle',
|
||||
text: 'Help',
|
||||
autoDetect: true
|
||||
}, options);
|
||||
|
||||
this.pageToTourMap = {
|
||||
'/inventory/': 'inventory_overview',
|
||||
'/inventory/add/': 'add_new_car',
|
||||
'/inventory/edit/': 'edit_car',
|
||||
'/finance/invoices/': 'manage_invoices',
|
||||
'/finance/invoices/create/': 'create_new_invoice',
|
||||
'/customers/': 'manage_customers',
|
||||
'/customers/add/': 'add_new_customer'
|
||||
};
|
||||
|
||||
this.render();
|
||||
this.attachEvents();
|
||||
}
|
||||
|
||||
render() {
|
||||
// Create the help button
|
||||
const button = document.createElement('div');
|
||||
button.className = `help-button ${this.options.position}`;
|
||||
button.innerHTML = `
|
||||
<button class="btn btn-phoenix-primary" id="context-help-btn"
|
||||
data-bs-toggle="tooltip" title="Get help for this page">
|
||||
<i class="bi bi-${this.options.icon}"></i>
|
||||
HELP
|
||||
</button>
|
||||
`;
|
||||
|
||||
// Add styles
|
||||
const style = document.createElement('style');
|
||||
style.textContent = `
|
||||
.help-button {
|
||||
position: fixed;
|
||||
z-index: 1000;
|
||||
}
|
||||
.help-button.bottom-right {
|
||||
bottom: 20px;
|
||||
right: 20px;
|
||||
}
|
||||
.help-button.bottom-left {
|
||||
bottom: 20px;
|
||||
left: 20px;
|
||||
}
|
||||
.help-button.top-right {
|
||||
top: 20px;
|
||||
right: 20px;
|
||||
}
|
||||
.help-button.top-left {
|
||||
top: 20px;
|
||||
left: 20px;
|
||||
}
|
||||
`;
|
||||
|
||||
document.head.appendChild(style);
|
||||
document.body.appendChild(button);
|
||||
}
|
||||
|
||||
attachEvents() {
|
||||
const helpButton = document.getElementById('context-help-btn');
|
||||
if (!helpButton) return;
|
||||
|
||||
helpButton.addEventListener('click', () => {
|
||||
this.showContextHelp();
|
||||
});
|
||||
|
||||
// Initialize tooltip
|
||||
new bootstrap.Tooltip(helpButton);
|
||||
}
|
||||
|
||||
showContextHelp() {
|
||||
// Detect current page and show appropriate tour
|
||||
if (this.options.autoDetect) {
|
||||
const currentPath = window.location.pathname;
|
||||
let tourSlug = null;
|
||||
|
||||
// Find the best match for the current path
|
||||
for (const [path, slug] of Object.entries(this.pageToTourMap)) {
|
||||
if (currentPath.includes(path)) {
|
||||
tourSlug = slug;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (tourSlug) {
|
||||
window.tourManager.loadTour(tourSlug);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// If no specific tour found or autoDetect is off, show help menu
|
||||
this.showHelpMenu();
|
||||
}
|
||||
|
||||
showHelpMenu() {
|
||||
// Create a modal with available help options
|
||||
const modal = document.createElement('div');
|
||||
modal.className = 'modal fade';
|
||||
modal.id = 'helpModal';
|
||||
modal.setAttribute('tabindex', '-1');
|
||||
|
||||
modal.innerHTML = `
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h5 class="modal-title">Help & Guides</h5>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="list-group">
|
||||
<a href="#" class="list-group-item list-group-item-action"
|
||||
onclick="window.tourManager.loadTour('add_new_car');">
|
||||
<i class="bi bi-plus-circle me-2"></i> How to Add a New Car
|
||||
</a>
|
||||
<a href="#" class="list-group-item list-group-item-action"
|
||||
onclick="window.tourManager.loadTour('create_new_invoice');">
|
||||
<i class="bi bi-file-text me-2"></i> How to Create an Invoice
|
||||
</a>
|
||||
<a href="#" class="list-group-item list-group-item-action"
|
||||
onclick="window.tourManager.loadTour('manage_customers');">
|
||||
<i class="bi bi-people me-2"></i> How to Manage Customers
|
||||
</a>
|
||||
<div class="dropdown-divider"></div>
|
||||
<a href="/tours/" class="list-group-item list-group-item-action">
|
||||
<i class="bi bi-collection me-2"></i> View All Guides
|
||||
</a>
|
||||
<a href="/support/" class="list-group-item list-group-item-action">
|
||||
<i class="bi bi-headset me-2"></i> Contact Support
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
document.body.appendChild(modal);
|
||||
|
||||
const modalInstance = new bootstrap.Modal(modal);
|
||||
modalInstance.show();
|
||||
|
||||
// Remove modal from DOM after it's hidden
|
||||
modal.addEventListener('hidden.bs.modal', () => {
|
||||
modal.remove();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize help button on all pages
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
window.helpButton = new HelpButton();
|
||||
});
|
||||
|
||||
// export { HelpButton };
|
||||
134
static/js/tours/tour-manager.js
Normal file
134
static/js/tours/tour-manager.js
Normal file
@ -0,0 +1,134 @@
|
||||
/**
|
||||
* Tour Manager for Car Inventory System
|
||||
* Uses IntroJS to provide guided tours of the application
|
||||
*/
|
||||
|
||||
function getCsrfToken(name) {
|
||||
let cookieValue = null;
|
||||
if (document.cookie && document.cookie !== "") {
|
||||
const cookies = document.cookie.split(";");
|
||||
for (let cookie of cookies) {
|
||||
cookie = cookie.trim();
|
||||
if (cookie.substring(0, name.length + 1) === name + "=") {
|
||||
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return cookieValue;
|
||||
}
|
||||
|
||||
class TourManager {
|
||||
constructor() {
|
||||
this.introJs = introJs();
|
||||
this.currentTour = null;
|
||||
this.tourData = null;
|
||||
this.tourSlug = null;
|
||||
|
||||
// Configure IntroJS defaults
|
||||
this.introJs.setOptions({
|
||||
showStepNumbers: true,
|
||||
showBullets: true,
|
||||
showProgress: true,
|
||||
scrollToElement: true,
|
||||
disableInteraction: false,
|
||||
doneLabel: 'Finish',
|
||||
nextLabel: 'Next →',
|
||||
prevLabel: '← Back',
|
||||
exitOnEsc: true,
|
||||
exitOnOverlayClick: false
|
||||
});
|
||||
|
||||
// Set up event listeners
|
||||
this.introJs.oncomplete(() => this.onTourComplete());
|
||||
this.introJs.onexit(() => this.onTourExit());
|
||||
}
|
||||
|
||||
/**
|
||||
* Load and start a tour by its slug
|
||||
* @param {string} slug - The tour slug
|
||||
*/
|
||||
async loadTour(slug) {
|
||||
try {
|
||||
this.tourSlug = slug;
|
||||
const response = await fetch(`/tours/data/${slug}/`);
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to load tour data');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
this.tourData = data.tour;
|
||||
|
||||
// If user already completed this tour, ask if they want to repeat
|
||||
if (data.completed && !confirm('You have already completed this guide. Would you like to view it again?')) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.startTour();
|
||||
} catch (error) {
|
||||
console.error('Error loading tour:', error);
|
||||
alert('Failed to load the interactive guide. Please try again later.');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the currently loaded tour
|
||||
*/
|
||||
startTour() {
|
||||
if (!this.tourData) {
|
||||
console.error('No tour data loaded');
|
||||
return;
|
||||
}
|
||||
|
||||
this.introJs.setOptions({
|
||||
steps: this.tourData.steps
|
||||
});
|
||||
|
||||
this.introJs.start();
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle tour completion
|
||||
*/
|
||||
onTourComplete() {
|
||||
if (!this.tourSlug) return;
|
||||
|
||||
// Mark the tour as completed on the server
|
||||
fetch(`/tours/complete/${this.tourSlug}/`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-CSRFToken': getCsrfToken()
|
||||
}
|
||||
}).catch(error => {
|
||||
console.error('Error marking tour as completed:', error);
|
||||
});
|
||||
|
||||
// Show success message
|
||||
alert('Congratulations! You have completed the guide.');
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle tour exit (without completion)
|
||||
*/
|
||||
onTourExit() {
|
||||
console.log('Tour exited');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get CSRF token from cookies
|
||||
*/
|
||||
|
||||
}
|
||||
|
||||
// Initialize the tour manager
|
||||
window.tourManager = new TourManager();
|
||||
|
||||
// Function to start a tour from a link
|
||||
function startTour(slug) {
|
||||
window.tourManager.loadTour(slug);
|
||||
return false; // Prevent default link action
|
||||
}
|
||||
|
||||
// Export for use in other modules
|
||||
// export { startTour };
|
||||
2811
static/js/tours/ui_element_map.json
Normal file
2811
static/js/tours/ui_element_map.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -81,7 +81,8 @@
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<div class="d-flex align-items-center">
|
||||
<i class="fas fa-dollar-sign me-2 text-primary"></i>
|
||||
|
||||
<span class="icon-saudi_riyal text-primary"></span>
|
||||
<strong class="me-2">{% trans 'Service price' %}:</strong> {{ appointment.get_appointment_amount_to_pay_text }}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -40,6 +40,7 @@
|
||||
<link href="{% static 'vendors/flatpickr/flatpickr.min.css' %}" rel="stylesheet">
|
||||
<link href="{% static 'css/custom.css' %}" rel="stylesheet">
|
||||
<link rel="stylesheet" href="https://unicons.iconscout.com/release/v4.0.8/css/line.css">
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/intro.js/7.2.0/introjs.css" integrity="sha512-4OzqLjfh1aJa7M33b5+h0CSx0Q3i9Qaxlrr1T/Z+Vz+9zs5A7GM3T3MFKXoreghi3iDOSbkPMXiMBhFO7UBW/g==" crossorigin="anonymous" referrerpolicy="no-referrer" />
|
||||
{% if LANGUAGE_CODE == 'ar' %}
|
||||
<link href="{% static 'css/theme-rtl.min.css' %}" type="text/css" rel="stylesheet" id="style-rtl">
|
||||
<link href="{% static 'css/user-rtl.min.css' %}" type="text/css" rel="stylesheet" id="user-style-rtl">
|
||||
@ -76,6 +77,7 @@
|
||||
{% endblock period_navigation %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
{% endblock content%}
|
||||
{% block body %}
|
||||
{% endblock body%}
|
||||
@ -96,6 +98,11 @@
|
||||
<script src="{% static 'vendors/anchorjs/anchor.min.js' %}"></script>
|
||||
<script src="{% static 'vendors/is/is.min.js' %}"></script>
|
||||
<script src="{% static 'vendors/fontawesome/all.min.js' %}"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/intro.js/7.2.0/intro.js" integrity="sha512-f26fxKZJiF0AjutUaQHNJ5KnXSisqyUQ3oyfaoen2apB1wLa5ccW3lmtaRe2jdP5kh4LF2gAHP9xQbx7wYhU5w==" crossorigin="anonymous" referrerpolicy="no-referrer"></script>
|
||||
<!-- Tour Manager -->
|
||||
<script src="{% static 'js/tours/tour-manager.js' %}"></script>
|
||||
|
||||
<script src="{% static 'js/tours/help-button.js' %}"></script>
|
||||
<script src="{% static 'vendors/lodash/lodash.min.js' %}"></script>
|
||||
<script src="{% static 'vendors/list.js/list.min.js' %}"></script>
|
||||
<script src="{% static 'vendors/feather-icons/feather.min.js' %}"></script>
|
||||
@ -115,6 +122,7 @@
|
||||
<script src="{% static 'vendors/flatpickr/flatpickr.min.js' %}"></script>
|
||||
|
||||
<script>
|
||||
|
||||
{% if entity_slug %}
|
||||
let entitySlug = "{{ view.kwargs.entity_slug }}"
|
||||
{% endif %}
|
||||
|
||||
@ -58,13 +58,13 @@
|
||||
</th>
|
||||
<th class="align-middle white-space-nowrap text-uppercase" scope="col" style="width: 10%;">
|
||||
<div class="d-inline-flex flex-center">
|
||||
<div class="d-flex align-items-center bg-info-subtle rounded me-2"><span class="text-info-dark" data-feather="database"></span></div>
|
||||
<div class="d-flex align-items-center bg-warning-subtle rounded me-2"><span class="text-warning-dark" data-feather="zap"></span></div>
|
||||
<span>{{ _("Action")|capfirst }}</span>
|
||||
</div>
|
||||
</th>
|
||||
<th class="align-middle white-space-nowrap text-uppercase" scope="col" style="width: 10%;">
|
||||
<div class="d-inline-flex flex-center">
|
||||
<div class="d-flex align-items-center bg-info-subtle rounded me-2"><span class="text-info-dark" data-feather="database"></span></div>
|
||||
<div class="d-flex align-items-center bg-success-subtle rounded me-2"><span class="text-success-dark" data-feather="user-check"></span></div>
|
||||
<span>{{ _("Assigned To")|capfirst }}</span>
|
||||
</div>
|
||||
</th>
|
||||
|
||||
@ -8,7 +8,7 @@
|
||||
<h2 class="mb-5">{{ _("Opportunities") }}</h2>
|
||||
<div class="d-xl-flex justify-content-between">
|
||||
<div class="mb-3">
|
||||
<a class="btn btn-primary me-4" href="{% url 'opportunity_create' %}"><span class="fas fa-plus me-2"></span>{{ _("Add Opportunity") }}</a>
|
||||
<a class="btn btn-primary me-4" href="{% url 'opportunity_create' slug%}"><span class="fas fa-plus me-2"></span>{{ _("Add Opportunity") }}</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@ -24,7 +24,7 @@
|
||||
<div class="d-flex gap-3">
|
||||
<button class="btn p-0" type="button" data-bs-toggle="dropdown" data-boundary="window" aria-haspopup="true" aria-expanded="false" data-bs-reference="parent"><span class="fas fa-ellipsis-h fs-10"></span></button>
|
||||
<ul class="dropdown-menu dropdown-menu-end">
|
||||
<li><a class="dropdown-item" href="{% url 'update_opportunity' pk=opportunity.pk %}">{{ _("Edit") }}</a></li>
|
||||
<li><a class="dropdown-item" href="{% url 'update_opportunity' opportunity.pk %}">{{ _("Edit") }}</a></li>
|
||||
<li><button class="dropdown-item text-danger" data-bs-toggle="modal" data-bs-target="#deleteModal">{% trans "Delete" %}</button></li>
|
||||
</ul>
|
||||
</div>
|
||||
@ -74,7 +74,7 @@
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="py-1">
|
||||
<div class="d-flex align-items-center"><span class="me-2 text-body-tertiary" data-feather="dollar-sign"></span>
|
||||
<div class="d-flex align-items-center"><span class="icon-saudi_riyal me-2 text-body-tertiary"></span>
|
||||
<p class="fw-semibold fs-9 mb-0 text-body-tertiary">{{ _("Expected Revenue")}}</p>
|
||||
</div>
|
||||
</td>
|
||||
|
||||
@ -11,7 +11,7 @@
|
||||
<p class="navbar-vertical-label">Apps</p>
|
||||
<hr class="navbar-vertical-line" />
|
||||
<div class="nav-item-wrapper">
|
||||
<a class="nav-link dropdown-indicator label-1" href="#nv-inventory" role="button" data-bs-toggle="collapse" aria-expanded="false" aria-controls="nv-inventory">
|
||||
<a id="inventory-nav" class="nav-link dropdown-indicator label-1 inventory-nav" href="#nv-inventory" role="button" data-bs-toggle="collapse" aria-expanded="false" aria-controls="nv-inventory">
|
||||
<div class="d-flex align-items-center">
|
||||
<div class="dropdown-indicator-icon-wrapper"><span class="fas fa-caret-right dropdown-indicator-icon"></span></div>
|
||||
<span class="nav-link-icon"><span class="fas fa-warehouse"></span></span><span class="nav-link-text">{% trans "Inventory"|capfirst %}</span>
|
||||
@ -22,7 +22,7 @@
|
||||
<li class="collapsed-nav-item-title d-none">{% trans "Inventory"|capfirst %}</li>
|
||||
{% if perms.inventory.add_car %}
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="{% url 'car_add' %}">
|
||||
<a id="btn-add-car" class="nav-link btn-add-car" href="{% url 'car_add' %}">
|
||||
<div class="d-flex align-items-center">
|
||||
<span class="nav-link-icon"><span class="fas fa-plus-circle"></span></span><span class="nav-link-text">{% trans "add car"|capfirst %}</span>
|
||||
</div>
|
||||
|
||||
@ -89,7 +89,7 @@
|
||||
<a href="{% url 'payment_create' invoice.pk %}" class="btn btn-phoenix-success"><span class="d-none d-sm-inline-block"><i class="fa-solid fa-money-bill"></i> {% trans 'Record Payment' %}</span></a>
|
||||
{% endif %}
|
||||
{% if not invoice.is_paid %}
|
||||
<button {% if invoice.is_review or invoice.amount_paid|to_int < invoice.amount_due|to_int %}disabled{% endif %} id="mark_invoice_as_paid" class="btn btn-phoenix-secondary" data-bs-toggle="modal" data-bs-target="#mark_as_paid_Modal"><span class="d-none d-sm-inline-block"><i class="fa-solid fa-money-check-dollar"></i> {% trans 'Mark as Paid' %}</span></button>
|
||||
<button {% if invoice.is_review or invoice.amount_paid|to_int < invoice.amount_due|to_int %}disabled{% endif %} id="mark_invoice_as_paid" class="btn btn-phoenix-secondary" data-bs-toggle="modal" data-bs-target="#mark_as_paid_Modal"><span class="d-none d-sm-inline-block"><span class="icon-saudi_riyal"></span> {% trans 'Mark as Paid' %}</span></button>
|
||||
{% endif %}
|
||||
<a href="{% url 'invoice_preview' invoice.pk %}" class="btn btn-phoenix-primary"><span class="d-none d-sm-inline-block"><i class="fa-regular fa-eye"></i> {% trans 'Preview' %}</span></a>
|
||||
</div>
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
{% block content %}
|
||||
<div class="row mt-4">
|
||||
<div class="d-flex justify-content-between mb-2">
|
||||
<h3 class="mb-3"><i class="fa-solid fa-hand-holding-dollar"></i> {% trans "Payments" %}</h3>
|
||||
<h3 class="mb-3"><span class="icon-saudi_riyal"></span> {% trans "Payments" %}</h3>
|
||||
{% comment %} <a href="{% url 'payment_create' %}" class="btn btn-sm btn-phoenix-success ">{% trans "Add Payment" %}</a> {% endcomment %}
|
||||
|
||||
</div>
|
||||
|
||||
31
templates/tours/start_tour.html
Normal file
31
templates/tours/start_tour.html
Normal file
@ -0,0 +1,31 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{ tour.name }} - Interactive Guide{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container my-4">
|
||||
<h1>{{ tour.name }}</h1>
|
||||
<p class="lead">{{ tour.description }}</p>
|
||||
|
||||
<div class="card mb-4">
|
||||
<div class="card-body">
|
||||
<h5 class="card-title">Ready to Start</h5>
|
||||
<p>This interactive guide will walk you through each step of the process.</p>
|
||||
<button class="btn btn-primary" onclick="window.tourManager.loadTour('{{ tour.slug }}')">
|
||||
Start Guide Now
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block extra_js %}
|
||||
<script>
|
||||
// Auto-start the tour after a short delay
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
setTimeout(() => {
|
||||
window.tourManager.loadTour('{{ tour.slug }}');
|
||||
}, 1000);
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
32
templates/tours/tour_list.html
Normal file
32
templates/tours/tour_list.html
Normal file
@ -0,0 +1,32 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Interactive Guides{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container my-4">
|
||||
<h1>Interactive Guides</h1>
|
||||
<p class="lead">Learn how to use the car inventory system with these interactive step-by-step guides.</p>
|
||||
|
||||
<div class="row mt-4">
|
||||
{% for tour in tours %}
|
||||
<div class="col-md-4 mb-4">
|
||||
<div class="card h-100">
|
||||
<div class="card-body">
|
||||
<h5 class="card-title">{{ tour.name }}</h5>
|
||||
<p class="card-text">{{ tour.description }}</p>
|
||||
</div>
|
||||
<div class="card-footer">
|
||||
<a href="{% url 'start_tour' tour.slug %}" class="btn btn-primary">Start Guide</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% empty %}
|
||||
<div class="col-12">
|
||||
<div class="alert alert-info">
|
||||
No interactive guides available at this time.
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
0
tours/__init__.py
Normal file
0
tours/__init__.py
Normal file
6
tours/admin.py
Normal file
6
tours/admin.py
Normal file
@ -0,0 +1,6 @@
|
||||
from django.contrib import admin
|
||||
from . import models
|
||||
|
||||
# Register your models here.
|
||||
admin.site.register(models.Tour)
|
||||
admin.site.register(models.TourCompletion)
|
||||
6
tours/apps.py
Normal file
6
tours/apps.py
Normal file
@ -0,0 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ToursConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'tours'
|
||||
0
tours/management/__init__.py
Normal file
0
tours/management/__init__.py
Normal file
0
tours/management/commands/__init__.py
Normal file
0
tours/management/commands/__init__.py
Normal file
84
tours/management/commands/generate_tours.py
Normal file
84
tours/management/commands/generate_tours.py
Normal file
@ -0,0 +1,84 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
import yaml
|
||||
import os
|
||||
import json
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate IntroJS tour definitions from workflow documentation"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
input_file = "haikal_kb.yaml"
|
||||
output_dir = os.path.join(settings.BASE_DIR, 'static', 'js', 'tours')
|
||||
|
||||
# Create output directory if it doesn't exist
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
try:
|
||||
with open(input_file, 'r', encoding='utf-8') as f:
|
||||
kb = yaml.safe_load(f)
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"Error reading knowledge base file: {e}"))
|
||||
return
|
||||
|
||||
workflows = kb.get("user_workflows", {})
|
||||
|
||||
tours_created = 0
|
||||
|
||||
# Map of common UI elements to their likely selectors
|
||||
element_selectors = {
|
||||
"inventory": "#inventory-menu, .inventory-nav, nav .inventory",
|
||||
"add car": "#add-car-button, .btn-add-car, button:contains('Add Car')",
|
||||
"save": "button[type='submit'], .btn-save, #save-button",
|
||||
"cancel": ".btn-cancel, #cancel-button, button:contains('Cancel')",
|
||||
"vin": "#vin-input, input[name='vin'], .vin-field",
|
||||
"make": "#make-select, select[name='make'], .make-field",
|
||||
"model": "#model-select, select[name='model'], .model-field",
|
||||
"series": "#series-select, select[name='series'], .series-field",
|
||||
"trim": "#trim-select, select[name='trim'], .trim-field",
|
||||
"price": "#price-input, input[name='price'], .price-field",
|
||||
"color": "#color-select, select[name='color'], .color-field",
|
||||
"invoice": "#invoice-section, .invoice-tab, #create-invoice",
|
||||
"customer": "#customer-select, select[name='customer'], .customer-field",
|
||||
"finance": "#finance-menu, .finance-nav, nav .finance",
|
||||
}
|
||||
|
||||
for workflow_name, workflow in workflows.items():
|
||||
steps = workflow.get("steps", [])
|
||||
if not steps:
|
||||
continue
|
||||
|
||||
tour_steps = []
|
||||
|
||||
for i, step in enumerate(steps):
|
||||
# Try to identify UI element from step description
|
||||
element = None
|
||||
for key, selector in element_selectors.items():
|
||||
if key.lower() in step.lower():
|
||||
element = selector
|
||||
break
|
||||
|
||||
tour_step = {
|
||||
"title": f"Step {i + 1}",
|
||||
"intro": step,
|
||||
"position": "bottom"
|
||||
}
|
||||
|
||||
if element:
|
||||
tour_step["element"] = element
|
||||
|
||||
tour_steps.append(tour_step)
|
||||
|
||||
# Save the tour definition as JSON
|
||||
tour_filename = workflow_name.lower().replace(' ', '_') + '_tour.json'
|
||||
with open(os.path.join(output_dir, tour_filename), 'w', encoding='utf-8') as f:
|
||||
json.dump({
|
||||
"name": workflow_name,
|
||||
"description": workflow.get("description", ""),
|
||||
"steps": tour_steps
|
||||
}, f, indent=2)
|
||||
|
||||
tours_created += 1
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"✅ Created {tours_created} IntroJS tour definitions in {output_dir}"))
|
||||
88
tours/management/commands/generate_ui_map.py
Normal file
88
tours/management/commands/generate_ui_map.py
Normal file
@ -0,0 +1,88 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.urls import get_resolver
|
||||
import os
|
||||
import json
|
||||
from django.template.loaders.app_directories import get_app_template_dirs
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate UI element map for IntroJS tours"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
output_file = os.path.join('static', 'js', 'tours', 'ui_element_map.json')
|
||||
|
||||
ui_map = {
|
||||
"pages": {},
|
||||
"common_elements": {
|
||||
"navigation": {
|
||||
"inventory": "#inventory-menu, .inventory-nav, nav .inventory",
|
||||
"finance": "#finance-menu, .finance-nav, nav .finance",
|
||||
"customers": "#customers-menu, .customers-nav, nav .customers"
|
||||
},
|
||||
"actions": {
|
||||
"add": ".btn-add, .add-button, button:contains('Add')",
|
||||
"edit": ".btn-edit, .edit-button, button:contains('Edit')",
|
||||
"save": ".btn-save, button[type='submit'], #save-button",
|
||||
"cancel": ".btn-cancel, #cancel-button, button:contains('Cancel')",
|
||||
"delete": ".btn-delete, .delete-button, button:contains('Delete')"
|
||||
},
|
||||
"forms": {
|
||||
"search": "#search-form, .search-input, input[name='q']",
|
||||
"date_range": ".date-range, input[type='date']",
|
||||
"dropdown": "select, .dropdown, .select-field"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Extract URL patterns to identify pages
|
||||
resolver = get_resolver()
|
||||
for url_pattern in resolver.url_patterns:
|
||||
if hasattr(url_pattern, 'name') and url_pattern.name:
|
||||
pattern_name = url_pattern.name
|
||||
# Skip admin and API URLs
|
||||
if pattern_name.startswith(('admin:', 'api:')):
|
||||
continue
|
||||
|
||||
ui_map["pages"][pattern_name] = {
|
||||
"url_pattern": str(url_pattern.pattern),
|
||||
"elements": {}
|
||||
}
|
||||
|
||||
# Scan templates for UI elements with IDs
|
||||
template_dirs = get_app_template_dirs('templates')
|
||||
for template_dir in template_dirs:
|
||||
for root, dirs, files in os.walk(template_dir):
|
||||
for file in files:
|
||||
if not file.endswith(('.html', '.htm')):
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(os.path.join(root, file), 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
# Try to identify the page/view this template is for
|
||||
template_path = os.path.relpath(os.path.join(root, file), template_dir)
|
||||
page_key = template_path.replace('/', '_').replace('.html', '')
|
||||
|
||||
# Create page entry if it doesn't exist
|
||||
if page_key not in ui_map["pages"]:
|
||||
ui_map["pages"][page_key] = {
|
||||
"template": template_path,
|
||||
"elements": {}
|
||||
}
|
||||
|
||||
# Extract elements with IDs
|
||||
import re
|
||||
id_matches = re.findall(r'id=["\']([^"\']+)["\']', content)
|
||||
for id_match in id_matches:
|
||||
ui_map["pages"][page_key]["elements"][id_match] = f"#{id_match}"
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.WARNING(f"Error processing template {file}: {e}"))
|
||||
|
||||
# Save UI map as JSON
|
||||
os.makedirs(os.path.dirname(output_file), exist_ok=True)
|
||||
with open(output_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(ui_map, f, indent=2)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"✅ UI element map saved to {output_file}"))
|
||||
45
tours/management/commands/import_tours.py
Normal file
45
tours/management/commands/import_tours.py
Normal file
@ -0,0 +1,45 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
import yaml
|
||||
import os
|
||||
from django.utils.text import slugify
|
||||
from tours.models import Tour
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Import tours from knowledge base"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
input_file = "haikal_kb.yaml"
|
||||
|
||||
try:
|
||||
with open(input_file, 'r', encoding='utf-8') as f:
|
||||
kb = yaml.safe_load(f)
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"Error reading knowledge base file: {e}"))
|
||||
return
|
||||
|
||||
workflows = kb.get("user_workflows", {})
|
||||
|
||||
tours_created = 0
|
||||
|
||||
for workflow_name, workflow in workflows.items():
|
||||
slug = slugify(workflow_name)
|
||||
tour_file = f"{slug}_tour.json"
|
||||
|
||||
tour, created = Tour.objects.update_or_create(
|
||||
slug=slug,
|
||||
defaults={
|
||||
'name': workflow_name,
|
||||
'description': workflow.get('description', ''),
|
||||
'tour_file': tour_file,
|
||||
'is_active': True
|
||||
}
|
||||
)
|
||||
|
||||
if created:
|
||||
tours_created += 1
|
||||
self.stdout.write(self.style.SUCCESS(f"Created tour: {workflow_name}"))
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS(f"Updated tour: {workflow_name}"))
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"✅ Imported {tours_created} tours from knowledge base"))
|
||||
40
tours/migrations/0001_initial.py
Normal file
40
tours/migrations/0001_initial.py
Normal file
@ -0,0 +1,40 @@
|
||||
# Generated by Django 5.2.1 on 2025-06-06 14:05
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Tour',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=100)),
|
||||
('description', models.TextField(blank=True)),
|
||||
('slug', models.SlugField(unique=True)),
|
||||
('tour_file', models.CharField(max_length=255)),
|
||||
('is_active', models.BooleanField(default=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='TourCompletion',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('completed_on', models.DateTimeField(auto_now_add=True)),
|
||||
('tour', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tours.tour')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'unique_together': {('tour', 'user')},
|
||||
},
|
||||
),
|
||||
]
|
||||
0
tours/migrations/__init__.py
Normal file
0
tours/migrations/__init__.py
Normal file
22
tours/models.py
Normal file
22
tours/models.py
Normal file
@ -0,0 +1,22 @@
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Tour(models.Model):
|
||||
name = models.CharField(max_length=100)
|
||||
description = models.TextField(blank=True)
|
||||
slug = models.SlugField(unique=True)
|
||||
tour_file = models.CharField(max_length=255)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class TourCompletion(models.Model):
|
||||
tour = models.ForeignKey(Tour, on_delete=models.CASCADE)
|
||||
user = models.ForeignKey('auth.User', on_delete=models.CASCADE)
|
||||
completed_on = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
unique_together = ('tour', 'user')
|
||||
|
||||
3
tours/tests.py
Normal file
3
tours/tests.py
Normal file
@ -0,0 +1,3 @@
|
||||
from django.test import TestCase
|
||||
|
||||
# Create your tests here.
|
||||
9
tours/urls.py
Normal file
9
tours/urls.py
Normal file
@ -0,0 +1,9 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path('', views.tour_list, name='tour_list'),
|
||||
path('data/<slug:slug>/', views.get_tour_data, name='get_tour_data'),
|
||||
path('complete/<slug:slug>/', views.mark_tour_completed, name='mark_tour_complete'),
|
||||
path('start/<slug:slug>/', views.start_tour_view, name='start_tour'),
|
||||
]
|
||||
55
tours/views.py
Normal file
55
tours/views.py
Normal file
@ -0,0 +1,55 @@
|
||||
import os
|
||||
import json
|
||||
from django.shortcuts import render, get_object_or_404
|
||||
from django.http import JsonResponse
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.conf import settings
|
||||
from .models import Tour, TourCompletion
|
||||
|
||||
|
||||
@login_required
|
||||
def tour_list(request):
|
||||
tours = Tour.objects.filter(is_active=True)
|
||||
return render(request, 'tours/tour_list.html', {'tours': tours})
|
||||
|
||||
|
||||
@login_required
|
||||
def get_tour_data(request, slug):
|
||||
tour = get_object_or_404(Tour, slug=slug, is_active=True)
|
||||
|
||||
# Check if user has already completed this tour
|
||||
completed = TourCompletion.objects.filter(tour=tour, user=request.user).exists()
|
||||
|
||||
# Load the tour data from JSON file
|
||||
tour_file_path = os.path.join(settings.BASE_DIR, 'static', 'js', 'tours', tour.tour_file)
|
||||
|
||||
try:
|
||||
with open(tour_file_path, 'r') as f:
|
||||
tour_data = json.load(f)
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
return JsonResponse({'error': 'Tour data not found or invalid'}, status=404)
|
||||
|
||||
return JsonResponse({
|
||||
'tour': tour_data,
|
||||
'completed': completed
|
||||
})
|
||||
|
||||
|
||||
@login_required
|
||||
def mark_tour_completed(request, slug):
|
||||
if request.method != 'POST':
|
||||
return JsonResponse({'error': 'Method not allowed'}, status=405)
|
||||
|
||||
tour = get_object_or_404(Tour, slug=slug, is_active=True)
|
||||
|
||||
# Mark the tour as completed for this user
|
||||
TourCompletion.objects.get_or_create(tour=tour, user=request.user)
|
||||
|
||||
return JsonResponse({'status': 'success'})
|
||||
|
||||
|
||||
@login_required
|
||||
def start_tour_view(request, slug):
|
||||
tour = get_object_or_404(Tour, slug=slug, is_active=True)
|
||||
# Redirect to the page where the tour should start
|
||||
return render(request, 'tours/start_tour.html', {'tour': tour})
|
||||
Loading…
x
Reference in New Issue
Block a user