Initial commit: SIBU 2.0 MISSION
This commit is contained in:
2
backend/app/core/__init__.py
Normal file
2
backend/app/core/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
"""Core configuration and utilities."""
|
||||
|
||||
46
backend/app/core/config.py
Normal file
46
backend/app/core/config.py
Normal file
@ -0,0 +1,46 @@
|
||||
"""Configuration settings using pydantic-settings."""
|
||||
import os
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
from typing import Literal
|
||||
|
||||
|
||||
def get_env_file() -> str:
|
||||
"""Get the appropriate env file based on ENVIRONMENT variable."""
|
||||
env = os.getenv("ENVIRONMENT", "development")
|
||||
env_file = f".env.{env}"
|
||||
|
||||
# Check if env file exists, fallback to .env.development
|
||||
if not os.path.exists(env_file):
|
||||
env_file = ".env.development"
|
||||
|
||||
return env_file
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Application settings loaded from environment variables."""
|
||||
|
||||
# Database
|
||||
database_url: str = "postgresql+asyncpg://sibu:sibu@localhost:5432/sibu"
|
||||
|
||||
# Google Maps (for server-side APIs)
|
||||
google_maps_api_key: str = ""
|
||||
google_maps_url_signing_secret: str = ""
|
||||
|
||||
# Environment
|
||||
environment: Literal["development", "production", "testing"] = "development"
|
||||
debug: bool = False
|
||||
|
||||
# Security
|
||||
admin_password: str = "admin" # Default for development, override in .env
|
||||
secret_key: str = "insecure-secret-key-dev" # Default for development, override in .env
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=get_env_file(),
|
||||
env_file_encoding="utf-8",
|
||||
case_sensitive=False,
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
|
||||
# Global settings instance
|
||||
settings = Settings()
|
||||
27
backend/app/core/database.py
Normal file
27
backend/app/core/database.py
Normal file
@ -0,0 +1,27 @@
|
||||
"""Database connection and session management."""
|
||||
from sqlmodel import SQLModel, create_engine, Session
|
||||
from typing import Generator
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
# Create database engine
|
||||
# Convert asyncpg URL to psycopg2 for synchronous operations
|
||||
database_url = settings.database_url.replace("+asyncpg", "+psycopg2")
|
||||
engine = create_engine(
|
||||
database_url,
|
||||
echo=settings.debug,
|
||||
future=True,
|
||||
)
|
||||
|
||||
|
||||
def init_db() -> None:
|
||||
"""Initialize database by creating all tables."""
|
||||
SQLModel.metadata.create_all(engine)
|
||||
|
||||
|
||||
def get_session() -> Generator[Session, None, None]:
|
||||
"""Dependency for getting database session."""
|
||||
with Session(engine) as session:
|
||||
yield session
|
||||
|
||||
251
backend/app/core/export_database_to_seeder.py
Normal file
251
backend/app/core/export_database_to_seeder.py
Normal file
@ -0,0 +1,251 @@
|
||||
"""Export current database data and generate a seeder script."""
|
||||
from sqlmodel import Session, select
|
||||
from typing import Dict, Any
|
||||
import json
|
||||
|
||||
from app.core.database import engine
|
||||
from app.models.route import Route
|
||||
from app.models.bus_stop import BusStop
|
||||
from app.models.route_stop import RouteStop
|
||||
from app.models.bus_schedule import BusSchedule
|
||||
|
||||
|
||||
def export_all_data() -> Dict[str, Any]:
|
||||
"""Export all data from the database."""
|
||||
with Session(engine) as session:
|
||||
# Export routes
|
||||
routes = session.exec(select(Route)).all()
|
||||
routes_data = []
|
||||
for route in routes:
|
||||
routes_data.append({
|
||||
"id": str(route.id),
|
||||
"name": route.name,
|
||||
"description": route.description,
|
||||
"origin_city": route.origin_city,
|
||||
"destination_city": route.destination_city,
|
||||
"distance_km": route.distance_km,
|
||||
"estimated_duration_minutes": route.estimated_duration_minutes,
|
||||
"status": route.status.value if route.status else "active",
|
||||
})
|
||||
|
||||
# Export bus stops
|
||||
bus_stops = session.exec(select(BusStop)).all()
|
||||
bus_stops_data = []
|
||||
for stop in bus_stops:
|
||||
bus_stops_data.append({
|
||||
"id": str(stop.id),
|
||||
"name": stop.name,
|
||||
"latitude": stop.latitude,
|
||||
"longitude": stop.longitude,
|
||||
"city": stop.city,
|
||||
"address": stop.address,
|
||||
"stop_type": stop.stop_type.value if stop.stop_type else "regular",
|
||||
"has_shelter": stop.has_shelter,
|
||||
"has_seating": stop.has_seating,
|
||||
"is_accessible": stop.is_accessible,
|
||||
})
|
||||
|
||||
# Export route stops
|
||||
route_stops = session.exec(select(RouteStop)).all()
|
||||
route_stops_data = []
|
||||
for route_stop in route_stops:
|
||||
route_stops_data.append({
|
||||
"id": str(route_stop.id),
|
||||
"route_id": str(route_stop.route_id),
|
||||
"stop_id": str(route_stop.stop_id),
|
||||
"stop_order": route_stop.stop_order,
|
||||
"travel_time_minutes": route_stop.travel_time_minutes,
|
||||
"is_pickup_point": route_stop.is_pickup_point,
|
||||
"is_dropoff_point": route_stop.is_dropoff_point,
|
||||
})
|
||||
|
||||
# Export bus schedules
|
||||
bus_schedules = session.exec(select(BusSchedule)).all()
|
||||
bus_schedules_data = []
|
||||
for schedule in bus_schedules:
|
||||
bus_schedules_data.append({
|
||||
"id": str(schedule.id),
|
||||
"route_id": str(schedule.route_id),
|
||||
"departure_time": schedule.departure_time.strftime("%H:%M:%S") if schedule.departure_time else None,
|
||||
"frequency_minutes": schedule.frequency_minutes,
|
||||
"schedule_type": schedule.schedule_type.value if schedule.schedule_type else "weekday",
|
||||
"is_active": schedule.is_active,
|
||||
"notes": schedule.notes,
|
||||
})
|
||||
|
||||
return {
|
||||
"routes": routes_data,
|
||||
"bus_stops": bus_stops_data,
|
||||
"route_stops": route_stops_data,
|
||||
"bus_schedules": bus_schedules_data,
|
||||
}
|
||||
|
||||
|
||||
def generate_seeder_code(data: Dict[str, Any]) -> str:
|
||||
"""Generate Python seeder code from exported data."""
|
||||
|
||||
code = '''"""Database seeding script generated from current database."""
|
||||
from sqlmodel import Session, select, create_engine
|
||||
from uuid import UUID
|
||||
from datetime import time
|
||||
|
||||
from app.core.config import settings
|
||||
from app.models.route import Route, RouteStatus
|
||||
from app.models.bus_stop import BusStop, StopType
|
||||
from app.models.route_stop import RouteStop
|
||||
from app.models.bus_schedule import BusSchedule, BusScheduleType
|
||||
|
||||
|
||||
def seed_database():
|
||||
"""Seed the database with exported data."""
|
||||
# Use synchronous engine for seeding (replace asyncpg with psycopg2)
|
||||
sync_database_url = settings.database_url.replace("+asyncpg", "+psycopg2")
|
||||
sync_engine = create_engine(sync_database_url, echo=False)
|
||||
|
||||
with Session(sync_engine) as session:
|
||||
# Check if data already exists
|
||||
try:
|
||||
existing_routes = session.exec(select(Route)).first()
|
||||
if existing_routes:
|
||||
print("Database already has data. Skipping seed.")
|
||||
print("To reseed, drop the tables first or use: make db-reset")
|
||||
return
|
||||
except Exception as e:
|
||||
# If tables don't exist yet, that's fine - we'll create the data
|
||||
print(f"Note: {e}")
|
||||
print("Proceeding with seed...")
|
||||
|
||||
'''
|
||||
|
||||
# Generate routes
|
||||
code += " # Insert Routes\n"
|
||||
code += " routes = [\n"
|
||||
for route in data["routes"]:
|
||||
status_enum = route["status"].upper().replace("-", "_")
|
||||
code += f''' Route(
|
||||
id=UUID("{route["id"]}"),
|
||||
name={repr(route["name"])},
|
||||
description={repr(route["description"])},
|
||||
origin_city={repr(route["origin_city"])},
|
||||
destination_city={repr(route["destination_city"])},
|
||||
distance_km={route["distance_km"] if route["distance_km"] is not None else "None"},
|
||||
estimated_duration_minutes={route["estimated_duration_minutes"] if route["estimated_duration_minutes"] is not None else "None"},
|
||||
status=RouteStatus.{status_enum},
|
||||
),
|
||||
'''
|
||||
code += " ]\n"
|
||||
code += " for route in routes:\n"
|
||||
code += " session.add(route)\n"
|
||||
code += " session.flush() # Flush routes so we can reference them in foreign keys\n\n"
|
||||
|
||||
# Generate bus stops
|
||||
code += " # Insert Bus Stops\n"
|
||||
code += " bus_stops = [\n"
|
||||
for stop in data["bus_stops"]:
|
||||
stop_type_enum = stop["stop_type"].upper().replace("-", "_")
|
||||
code += f''' BusStop(
|
||||
id=UUID("{stop["id"]}"),
|
||||
name={repr(stop["name"])},
|
||||
latitude={stop["latitude"]},
|
||||
longitude={stop["longitude"]},
|
||||
city={repr(stop["city"])},
|
||||
address={repr(stop["address"]) if stop["address"] else "None"},
|
||||
stop_type=StopType.{stop_type_enum},
|
||||
has_shelter={stop["has_shelter"]},
|
||||
has_seating={stop["has_seating"]},
|
||||
is_accessible={stop["is_accessible"]},
|
||||
),
|
||||
'''
|
||||
code += " ]\n"
|
||||
code += " for stop in bus_stops:\n"
|
||||
code += " session.add(stop)\n"
|
||||
code += " session.flush() # Flush stops so we can reference them in route_stops\n\n"
|
||||
|
||||
# Generate route stops
|
||||
code += " # Insert Route Stops\n"
|
||||
code += " route_stops = [\n"
|
||||
for route_stop in data["route_stops"]:
|
||||
code += f''' RouteStop(
|
||||
route_id=UUID("{route_stop["route_id"]}"),
|
||||
stop_id=UUID("{route_stop["stop_id"]}"),
|
||||
stop_order={route_stop["stop_order"]},
|
||||
travel_time_minutes={route_stop["travel_time_minutes"] if route_stop["travel_time_minutes"] is not None else "None"},
|
||||
is_pickup_point={route_stop["is_pickup_point"]},
|
||||
is_dropoff_point={route_stop["is_dropoff_point"]},
|
||||
),
|
||||
'''
|
||||
code += " ]\n"
|
||||
code += " for route_stop in route_stops:\n"
|
||||
code += " session.add(route_stop)\n"
|
||||
code += " session.flush() # Flush route_stops before adding schedules\n\n"
|
||||
|
||||
# Generate bus schedules
|
||||
code += " # Insert Bus Schedules\n"
|
||||
code += " bus_schedules = [\n"
|
||||
for schedule in data["bus_schedules"]:
|
||||
if schedule["departure_time"]:
|
||||
hour, minute, second = schedule["departure_time"].split(":")
|
||||
# Convert to int to remove leading zeros
|
||||
hour_int = int(hour)
|
||||
minute_int = int(minute)
|
||||
time_str = f"time({hour_int}, {minute_int})"
|
||||
else:
|
||||
time_str = "None"
|
||||
|
||||
schedule_type_enum = schedule["schedule_type"].upper().replace("-", "_")
|
||||
code += f''' BusSchedule(
|
||||
route_id=UUID("{schedule["route_id"]}"),
|
||||
departure_time={time_str},
|
||||
frequency_minutes={schedule["frequency_minutes"] if schedule["frequency_minutes"] is not None else "None"},
|
||||
schedule_type=BusScheduleType.{schedule_type_enum},
|
||||
is_active={schedule["is_active"]},
|
||||
notes={repr(schedule["notes"]) if schedule["notes"] else "None"},
|
||||
),
|
||||
'''
|
||||
code += " ]\n"
|
||||
code += " for schedule in bus_schedules:\n"
|
||||
code += " session.add(schedule)\n\n"
|
||||
|
||||
code += " session.commit()\n"
|
||||
code += ' print("Database seeded successfully!")\n\n'
|
||||
|
||||
code += '''
|
||||
if __name__ == "__main__":
|
||||
seed_database()
|
||||
'''
|
||||
|
||||
return code
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function to export database and generate seeder."""
|
||||
print("Exporting database data...")
|
||||
|
||||
try:
|
||||
data = export_all_data()
|
||||
|
||||
# Save JSON export
|
||||
json_file = "database_export.json"
|
||||
with open(json_file, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, indent=2, default=str, ensure_ascii=False)
|
||||
print(f"✅ Exported data to {json_file}")
|
||||
print(f" - {len(data['routes'])} routes")
|
||||
print(f" - {len(data['bus_stops'])} bus stops")
|
||||
print(f" - {len(data['route_stops'])} route stops")
|
||||
print(f" - {len(data['bus_schedules'])} bus schedules")
|
||||
|
||||
# Generate seeder code
|
||||
seeder_code = generate_seeder_code(data)
|
||||
seeder_file = "app/core/seed.py"
|
||||
with open(seeder_file, "w", encoding="utf-8") as f:
|
||||
f.write(seeder_code)
|
||||
print(f"✅ Generated seeder script: {seeder_file}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error exporting database: {e}")
|
||||
raise
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
92
backend/app/core/export_supabase_data.py
Normal file
92
backend/app/core/export_supabase_data.py
Normal file
@ -0,0 +1,92 @@
|
||||
"""Script to export route data from Supabase to update seed script."""
|
||||
import os
|
||||
from supabase import create_client, Client
|
||||
from typing import Dict, Any
|
||||
import json
|
||||
|
||||
def get_supabase_client() -> Client:
|
||||
"""Create Supabase client from environment variables."""
|
||||
supabase_url = os.getenv("SUPABASE_URL")
|
||||
supabase_key = os.getenv("SUPABASE_ANON_KEY")
|
||||
|
||||
if not supabase_url or not supabase_key:
|
||||
raise ValueError("SUPABASE_URL and SUPABASE_ANON_KEY must be set in environment")
|
||||
|
||||
return create_client(supabase_url, supabase_key)
|
||||
|
||||
def export_route_data(route_name: str = "Boquete>David") -> Dict[str, Any]:
|
||||
"""Export route data including all stops from Supabase."""
|
||||
supabase = get_supabase_client()
|
||||
|
||||
# Get route
|
||||
route_response = supabase.table("routes").select("*").eq("name", route_name).execute()
|
||||
if not route_response.data:
|
||||
raise ValueError(f"Route '{route_name}' not found in Supabase")
|
||||
|
||||
route = route_response.data[0]
|
||||
route_id = route["id"]
|
||||
|
||||
# Get all route stops with stop details
|
||||
route_stops_response = supabase.table("route_stops").select(
|
||||
"*, bus_stops(*)"
|
||||
).eq("route_id", route_id).order("stop_order").execute()
|
||||
|
||||
route_stops = route_stops_response.data
|
||||
|
||||
return {
|
||||
"route": route,
|
||||
"route_stops": route_stops,
|
||||
"total_stops": len(route_stops)
|
||||
}
|
||||
|
||||
def export_all_routes() -> Dict[str, Any]:
|
||||
"""Export all routes and their stops from Supabase."""
|
||||
supabase = get_supabase_client()
|
||||
|
||||
# Get all routes
|
||||
routes_response = supabase.table("routes").select("*").execute()
|
||||
routes = routes_response.data
|
||||
|
||||
all_data = {}
|
||||
|
||||
for route in routes:
|
||||
route_id = route["id"]
|
||||
route_name = route["name"]
|
||||
|
||||
# Get all route stops
|
||||
route_stops_response = supabase.table("route_stops").select(
|
||||
"*, bus_stops(*)"
|
||||
).eq("route_id", route_id).order("stop_order").execute()
|
||||
|
||||
all_data[route_name] = {
|
||||
"route": route,
|
||||
"route_stops": route_stops_response.data,
|
||||
"total_stops": len(route_stops_response.data)
|
||||
}
|
||||
|
||||
return all_data
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
if len(sys.argv) > 1 and sys.argv[1] == "--all":
|
||||
# Export all routes
|
||||
data = export_all_routes()
|
||||
output_file = "supabase_export_all.json"
|
||||
else:
|
||||
# Export specific route
|
||||
route_name = sys.argv[1] if len(sys.argv) > 1 else "Boquete>David"
|
||||
data = export_route_data(route_name)
|
||||
output_file = f"supabase_export_{route_name.replace('>', '_')}.json"
|
||||
|
||||
# Save to JSON file
|
||||
with open(output_file, "w") as f:
|
||||
json.dump(data, f, indent=2, default=str)
|
||||
|
||||
print(f"✅ Exported data to {output_file}")
|
||||
if isinstance(data, dict) and "total_stops" in data:
|
||||
print(f" Total stops: {data['total_stops']}")
|
||||
elif isinstance(data, dict):
|
||||
for route_name, route_data in data.items():
|
||||
print(f" {route_name}: {route_data['total_stops']} stops")
|
||||
|
||||
99
backend/app/core/generate_route_stops.py
Normal file
99
backend/app/core/generate_route_stops.py
Normal file
@ -0,0 +1,99 @@
|
||||
"""Helper script to generate intermediate stops along a route."""
|
||||
from typing import List, Tuple
|
||||
|
||||
def interpolate_point(
|
||||
start: Tuple[float, float],
|
||||
end: Tuple[float, float],
|
||||
fraction: float
|
||||
) -> Tuple[float, float]:
|
||||
"""Interpolate a point between start and end coordinates."""
|
||||
lat = start[0] + (end[0] - start[0]) * fraction
|
||||
lng = start[1] + (end[1] - start[1]) * fraction
|
||||
return (lat, lng)
|
||||
|
||||
def generate_intermediate_stops(
|
||||
start_coords: Tuple[float, float],
|
||||
end_coords: Tuple[float, float],
|
||||
num_stops: int,
|
||||
start_name: str = "Start",
|
||||
end_name: str = "End",
|
||||
city: str = "Route"
|
||||
) -> List[dict]:
|
||||
"""Generate intermediate stops along a route.
|
||||
|
||||
Args:
|
||||
start_coords: (latitude, longitude) of start point
|
||||
end_coords: (latitude, longitude) of end point
|
||||
num_stops: Total number of stops to generate (including start and end)
|
||||
start_name: Name of the start stop
|
||||
end_name: Name of the end stop
|
||||
city: City name for intermediate stops
|
||||
|
||||
Returns:
|
||||
List of stop dictionaries with name, lat, lng, city
|
||||
"""
|
||||
if num_stops < 2:
|
||||
return [
|
||||
{"name": start_name, "lat": start_coords[0], "lng": start_coords[1], "city": city},
|
||||
{"name": end_name, "lat": end_coords[0], "lng": end_coords[1], "city": city}
|
||||
]
|
||||
|
||||
stops = []
|
||||
|
||||
# Add start stop
|
||||
stops.append({
|
||||
"name": start_name,
|
||||
"lat": start_coords[0],
|
||||
"lng": start_coords[1],
|
||||
"city": city
|
||||
})
|
||||
|
||||
# Generate intermediate stops
|
||||
for i in range(1, num_stops - 1):
|
||||
fraction = i / (num_stops - 1)
|
||||
lat, lng = interpolate_point(start_coords, end_coords, fraction)
|
||||
|
||||
# Generate realistic stop names
|
||||
stop_name = f"Parada {i}"
|
||||
if i % 10 == 0:
|
||||
stop_name = f"Parada Principal {i // 10}"
|
||||
elif i % 5 == 0:
|
||||
stop_name = f"Intersección {i // 5}"
|
||||
|
||||
stops.append({
|
||||
"name": stop_name,
|
||||
"lat": lat,
|
||||
"lng": lng,
|
||||
"city": city
|
||||
})
|
||||
|
||||
# Add end stop
|
||||
stops.append({
|
||||
"name": end_name,
|
||||
"lat": end_coords[0],
|
||||
"lng": end_coords[1],
|
||||
"city": city
|
||||
})
|
||||
|
||||
return stops
|
||||
|
||||
# Example: Generate 61 stops for Boquete>David route
|
||||
if __name__ == "__main__":
|
||||
# Boquete coordinates (Terminal)
|
||||
boquete_start = (8.7697, -82.4328)
|
||||
# David coordinates (Terminal)
|
||||
david_end = (8.4177, -82.4270)
|
||||
|
||||
stops = generate_intermediate_stops(
|
||||
boquete_start,
|
||||
david_end,
|
||||
num_stops=61,
|
||||
start_name="Terminal de Boquete",
|
||||
end_name="Terminal de David",
|
||||
city="Ruta Boquete-David"
|
||||
)
|
||||
|
||||
print(f"Generated {len(stops)} stops:")
|
||||
for i, stop in enumerate(stops, 1):
|
||||
print(f"{i:2d}. {stop['name']:30s} ({stop['lat']:.6f}, {stop['lng']:.6f})")
|
||||
|
||||
223
backend/app/core/import_supabase_coordinates.py
Normal file
223
backend/app/core/import_supabase_coordinates.py
Normal file
@ -0,0 +1,223 @@
|
||||
"""Script to import bus stop coordinates from Supabase that follow actual roads."""
|
||||
import os
|
||||
from pathlib import Path
|
||||
from supabase import create_client, Client
|
||||
from sqlmodel import Session, select, create_engine
|
||||
import sys
|
||||
|
||||
from app.core.config import settings, get_env_file
|
||||
from app.models.bus_stop import BusStop
|
||||
from app.models.route import Route
|
||||
from app.models.route_stop import RouteStop
|
||||
|
||||
|
||||
def load_env_file():
|
||||
"""Load environment variables from .env.development file."""
|
||||
env_file = get_env_file()
|
||||
env_path = Path(env_file)
|
||||
|
||||
# If relative path, make it relative to backend directory
|
||||
if not env_path.is_absolute():
|
||||
backend_dir = Path(__file__).parent.parent.parent
|
||||
env_path = backend_dir / env_file
|
||||
|
||||
if env_path.exists():
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv(env_path)
|
||||
print(f"✓ Loaded environment from {env_path}")
|
||||
else:
|
||||
print(f"⚠️ Warning: {env_path} not found, using system environment variables")
|
||||
|
||||
|
||||
def get_supabase_client() -> Client:
|
||||
"""Create Supabase client from environment variables."""
|
||||
# Load .env.development file first
|
||||
load_env_file()
|
||||
|
||||
supabase_url = os.getenv("SUPABASE_URL")
|
||||
supabase_key = os.getenv("SUPABASE_ANON_KEY")
|
||||
|
||||
if not supabase_url or not supabase_key:
|
||||
raise ValueError(
|
||||
"SUPABASE_URL and SUPABASE_ANON_KEY must be set in environment or .env.development file.\n"
|
||||
f"Checked file: {get_env_file()}"
|
||||
)
|
||||
|
||||
return create_client(supabase_url, supabase_key)
|
||||
|
||||
|
||||
def import_coordinates_from_supabase(route_name: str = "Boquete>David", supabase_route_name: str = None):
|
||||
"""Import bus stop coordinates from Supabase for a specific route.
|
||||
|
||||
Args:
|
||||
route_name: Route name in local database format (e.g., "Boquete>David")
|
||||
supabase_route_name: Route name in Supabase format (e.g., "Boquete – David").
|
||||
If None, will try to find it automatically.
|
||||
"""
|
||||
supabase = get_supabase_client()
|
||||
|
||||
# If supabase_route_name is provided, use it directly
|
||||
if supabase_route_name:
|
||||
route_response = supabase.table("routes").select("*").eq("name", supabase_route_name).execute()
|
||||
else:
|
||||
# Get route from Supabase - try exact match first, then try variations
|
||||
route_response = supabase.table("routes").select("*").eq("name", route_name).execute()
|
||||
|
||||
# If not found, try with different separators (Supabase uses " – " em dash)
|
||||
if not route_response.data:
|
||||
# Try with different separators
|
||||
variations = [
|
||||
route_name.replace(">", " – "), # Em dash (Supabase format)
|
||||
route_name.replace(">", "-"), # Regular dash
|
||||
route_name.replace(">", " to "), # " to "
|
||||
route_name.replace(">", " -> "), # " -> "
|
||||
]
|
||||
for variant in variations:
|
||||
route_response = supabase.table("routes").select("*").eq("name", variant).execute()
|
||||
if route_response.data:
|
||||
print(f"Found route with name variation: '{variant}'")
|
||||
break
|
||||
|
||||
# If still not found, list available routes
|
||||
if not route_response.data:
|
||||
all_routes = supabase.table("routes").select("name").execute()
|
||||
available = [r["name"] for r in all_routes.data] if all_routes.data else []
|
||||
raise ValueError(
|
||||
f"Route '{route_name}' not found in Supabase.\n"
|
||||
f"Available routes: {', '.join(available) if available else 'None found'}"
|
||||
)
|
||||
|
||||
supabase_route = route_response.data[0]
|
||||
supabase_route_id = supabase_route["id"]
|
||||
|
||||
# Get all route stops with stop details from Supabase
|
||||
# Old app uses 'stops' table with 'lat' and 'lng' columns, and 'seq' for order
|
||||
# Try different query formats to match Supabase schema
|
||||
try:
|
||||
# Try the format used by old Flutter app: stops:stop_id with seq
|
||||
route_stops_response = supabase.table("route_stops").select(
|
||||
"seq, stops:stop_id(id, name, lat, lng)"
|
||||
).eq("route_id", supabase_route_id).order("seq").execute()
|
||||
|
||||
except Exception as e1:
|
||||
try:
|
||||
# Try with stop_order instead of seq
|
||||
route_stops_response = supabase.table("route_stops").select(
|
||||
"stop_order, stops:stop_id(id, name, lat, lng)"
|
||||
).eq("route_id", supabase_route_id).order("stop_order").execute()
|
||||
|
||||
except Exception as e2:
|
||||
try:
|
||||
# Try bus_stops table (new schema)
|
||||
route_stops_response = supabase.table("route_stops").select(
|
||||
"stop_order, bus_stops(*)"
|
||||
).eq("route_id", supabase_route_id).order("stop_order").execute()
|
||||
|
||||
except Exception as e3:
|
||||
raise Exception(f"Could not query Supabase: {e1}, {e2}, {e3}")
|
||||
|
||||
if not route_stops_response.data:
|
||||
print(f"No stops found for route '{route_name}' in Supabase")
|
||||
return
|
||||
|
||||
# Connect to local database
|
||||
sync_database_url = settings.database_url.replace("+asyncpg", "+psycopg2")
|
||||
sync_engine = create_engine(sync_database_url, echo=False)
|
||||
|
||||
with Session(sync_engine) as session:
|
||||
# Find the route in local database
|
||||
local_route = session.exec(select(Route).where(Route.name == route_name)).first()
|
||||
if not local_route:
|
||||
print(f"Route '{route_name}' not found in local database. Please create it first.")
|
||||
return
|
||||
|
||||
print(f"Found route '{route_name}' in local database (ID: {local_route.id})")
|
||||
|
||||
# Get local route stops ordered by stop_order
|
||||
local_route_stops = session.exec(
|
||||
select(RouteStop, BusStop)
|
||||
.join(BusStop, RouteStop.stop_id == BusStop.id)
|
||||
.where(RouteStop.route_id == local_route.id)
|
||||
.order_by(RouteStop.stop_order)
|
||||
).all()
|
||||
|
||||
if len(local_route_stops) != len(route_stops_response.data):
|
||||
print(f"⚠️ Warning: Local database has {len(local_route_stops)} stops, Supabase has {len(route_stops_response.data)} stops")
|
||||
print(" Updating coordinates for matching stops...")
|
||||
|
||||
# Update coordinates for each stop
|
||||
updated_count = 0
|
||||
for i, supabase_stop_data in enumerate(route_stops_response.data):
|
||||
if i >= len(local_route_stops):
|
||||
break
|
||||
|
||||
# Try different possible field names for the stop data
|
||||
supabase_stop = (
|
||||
supabase_stop_data.get("bus_stops") or
|
||||
supabase_stop_data.get("stops") or
|
||||
supabase_stop_data # If the stop data is directly in the response
|
||||
)
|
||||
if not supabase_stop or not isinstance(supabase_stop, dict):
|
||||
continue
|
||||
|
||||
# Get the local stop
|
||||
local_route_stop, local_bus_stop = local_route_stops[i]
|
||||
|
||||
# Update coordinates from Supabase
|
||||
# Supabase may use 'lat'/'lng' (old schema) or 'latitude'/'longitude' (new schema)
|
||||
new_latitude = supabase_stop.get("latitude") or supabase_stop.get("lat")
|
||||
new_longitude = supabase_stop.get("longitude") or supabase_stop.get("lng")
|
||||
|
||||
if new_latitude is None or new_longitude is None:
|
||||
print(f"⚠️ Skipping stop {i+1}: missing coordinates in Supabase")
|
||||
continue
|
||||
|
||||
# Check if coordinates are different
|
||||
if abs(local_bus_stop.latitude - new_latitude) > 0.0001 or \
|
||||
abs(local_bus_stop.longitude - new_longitude) > 0.0001:
|
||||
local_bus_stop.latitude = new_latitude
|
||||
local_bus_stop.longitude = new_longitude
|
||||
session.add(local_bus_stop)
|
||||
updated_count += 1
|
||||
print(f"✓ Updated stop {i+1} ({local_bus_stop.name}): "
|
||||
f"({new_latitude:.6f}, {new_longitude:.6f})")
|
||||
|
||||
session.commit()
|
||||
print(f"\n✅ Successfully updated {updated_count} stops with coordinates from Supabase")
|
||||
|
||||
|
||||
def import_all_routes():
|
||||
"""Import coordinates for all routes from Supabase."""
|
||||
supabase = get_supabase_client()
|
||||
|
||||
# Get all routes from Supabase
|
||||
routes_response = supabase.table("routes").select("*").execute()
|
||||
routes = routes_response.data
|
||||
|
||||
print(f"Found {len(routes)} routes in Supabase")
|
||||
|
||||
for route in routes:
|
||||
supabase_route_name = route["name"] # Format: "Boquete – David"
|
||||
# Convert Supabase format to local format for matching
|
||||
local_route_name = supabase_route_name.replace(" – ", ">").replace(" - ", ">").replace(" to ", ">").replace(" -> ", ">")
|
||||
|
||||
print(f"\n{'='*60}")
|
||||
print(f"Importing coordinates for route: {supabase_route_name}")
|
||||
print(f"Looking for local route: {local_route_name}")
|
||||
print(f"{'='*60}")
|
||||
try:
|
||||
import_coordinates_from_supabase(local_route_name, supabase_route_name=supabase_route_name)
|
||||
except Exception as e:
|
||||
print(f"❌ Error importing route '{supabase_route_name}': {e}")
|
||||
continue
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) > 1 and sys.argv[1] == "--all":
|
||||
# Import all routes
|
||||
import_all_routes()
|
||||
else:
|
||||
# Import specific route
|
||||
route_name = sys.argv[1] if len(sys.argv) > 1 else "Boquete>David"
|
||||
import_coordinates_from_supabase(route_name)
|
||||
|
||||
46
backend/app/core/security.py
Normal file
46
backend/app/core/security.py
Normal file
@ -0,0 +1,46 @@
|
||||
import bcrypt
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Union, Optional
|
||||
from jose import jwt
|
||||
from app.core.config import settings
|
||||
|
||||
ALGORITHM = "HS256"
|
||||
|
||||
def create_access_token(
|
||||
subject: Union[str, Any],
|
||||
role: str,
|
||||
full_name: str,
|
||||
expires_delta: Optional[timedelta] = None
|
||||
) -> str:
|
||||
if expires_delta:
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(timezone.utc) + timedelta(minutes=1440)
|
||||
|
||||
to_encode = {
|
||||
"exp": expire,
|
||||
"sub": str(subject),
|
||||
"role": role,
|
||||
"full_name": full_name
|
||||
}
|
||||
encoded_jwt = jwt.encode(to_encode, settings.secret_key, algorithm=ALGORITHM)
|
||||
return encoded_jwt
|
||||
|
||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||
return bcrypt.checkpw(
|
||||
plain_password.encode('utf-8'),
|
||||
hashed_password.encode('utf-8')
|
||||
)
|
||||
|
||||
def get_password_hash(password: str) -> str:
|
||||
return bcrypt.hashpw(
|
||||
password.encode('utf-8'),
|
||||
bcrypt.gensalt()
|
||||
).decode('utf-8')
|
||||
|
||||
def get_token_payload(token: str) -> dict:
|
||||
try:
|
||||
payload = jwt.decode(token, settings.secret_key, algorithms=[ALGORITHM])
|
||||
return payload
|
||||
except Exception:
|
||||
return {}
|
||||
5900
backend/app/core/seed.py
Normal file
5900
backend/app/core/seed.py
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user