diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 00000000..2105d6b3
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,22 @@
+# Set default behavior to automatically normalize line endings
+* text=auto
+
+# Python files
+*.py text diff=python eol=lf
+*.pyw text diff=python eol=lf
+
+# Web files
+*.html text eol=lf
+*.css text eol=lf
+*.js text eol=lf
+*.json text eol=lf
+
+# Config files
+*.yml text eol=lf
+*.yaml text eol=lf
+*.toml text eol=lf
+*.ini text eol=lf
+*.cfg text eol=lf
+.gitattributes text eol=lf
+.gitignore text eol=lf
+.pre-commit-config.yaml text eol=lf
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 00000000..c9b7aa49
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,43 @@
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v5.0.0
+ hooks:
+ - id: trailing-whitespace
+ exclude: |
+ (?x)^(
+ .*\.(md|rst)$|
+ migrations/.*\.py|
+ .*\.(css|js|html)$
+ )$
+ - id: end-of-file-fixer
+ exclude: |
+ (?x)^(
+ migrations/.*\.py|
+ .*\.(css|js|html)$
+ )$
+ - id: mixed-line-ending
+ args: [--fix=lf]
+ - id: check-yaml
+ - id: check-added-large-files
+ - id: check-merge-conflict
+
+- repo: https://github.com/pycqa/flake8
+ rev: 6.1.0
+ hooks:
+ - id: flake8
+ args: [--max-line-length=120, --extend-ignore=E203,W503,E501]
+ exclude: migrations/
+
+- repo: https://github.com/psf/black
+ rev: 23.12.1
+ hooks:
+ - id: black
+ args: [--line-length=120]
+ exclude: migrations/
+
+- repo: https://github.com/pycqa/isort
+ rev: 5.13.2
+ hooks:
+ - id: isort
+ args: [--profile=black, --line-length=120]
+ exclude: migrations/
diff --git a/apps/stations/admin.py b/apps/stations/admin.py
index b3a7e7e9..d6e2d8b0 100644
--- a/apps/stations/admin.py
+++ b/apps/stations/admin.py
@@ -6,7 +6,7 @@
from django.db.models import Count
import markupsafe
-from egypt_metro import settings
+from metro import settings
from .models import Line, Station, LineStation
from apps.stations.management.commands.populate_metro_data import Command as MetroDataCommand
diff --git a/apps/trains/admin.py b/apps/trains/admin.py
new file mode 100644
index 00000000..10bdc3be
--- /dev/null
+++ b/apps/trains/admin.py
@@ -0,0 +1,353 @@
+# apps/trains/admin.py
+
+from django.contrib import admin
+from django.db.models import Avg, Sum
+from django.urls import reverse
+from django.utils.html import format_html
+from import_export.admin import ImportExportModelAdmin
+from rangefilter.filters import DateRangeFilter
+
+from .models import ActualSchedule, CrowdMeasurement, Schedule, Train, TrainCar
+
+
+class TrainCarInline(admin.TabularInline):
+ model = TrainCar
+ extra = 0
+ min_num = 1
+ max_num = 10
+ show_change_link = True
+ fields = (
+ "car_number",
+ "capacity",
+ "current_load",
+ "is_operational",
+ "load_percentage",
+ "crowd_status",
+ )
+ readonly_fields = ("load_percentage", "crowd_status")
+
+ def load_percentage(self, obj):
+ return format_html("{:.1f}%", obj.load_percentage)
+
+ def crowd_status(self, obj):
+ status = obj.crowd_status
+ colors = {
+ "EMPTY": "green",
+ "LIGHT": "lightgreen",
+ "MODERATE": "orange",
+ "CROWDED": "orangered",
+ "PACKED": "red",
+ }
+ return format_html('{}', colors.get(status, "black"), status)
+
+
+class ScheduleInline(admin.TabularInline):
+ model = Schedule
+ extra = 0
+ show_change_link = True
+ fields = (
+ "station",
+ "arrival_time",
+ "departure_time",
+ "day_type",
+ "sequence_number",
+ "is_active",
+ )
+ ordering = ("day_type", "sequence_number")
+
+
+@admin.register(Train)
+class TrainAdmin(ImportExportModelAdmin):
+ list_display = (
+ "train_id",
+ "line_link",
+ "status_badge",
+ "current_station_link",
+ "direction",
+ "speed",
+ "car_count",
+ "total_passengers",
+ "last_updated",
+ )
+ list_filter = (
+ "line",
+ "status",
+ "has_air_conditioning",
+ "direction",
+ ("last_updated", DateRangeFilter),
+ )
+ search_fields = ("train_id", "current_station__name", "line__name")
+ readonly_fields = ("last_updated", "total_passengers", "average_load")
+ inlines = [TrainCarInline, ScheduleInline]
+ fieldsets = (
+ (
+ "Basic Information",
+ {"fields": ("train_id", "line", "status", "has_air_conditioning", "number_of_cars")},
+ ),
+ (
+ "Current Status",
+ {"fields": ("current_station", "next_station", "direction", "speed", "last_updated")},
+ ),
+ ("Location", {"fields": ("latitude", "longitude"), "classes": ("collapse",)}),
+ ("Statistics", {"fields": ("total_passengers", "average_load"), "classes": ("collapse",)}),
+ )
+ actions = ["mark_as_maintenance", "mark_as_in_service"]
+
+ def line_link(self, obj):
+ url = reverse("admin:stations_line_change", args=[obj.line.id])
+ return format_html('{}', url, obj.line.name)
+
+ line_link.short_description = "Line"
+
+ def current_station_link(self, obj):
+ if obj.current_station:
+ url = reverse("admin:stations_station_change", args=[obj.current_station.id])
+ return format_html('{}', url, obj.current_station.name)
+ return "-"
+
+ current_station_link.short_description = "Current Station"
+
+ def status_badge(self, obj):
+ colors = {
+ "IN_SERVICE": "green",
+ "DELAYED": "orange",
+ "MAINTENANCE": "red",
+ "OUT_OF_SERVICE": "gray",
+ }
+ return format_html(
+ ''
+ "{}"
+ "",
+ colors.get(obj.status, "black"),
+ obj.status,
+ )
+
+ status_badge.short_description = "Status"
+
+ def car_count(self, obj):
+ return obj.cars.count()
+
+ car_count.short_description = "Cars"
+
+ def total_passengers(self, obj):
+ return obj.cars.aggregate(total=Sum("current_load"))["total"] or 0
+
+ total_passengers.short_description = "Total Passengers"
+
+ def average_load(self, obj):
+ avg = obj.cars.aggregate(avg=Avg("current_load"))["avg"] or 0
+ return f"{avg:.1f} passengers/car"
+
+ average_load.short_description = "Average Load"
+
+ def mark_as_maintenance(self, request, queryset):
+ updated = queryset.update(status="MAINTENANCE")
+ self.message_user(request, f"{updated} trains marked as under maintenance.")
+
+ mark_as_maintenance.short_description = "Mark selected trains as under maintenance"
+
+ def mark_as_in_service(self, request, queryset):
+ updated = queryset.update(status="IN_SERVICE")
+ self.message_user(request, f"{updated} trains marked as in service.")
+
+ mark_as_in_service.short_description = "Mark selected trains as in service"
+
+
+@admin.register(TrainCar)
+class TrainCarAdmin(admin.ModelAdmin):
+ list_display = (
+ "train_link",
+ "car_number",
+ "capacity",
+ "current_load",
+ "load_percentage_display",
+ "crowd_status_badge",
+ "is_operational",
+ )
+ list_filter = ("is_operational", "train__line")
+ search_fields = ("train__train_id",)
+ readonly_fields = ("last_updated",)
+ actions = ["mark_as_operational", "mark_as_non_operational"]
+
+ def load_percentage_display(self, obj):
+ percentage = obj.load_percentage
+ width = min(int(percentage), 100)
+ color = self._get_percentage_color(percentage)
+
+ return format_html(
+ '
",
+ width,
+ color,
+ "{:.1f}%".format(percentage),
+ )
+
+ load_percentage_display.short_description = "Load"
+
+ def crowd_status_badge(self, obj):
+ status = obj.crowd_status
+ colors = {
+ "EMPTY": "#28a745",
+ "LIGHT": "#98d85b",
+ "MODERATE": "#ffc107",
+ "CROWDED": "#fd7e14",
+ "PACKED": "#dc3545",
+ }
+ return format_html(
+ '{}',
+ colors.get(status, "black"),
+ status,
+ )
+
+ crowd_status_badge.short_description = "Status"
+
+ def _get_percentage_color(self, percentage):
+ if percentage < 30:
+ return "#28a745" # green
+ elif percentage < 50:
+ return "#98d85b" # light green
+ elif percentage < 70:
+ return "#ffc107" # yellow
+ elif percentage < 90:
+ return "#fd7e14" # orange
+ return "#dc3545" # red
+
+ def train_link(self, obj):
+ url = reverse("admin:trains_train_change", args=[obj.train.id])
+ return format_html('{}', url, obj.train.train_id)
+
+ train_link.short_description = "Train"
+
+ def mark_as_operational(self, request, queryset):
+ updated = queryset.update(is_operational=True)
+ self.message_user(request, "{} cars marked as operational.".format(updated))
+
+ mark_as_operational.short_description = "Mark selected cars as operational"
+
+ def mark_as_non_operational(self, request, queryset):
+ updated = queryset.update(is_operational=False)
+ self.message_user(request, "{} cars marked as non-operational.".format(updated))
+
+ mark_as_non_operational.short_description = "Mark selected cars as non-operational"
+
+
+@admin.register(Schedule)
+class ScheduleAdmin(ImportExportModelAdmin):
+ list_display = (
+ "train_link",
+ "station_link",
+ "arrival_time",
+ "departure_time",
+ "day_type",
+ "sequence_number",
+ "is_active",
+ )
+ list_filter = ("day_type", "is_active", "train__line", ("last_updated", DateRangeFilter))
+ search_fields = ("train__train_id", "station__name")
+ ordering = ("train", "day_type", "sequence_number")
+ actions = ["activate_schedules", "deactivate_schedules"]
+ readonly_fields = ("last_updated",)
+
+ def train_link(self, obj):
+ url = reverse("admin:trains_train_change", args=[obj.train.id])
+ return format_html('{}', url, obj.train.train_id)
+
+ train_link.short_description = "Train"
+
+ def station_link(self, obj):
+ url = reverse("admin:stations_station_change", args=[obj.station.id])
+ return format_html('{}', url, obj.station.name)
+
+ station_link.short_description = "Station"
+
+ def activate_schedules(self, request, queryset):
+ updated = queryset.update(is_active=True)
+ self.message_user(request, f"{updated} schedules activated.")
+
+ activate_schedules.short_description = "Activate selected schedules"
+
+ def deactivate_schedules(self, request, queryset):
+ updated = queryset.update(is_active=False)
+ self.message_user(request, f"{updated} schedules deactivated.")
+
+ deactivate_schedules.short_description = "Deactivate selected schedules"
+
+
+@admin.register(ActualSchedule)
+class ActualScheduleAdmin(admin.ModelAdmin):
+ list_display = (
+ "schedule_link",
+ "status_badge",
+ "actual_arrival",
+ "actual_departure",
+ "delay_minutes",
+ "created_at",
+ )
+ list_filter = ("status", ("created_at", DateRangeFilter), "schedule__train__line")
+ search_fields = ("schedule__train__train_id", "schedule__station__name")
+ readonly_fields = ("delay_minutes", "created_at", "updated_at")
+ fieldsets = (
+ ("Schedule Information", {"fields": ("schedule", "status", "reason")}),
+ ("Timing Information", {"fields": ("actual_arrival", "actual_departure", "delay_minutes")}),
+ ("Metadata", {"fields": ("created_at", "updated_at"), "classes": ("collapse",)}),
+ )
+
+ def schedule_link(self, obj):
+ url = reverse("admin:trains_schedule_change", args=[obj.schedule.id])
+ return format_html('{} - {}', url, obj.schedule.train.train_id, obj.schedule.station.name)
+
+ schedule_link.short_description = "Schedule"
+
+ def status_badge(self, obj):
+ colors = {
+ "ON_TIME": "green",
+ "DELAYED": "orange",
+ "CANCELLED": "red",
+ "SKIPPED": "gray",
+ "DIVERTED": "purple",
+ }
+ return format_html(
+ ''
+ "{}"
+ "",
+ colors.get(obj.status, "black"),
+ obj.status,
+ )
+
+ status_badge.short_description = "Status"
+
+
+@admin.register(CrowdMeasurement)
+class CrowdMeasurementAdmin(admin.ModelAdmin):
+ list_display = (
+ "train_car_link",
+ "timestamp",
+ "passenger_count",
+ "crowd_percentage",
+ "confidence_score",
+ "measurement_method",
+ )
+ list_filter = ("measurement_method", ("timestamp", DateRangeFilter), "train_car__train__line")
+ search_fields = ("train_car__train__train_id",)
+ readonly_fields = ("timestamp",)
+
+ def train_car_link(self, obj):
+ url = reverse("admin:trains_traincar_change", args=[obj.train_car.id])
+ return format_html(
+ '{} - Car {}',
+ url,
+ obj.train_car.train.train_id,
+ obj.train_car.car_number,
+ )
+
+ train_car_link.short_description = "Train Car"
+
+
+# Register any additional models or customize admin site
+admin.site.site_header = "Train Management System"
+admin.site.site_title = "Train Management System"
+admin.site.index_title = "Administration"
diff --git a/apps/trains/api/filters.py b/apps/trains/api/filters.py
new file mode 100644
index 00000000..ec3d3cee
--- /dev/null
+++ b/apps/trains/api/filters.py
@@ -0,0 +1,14 @@
+# apps/trains/api/filters.py
+
+from django_filters import rest_framework as filters
+from ..models import Train
+
+
+class TrainFilter(filters.FilterSet):
+ line = filters.CharFilter(field_name='line__name', lookup_expr='icontains')
+ status = filters.CharFilter(field_name='status')
+ has_ac = filters.BooleanFilter(field_name='has_air_conditioning')
+
+ class Meta:
+ model = Train
+ fields = ['line', 'status', 'has_ac']
diff --git a/apps/trains/api/pagination.py b/apps/trains/api/pagination.py
new file mode 100644
index 00000000..2c07c208
--- /dev/null
+++ b/apps/trains/api/pagination.py
@@ -0,0 +1,9 @@
+# apps/trains/api/pagination.py
+
+from rest_framework.pagination import PageNumberPagination
+
+
+class TrainPagination(PageNumberPagination):
+ page_size = 20
+ page_size_query_param = 'page_size'
+ max_page_size = 100
diff --git a/apps/trains/api/permissions.py b/apps/trains/api/permissions.py
new file mode 100644
index 00000000..2c3eb7dd
--- /dev/null
+++ b/apps/trains/api/permissions.py
@@ -0,0 +1,13 @@
+# apps/trains/api/permissions.py
+
+from rest_framework import permissions
+
+
+class IsTrainOperator(permissions.BasePermission):
+ def has_permission(self, request, view):
+ return request.user and request.user.has_perm('trains.can_operate_train')
+
+
+class IsScheduleManager(permissions.BasePermission):
+ def has_permission(self, request, view):
+ return request.user and request.user.has_perm('trains.can_manage_schedules')
diff --git a/apps/trains/api/serializers.py b/apps/trains/api/serializers.py
new file mode 100644
index 00000000..460957f1
--- /dev/null
+++ b/apps/trains/api/serializers.py
@@ -0,0 +1,70 @@
+# apps/trains/api/serializers.py
+
+from rest_framework import serializers
+
+from ..models import CrowdMeasurement, Schedule, Train
+
+
+class TrainSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Train
+ fields = "__all__"
+
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data["line_name"] = instance.line.name
+ data["current_station_name"] = instance.current_station.name if instance.current_station else None
+ return data
+
+
+class ScheduleSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Schedule
+ fields = "__all__"
+
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data["station_name"] = instance.station.name
+ data["train_id"] = instance.train.train_id
+ return data
+
+
+class TrainStatusSerializer(serializers.Serializer):
+ train_id = serializers.CharField()
+ current_location = serializers.DictField()
+ speed = serializers.FloatField()
+ status = serializers.CharField()
+ next_station = serializers.CharField()
+ estimated_arrival = serializers.DateTimeField()
+ crowd_level = serializers.CharField()
+
+
+class CrowdLevelSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = CrowdMeasurement
+ fields = ["timestamp", "passenger_count", "crowd_percentage", "confidence_score"]
+
+
+class TrainDetailSerializer(serializers.ModelSerializer):
+ schedule = ScheduleSerializer(many=True, read_only=True, source="schedule_set")
+ crowd_measurements = CrowdLevelSerializer(many=True, read_only=True, source="crowdmeasurement_set")
+
+ class Meta:
+ model = Train
+ fields = [
+ "train_id",
+ "line",
+ "status",
+ "current_station",
+ "next_station",
+ "last_updated",
+ "schedule",
+ "crowd_measurements",
+ ]
+
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data["line_name"] = instance.line.name
+ data["current_station_name"] = instance.current_station.name if instance.current_station else None
+ data["next_station_name"] = instance.next_station.name if instance.next_station else None
+ return data
diff --git a/apps/trains/api/swagger.py b/apps/trains/api/swagger.py
new file mode 100644
index 00000000..2936d9b5
--- /dev/null
+++ b/apps/trains/api/swagger.py
@@ -0,0 +1,26 @@
+# apps/trains/api/swagger.py
+
+from drf_spectacular.types import OpenApiTypes
+from drf_spectacular.utils import OpenApiParameter, extend_schema
+
+from apps.trains.api.serializers import TrainSerializer
+
+train_list_docs = extend_schema(
+ summary="List all trains",
+ description="Get a list of all trains with optional filtering",
+ parameters=[
+ OpenApiParameter(
+ name="line_id",
+ type=OpenApiTypes.STR,
+ location=OpenApiParameter.QUERY,
+ description="Filter trains by line ID",
+ ),
+ OpenApiParameter(
+ name="status",
+ type=OpenApiTypes.STR,
+ location=OpenApiParameter.QUERY,
+ description="Filter trains by status",
+ ),
+ ],
+ responses={200: TrainSerializer(many=True)},
+)
diff --git a/apps/trains/api/urls.py b/apps/trains/api/urls.py
new file mode 100644
index 00000000..d1c84daf
--- /dev/null
+++ b/apps/trains/api/urls.py
@@ -0,0 +1,53 @@
+# apps/trains/api/urls.py
+
+from django.urls import include, path
+from rest_framework.routers import DefaultRouter
+
+from . import views
+
+# Initialize the router
+router = DefaultRouter()
+router.register(r"trains", views.TrainViewSet, basename="train")
+router.register(r"schedules", views.ScheduleViewSet, basename="schedule")
+
+# Define URL patterns
+urlpatterns = [
+ # Include router URLs
+ path("", include(router.urls)),
+ # Train-specific endpoints
+ path("/crowd/", views.TrainCrowdView.as_view(), name="train-crowd"),
+ path("/schedule/", views.TrainScheduleView.as_view(), name="train-schedule"),
+ # Additional train views (if needed)
+ path("list/", views.TrainListView.as_view(), name="train-list-custom"),
+ path("/details/", views.TrainDetailView.as_view(), name="train-detail-custom"),
+]
+
+# API Endpoint Documentation
+"""
+Available Endpoints:
+
+Default Router Endpoints:
+- GET /trains/ - List all trains
+- POST /trains/ - Create a new train
+- GET /trains/{id}/ - Retrieve a train
+- PUT /trains/{id}/ - Update a train
+- PATCH /trains/{id}/ - Partially update a train
+- DELETE /trains/{id}/ - Delete a train
+- GET /schedules/ - List all schedules
+- POST /schedules/ - Create a new schedule
+- GET /schedules/{id}/ - Retrieve a schedule
+- PUT /schedules/{id}/ - Update a schedule
+- PATCH /schedules/{id}/ - Partially update a schedule
+- DELETE /schedules/{id}/ - Delete a schedule
+
+Custom Endpoints:
+- GET /trains/{train_id}/crowd/ - Get crowd information for a train
+- GET /trains/{train_id}/schedule/ - Get schedule for a train
+- GET /trains/list/ - Custom train listing endpoint
+- GET /trains/{train_id}/details/ - Custom train details endpoint
+
+Router Additional Actions:
+- GET /trains/train_status/ - Get train status
+- GET /trains/line_trains/ - Get trains by line
+- GET /schedules/station_schedule/ - Get schedule by station
+"""
diff --git a/apps/trains/api/views.py b/apps/trains/api/views.py
new file mode 100644
index 00000000..c9a0fc5c
--- /dev/null
+++ b/apps/trains/api/views.py
@@ -0,0 +1,385 @@
+# apps/trains/api/views.py
+
+from venv import logger
+
+from django.core.cache import cache
+from django.db.models import Prefetch
+from django.shortcuts import get_object_or_404
+from django.utils import timezone
+from drf_spectacular.utils import OpenApiParameter, extend_schema
+from rest_framework import status, status as drf_status, viewsets
+from rest_framework.decorators import action
+from rest_framework.permissions import IsAuthenticated
+from rest_framework.response import Response
+from rest_framework.views import APIView
+
+from ..models import Schedule, Train, TrainCar
+from ..services.crowd_service import CrowdService
+from ..services.train_service import TrainService
+from .filters import TrainFilter
+from .serializers import CrowdLevelSerializer, ScheduleSerializer, TrainDetailSerializer, TrainSerializer
+
+
+@extend_schema(tags=["Trains"])
+class TrainViewSet(viewsets.ModelViewSet):
+ """
+ ViewSet for managing train operations.
+ Provides CRUD operations and additional actions for train management.
+ """
+
+ serializer_class = TrainSerializer
+ permission_classes = [IsAuthenticated]
+ filterset_class = TrainFilter
+ search_fields = ["train_id", "line__name"]
+ ordering_fields = ["train_id", "status", "last_updated"]
+ train_service = TrainService()
+
+ def get_queryset(self):
+ """Get optimized queryset with related fields"""
+ return (
+ Train.objects.all()
+ .select_related("line", "current_station", "next_station")
+ .prefetch_related(Prefetch("cars", queryset=TrainCar.objects.select_related("train")))
+ )
+
+ @extend_schema(
+ summary="List all trains",
+ parameters=[
+ OpenApiParameter(name="line_id", type=str, description="Filter by line ID"),
+ OpenApiParameter(name="status", type=str, description="Filter by train status"),
+ OpenApiParameter(name="search", type=str, description="Search trains"),
+ OpenApiParameter(name="ordering", type=str, description="Order results"),
+ ],
+ )
+ def list(self, request):
+ """Get list of trains with filtering, searching, and ordering"""
+ try:
+ queryset = self.filter_queryset(self.get_queryset())
+ page = self.paginate_queryset(queryset)
+
+ if page is not None:
+ serializer = self.get_serializer(page, many=True)
+ return self.get_paginated_response(serializer.data)
+
+ serializer = self.get_serializer(queryset, many=True)
+ return Response({"status": "success", "count": queryset.count(), "data": serializer.data})
+ except Exception as e:
+ logger.error(f"Error listing trains: {str(e)}")
+ return Response(
+ {"error": "Failed to retrieve trains"},
+ status=drf_status.HTTP_500_INTERNAL_SERVER_ERROR,
+ )
+
+ @extend_schema(summary="Get train details", responses={200: TrainDetailSerializer})
+ def retrieve(self, request, pk=None):
+ """Get detailed information about a specific train"""
+ try:
+ train = self.get_object()
+ serializer = TrainDetailSerializer(train)
+
+ # Get cached status
+ train_status = self.get_train_status(train.train_id)
+
+ # Get crowd information
+ crowd_service = CrowdService()
+ crowd_info = crowd_service.get_crowd_levels(train)
+
+ return Response(
+ {
+ "status": "success",
+ "data": {
+ **serializer.data,
+ "current_status": train_status,
+ "crowd_info": crowd_info,
+ },
+ }
+ )
+ except Train.DoesNotExist:
+ return Response({"error": "Train not found"}, status=drf_status.HTTP_404_NOT_FOUND)
+ except Exception as e:
+ logger.error(f"Error retrieving train: {str(e)}")
+ return Response(
+ {"error": "Failed to retrieve train details"},
+ status=drf_status.HTTP_500_INTERNAL_SERVER_ERROR,
+ )
+
+ def get_train_status(self, train_id):
+ """Get cached train status with automatic refresh"""
+ cache_key = f"train_status_{train_id}"
+ cached_status = cache.get(cache_key)
+
+ if not cached_status:
+ try:
+ status = self.train_service.calculate_train_status(train_id)
+ cache.set(cache_key, status, timeout=30)
+ return status
+ except Exception as e:
+ logger.error(f"Error calculating train status: {str(e)}")
+ return None
+
+ return cached_status
+
+ @extend_schema(
+ summary="Get train status",
+ parameters=[OpenApiParameter(name="train_id", type=str, required=True)],
+ )
+ @action(detail=False, methods=["GET"])
+ def train_status(self, request):
+ """Get current status of a specific train"""
+ train_id = request.query_params.get("train_id")
+ if not train_id:
+ return Response(
+ status=drf_status.HTTP_400_BAD_REQUEST,
+ )
+
+ try:
+ status_data = self.get_train_status(train_id)
+ if not status_data:
+ return Response(
+ {"error": "Failed to retrieve train status"},
+ status=drf_status.HTTP_404_NOT_FOUND,
+ )
+
+ return Response({"status": "success", "data": status_data, "timestamp": timezone.now()})
+ except Exception as e:
+ logger.error(f"Error getting train status: {str(e)}")
+ return Response(
+ {"error": "Failed to retrieve train status"},
+ status=drf_status.HTTP_500_INTERNAL_SERVER_ERROR,
+ )
+
+ @extend_schema(
+ summary="Get trains by line",
+ parameters=[OpenApiParameter(name="line_id", type=str, required=True)],
+ )
+ @action(detail=False, methods=["GET"])
+ def line_trains(self, request):
+ """Get all trains for a specific line with real-time information"""
+ line_id = request.query_params.get("line_id")
+ if not line_id:
+ return Response({"error": "line_id is required"}, status=drf_status.HTTP_400_BAD_REQUEST)
+
+ try:
+ trains = self.get_queryset().filter(line_id=line_id)
+ serializer = self.get_serializer(trains, many=True)
+
+ # Enhance with real-time data
+ enhanced_data = []
+ for train_data in serializer.data:
+ train_id = train_data["train_id"]
+ status = self.get_train_status(train_id)
+ enhanced_data.append({**train_data, "real_time_status": status})
+
+ return Response(
+ {
+ "status": "success",
+ "count": len(enhanced_data),
+ "data": enhanced_data,
+ "timestamp": timezone.now(),
+ }
+ )
+ except Exception as e:
+ logger.error(f"Error getting line trains: {str(e)}")
+ return Response(
+ {"error": "Failed to retrieve line trains"},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ )
+
+
+@extend_schema(tags=["Schedules"])
+class ScheduleViewSet(viewsets.ModelViewSet):
+ """
+ ViewSet for managing train schedules.
+ Provides CRUD operations and additional actions for schedule management.
+ """
+
+ serializer_class = ScheduleSerializer
+ permission_classes = [IsAuthenticated]
+
+ def get_queryset(self):
+ return Schedule.objects.all().select_related("train", "station").order_by("sequence_number")
+
+ @extend_schema(
+ summary="Get station schedule",
+ parameters=[OpenApiParameter(name="station_id", type=str, required=True)],
+ )
+ @action(detail=False, methods=["GET"])
+ def station_schedule(self, request):
+ """Get schedule for a specific station with real-time updates"""
+ station_id = request.query_params.get("station_id")
+ if not station_id:
+ return Response({"error": "station_id is required"}, status=status.HTTP_400_BAD_REQUEST)
+
+ try:
+ schedules = self.get_queryset().filter(station_id=station_id, is_active=True)
+ serializer = self.get_serializer(schedules, many=True)
+
+ return Response(
+ {
+ "status": "success",
+ "count": schedules.count(),
+ "data": serializer.data,
+ "timestamp": timezone.now(),
+ }
+ )
+ except Exception as e:
+ logger.error(f"Error getting station schedule: {str(e)}")
+ return Response(
+ {"error": "Failed to retrieve station schedule"},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ )
+
+
+class TrainCrowdView(APIView):
+ """
+ API View for managing train crowd levels.
+ """
+
+ permission_classes = [IsAuthenticated]
+ serializer_class = CrowdLevelSerializer
+ crowd_service = CrowdService()
+
+ @extend_schema(summary="Get crowd levels", responses={200: CrowdLevelSerializer})
+ def get(self, request, train_id):
+ """Get current crowd levels for a train with historical data"""
+ try:
+ train = get_object_or_404(Train, train_id=train_id)
+
+ # Get current and historical crowd data
+ current_data = self.crowd_service.get_crowd_levels(train)
+ historical_data = self.crowd_service.get_historical_data(train)
+
+ return Response(
+ {
+ "status": "success",
+ "data": {"current": current_data, "historical": historical_data},
+ "timestamp": timezone.now(),
+ }
+ )
+ except Exception as e:
+ logger.error(f"Error getting crowd levels: {str(e)}")
+ return Response(
+ {"error": "Failed to retrieve crowd levels"},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ )
+
+
+@extend_schema(tags=["Trains"])
+class TrainListView(APIView):
+ """
+ API View for custom train listing.
+ """
+
+ permission_classes = [IsAuthenticated]
+ serializer_class = TrainSerializer
+
+ @extend_schema(summary="List trains with custom format", responses={200: TrainSerializer(many=True)})
+ def get(self, request):
+ try:
+ trains = Train.objects.all().select_related("line", "current_station", "next_station")
+ serializer = self.serializer_class(trains, many=True)
+ return Response(
+ {
+ "status": "success",
+ "count": trains.count(),
+ "data": serializer.data,
+ "timestamp": timezone.now(),
+ }
+ )
+ except Exception as e:
+ logger.error(f"Error in custom train list: {str(e)}")
+ return Response({"error": "Failed to retrieve trains"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+
+@extend_schema(tags=["Trains"])
+class TrainDetailView(APIView):
+ """
+ API View for custom train details.
+ """
+
+ permission_classes = [IsAuthenticated]
+ serializer_class = TrainDetailSerializer
+
+ @extend_schema(summary="Get detailed train information", responses={200: TrainDetailSerializer})
+ def get(self, request, train_id):
+ try:
+ train = get_object_or_404(Train, train_id=train_id)
+ serializer = self.serializer_class(train)
+
+ # Get additional information
+ train_service = TrainService()
+ crowd_service = CrowdService()
+
+ return Response(
+ {
+ "status": "success",
+ "data": {
+ **serializer.data,
+ "real_time_status": train_service.calculate_train_status(train_id),
+ "crowd_info": crowd_service.get_crowd_levels(train),
+ },
+ "timestamp": timezone.now(),
+ }
+ )
+ except Train.DoesNotExist:
+ return Response({"error": "Train not found"}, status=status.HTTP_404_NOT_FOUND)
+ except Exception as e:
+ logger.error(f"Error in custom train detail: {str(e)}")
+ return Response(
+ {"error": "Failed to retrieve train details"},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ )
+
+
+@extend_schema(tags=["Schedules"])
+class TrainScheduleView(APIView):
+ """
+ API View for retrieving train schedules.
+ """
+
+ permission_classes = [IsAuthenticated]
+ serializer_class = ScheduleSerializer
+
+ @extend_schema(summary="Get train schedule", responses={200: ScheduleSerializer(many=True)})
+ def get(self, request, train_id):
+ """Get schedule for a specific train"""
+ try:
+ train = get_object_or_404(Train, train_id=train_id)
+
+ # Get train schedules
+ schedules = (
+ Schedule.objects.filter(train=train, is_active=True)
+ .select_related("station")
+ .order_by("sequence_number")
+ )
+
+ serializer = self.serializer_class(schedules, many=True)
+
+ # Get real-time updates
+ train_service = TrainService()
+ current_status = train_service.calculate_train_status(train_id)
+
+ # Calculate estimated arrival times
+ schedule_data = []
+ for schedule in serializer.data:
+ estimated_time = train_service.calculate_estimated_arrival(train_id, schedule["station"])
+ schedule_data.append({**schedule, "estimated_arrival": estimated_time})
+
+ return Response(
+ {
+ "status": "success",
+ "data": {
+ "schedule": schedule_data,
+ "current_status": current_status,
+ "last_updated": timezone.now(),
+ },
+ }
+ )
+ except Train.DoesNotExist:
+ return Response({"error": "Train not found"}, status=status.HTTP_404_NOT_FOUND)
+ except Exception as e:
+ logger.error(f"Error getting train schedule: {str(e)}")
+ return Response(
+ {"error": "Failed to retrieve train schedule"},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ )
diff --git a/apps/trains/apps.py b/apps/trains/apps.py
new file mode 100644
index 00000000..cad67e87
--- /dev/null
+++ b/apps/trains/apps.py
@@ -0,0 +1,8 @@
+# apps/trains/apps.py
+
+from django.apps import AppConfig
+
+
+class TrainsConfig(AppConfig):
+ default_auto_field = "django.db.models.BigAutoField"
+ name = "apps.trains"
diff --git a/apps/trains/constants.py b/apps/trains/constants.py
new file mode 100644
index 00000000..6a907d56
--- /dev/null
+++ b/apps/trains/constants.py
@@ -0,0 +1,105 @@
+CARS_PER_TRAIN = 10
+STATION_STOP_TIME = 120 # seconds
+
+# Line configurations with actual station counts
+LINE_CONFIG = {
+ 'LINE_1': {
+ 'has_ac_percentage': 50,
+ 'total_trains': 30,
+ 'speed_limit': 80,
+ 'station_stop_time': 120,
+ 'total_stations': 35,
+ 'color_code': '#FF0000',
+ 'directions': [
+ ('HELWAN', 'Helwan'),
+ ('MARG', 'El-Marg'),
+ ]
+ },
+ 'LINE_2': {
+ 'has_ac_percentage': 50,
+ 'total_trains': 20,
+ 'speed_limit': 80,
+ 'station_stop_time': 120,
+ 'total_stations': 20,
+ 'color_code': '#0000FF',
+ 'directions': [
+ ('SHOBRA', 'Shobra El Kheima'),
+ ('MONIB', 'El-Monib'),
+ ]
+ },
+ 'LINE_3': {
+ 'has_ac_percentage': 100,
+ 'total_trains': 25,
+ 'speed_limit': 100,
+ 'station_stop_time': 120,
+ 'total_stations': 34,
+ 'color_code': '#00FF00',
+ 'directions': [
+ ('ADLY', 'Adly Mansour'),
+ ('KIT_KAT', 'Kit Kat'),
+ ]
+ }
+}
+
+# Train status choices
+TRAIN_STATUS_CHOICES = [
+ ('IN_SERVICE', 'In Service'),
+ ('DELAYED', 'Delayed'),
+ ('MAINTENANCE', 'Under Maintenance'),
+ ('OUT_OF_SERVICE', 'Out of Service'),
+]
+
+DIRECTION_CHOICES = [
+ ('NORTHBOUND', 'Northbound'),
+ ('SOUTHBOUND', 'Southbound'),
+]
+
+# Train types
+TRAIN_TYPES = [
+ ('AC', 'Air Conditioned'),
+ ('NON_AC', 'Non Air Conditioned')
+]
+
+# Crowd levels with realistic thresholds
+CROWD_LEVELS = {
+ 'EMPTY': (0, 20),
+ 'LIGHT': (21, 40),
+ 'MODERATE': (41, 60),
+ 'CROWDED': (61, 80),
+ 'PACKED': (81, 100),
+}
+
+# Car capacity based on actual metro cars
+CAR_CAPACITY = {
+ 'SEATED': 40,
+ 'STANDING': 140,
+ 'TOTAL': 180,
+ 'CRUSH': 220,
+}
+
+# Time windows for peak hours
+PEAK_HOURS = {
+ 'MORNING': {
+ 'start': '07:00',
+ 'end': '10:00',
+ },
+ 'EVENING': {
+ 'start': '16:00',
+ 'end': '19:00',
+ }
+}
+
+# Average speeds
+AVERAGE_SPEEDS = {
+ 'NORMAL': 60,
+ 'PEAK': 45,
+ 'OFF_PEAK': 70,
+}
+
+# Station dwell times
+DWELL_TIMES = {
+ 'NORMAL': 30,
+ 'INTERCHANGE': 45,
+ 'TERMINAL': 60,
+ 'PEAK_FACTOR': 1.5,
+}
diff --git a/apps/trains/consumers.py b/apps/trains/consumers.py
new file mode 100644
index 00000000..3dbbd9e5
--- /dev/null
+++ b/apps/trains/consumers.py
@@ -0,0 +1,244 @@
+# apps/trains/consumers.py
+
+import json
+import logging
+from channels.generic.websocket import AsyncWebsocketConsumer
+from channels.exceptions import DenyConnection
+from channels.db import database_sync_to_async
+from .services.train_service import TrainService
+from .models import Train
+
+logger = logging.getLogger(__name__)
+
+
+class TrainConsumer(AsyncWebsocketConsumer):
+ """
+ WebSocket consumer for real-time train updates.
+ Handles:
+ - Live train location updates
+ - Crowd level updates
+ - Schedule changes/delays
+ - Service alerts
+ """
+
+ async def connect(self):
+ """Handle WebSocket connection and authentication"""
+ try:
+ # Get train ID from URL route
+ self.train_id = self.scope['url_route']['kwargs']['train_id']
+
+ # Verify train exists
+ train_exists = await self.verify_train()
+ if not train_exists:
+ logger.warning(f"Attempted connection to non-existent train: {self.train_id}")
+ raise DenyConnection()
+
+ # Set up group names for different update types
+ self.base_group = f'train_{self.train_id}'
+ self.location_group = f'{self.base_group}_location'
+ self.crowd_group = f'{self.base_group}_crowd'
+ self.schedule_group = f'{self.base_group}_schedule'
+
+ # Join all relevant groups
+ for group in [self.base_group, self.location_group, self.crowd_group, self.schedule_group]:
+ await self.channel_layer.group_add(group, self.channel_name)
+
+ await self.accept()
+
+ # Send initial data
+ await self.send_initial_data()
+ logger.info(f"WebSocket connection established for train: {self.train_id}")
+
+ except DenyConnection:
+ raise
+ except Exception as e:
+ logger.error(f"Error establishing WebSocket connection: {e}")
+ raise DenyConnection()
+
+ async def disconnect(self, close_code):
+ """Handle WebSocket disconnection"""
+ try:
+ # Leave all groups
+ for group in [self.base_group, self.location_group, self.crowd_group, self.schedule_group]:
+ await self.channel_layer.group_discard(group, self.channel_name)
+
+ logger.info(f"WebSocket connection closed for train: {self.train_id}")
+ except Exception as e:
+ logger.error(f"Error during WebSocket disconnection: {e}")
+
+ async def receive(self, text_data):
+ """Handle incoming WebSocket messages"""
+ try:
+ data = json.loads(text_data)
+ message_type = data.get('type')
+ message_data = data.get('data', {})
+
+ # Handle different types of updates
+ handlers = {
+ 'location_update': self.handle_location_update,
+ 'crowd_update': self.handle_crowd_update,
+ 'schedule_update': self.handle_schedule_update,
+ 'service_alert': self.handle_service_alert,
+ 'request_status': self.handle_status_request
+ }
+
+ handler = handlers.get(message_type)
+ if handler:
+ await handler(message_data)
+ else:
+ logger.warning(f"Unknown message type received: {message_type}")
+
+ except json.JSONDecodeError:
+ logger.error("Invalid JSON received")
+ except Exception as e:
+ logger.error(f"Error processing message: {e}")
+ await self.send_error("Error processing message")
+
+ @database_sync_to_async
+ def verify_train(self):
+ """Verify train exists in database"""
+ try:
+ return Train.objects.filter(train_id=self.train_id).exists()
+ except Exception:
+ return False
+
+ async def send_initial_data(self):
+ """Send initial train status data"""
+ try:
+ train_service = TrainService()
+ status = await database_sync_to_async(train_service.get_train_status)(self.train_id)
+
+ if status:
+ await self.send(text_data=json.dumps({
+ 'type': 'initial_data',
+ 'data': status
+ }))
+ except Exception as e:
+ logger.error(f"Error sending initial data: {e}")
+ await self.send_error("Error fetching initial data")
+
+ async def handle_location_update(self, data):
+ """Handle train location updates"""
+ try:
+ # Validate location data
+ if self.validate_location_data(data):
+ await self.channel_layer.group_send(
+ self.location_group,
+ {
+ 'type': 'location_update',
+ 'data': {
+ 'train_id': self.train_id,
+ 'latitude': data['latitude'],
+ 'longitude': data['longitude'],
+ 'speed': data.get('speed'),
+ 'timestamp': data.get('timestamp')
+ }
+ }
+ )
+ except Exception as e:
+ logger.error(f"Error handling location update: {e}")
+ await self.send_error("Error updating location")
+
+ async def handle_crowd_update(self, data):
+ """Handle crowd level updates"""
+ try:
+ # This will be integrated with AI model later
+ await self.channel_layer.group_send(
+ self.crowd_group,
+ {
+ 'type': 'crowd_update',
+ 'data': {
+ 'train_id': self.train_id,
+ 'car_number': data.get('car_number'),
+ 'passenger_count': data.get('passenger_count'),
+ 'timestamp': data.get('timestamp')
+ }
+ }
+ )
+ except Exception as e:
+ logger.error(f"Error handling crowd update: {e}")
+ await self.send_error("Error updating crowd data")
+
+ async def handle_schedule_update(self, data):
+ """Handle schedule updates"""
+ try:
+ await self.channel_layer.group_send(
+ self.schedule_group,
+ {
+ 'type': 'schedule_update',
+ 'data': {
+ 'train_id': self.train_id,
+ 'status': data.get('status'),
+ 'delay': data.get('delay'),
+ 'next_station': data.get('next_station'),
+ 'estimated_arrival': data.get('estimated_arrival')
+ }
+ }
+ )
+ except Exception as e:
+ logger.error(f"Error handling schedule update: {e}")
+ await self.send_error("Error updating schedule")
+
+ async def handle_service_alert(self, data):
+ """Handle service alerts"""
+ try:
+ await self.channel_layer.group_send(
+ self.base_group,
+ {
+ 'type': 'service_alert',
+ 'data': {
+ 'train_id': self.train_id,
+ 'alert_type': data.get('alert_type'),
+ 'message': data.get('message'),
+ 'severity': data.get('severity', 'info')
+ }
+ }
+ )
+ except Exception as e:
+ logger.error(f"Error handling service alert: {e}")
+ await self.send_error("Error sending service alert")
+
+ async def handle_status_request(self, data):
+ """Handle requests for current train status"""
+ try:
+ train_service = TrainService()
+ status = await database_sync_to_async(train_service.get_train_status)(self.train_id)
+
+ if status:
+ await self.send(text_data=json.dumps({
+ 'type': 'status_update',
+ 'data': status
+ }))
+ except Exception as e:
+ logger.error(f"Error handling status request: {e}")
+ await self.send_error("Error fetching status")
+
+ # Message type handlers
+ async def location_update(self, event):
+ """Send location update to WebSocket"""
+ await self.send(text_data=json.dumps(event['data']))
+
+ async def crowd_update(self, event):
+ """Send crowd update to WebSocket"""
+ await self.send(text_data=json.dumps(event['data']))
+
+ async def schedule_update(self, event):
+ """Send schedule update to WebSocket"""
+ await self.send(text_data=json.dumps(event['data']))
+
+ async def service_alert(self, event):
+ """Send service alert to WebSocket"""
+ await self.send(text_data=json.dumps(event['data']))
+
+ async def send_error(self, message):
+ """Send error message to WebSocket"""
+ await self.send(text_data=json.dumps({
+ 'type': 'error',
+ 'message': message
+ }))
+
+ @staticmethod
+ def validate_location_data(data):
+ """Validate location update data"""
+ required_fields = ['latitude', 'longitude']
+ return all(field in data for field in required_fields)
diff --git a/apps/trains/docs/ai_integration.md b/apps/trains/docs/ai_integration.md
new file mode 100644
index 00000000..11ad1c5b
--- /dev/null
+++ b/apps/trains/docs/ai_integration.md
@@ -0,0 +1,27 @@
+# apps/trains/docs/ai_integration.md
+
+## AI Integration Guide
+
+## Overview
+
+This document describes the integration points between the Metro system and the AI crowd detection service.
+
+## Endpoints
+
+- Crowd Detection: `/api/detect-crowd`
+- Crowd Prediction: `/api/predict-crowd`
+
+## Authentication
+
+Bearer token authentication is required for all AI service endpoints.
+
+## Data Formats
+
+### Request Format
+
+```json
+{
+ "camera_id": "string",
+ "timestamp": "ISO datetime",
+ "image_data": "base64 string"
+}
diff --git a/apps/trains/docs/api_requirements.py b/apps/trains/docs/api_requirements.py
new file mode 100644
index 00000000..23120737
--- /dev/null
+++ b/apps/trains/docs/api_requirements.py
@@ -0,0 +1,14 @@
+# apps/trains/docs/api_requirements.py
+
+AI_SERVICE_REQUIREMENTS = {
+ 'endpoints': {
+ 'crowd_detection': '/api/detect-crowd',
+ 'prediction': '/api/predict-crowd'
+ },
+ 'authentication': 'Bearer token',
+ 'response_format': {
+ 'passenger_count': 'integer',
+ 'confidence_score': 'float',
+ 'timestamp': 'ISO format datetime'
+ }
+}
diff --git a/apps/trains/management/commands/generate_api_docs.py b/apps/trains/management/commands/generate_api_docs.py
new file mode 100644
index 00000000..e805a058
--- /dev/null
+++ b/apps/trains/management/commands/generate_api_docs.py
@@ -0,0 +1,18 @@
+# apps/trains/management/commands/generate_api_docs.py
+
+from django.core.management.base import BaseCommand
+from drf_spectacular.generators import SchemaGenerator
+from drf_spectacular.renderers import OpenAPIRenderer
+
+
+class Command(BaseCommand):
+ help = 'Generate API documentation'
+
+ def handle(self, *args, **options):
+ generator = SchemaGenerator()
+ schema = generator.get_schema()
+ renderer = OpenAPIRenderer()
+ output = renderer.render(schema, None, None)
+
+ with open('docs/api/swagger.yaml', 'w') as f:
+ f.write(output.decode())
diff --git a/apps/trains/management/commands/initialize_trains.py b/apps/trains/management/commands/initialize_trains.py
new file mode 100644
index 00000000..f29ed571
--- /dev/null
+++ b/apps/trains/management/commands/initialize_trains.py
@@ -0,0 +1,193 @@
+import logging
+import random
+
+from django.core.management.base import BaseCommand
+from django.db import transaction
+from django.utils import timezone
+
+from apps.stations.models import Line, Station
+from apps.trains.constants import AVERAGE_SPEEDS, CAR_CAPACITY, CARS_PER_TRAIN, LINE_CONFIG, PEAK_HOURS
+from apps.trains.models import Train
+from apps.trains.models.crowd import TrainCar
+
+logger = logging.getLogger(__name__)
+
+
+class Command(BaseCommand):
+ help = "Initialize trains with realistic data based on actual metro lines"
+
+ def handle(self, *args, **kwargs):
+ with transaction.atomic():
+ self.stdout.write("Initializing trains...")
+
+ # Delete existing trains
+ Train.objects.all().delete()
+
+ # Get all lines and log their names
+ lines = Line.objects.all()
+ self.stdout.write(f"Found lines: {', '.join([line.name for line in lines])}")
+
+ for line in lines:
+ line_name = f"LINE_{line.name}"
+ config = LINE_CONFIG.get(line_name)
+
+ if not config:
+ self.stdout.write(
+ self.style.WARNING(
+ f"No config for line {line.name}. " f'Available configs: {", ".join(LINE_CONFIG.keys())}'
+ )
+ )
+ continue
+
+ # Get actual stations for this line
+ stations = Station.objects.filter(lines=line).order_by("station_lines__order")
+
+ self.stdout.write(f"Processing {line.name}: Found {stations.count()} stations")
+
+ if not stations.exists():
+ self.stdout.write(self.style.WARNING(f"No stations found for {line.name}"))
+ continue
+
+ # Calculate trains needed
+ total_trains = config["total_trains"]
+ ac_trains = int((config["has_ac_percentage"] / 100) * total_trains)
+ station_spacing = max(1, stations.count() // total_trains)
+
+ self.stdout.write(
+ f"Creating {ac_trains} AC and {total_trains - ac_trains} " f"non-AC trains for {line.name}"
+ )
+
+ try:
+ # Create AC trains
+ for i in range(ac_trains):
+ station_index = (i * station_spacing) % stations.count()
+ train = self._create_train(line, i + 1, True, stations, station_index, config)
+ self._create_cars(train, is_peak=self._is_peak_hour())
+
+ # Create non-AC trains
+ for i in range(total_trains - ac_trains):
+ station_index = ((i + ac_trains) * station_spacing) % stations.count()
+ train = self._create_train(line, i + ac_trains + 1, False, stations, station_index, config)
+ self._create_cars(train, is_peak=self._is_peak_hour())
+
+ self.stdout.write(self.style.SUCCESS(f"Created {total_trains} trains for {line.name}"))
+ except Exception as e:
+ self.stdout.write(self.style.ERROR(f"Error creating trains for {line.name}: {str(e)}"))
+
+ def _create_train(self, line, number, has_ac, stations, station_index, config):
+ """Create a train with realistic data"""
+ try:
+ current_station = stations[station_index]
+ next_station = stations[station_index + 1] if station_index < len(stations) - 1 else stations[0]
+
+ # Get direction based on station order
+ direction = self._determine_direction(line, current_station, next_station)
+
+ # Determine status
+ is_peak = self._is_peak_hour()
+ status = "IN_SERVICE" if (is_peak or random.random() > 0.1) else random.choice(["DELAYED", "MAINTENANCE"])
+
+ # Convert and round coordinates to Decimal with 6 decimal places
+ from decimal import ROUND_DOWN, Decimal
+
+ try:
+ lat = Decimal(str(current_station.latitude)).quantize(Decimal("0.000001"), rounding=ROUND_DOWN)
+ lon = Decimal(str(current_station.longitude)).quantize(Decimal("0.000001"), rounding=ROUND_DOWN)
+ except Exception as e:
+ self.stdout.write(self.style.WARNING(f"Error converting coordinates: {e}"))
+ lat = Decimal("0.000000")
+ lon = Decimal("0.000000")
+
+ train = Train.objects.create(
+ train_id=f'{line.name}_{number:03d}_{"AC" if has_ac else "NONAC"}',
+ line=line,
+ has_air_conditioning=has_ac,
+ number_of_cars=CARS_PER_TRAIN,
+ current_station=current_station,
+ next_station=next_station,
+ direction=direction,
+ status=status,
+ speed=self._calculate_speed(config, is_peak),
+ latitude=lat,
+ longitude=lon,
+ last_updated=timezone.now(),
+ )
+
+ self.stdout.write(f"Created train: {train.train_id}")
+ return train
+
+ except Exception as e:
+ self.stdout.write(self.style.ERROR(f"Error creating train: {str(e)}"))
+ raise
+
+ def _create_cars(self, train, is_peak):
+ """Create cars for a train with appropriate capacity settings"""
+ try:
+ cars_created = []
+ for car_number in range(1, train.number_of_cars + 1):
+ # Calculate initial load based on peak hours
+ initial_load = random.randint(0, int(CAR_CAPACITY["TOTAL"] * (0.8 if is_peak else 0.4)))
+
+ car = TrainCar.objects.create(
+ train=train,
+ car_number=car_number,
+ capacity=CAR_CAPACITY["TOTAL"],
+ current_load=initial_load,
+ is_operational=True,
+ )
+ cars_created.append(car)
+
+ self.stdout.write(f"Created {len(cars_created)} cars for train {train.train_id}")
+ return cars_created
+
+ except Exception as e:
+ self.stdout.write(self.style.ERROR(f"Error creating cars for train {train.train_id}: {str(e)}"))
+ return []
+
+ def _is_peak_hour(self):
+ """
+ Determine if current time is during peak hours
+ Returns True if current time is during morning or evening peak hours
+ """
+ current_time = timezone.localtime().time()
+
+ # Convert peak hour strings to time objects
+ morning_start = timezone.datetime.strptime(PEAK_HOURS["MORNING"]["start"], "%H:%M").time()
+ morning_end = timezone.datetime.strptime(PEAK_HOURS["MORNING"]["end"], "%H:%M").time()
+ evening_start = timezone.datetime.strptime(PEAK_HOURS["EVENING"]["start"], "%H:%M").time()
+ evening_end = timezone.datetime.strptime(PEAK_HOURS["EVENING"]["end"], "%H:%M").time()
+
+ # Check if current time is in either morning or evening peak hours
+ is_morning_peak = morning_start <= current_time <= morning_end
+ is_evening_peak = evening_start <= current_time <= evening_end
+
+ return is_morning_peak or is_evening_peak
+
+ def _calculate_speed(self, config, is_peak):
+ """
+ Calculate train speed based on configuration and peak hours
+ """
+ base_speed = config["speed_limit"]
+ if is_peak:
+ return min(AVERAGE_SPEEDS["PEAK"], base_speed)
+ return min(AVERAGE_SPEEDS["NORMAL"], base_speed)
+
+ def _determine_direction(self, line, current_station, next_station):
+ """Determine train direction based on station order"""
+ try:
+ current_order = current_station.get_station_order(line)
+ next_order = next_station.get_station_order(line)
+
+ line_config = LINE_CONFIG[f"LINE_{line.name}"]
+ directions = line_config["directions"]
+
+ # Default to first direction if order comparison fails
+ if current_order is None or next_order is None:
+ return directions[0][0]
+
+ return directions[0][0] if next_order > current_order else directions[1][0]
+
+ except Exception as e:
+ self.stdout.write(self.style.ERROR(f"Error determining direction: {str(e)}"))
+ # Return default direction
+ return LINE_CONFIG[f"LINE_{line.name}"]["directions"][0][0]
diff --git a/apps/trains/models/__init__.py b/apps/trains/models/__init__.py
new file mode 100644
index 00000000..900bd555
--- /dev/null
+++ b/apps/trains/models/__init__.py
@@ -0,0 +1,11 @@
+from .train import Train
+from .schedule import Schedule, ActualSchedule
+from .crowd import TrainCar, CrowdMeasurement
+
+__all__ = [
+ 'Train',
+ 'Schedule',
+ 'ActualSchedule',
+ 'TrainCar',
+ 'CrowdMeasurement'
+]
diff --git a/apps/trains/models/crowd.py b/apps/trains/models/crowd.py
new file mode 100644
index 00000000..1f8334d1
--- /dev/null
+++ b/apps/trains/models/crowd.py
@@ -0,0 +1,123 @@
+# apps/trains/models/crowd.py
+
+from django.core.exceptions import ValidationError
+from django.core.validators import MaxValueValidator, MinValueValidator
+from django.db import models
+from django.utils import timezone
+
+from ..constants import CAR_CAPACITY, CROWD_LEVELS
+
+
+class TrainCar(models.Model):
+ train = models.ForeignKey("Train", on_delete=models.CASCADE, related_name="cars")
+ car_number = models.IntegerField(validators=[MinValueValidator(1), MaxValueValidator(10)])
+ capacity = models.IntegerField(default=CAR_CAPACITY["TOTAL"])
+ current_load = models.IntegerField(default=0, validators=[MinValueValidator(0)])
+ is_operational = models.BooleanField(default=True)
+ last_updated = models.DateTimeField(auto_now=True)
+
+ class Meta:
+ unique_together = ("train", "car_number")
+ ordering = ["car_number"]
+ indexes = [
+ models.Index(fields=["train", "car_number"]),
+ models.Index(fields=["is_operational"]),
+ ]
+
+ def __str__(self):
+ return f"{self.train.train_id} - Car {self.car_number}"
+
+ def clean(self):
+ if self.current_load > self.capacity:
+ raise ValidationError("Current load cannot exceed capacity")
+ if self.current_load < 0:
+ raise ValidationError("Current load cannot be negative")
+
+ def save(self, *args, **kwargs):
+ self.full_clean()
+ super().save(*args, **kwargs)
+
+ # Create crowd measurement record
+ CrowdMeasurement.objects.create(
+ train_car=self,
+ passenger_count=self.current_load,
+ crowd_percentage=self.load_percentage,
+ confidence_score=0.95, # Default high confidence for direct measurements
+ measurement_method="WEIGHT_SENSOR",
+ )
+
+ @property
+ def load_percentage(self):
+ """Calculate the current load percentage"""
+ return (self.current_load / self.capacity) * 100 if self.capacity else 0
+
+ @property
+ def crowd_status(self):
+ """Determine crowd status based on load percentage"""
+ percentage = self.load_percentage
+ for status, (min_val, max_val) in CROWD_LEVELS.items():
+ if min_val <= percentage <= max_val:
+ return status
+ return "UNKNOWN"
+
+ def update_load(self, count, method="WEIGHT_SENSOR", confidence=0.95):
+ """Update passenger count with measurement tracking"""
+ if count > self.capacity:
+ raise ValidationError(f"Count {count} exceeds car capacity {self.capacity}")
+
+ self.current_load = count
+ self.last_updated = timezone.now()
+ self.save()
+
+ return CrowdMeasurement.objects.create(
+ train_car=self,
+ passenger_count=count,
+ crowd_percentage=self.load_percentage,
+ confidence_score=confidence,
+ measurement_method=method,
+ )
+
+
+class CrowdMeasurement(models.Model):
+ """Track crowd measurements over time with different methods"""
+
+ MEASUREMENT_METHODS = [
+ ("AI_CAMERA", "AI Camera Detection"),
+ ("WEIGHT_SENSOR", "Weight Sensor"),
+ ("MANUAL", "Manual Count"),
+ ("ESTIMATED", "AI Estimated"),
+ ]
+
+ train_car = models.ForeignKey(TrainCar, on_delete=models.CASCADE, related_name="measurements")
+ timestamp = models.DateTimeField(auto_now_add=True)
+ passenger_count = models.IntegerField()
+ crowd_percentage = models.FloatField(validators=[MinValueValidator(0), MaxValueValidator(100)])
+ confidence_score = models.FloatField(
+ validators=[MinValueValidator(0), MaxValueValidator(1)],
+ help_text="Confidence level of the measurement (0-1)",
+ )
+ measurement_method = models.CharField(
+ max_length=20, choices=MEASUREMENT_METHODS, help_text="Method used to measure crowd levels"
+ )
+
+ class Meta:
+ indexes = [
+ models.Index(fields=["timestamp"]),
+ models.Index(fields=["train_car", "timestamp"]),
+ models.Index(fields=["measurement_method"]),
+ ]
+ ordering = ["-timestamp"]
+
+ def __str__(self):
+ return f"{self.train_car} - {self.passenger_count} passengers ({self.measurement_method})"
+
+ @property
+ def is_reliable(self):
+ """Check if measurement is considered reliable"""
+ return self.confidence_score >= 0.8
+
+ @classmethod
+ def get_recent_measurements(cls, train_car, minutes=15):
+ """Get recent measurements for a train car"""
+ time_threshold = timezone.now() - timezone.timedelta(minutes=minutes)
+ return cls.objects.filter(train_car=train_car, timestamp__gte=time_threshold).order_by("-timestamp")
diff --git a/apps/trains/models/schedule.py b/apps/trains/models/schedule.py
new file mode 100644
index 00000000..f4f4b192
--- /dev/null
+++ b/apps/trains/models/schedule.py
@@ -0,0 +1,76 @@
+# apps/trains/models/schedule.py
+
+import datetime
+
+from django.core.validators import MinValueValidator
+from django.db import models
+from django.utils import timezone
+
+
+class Schedule(models.Model):
+ DAY_TYPES = [
+ ("WEEKDAY", "Weekday"),
+ ("SATURDAY", "Saturday"),
+ ("SUNDAY", "Sunday"),
+ ("HOLIDAY", "Holiday"),
+ ]
+
+ train = models.ForeignKey("Train", on_delete=models.CASCADE, related_name="schedules")
+ station = models.ForeignKey("stations.Station", on_delete=models.CASCADE)
+ arrival_time = models.TimeField()
+ departure_time = models.TimeField()
+ day_type = models.CharField(max_length=10, choices=DAY_TYPES)
+ sequence_number = models.PositiveIntegerField(validators=[MinValueValidator(1)])
+ is_active = models.BooleanField(default=True) # Added this field
+ last_updated = models.DateTimeField(auto_now=True) # Added this field
+
+ class Meta:
+ ordering = ["sequence_number"]
+ indexes = [
+ models.Index(fields=["train", "day_type"]),
+ models.Index(fields=["station", "arrival_time"]),
+ ]
+
+ def __str__(self):
+ return f"{self.train.train_id} - {self.station.name} ({self.day_type})"
+
+
+class ActualSchedule(models.Model):
+ STATUS_CHOICES = [
+ ("ON_TIME", "On Time"),
+ ("DELAYED", "Delayed"),
+ ("CANCELLED", "Cancelled"),
+ ("SKIPPED", "Station Skipped"),
+ ("DIVERTED", "Train Diverted"),
+ ]
+
+ schedule = models.ForeignKey(Schedule, on_delete=models.CASCADE)
+ actual_arrival = models.DateTimeField(null=True)
+ actual_departure = models.DateTimeField(null=True)
+ delay_minutes = models.IntegerField(default=0)
+ status = models.CharField(max_length=20, choices=STATUS_CHOICES, default="ON_TIME")
+ reason = models.TextField(blank=True, null=True)
+ created_at = models.DateTimeField(auto_now_add=True)
+ updated_at = models.DateTimeField(auto_now=True)
+
+ class Meta:
+ indexes = [
+ models.Index(fields=["schedule", "status"]),
+ models.Index(fields=["created_at"]),
+ ]
+
+ def save(self, *args, **kwargs):
+ # Calculate delay if actual arrival time is provided
+ if self.actual_arrival and self.schedule.arrival_time:
+ scheduled_time = datetime.datetime.combine(self.actual_arrival.date(), self.schedule.arrival_time)
+ scheduled_time = timezone.make_aware(scheduled_time)
+ delay = self.actual_arrival - scheduled_time
+ self.delay_minutes = max(0, int(delay.total_seconds() / 60))
+
+ # Update status based on delay
+ if self.delay_minutes == 0:
+ self.status = "ON_TIME"
+ elif self.delay_minutes > 0:
+ self.status = "DELAYED"
+
+ super().save(*args, **kwargs)
diff --git a/apps/trains/models/train.py b/apps/trains/models/train.py
new file mode 100644
index 00000000..20bbb17c
--- /dev/null
+++ b/apps/trains/models/train.py
@@ -0,0 +1,100 @@
+from decimal import Decimal
+
+from django.core.exceptions import ValidationError
+from django.core.validators import DecimalValidator
+from django.db import models
+from django.utils import timezone
+
+from ..constants import CARS_PER_TRAIN, DIRECTION_CHOICES, LINE_CONFIG, PEAK_HOURS, TRAIN_STATUS_CHOICES, TRAIN_TYPES
+
+
+class Train(models.Model):
+ train_id = models.CharField(max_length=50, unique=True)
+ line = models.ForeignKey("stations.Line", on_delete=models.CASCADE)
+ number_of_cars = models.IntegerField(default=CARS_PER_TRAIN)
+ has_air_conditioning = models.BooleanField(default=False)
+ train_type = models.CharField(max_length=10, choices=TRAIN_TYPES, default="NON_AC")
+ current_station = models.ForeignKey(
+ "stations.Station", on_delete=models.SET_NULL, null=True, related_name="current_trains"
+ )
+ next_station = models.ForeignKey(
+ "stations.Station", on_delete=models.SET_NULL, null=True, related_name="incoming_trains"
+ )
+ direction = models.CharField(max_length=20, choices=DIRECTION_CHOICES)
+ status = models.CharField(max_length=20, choices=TRAIN_STATUS_CHOICES, default="IN_SERVICE")
+ latitude = models.DecimalField(
+ max_digits=9,
+ decimal_places=6,
+ validators=[DecimalValidator(9, 6)],
+ default=Decimal("0.000000"), # Add default value
+ )
+ longitude = models.DecimalField(
+ max_digits=9,
+ decimal_places=6,
+ validators=[DecimalValidator(9, 6)],
+ default=Decimal("0.000000"), # Add default value
+ )
+ speed = models.FloatField(default=0)
+ last_updated = models.DateTimeField(auto_now=True)
+
+ class Meta:
+ indexes = [
+ models.Index(fields=["train_id"]),
+ models.Index(fields=["line", "status"]),
+ models.Index(fields=["current_station"]),
+ ]
+
+ def __str__(self):
+ return f"{self.train_id} ({self.line.name})"
+
+ def clean(self):
+ if self.line_id:
+ line_name = f"LINE_{self.line.name}"
+ line_config = LINE_CONFIG.get(line_name)
+
+ if line_config:
+ # Validate AC requirement
+ if line_config["has_ac_percentage"] == 100 and not self.has_air_conditioning:
+ raise ValidationError(f"All trains on {self.line.name} must have air conditioning")
+
+ # Validate direction
+ valid_directions = [d[0] for d in line_config["directions"]]
+ if self.direction and self.direction not in valid_directions:
+ raise ValidationError(
+ f"Invalid direction for {self.line.name}. " f"Valid choices are: {', '.join(valid_directions)}"
+ )
+
+ # Validate speed
+ if self.speed < 0:
+ raise ValidationError("Speed cannot be negative")
+ if line_config and self.speed > line_config["speed_limit"]:
+ raise ValidationError(f"Speed exceeds line limit of {line_config['speed_limit']} km/h")
+
+ def save(self, *args, **kwargs):
+ self.train_type = "AC" if self.has_air_conditioning else "NON_AC"
+ self.full_clean()
+ super().save(*args, **kwargs)
+
+ def get_next_station_arrival(self):
+ """Calculate estimated arrival time at next station"""
+ if not (self.current_station and self.next_station and self.speed):
+ return None
+
+ distance = self.current_station.distance_to(self.next_station)
+ time_in_hours = distance / (self.speed * 1000) # Convert speed to m/h
+ return timezone.now() + timezone.timedelta(hours=time_in_hours)
+
+ def is_delayed(self):
+ """Check if train is delayed"""
+ return self.status == "DELAYED"
+
+ def is_peak_hour(self):
+ """Check if current time is during peak hours"""
+ current_time = timezone.now().time()
+
+ for period, times in PEAK_HOURS.items():
+ start = timezone.datetime.strptime(times["start"], "%H:%M").time()
+ end = timezone.datetime.strptime(times["end"], "%H:%M").time()
+ if start <= current_time <= end:
+ return True
+ return False
diff --git a/apps/trains/services/ai_service.py b/apps/trains/services/ai_service.py
new file mode 100644
index 00000000..f45b5451
--- /dev/null
+++ b/apps/trains/services/ai_service.py
@@ -0,0 +1,118 @@
+# apps/trains/services/ai_service.py
+
+from datetime import timezone
+import logging
+import aiohttp
+from django.conf import settings
+from django.core.cache import cache
+
+logger = logging.getLogger(__name__)
+
+
+class AIService:
+ """
+ Service to interact with external AI model API
+ This service acts as a bridge between your backend and the AI model service
+ """
+
+ def __init__(self):
+ self.api_base_url = settings.AI_SERVICE_URL
+ self.api_key = settings.AI_SERVICE_API_KEY
+ self.cache_timeout = 300 # 5 minutes
+
+ async def process_camera_feed(self, camera_id, image_data):
+ """Process camera feed through external AI service"""
+ try:
+ cache_key = f'camera_processing_{camera_id}'
+
+ # Check cache first
+ cached_result = cache.get(cache_key)
+ if cached_result:
+ return cached_result
+
+ # Prepare the request to the AI service
+ headers = {
+ 'Authorization': f'Bearer {self.api_key}',
+ 'Content-Type': 'application/json'
+ }
+
+ payload = {
+ 'camera_id': camera_id,
+ 'image_data': image_data,
+ 'timestamp': timezone.now().isoformat()
+ }
+
+ # Make request to AI service
+ response = await self._make_api_request(
+ endpoint='/process-crowd',
+ method='POST',
+ data=payload,
+ headers=headers
+ )
+
+ if response and response.get('success'):
+ result = {
+ 'passenger_count': response['passenger_count'],
+ 'confidence_score': response['confidence_score'],
+ 'timestamp': response['timestamp']
+ }
+ cache.set(cache_key, result, self.cache_timeout)
+ return result
+
+ return None
+
+ except Exception as e:
+ logger.error(f"Error processing camera feed: {e}")
+ return None
+
+ async def get_crowd_prediction(self, train_car_id, timestamp):
+ """Get crowd prediction from AI service"""
+ try:
+ cache_key = f'crowd_prediction_{train_car_id}_{timestamp}'
+
+ # Check cache
+ cached_prediction = cache.get(cache_key)
+ if cached_prediction:
+ return cached_prediction
+
+ # Request prediction from AI service
+ response = await self._make_api_request(
+ endpoint='/predict-crowd',
+ method='GET',
+ params={
+ 'train_car_id': train_car_id,
+ 'timestamp': timestamp.isoformat()
+ }
+ )
+
+ if response and response.get('success'):
+ prediction = {
+ 'predicted_count': response['predicted_count'],
+ 'confidence': response['confidence'],
+ 'factors': response.get('factors', [])
+ }
+ cache.set(cache_key, prediction, self.cache_timeout)
+ return prediction
+
+ return None
+
+ except Exception as e:
+ logger.error(f"Error getting crowd prediction: {e}")
+ return None
+
+ async def _make_api_request(self, endpoint, method='GET', data=None, params=None, headers=None):
+ """Make request to AI service API"""
+ try:
+ url = f"{self.api_base_url}{endpoint}"
+
+ async with aiohttp.ClientSession() as session:
+ if method == 'GET':
+ async with session.get(url, params=params, headers=headers) as response:
+ return await response.json()
+ elif method == 'POST':
+ async with session.post(url, json=data, headers=headers) as response:
+ return await response.json()
+
+ except Exception as e:
+ logger.error(f"API request error: {e}")
+ return None
diff --git a/apps/trains/services/crowd_service.py b/apps/trains/services/crowd_service.py
new file mode 100644
index 00000000..b6d024d7
--- /dev/null
+++ b/apps/trains/services/crowd_service.py
@@ -0,0 +1,75 @@
+# apps/trains/services/crowd_service.py
+
+from django.db.models import Avg
+from django.utils import timezone
+from django.core.cache import cache
+from ..models import TrainCar, CrowdMeasurement
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class CrowdService:
+ CACHE_TIMEOUT = 300 # 5 minutes
+
+ @staticmethod
+ def update_crowd_level(train_car_id, passenger_count, method='AI_CAMERA', confidence=0.95):
+ """Update crowd level for a train car"""
+ try:
+ train_car = TrainCar.objects.get(id=train_car_id)
+
+ # Create crowd measurement
+ measurement = CrowdMeasurement.objects.create(
+ train_car=train_car,
+ passenger_count=passenger_count,
+ crowd_percentage=(passenger_count / train_car.capacity) * 100,
+ confidence_score=confidence,
+ measurement_method=method
+ )
+
+ # Update train car current load
+ train_car.current_load = passenger_count
+ train_car.save()
+
+ # Clear cache
+ cache.delete(f'crowd_level_{train_car_id}')
+
+ return measurement
+ except Exception as e:
+ logger.error(f"Error updating crowd level: {e}")
+ raise
+
+ @staticmethod
+ def get_crowd_history(train_car_id, hours=24):
+ """Get crowd level history for a train car"""
+ time_threshold = timezone.now() - timezone.timedelta(hours=hours)
+
+ return CrowdMeasurement.objects.filter(
+ train_car_id=train_car_id,
+ timestamp__gte=time_threshold
+ ).order_by('timestamp')
+
+ @staticmethod
+ def get_line_crowding(line_id):
+ """Get crowding information for an entire line"""
+ return TrainCar.objects.filter(
+ train__line_id=line_id
+ ).aggregate(
+ avg_load=Avg('current_load'),
+ avg_percentage=Avg('current_load') * 100 / Avg('capacity')
+ )
+
+ @staticmethod
+ def predict_crowding(train_car_id, timestamp):
+ """Predict crowd levels for a specific time"""
+ # Get historical data for the same day/time
+ day_of_week = timestamp.weekday()
+ time_of_day = timestamp.time()
+
+ historical_data = CrowdMeasurement.objects.filter(
+ train_car_id=train_car_id,
+ timestamp__week_day=day_of_week,
+ timestamp__time__hour=time_of_day.hour
+ ).aggregate(Avg('passenger_count'))
+
+ return historical_data['passenger_count__avg'] or 0
diff --git a/apps/trains/services/schedule_service.py b/apps/trains/services/schedule_service.py
new file mode 100644
index 00000000..e88bd76d
--- /dev/null
+++ b/apps/trains/services/schedule_service.py
@@ -0,0 +1,164 @@
+from datetime import datetime, date
+from typing import Optional
+from django.utils import timezone
+from django.core.exceptions import ValidationError
+from django.db.models import QuerySet
+
+from ..models.schedule import Schedule, ActualSchedule
+
+
+class ScheduleService:
+ """Service class for managing train schedules and timing."""
+
+ def get_train_schedule(self, train_id: str, schedule_date: date) -> QuerySet[Schedule]:
+ """
+ Get schedule for specific train and date.
+
+ Args:
+ train_id (str): Train identifier
+ schedule_date (date): Date for schedule
+
+ Returns:
+ QuerySet[Schedule]: Ordered schedule entries
+ """
+ day_type = self._get_day_type(schedule_date)
+ return Schedule.objects.filter(
+ train_id=train_id,
+ day_type=day_type,
+ is_active=True
+ ).select_related(
+ 'station'
+ ).order_by('sequence_number')
+
+ def get_station_schedule(
+ self,
+ station_id: int,
+ schedule_date: date
+ ) -> QuerySet[Schedule]:
+ """
+ Get schedule for specific station and date.
+
+ Args:
+ station_id (int): Station identifier
+ schedule_date (date): Date for schedule
+
+ Returns:
+ QuerySet[Schedule]: Ordered schedule entries
+ """
+ day_type = self._get_day_type(schedule_date)
+ return Schedule.objects.filter(
+ station_id=station_id,
+ day_type=day_type,
+ is_active=True
+ ).select_related(
+ 'train'
+ ).order_by('arrival_time')
+
+ def record_actual_arrival(
+ self,
+ schedule_id: int,
+ arrival_time: datetime,
+ reason: Optional[str] = None
+ ) -> ActualSchedule:
+ """
+ Record actual arrival time and calculate delay.
+
+ Args:
+ schedule_id (int): Schedule identifier
+ arrival_time (datetime): Actual arrival time
+ reason (Optional[str]): Reason for delay if any
+
+ Returns:
+ ActualSchedule: Created actual schedule record
+
+ Raises:
+ ValidationError: If arrival time is invalid
+ """
+ try:
+ schedule = Schedule.objects.get(id=schedule_id)
+
+ if arrival_time < timezone.now() - timezone.timedelta(hours=1):
+ raise ValidationError("Arrival time cannot be more than 1 hour in the past")
+
+ actual = ActualSchedule.objects.create(
+ schedule=schedule,
+ actual_arrival=arrival_time,
+ reason=reason
+ )
+
+ self._calculate_delay(actual)
+ return actual
+
+ except Schedule.DoesNotExist:
+ raise ValidationError(f"Schedule with ID {schedule_id} not found")
+
+ def _get_day_type(self, schedule_date: date) -> str:
+ """
+ Determine day type (weekday, weekend, holiday).
+
+ Args:
+ schedule_date (date): Date to check
+
+ Returns:
+ str: Day type identifier
+ """
+ if self._is_holiday(schedule_date):
+ return 'HOLIDAY'
+
+ weekday = schedule_date.weekday()
+ if weekday == 5:
+ return 'SATURDAY'
+ if weekday == 6:
+ return 'SUNDAY'
+ return 'WEEKDAY'
+
+ def _calculate_delay(self, actual_schedule: ActualSchedule) -> None:
+ """
+ Calculate delay in minutes and update status.
+
+ Args:
+ actual_schedule (ActualSchedule): Actual schedule record
+ """
+ scheduled_time = datetime.combine(
+ actual_schedule.actual_arrival.date(),
+ actual_schedule.schedule.arrival_time
+ )
+ scheduled_time = timezone.make_aware(scheduled_time)
+
+ delay = actual_schedule.actual_arrival - scheduled_time
+ delay_minutes = delay.total_seconds() / 60
+
+ actual_schedule.delay_minutes = max(0, delay_minutes)
+ actual_schedule.status = self._get_delay_status(delay_minutes)
+ actual_schedule.save(update_fields=['delay_minutes', 'status'])
+
+ def _get_delay_status(self, delay_minutes: float) -> str:
+ """
+ Determine delay status based on minutes.
+
+ Args:
+ delay_minutes (float): Delay in minutes
+
+ Returns:
+ str: Delay status
+ """
+ if delay_minutes <= 2:
+ return 'ON_TIME'
+ if delay_minutes <= 5:
+ return 'SLIGHT_DELAY'
+ if delay_minutes <= 15:
+ return 'DELAYED'
+ return 'SIGNIFICANTLY_DELAYED'
+
+ def _is_holiday(self, check_date: date) -> bool:
+ """
+ Check if date is a holiday.
+
+ Args:
+ check_date (date): Date to check
+
+ Returns:
+ bool: True if holiday
+ """
+ # Implement holiday checking logic here
+ return False
diff --git a/apps/trains/services/train_service.py b/apps/trains/services/train_service.py
new file mode 100644
index 00000000..ca14c151
--- /dev/null
+++ b/apps/trains/services/train_service.py
@@ -0,0 +1,193 @@
+# apps/trains/services/train_service.py
+
+from os import name
+from django.db.models import Count, Q
+from django.utils import timezone
+from django.core.cache import cache
+from apps.stations.models import Line
+from ..models import Train, TrainCar
+from ..constants import LINE_CONFIG, CROWD_LEVELS, PEAK_HOURS
+import logging
+
+logger = logging.getLogger(name)
+
+
+class TrainService:
+ CACHE_TIMEOUT = 300 # 5 minutes
+
+
+def get_train_status(self, train_id):
+ """Get comprehensive train status with caching"""
+ cache_key = f"train_status_{train_id}"
+ status = cache.get(cache_key)
+
+ if not status:
+ try:
+ train = Train.objects.select_related(
+ "line", "current_station", "next_station"
+ ).get(train_id=train_id)
+
+ status = {
+ "train_id": train.train_id,
+ "line": train.line.name,
+ "current_station": {
+ "name": (
+ train.current_station.name if train.current_station else None
+ ),
+ "arrival_time": self._format_time(train.get_next_station_arrival()),
+ },
+ "next_station": {
+ "name": train.next_station.name if train.next_station else None,
+ "estimated_arrival": self._format_time(
+ train.get_next_station_arrival()
+ ),
+ },
+ "status": train.status,
+ "direction": train.direction,
+ "has_ac": train.has_air_conditioning,
+ "speed": train.speed,
+ "location": {
+ "latitude": float(train.latitude) if train.latitude else None,
+ "longitude": float(train.longitude) if train.longitude else None,
+ },
+ "crowd_levels": self.get_crowd_levels(train),
+ "is_peak_hour": train.is_peak_hour(),
+ "last_updated": train.last_updated.isoformat(),
+ }
+
+ cache.set(cache_key, status, self.CACHE_TIMEOUT)
+ except Train.DoesNotExist:
+ logger.warning(f"Train not found: {train_id}")
+ return None
+ except Exception as e:
+ logger.error(f"Error getting train status: {e}")
+ return None
+
+ return status
+
+
+def get_crowd_levels(self, train):
+ """Get crowd levels for all cars"""
+ cache_key = f"crowd_levels_{train.id}"
+ crowd_levels = cache.get(cache_key)
+
+ if not crowd_levels:
+ try:
+ cars = TrainCar.objects.filter(train=train)
+ crowd_levels = [
+ {
+ "car_number": car.car_number,
+ "capacity": car.capacity,
+ "current_load": car.current_load,
+ "crowd_percentage": (car.current_load / car.capacity) * 100,
+ "status": self._get_crowd_status(car.current_load, car.capacity),
+ }
+ for car in cars
+ ]
+ cache.set(cache_key, crowd_levels, self.CACHE_TIMEOUT)
+ except Exception as e:
+ logger.error(f"Error getting crowd levels: {e}")
+ return []
+
+ return crowd_levels
+
+
+def update_train_location(
+ self, train_id, station_id=None, latitude=None, longitude=None, speed=None
+):
+ """Update train location and related data"""
+ try:
+ train = Train.objects.get(train_id=train_id)
+
+ if station_id:
+ train.current_station_id = station_id
+
+ if latitude is not None and longitude is not None:
+ train.latitude = latitude
+ train.longitude = longitude
+
+ if speed is not None:
+ train.speed = speed
+
+ train.last_updated = timezone.now()
+ train.save()
+
+ # Invalidate cache
+ cache.delete(f"train_status_{train_id}")
+ cache.delete(f"crowd_levels_{train.id}")
+
+ return self.get_train_status(train_id)
+ except Exception as e:
+ logger.error(f"Error updating train location: {e}")
+ return None
+
+
+def validate_line_ac_distribution(self, line_id):
+ """Validate AC distribution for a specific line"""
+ try:
+ line_stats = Train.objects.filter(line_id=line_id).aggregate(
+ total_trains=Count("id"),
+ ac_trains=Count("id", filter=Q(has_air_conditioning=True)),
+ )
+
+ line = Line.objects.get(id=line_id)
+ line_name = f"LINE_{line.name}"
+ config = LINE_CONFIG.get(line_name)
+
+ if not config:
+ return True
+
+ required_ac_trains = int(
+ (config["has_ac_percentage"] / 100) * line_stats["total_trains"]
+ )
+
+ return line_stats["ac_trains"] == required_ac_trains
+ except Exception as e:
+ logger.error(f"Error validating AC distribution: {e}")
+ return False
+
+
+def get_line_trains(self, line_id):
+ """Get all trains for a specific line"""
+ cache_key = f"line_trains_{line_id}"
+ trains_data = cache.get(cache_key)
+
+ if not trains_data:
+ try:
+ trains = Train.objects.filter(line_id=line_id).select_related(
+ "line", "current_station", "next_station"
+ )
+ trains_data = [self.get_train_status(train.train_id) for train in trains]
+ cache.set(cache_key, trains_data, self.CACHE_TIMEOUT)
+ except Exception as e:
+ logger.error(f"Error getting line trains: {e}")
+ return []
+
+ return trains_data
+
+
+def _get_crowd_status(self, current_load, capacity):
+ """Determine crowd status based on load percentage"""
+ percentage = (current_load / capacity) * 100
+ for status, (min_val, max_val) in CROWD_LEVELS.items():
+ if min_val <= percentage <= max_val:
+ return status
+ return "UNKNOWN"
+
+
+def _format_time(self, dt):
+ """Format datetime for API response"""
+ return dt.isoformat() if dt else None
+
+
+def is_peak_hour(self):
+ """Check if current time is during peak hours"""
+ current_time = timezone.now().time()
+
+ for period, times in PEAK_HOURS.items():
+ start = timezone.datetime.strptime(times["start"], "%H:%M").time()
+ end = timezone.datetime.strptime(times["end"], "%H:%M").time()
+ if start <= current_time <= end:
+ return True
+
+ return False
diff --git a/apps/trains/tests/factories.py b/apps/trains/tests/factories.py
new file mode 100644
index 00000000..e355f0c8
--- /dev/null
+++ b/apps/trains/tests/factories.py
@@ -0,0 +1,60 @@
+import factory
+from factory.django import DjangoModelFactory
+
+from apps.stations.models import Line, Station
+from apps.trains.models import Train, TrainCar
+
+from ..constants import LINE_CONFIG
+
+
+class LineFactory(DjangoModelFactory):
+ class Meta:
+ model = Line
+
+ name = factory.Sequence(lambda n: f"Line {n}")
+ color_code = factory.Sequence(lambda n: f"#FF{n:04d}")
+
+
+class StationFactory(DjangoModelFactory):
+ class Meta:
+ model = Station
+
+ name = factory.Sequence(lambda n: f"Station {n}")
+ latitude = factory.Faker("latitude")
+ longitude = factory.Faker("longitude")
+
+
+class TrainFactory(DjangoModelFactory):
+ class Meta:
+ model = Train
+
+ @factory.lazy_attribute
+ def train_id(self):
+ return f"{self.line.name}_AC_{factory.Sequence(int)}"
+
+ line = factory.SubFactory(LineFactory)
+ has_air_conditioning = True
+ current_station = factory.SubFactory(StationFactory)
+ next_station = factory.SubFactory(StationFactory)
+
+ @factory.lazy_attribute
+ def direction(self):
+ line_name = f"LINE_{self.line.name}"
+ line_config = LINE_CONFIG.get(line_name, {})
+ directions = line_config.get("directions", [])
+ return directions[0][0] if directions else "HELWAN"
+
+ status = "IN_SERVICE"
+ latitude = factory.Faker("latitude")
+ longitude = factory.Faker("longitude")
+ speed = factory.Faker("pyfloat", min_value=0, max_value=80)
+
+
+class TrainCarFactory(DjangoModelFactory):
+ class Meta:
+ model = TrainCar
+
+ train = factory.SubFactory(TrainFactory)
+ car_number = factory.Sequence(lambda n: n + 1)
+ capacity = 300
+ current_load = factory.Faker("pyint", min_value=0, max_value=300)
diff --git a/apps/trains/tests/test_api.py b/apps/trains/tests/test_api.py
new file mode 100644
index 00000000..a3ac0248
--- /dev/null
+++ b/apps/trains/tests/test_api.py
@@ -0,0 +1,29 @@
+# apps/trains/tests/test_api.py
+
+from django.test import TestCase
+from django.urls import reverse
+from rest_framework.test import APIClient
+from rest_framework import status
+import sys
+import os
+
+from apps.trains.tests.factories import TrainFactory
+sys.path.append(os.path.join(os.path.dirname(__file__), '../../..'))
+
+
+class TrainAPITests(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+ self.train = TrainFactory()
+ self.url = reverse('train-list')
+
+ def test_list_trains(self):
+ response = self.client.get(self.url)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(len(response.data['data']), 1)
+
+ def test_train_detail(self):
+ url = reverse('train-detail', kwargs={'pk': self.train.pk})
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(response.data['data']['train_id'], self.train.train_id)
diff --git a/apps/trains/tests/test_websocket.py b/apps/trains/tests/test_websocket.py
new file mode 100644
index 00000000..cf67ac69
--- /dev/null
+++ b/apps/trains/tests/test_websocket.py
@@ -0,0 +1,13 @@
+# apps/trains/tests/test_websocket.py
+from channels.testing import WebsocketCommunicator
+from django.test import TestCase
+
+from apps.trains.consumers import TrainConsumer
+
+
+class TrainWebsocketTests(TestCase):
+ async def test_websocket_connection(self):
+ communicator = WebsocketCommunicator(TrainConsumer.as_asgi(), "/ws/train/test_train_id/")
+ connected, _ = await communicator.connect()
+ self.assertTrue(connected)
+ await communicator.disconnect()
diff --git a/egypt_metro/__init__.py b/apps/trains/utils/validators.py
similarity index 100%
rename from egypt_metro/__init__.py
rename to apps/trains/utils/validators.py
diff --git a/apps/users/tests.py b/apps/users/tests.py
index 7ce503c2..e69de29b 100644
--- a/apps/users/tests.py
+++ b/apps/users/tests.py
@@ -1,3 +0,0 @@
-from django.test import TestCase
-
-# Create your tests here.
diff --git a/git_dangling.txt b/git_dangling.txt
new file mode 100644
index 00000000..7b609dbd
--- /dev/null
+++ b/git_dangling.txt
@@ -0,0 +1,7 @@
+dangling tree bc8230ff7c8d38b705bb248019616205728aebd4
+dangling tree 4109ddabf7eb7d720f7f1a263fd1d64daaf80569
+dangling tree 0c0c0f7d5ed0d02f016ad38ead0d86211287daea
+dangling tree 680e1f3ba5e14550904388870afb8f39b8e69d1f
+dangling tree e966d4d31891ed258391dc76eea3e4bc2faca808
+dangling blob e375c87b3d246a097b417245cc47efa8e12f3572
+dangling tree c4794c78cc57f29dc4e1b741c45c6e281b12cf88
diff --git a/main b/main
new file mode 100644
index 00000000..e69de29b
diff --git a/manage.py b/manage.py
index 2150cde0..97f8ecac 100644
--- a/manage.py
+++ b/manage.py
@@ -6,7 +6,7 @@
def main():
"""Run administrative tasks."""
- os.environ.setdefault("DJANGO_SETTINGS_MODULE", "egypt_metro.settings")
+ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "metro.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
diff --git a/metro/__init__.py b/metro/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/egypt_metro/asgi.py b/metro/asgi.py
similarity index 72%
rename from egypt_metro/asgi.py
rename to metro/asgi.py
index 5107c66a..9c0ea607 100644
--- a/egypt_metro/asgi.py
+++ b/metro/asgi.py
@@ -1,5 +1,5 @@
"""
-ASGI config for egypt_metro project.
+ASGI config for metro project.
It exposes the ASGI callable as a module-level variable named ``application``.
@@ -11,6 +11,6 @@
from django.core.asgi import get_asgi_application
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "egypt_metro.settings")
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "metro.settings")
application = get_asgi_application()
diff --git a/egypt_metro/context_processors.py b/metro/context_processors.py
similarity index 82%
rename from egypt_metro/context_processors.py
rename to metro/context_processors.py
index 54dd1aef..751c2962 100644
--- a/egypt_metro/context_processors.py
+++ b/metro/context_processors.py
@@ -1,4 +1,4 @@
-# egypt_metro/context_processors.py
+# metro/context_processors.py
# Context processors are functions that run before rendering a template.
def project_name(request):
diff --git a/egypt_metro/settings.py b/metro/settings.py
similarity index 64%
rename from egypt_metro/settings.py
rename to metro/settings.py
index 8e05d099..a705f08d 100644
--- a/egypt_metro/settings.py
+++ b/metro/settings.py
@@ -1,5 +1,5 @@
"""
-Django settings for egypt_metro project.
+Django settings for metro project.
Generated by 'django-admin startproject' using Django 5.1.3.
@@ -10,15 +10,18 @@
https://docs.djangoproject.com/en/5.1/ref/settings/
"""
-# import logging
-from pathlib import Path # File path helper
import os # Operating system dependent functionality
-import dj_database_url # type: ignore # Parse database URLs
-from dotenv import load_dotenv # Load environment variables from .env file
+
+# from decouple import config # Configuration helper
+from datetime import datetime # Date and time utilities
from datetime import timedelta # Time delta for JWT tokens
+
+# import logging
+from pathlib import Path # File path helper
+
+import dj_database_url # type: ignore # Parse database URLs
from corsheaders.defaults import default_headers # Default headers for CORS
-# from decouple import config # Configuration helper
-from datetime import datetime # Date and time utilities
+from dotenv import load_dotenv # Load environment variables from .env file
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent # Base directory for the project
@@ -32,19 +35,18 @@
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.getenv("SECRET_KEY") # Secret key for Django
DEBUG = os.getenv("DEBUG", "False") == "True" # Default to False
-# ALLOWED_HOSTS = config("ALLOWED_HOSTS", default="").split(",")
ALLOWED_HOSTS = [
- '127.0.0.1',
- 'localhost',
- 'backend-54v5.onrender.com', # Your production domain
+ "127.0.0.1",
+ "localhost",
+ "backend-54v5.onrender.com", # Your production domain
]
DEBUG = True
BASE_URL = os.getenv("BASE_URL") # Base URL for the project
JWT_SECRET = os.getenv("JWT_SECRET") # Secret key for JWT tokens
CSRF_TRUSTED_ORIGINS = [
- 'https://backend-54v5.onrender.com',
- 'http://127.0.0.1:8000',
- 'http://localhost:8000',
+ "https://backend-54v5.onrender.com",
+ "http://127.0.0.1:8000",
+ "http://localhost:8000",
]
# Set API start time to the application's boot time
@@ -58,7 +60,6 @@
"django.contrib.sessions", # Sessions framework
"django.contrib.messages", # Messages framework
"django.contrib.staticfiles", # Static files
-
# External packages
"allauth", # Authentication
"allauth.account", # Account management
@@ -67,14 +68,17 @@
"rest_framework", # REST framework
"rest_framework_simplejwt", # JWT authentication
"corsheaders", # CORS headers
- 'drf_yasg', # Swagger
- "constance", # Dynamic settings
+ "drf_yasg", # Swagger
+ "constance", # Dynamic settings
"constance.backends.database", # Database backend for Constance
-
+ "channels", # Channels
+ "import_export", # Import and export data
+ "rangefilter", # Range filter for Django admin
# Custom apps
"apps.users.apps.UsersConfig", # Users app
"apps.stations.apps.StationsConfig", # Stations app
"apps.routes.apps.RoutesConfig", # Routes app
+ "apps.trains.apps.TrainsConfig", # Trains app
]
# Middleware configuration
@@ -91,13 +95,28 @@
"allauth.account.middleware.AccountMiddleware", # Account middleware
]
-ROOT_URLCONF = "egypt_metro.urls" # Root URL configuration
-WSGI_APPLICATION = "egypt_metro.wsgi.application" # WSGI application
+ROOT_URLCONF = "metro.urls" # Root URL configuration
+WSGI_APPLICATION = "metro.wsgi.application" # WSGI application
+
+# AI Model Settings
+AI_MODEL_PATH = "path/to/your/trained/model"
+AI_MODEL_CONFIDENCE_THRESHOLD = 0.8
+
+# AI Service Configuration
+AI_SERVICE_URL = "http://your-ai-service-url/api" # Your friend's AI service URL
+AI_SERVICE_API_KEY = "your-api-key" # API key for authentication
+AI_SERVICE_TIMEOUT = 30 # seconds
+
+# For production Redis
+# if ENVIRONMENT == 'prod':
+# REDIS_HOST = os.getenv('REDIS_HOST', 'your-production-redis-host')
+# REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
+# else:
+# REDIS_HOST = os.getenv('REDIS_HOST', 'redis')
+# REDIS_PORT = int(os.getenv('REDIS_PORT', 6379))
# CORS settings
-CORS_ALLOW_ALL_ORIGINS = (
- os.getenv("CORS_ALLOW_ALL_ORIGINS", "False") == "True"
-)
+CORS_ALLOW_ALL_ORIGINS = os.getenv("CORS_ALLOW_ALL_ORIGINS", "False") == "True"
if not CORS_ALLOW_ALL_ORIGINS:
CORS_ALLOWED_ORIGINS = [
"https://backend-54v5.onrender.com",
@@ -133,7 +152,7 @@
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
- "DIRS": [os.path.join(BASE_DIR, 'templates')], # Add template directories here
+ "DIRS": [os.path.join(BASE_DIR, "templates")], # Add template directories here
"APP_DIRS": True, # Enable app templates
"OPTIONS": {
"context_processors": [
@@ -141,8 +160,8 @@
"django.template.context_processors.request", # Request context processor
"django.contrib.auth.context_processors.auth", # Auth context processor
"django.contrib.messages.context_processors.messages", # Messages context processor
- 'django.template.context_processors.static', # Static context processor
- 'egypt_metro.context_processors.project_name',
+ "django.template.context_processors.static", # Static context processor
+ "metro.context_processors.project_name",
],
},
},
@@ -154,6 +173,27 @@
# Custom User Model
AUTH_USER_MODEL = "users.User"
+# Add ASGI application
+ASGI_APPLICATION = "config.asgi.application"
+
+# Add Channel Layers configuration
+CHANNEL_LAYERS = {
+ "default": {
+ "BACKEND": "channels_redis.core.RedisChannelLayer",
+ "CONFIG": {
+ "hosts": [(os.getenv("REDIS_HOST", "127.0.0.1"), 6379)],
+ },
+ },
+}
+
+# API Documentation
+SPECTACULAR_SETTINGS = {
+ "TITLE": "Metro API",
+ "DESCRIPTION": "API documentation for Metro Train System",
+ "VERSION": "1.0.0",
+ "SERVE_INCLUDE_SCHEMA": False,
+}
+
# Parse the DATABASE_URL environment variable
default_db_config = dj_database_url.config(
default=os.getenv("DATABASE_URL"), # Load from .env file or environment
@@ -172,15 +212,15 @@
"PASSWORD": default_db_config.get("PASSWORD", os.getenv("DB_PASSWORD")),
"HOST": default_db_config.get("HOST", os.getenv("DB_HOST")),
"PORT": default_db_config.get("PORT", os.getenv("DB_PORT")),
- "CONN_MAX_AGE": default_db_config.get("CONN_MAX_AGE", 0), # Reuse connections for up to 600 seconds
+ "CONN_MAX_AGE": default_db_config.get("CONN_MAX_AGE", 0), # Reuse connections up to 600 sec
"OPTIONS": {
**default_db_config.get("OPTIONS", {}), # Merge existing options
"options": "-c search_path=public", # Specify the default schema
- 'connect_timeout': 30, # Increase the connection timeout (in seconds)
- 'keepalives': 1, # Enable TCP keepalives
- 'keepalives_idle': 60, # Increase this value
- 'keepalives_interval': 10,
- 'keepalives_count': 5,
+ "connect_timeout": 30, # Increase the connection timeout (in seconds)
+ "keepalives": 1, # Enable TCP keepalives
+ "keepalives_idle": 60, # Increase this value
+ "keepalives_interval": 10,
+ "keepalives_count": 5,
},
"DISABLE_SERVER_SIDE_CURSORS": True, # Optimize for specific queries
}
@@ -193,24 +233,16 @@
# }
# }
-
-# Security Settings General
-# SESSION_COOKIE_HTTPONLY = True # Prevent JavaScript access to session cookies
-# CSRF_COOKIE_HTTPONLY = True # Prevent JavaScript access to CSRF cookies
-# SESSION_COOKIE_SAMESITE = "Lax" # Set SameSite cookie attribute
-# CSRF_COOKIE_SAMESITE = "Lax" # Set SameSite cookie attribute
-# CSRF_TRUSTED_ORIGINS = os.getenv("CSRF_TRUSTED_ORIGINS", "").split(",")
-# SESSION_COOKIE_DOMAIN = os.getenv("SESSION_COOKIE_DOMAIN", None)
-# CSRF_COOKIE_DOMAIN = os.getenv("CSRF_COOKIE_DOMAIN", None)
-
# Enforce additional production-specific settings
if ENVIRONMENT == "prod":
- DATABASES["default"]["OPTIONS"].update({
- "sslmode": "require", # Enforce SSL for secure connections
- })
+ DATABASES["default"]["OPTIONS"].update(
+ {
+ "sslmode": "require", # Enforce SSL for secure connections
+ }
+ )
# Security settings Production
- CSRF_COOKIE_SECURE = True # Ensure CSRF cookies are only sent over HTTPS
- SESSION_COOKIE_SECURE = True # Ensure session cookies are only sent over HTTPS
+ CSRF_COOKIE_SECURE = True # Ensure CSRF cookies are only sent over HTTPS
+ SESSION_COOKIE_SECURE = True # Ensure session cookies are only sent over HTTPS
# SECURE_BROWSER_XSS_FILTER = True # Enable XSS protection for browsers
# SECURE_CONTENT_TYPE_NOSNIFF = True # Prevent content type sniffing
# SECURE_HSTS_SECONDS = 31536000 # 1 year in seconds
@@ -219,7 +251,7 @@
SECURE_SSL_REDIRECT = True # Redirect HTTP to HTTPS
# # Proxy Settings
# USE_X_FORWARDED_HOST = True
- SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
+ SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
# SECURE_REFERRER_POLICY = "same-origin" # Referrer policy
# X_FRAME_OPTIONS = "DENY" # Prevent framing of site content
@@ -234,16 +266,20 @@
AUTH_PASSWORD_VALIDATORS = [
{
- "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", # User attribute similarity validator
+ "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
+ # User attribute similarity validator
},
{
- "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", # Minimum length validator
+ "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
+ # Minimum length validator
},
{
- "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", # Common password validator
+ "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
+ # Common password validator
},
{
- "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", # Numeric password validator
+ "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
+ # Numeric password validator
},
]
@@ -253,33 +289,42 @@
]
REST_FRAMEWORK = {
+ "DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.URLPathVersioning",
+ "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.PageNumberPagination",
+ "PAGE_SIZE": 20,
"DEFAULT_AUTHENTICATION_CLASSES": (
"rest_framework_simplejwt.authentication.JWTAuthentication",
"rest_framework.authentication.TokenAuthentication",
"rest_framework.authentication.SessionAuthentication", # For session-based authentication
- 'rest_framework.authentication.BasicAuthentication',
+ "rest_framework.authentication.BasicAuthentication",
),
"DEFAULT_PERMISSION_CLASSES": (
"rest_framework.permissions.IsAuthenticated", # Default to authenticated users
- 'rest_framework.permissions.AllowAny', # Allow any user
+ "rest_framework.permissions.AllowAny", # Allow any user
),
- 'DEFAULT_RENDERER_CLASSES': [
- 'rest_framework.renderers.JSONRenderer', # Default renderer
- 'rest_framework.renderers.BrowsableAPIRenderer', # Browsable API renderer
+ "DEFAULT_RENDERER_CLASSES": [
+ "rest_framework.renderers.JSONRenderer", # Default renderer
+ "rest_framework.renderers.BrowsableAPIRenderer", # Browsable API renderer
# 'drf_yasg.renderers.SwaggerJSONRenderer', # Swagger JSON renderer
# 'drf_yasg.renderers.OpenAPIRenderer', # OpenAPI renderer
],
+ "DEFAULT_FILTER_BACKENDS": [
+ "django_filters.rest_framework.DjangoFilterBackend",
+ "rest_framework.filters.SearchFilter",
+ "rest_framework.filters.OrderingFilter",
+ ],
"DEFAULT_THROTTLE_CLASSES": [
"rest_framework.throttling.UserRateThrottle",
"rest_framework.throttling.AnonRateThrottle",
],
"DEFAULT_THROTTLE_RATES": {
- 'anon': '60/minute', # Anonymous users can make 60 requests per minute
- 'user': '120/minute', # Authenticated users can make 120 requests per minute
- 'station_lookup': '10/second', # For specific station lookup endpoints
- 'route_planning': '30/minute', # For route and trip planning APIs
- 'ticket_booking': '15/minute', # For ticket booking and QR code generation
+ "anon": "60/minute", # Anonymous users can make 60 requests per minute
+ "user": "120/minute", # Authenticated users can make 120 requests per minute
+ "station_lookup": "10/second", # For specific station lookup endpoints
+ "route_planning": "30/minute", # For route and trip planning APIs
+ "ticket_booking": "15/minute", # For ticket booking and QR code generation
},
+ "DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema",
}
SIMPLE_JWT = {
@@ -293,41 +338,65 @@
LOGS_DIR.mkdir(exist_ok=True)
LOGGING = {
- 'version': 1,
- 'disable_existing_loggers': False,
- 'formatters': {
- 'verbose': {
- 'format': '{levelname} {asctime} {module} {message}',
- 'style': '{',
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "verbose": {
+ "format": "{levelname} {asctime} {module} {message}",
+ "style": "{",
},
},
- 'handlers': {
- 'file': {
- 'level': 'INFO',
- 'class': 'logging.FileHandler',
- 'filename': 'logs/debug.log',
- 'formatter': 'verbose',
+ "handlers": {
+ "file": {
+ "level": "INFO",
+ "class": "logging.FileHandler",
+ "filename": "logs/debug.log",
+ "formatter": "verbose",
},
- 'console': {
- 'level': 'INFO',
- 'class': 'logging.StreamHandler',
- 'formatter': 'verbose',
+ "console": {
+ "level": "INFO",
+ "class": "logging.StreamHandler",
+ "formatter": "verbose",
},
},
- 'loggers': {
- 'apps.stations': {
- 'handlers': ['file', 'console'],
- 'level': 'INFO',
- 'propagate': True,
+ "loggers": {
+ "apps.stations": {
+ "handlers": ["file", "console"],
+ "level": "INFO",
+ "propagate": True,
},
- 'apps.routes': {
- 'handlers': ['file', 'console'],
- 'level': 'INFO',
- 'propagate': True,
+ "apps.routes": {
+ "handlers": ["file", "console"],
+ "level": "INFO",
+ "propagate": True,
},
},
}
+# Add Cache configuration
+# CACHES = {
+# 'default': {
+# 'BACKEND': 'django.core.cache.backends.redis.RedisCache',
+# 'LOCATION': f"redis://{os.getenv('REDIS_HOST', '127.0.0.1')}:6379/1",
+# 'OPTIONS': {
+# 'CLIENT_CLASS': 'django_redis.client.DefaultClient',
+# 'PARSER_CLASS': 'redis.connection.HiredisParser',
+# 'SOCKET_TIMEOUT': 5,
+# 'SOCKET_CONNECT_TIMEOUT': 5,
+# 'CONNECTION_POOL_CLASS': 'redis.connection.BlockingConnectionPool',
+# 'CONNECTION_POOL_CLASS_KWARGS': {
+# 'max_connections': 50,
+# 'timeout': 20,
+# },
+# 'MAX_CONNECTIONS': 1000,
+# 'RETRY_ON_TIMEOUT': True,
+# },
+# }
+# }
+
+# Cache time to live is 15 minutes
+CACHE_TTL = 60 * 15
+
# Cache configuration
if ENVIRONMENT == "prod":
CACHES = {
@@ -339,10 +408,7 @@
else:
CACHES = {
"default": {
- "BACKEND": os.getenv(
- "CACHE_BACKEND",
- "django.core.cache.backends.locmem.LocMemCache"
- ),
+ "BACKEND": os.getenv("CACHE_BACKEND", "django.core.cache.backends.locmem.LocMemCache"),
"LOCATION": os.getenv("CACHE_LOCATION", "unique-snowflake"),
}
}
@@ -357,12 +423,12 @@
# Session Settings
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db" # Cached database session engine
SESSION_CACHE_ALIAS = "default" # Cache alias for sessions
-SESSION_COOKIE_AGE = 3600 # Session cookie age in seconds (1 hour)
+SESSION_COOKIE_AGE = 3600 # Session cookie age in seconds (1 hour)
SESSION_EXPIRE_AT_BROWSER_CLOSE = ENVIRONMENT == "dev" # True for development, False for production
SESSION_SAVE_EVERY_REQUEST = True # Save session data on every request
-HANDLER404 = "egypt_metro.views.custom_404" # Custom 404 handler
-HANDLER500 = "egypt_metro.views.custom_500" # Custom 500 handler
+HANDLER404 = "metro.views.custom_404" # Custom 404 handler
+HANDLER500 = "metro.views.custom_500" # Custom 500 handler
# Internationalization
# https://docs.djangoproject.com/en/5.1/topics/i18n/
@@ -384,14 +450,12 @@
STATIC_URL = "/static/" # URL for static files
-STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles') # Folder where static files will be collected
+STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles") # Folder where static files will be collected
-STATICFILES_STORAGE = (
- "whitenoise.storage.CompressedManifestStaticFilesStorage" # Static files storage
-)
+STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage" # Static files storage
STATICFILES_DIRS = [
- os.path.join(BASE_DIR, 'static'),
+ os.path.join(BASE_DIR, "static"),
]
# Media files (optional, if your project uses media uploads)
diff --git a/egypt_metro/urls.py b/metro/urls.py
similarity index 90%
rename from egypt_metro/urls.py
rename to metro/urls.py
index 33af48e5..4cc00d00 100644
--- a/egypt_metro/urls.py
+++ b/metro/urls.py
@@ -1,5 +1,5 @@
"""
-URL configuration for egypt_metro project.
+URL configuration for metro project.
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/5.1/topics/http/urls/
@@ -14,16 +14,17 @@
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
-# egypt_metro/urls.py
+# metro/urls.py
import logging
from django.contrib import admin
from django.urls import path, include
-from egypt_metro import settings
+from metro import settings
from .views import health_check, home
# from django.conf.urls.static import static
from django.views.generic import RedirectView
from drf_yasg.views import get_schema_view
+from drf_spectacular.views import SpectacularAPIView, SpectacularSwaggerView
from drf_yasg import openapi
from rest_framework.permissions import AllowAny
from django.conf.urls.static import static
@@ -69,6 +70,7 @@
path("api/users/", include("apps.users.urls")), # User
path("api/stations/", include("apps.stations.urls")), # Stations
path('api/routes/', include('apps.routes.urls')), # Routes
+ path('api/trains/', include('apps.routes.urls')), # Routes
# Miscellaneous
path("health/", health_check, name="health_check"), # Health check
@@ -83,6 +85,9 @@
), # Swagger UI
path("redoc/", schema_view.with_ui("redoc", cache_timeout=0), name="schema-redoc"), # ReDoc
+
+ path('api/schema/', SpectacularAPIView.as_view(), name='schema'),
+ path('api/docs/', SpectacularSwaggerView.as_view(url_name='schema'), name='swagger-ui'),
]
if settings.DEBUG:
diff --git a/egypt_metro/views.py b/metro/views.py
similarity index 99%
rename from egypt_metro/views.py
rename to metro/views.py
index 6a5789e2..8d81c64b 100644
--- a/egypt_metro/views.py
+++ b/metro/views.py
@@ -1,4 +1,4 @@
-# egypt_metro/views.py
+# metro/views.py
import logging
import os
diff --git a/egypt_metro/wsgi.py b/metro/wsgi.py
similarity index 72%
rename from egypt_metro/wsgi.py
rename to metro/wsgi.py
index 45edea93..8594c798 100644
--- a/egypt_metro/wsgi.py
+++ b/metro/wsgi.py
@@ -1,5 +1,5 @@
"""
-WSGI config for egypt_metro project.
+WSGI config for metro project.
It exposes the WSGI callable as a module-level variable named ``application``.
@@ -11,6 +11,6 @@
from django.core.wsgi import get_wsgi_application
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "egypt_metro.settings")
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "metro.settings")
application = get_wsgi_application()
diff --git a/pyproject.toml b/pyproject.toml
index d195b33c..9a4485ce 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -129,5 +129,5 @@ build-backend = "poetry.core.masonry.api"
# Add this section to specify the package directory
packages = [
- { include = "egypt_metro" }, # Main package directory
+ { include = "metro" }, # Main package directory
]
diff --git a/recovered_file.txt b/recovered_file.txt
new file mode 100644
index 00000000..e375c87b
--- /dev/null
+++ b/recovered_file.txt
@@ -0,0 +1,74 @@
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v5.0.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: mixed-line-ending
+ args: [--fix=lf]
+ - id: check-yaml
+ - id: check-added-large-files
+ args: [--maxkb=500]
+ - id: check-merge-conflict
+ - id: debug-statements
+ - id: requirements-txt-fixer
+
+- repo: https://github.com/pycqa/flake8
+ rev: 7.1.2
+ hooks:
+ - id: flake8
+ args: [
+ --max-line-length=100,
+ --extend-ignore=E203,
+ --exclude=.git,__pycache__,build,dist
+ ]
+ additional_dependencies: [
+ 'flake8-docstrings',
+ 'flake8-bugbear',
+ 'flake8-comprehensions',
+ ]
+
+- repo: https://github.com/psf/black
+ rev: 25.1.0
+ hooks:
+ - id: black
+ args: [
+ --line-length=100,
+ --target-version=py39,
+ --skip-string-normalization,
+ ]
+ language_version: python3
+
+- repo: https://github.com/pycqa/isort
+ rev: 6.0.0
+ hooks:
+ - id: isort
+ args: [
+ --profile=black,
+ --line-length=100,
+ --multi-line=3,
+ --filter-files,
+ ]
+
+- repo: https://github.com/pre-commit/mirrors-mypy
+ rev: v1.15.0
+ hooks:
+ - id: mypy
+ additional_dependencies: [
+ 'types-all',
+ 'django-stubs',
+ 'djangorestframework-stubs',
+ ]
+ args: [
+ --ignore-missing-imports,
+ --disallow-untyped-defs,
+ --disallow-incomplete-defs,
+ --check-untyped-defs,
+ --disallow-untyped-decorators,
+ --no-implicit-optional,
+ --warn-redundant-casts,
+ --warn-unused-ignores,
+ --warn-return-any,
+ --strict-optional,
+ --strict-equality,
+ ]
\ No newline at end of file
diff --git a/render.yaml b/render.yaml
index ea18a5d4..314e19ca 100644
--- a/render.yaml
+++ b/render.yaml
@@ -14,7 +14,7 @@ services:
poetry install --no-dev && \
python manage.py collectstatic --noinput && \
python manage.py migrate --noinput
- startCommand: gunicorn egypt_metro.wsgi:application --bind 0.0.0.0:$PORT --workers=3 --threads=2 --timeout=120
+ startCommand: gunicorn metro.wsgi:application --bind 0.0.0.0:$PORT --workers=3 --threads=2 --timeout=120
envVars:
- key: ENVIRONMENT # Environment for loading specific config
value: prod
diff --git a/requirements.txt b/requirements.txt
index 1153dd9e..6f039951 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,3 +1,6 @@
+aiohappyeyeballs==2.4.6
+aiohttp==3.11.12
+aiosignal==1.3.2
anyio==4.7.0
asgiref==3.8.1
asttokens==2.4.1
@@ -6,11 +9,15 @@ atomicwrites==1.4.1
attrs==24.3.0
autopep8==2.3.1
beautifulsoup4==4.12.3
+black==25.1.0
build==1.2.2.post1
CacheControl==0.14.2
cachetools==5.5.0
certifi==2025.1.31
cffi==1.17.1
+cfgv==3.4.0
+channels==4.0.0
+channels-redis==4.1.0
charset-normalizer==3.4.1
cleo==2.1.0
click==8.1.8
@@ -20,26 +27,36 @@ crashtest==0.4.1
cryptography==44.0.0
debugpy==1.8.5
decorator==5.1.1
+diff-match-patch==20241021
distlib==0.3.9
dj-database-url==2.3.0
Django==4.2.18
+django-admin-rangefilter==0.13.2
django-allauth==65.3.0
django-constance==4.1.3
django-cors-headers==4.6.0
django-db-geventpool==4.0.7
django-debug-toolbar==4.4.6
django-environ==0.11.2
+django-extensions==3.2.3
+django-filter==25.1
+django-import-export==4.3.5
django-redis==5.4.0
django-silk==5.3.2
djangorestframework==3.15.2
djangorestframework-simplejwt==5.3.1
docker==7.1.0
+drf-spectacular==0.28.0
drf-yasg==1.21.8
dulwich==0.22.7
executing==2.1.0
+factory_boy==3.3.3
+Faker==36.1.1
fastapi==0.68.2
fastjsonschema==2.21.1
filelock==3.17.0
+flake8==7.1.2
+frozenlist==1.5.0
geographiclib==2.0
geopy==2.4.1
git-filter-repo==2.45.0
@@ -52,35 +69,47 @@ h11==0.14.0
httpcore==0.16.3
httplib2==0.22.0
httpx==0.23.3
+identify==2.6.7
idna==3.10
inflection==0.5.1
iniconfig==2.0.0
installer==0.7.0
ipykernel==6.29.5
ipython==8.27.0
+isort==6.0.0
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.1.0
jedi==0.19.1
Jinja2==3.1.5
+jsonschema==4.23.0
+jsonschema-specifications==2024.10.1
jupyter_client==8.6.3
jupyter_core==5.7.2
keyring==25.6.0
MarkupSafe==3.0.2
matplotlib-inline==0.1.7
+mccabe==0.7.0
more-itertools==10.6.0
msgpack==1.1.0
+multidict==6.1.0
+mypy-extensions==1.0.0
nest-asyncio==1.6.0
+nodeenv==1.9.1
+numpy==2.2.3
oauthlib==3.2.2
packaging==24.2
parso==0.8.4
+pathspec==0.12.1
pipdeptree==2.24.0
pkginfo==1.12.0
platformdirs==4.3.6
pluggy==1.5.0
poetry==2.0.1
poetry-core==2.0.1
+pre_commit==4.1.0
prompt_toolkit==3.0.47
+propcache==0.2.1
psutil==6.0.0
psycopg2==2.9.10
psycopg2-binary==2.9.10
@@ -91,6 +120,7 @@ pyasn1_modules==0.4.1
pycodestyle==2.12.1
pycparser==2.22
pydantic==1.10.19
+pyflakes==3.2.0
Pygments==2.18.0
PyJWT==2.10.1
pyparsing==3.1.4
@@ -102,16 +132,18 @@ python-dateutil==2.9.0.post0
python-decouple==3.8
python-dotenv==1.0.1
pytz==2024.2
-pywin32; sys_platform == "win32" and python_version >= "3.6"
+pywin32==308
pywin32-ctypes==0.2.3
PyYAML==6.0.2
pyzmq==26.2.0
RapidFuzz==3.12.1
redis==5.2.1
+referencing==0.36.2
requests==2.32.3
requests-oauthlib==2.0.0
requests-toolbelt==1.0.0
rfc3986==1.5.0
+rpds-py==0.22.3
rsa==4.9
shellingham==1.5.4
simplejwt==2.0.1
@@ -121,9 +153,11 @@ soupsieve==2.6
sqlparse==0.5.2
stack-data==0.6.3
starlette==0.14.2
+tablib==3.8.0
toml==0.10.2
tomlkit==0.13.2
tornado==6.4.1
+tqdm==4.67.1
traitlets==5.14.3
trove-classifiers==2025.1.15.22
typing==3.7.4.3
@@ -136,3 +170,4 @@ virtualenv==20.29.1
waitress==3.0.2
wcwidth==0.2.13
whitenoise==6.7.0
+yarl==1.18.3
diff --git a/tash list b/tash list
new file mode 100644
index 00000000..1250959d
--- /dev/null
+++ b/tash list
@@ -0,0 +1,314 @@
+
+ SSUUMMMMAARRYY OOFF LLEESSSS CCOOMMMMAANNDDSS
+
+ Commands marked with * may be preceded by a number, _N.
+ Notes in parentheses indicate the behavior if _N is given.
+ A key preceded by a caret indicates the Ctrl key; thus ^K is ctrl-K.
+
+ h H Display this help.
+ q :q Q :Q ZZ Exit.
+ ---------------------------------------------------------------------------
+
+ MMOOVVIINNGG
+
+ e ^E j ^N CR * Forward one line (or _N lines).
+ y ^Y k ^K ^P * Backward one line (or _N lines).
+ f ^F ^V SPACE * Forward one window (or _N lines).
+ b ^B ESC-v * Backward one window (or _N lines).
+ z * Forward one window (and set window to _N).
+ w * Backward one window (and set window to _N).
+ ESC-SPACE * Forward one window, but don't stop at end-of-file.
+ d ^D * Forward one half-window (and set half-window to _N).
+ u ^U * Backward one half-window (and set half-window to _N).
+ ESC-) RightArrow * Right one half screen width (or _N positions).
+ ESC-( LeftArrow * Left one half screen width (or _N positions).
+ ESC-} ^RightArrow Right to last column displayed.
+ ESC-{ ^LeftArrow Left to first column.
+ F Forward forever; like "tail -f".
+ ESC-F Like F but stop when search pattern is found.
+ r ^R ^L Repaint screen.
+ R Repaint screen, discarding buffered input.
+ ---------------------------------------------------
+ Default "window" is the screen height.
+ Default "half-window" is half of the screen height.
+ ---------------------------------------------------------------------------
+
+ SSEEAARRCCHHIINNGG
+
+ /_p_a_t_t_e_r_n * Search forward for (_N-th) matching line.
+ ?_p_a_t_t_e_r_n * Search backward for (_N-th) matching line.
+ n * Repeat previous search (for _N-th occurrence).
+ N * Repeat previous search in reverse direction.
+ ESC-n * Repeat previous search, spanning files.
+ ESC-N * Repeat previous search, reverse dir. & spanning files.
+ ^O^N ^On * Search forward for (_N-th) OSC8 hyperlink.
+ ^O^P ^Op * Search backward for (_N-th) OSC8 hyperlink.
+ ^O^L ^Ol Jump to the currently selected OSC8 hyperlink.
+ ESC-u Undo (toggle) search highlighting.
+ ESC-U Clear search highlighting.
+ &_p_a_t_t_e_r_n * Display only matching lines.
+ ---------------------------------------------------
+ A search pattern may begin with one or more of:
+ ^N or ! Search for NON-matching lines.
+ ^E or * Search multiple files (pass thru END OF FILE).
+ ^F or @ Start search at FIRST file (for /) or last file (for ?).
+ ^K Highlight matches, but don't move (KEEP position).
+ ^R Don't use REGULAR EXPRESSIONS.
+ ^S _n Search for match in _n-th parenthesized subpattern.
+ ^W WRAP search if no match found.
+ ^L Enter next character literally into pattern.
+ ---------------------------------------------------------------------------
+
+ JJUUMMPPIINNGG
+
+ g < ESC-< * Go to first line in file (or line _N).
+ G > ESC-> * Go to last line in file (or line _N).
+ p % * Go to beginning of file (or _N percent into file).
+ t * Go to the (_N-th) next tag.
+ T * Go to the (_N-th) previous tag.
+ { ( [ * Find close bracket } ) ].
+ } ) ] * Find open bracket { ( [.
+ ESC-^F _<_c_1_> _<_c_2_> * Find close bracket _<_c_2_>.
+ ESC-^B _<_c_1_> _<_c_2_> * Find open bracket _<_c_1_>.
+ ---------------------------------------------------
+ Each "find close bracket" command goes forward to the close bracket
+ matching the (_N-th) open bracket in the top line.
+ Each "find open bracket" command goes backward to the open bracket
+ matching the (_N-th) close bracket in the bottom line.
+
+ m_<_l_e_t_t_e_r_> Mark the current top line with .
+ M_<_l_e_t_t_e_r_> Mark the current bottom line with .
+ '_<_l_e_t_t_e_r_> Go to a previously marked position.
+ '' Go to the previous position.
+ ^X^X Same as '.
+ ESC-m_<_l_e_t_t_e_r_> Clear a mark.
+ ---------------------------------------------------
+ A mark is any upper-case or lower-case letter.
+ Certain marks are predefined:
+ ^ means beginning of the file
+ $ means end of the file
+ ---------------------------------------------------------------------------
+
+ CCHHAANNGGIINNGG FFIILLEESS
+
+ :e [_f_i_l_e] Examine a new file.
+ ^X^V Same as :e.
+ :n * Examine the (_N-th) next file from the command line.
+ :p * Examine the (_N-th) previous file from the command line.
+ :x * Examine the first (or _N-th) file from the command line.
+ ^O^O Open the currently selected OSC8 hyperlink.
+ :d Delete the current file from the command line list.
+ = ^G :f Print current file name.
+ ---------------------------------------------------------------------------
+
+ MMIISSCCEELLLLAANNEEOOUUSS CCOOMMMMAANNDDSS
+
+ -_<_f_l_a_g_> Toggle a command line option [see OPTIONS below].
+ --_<_n_a_m_e_> Toggle a command line option, by name.
+ __<_f_l_a_g_> Display the setting of a command line option.
+ ___<_n_a_m_e_> Display the setting of an option, by name.
+ +_c_m_d Execute the less cmd each time a new file is examined.
+
+ !_c_o_m_m_a_n_d Execute the shell command with $SHELL.
+ #_c_o_m_m_a_n_d Execute the shell command, expanded like a prompt.
+ |XX_c_o_m_m_a_n_d Pipe file between current pos & mark XX to shell command.
+ s _f_i_l_e Save input to a file.
+ v Edit the current file with $VISUAL or $EDITOR.
+ V Print version number of "less".
+ ---------------------------------------------------------------------------
+
+ OOPPTTIIOONNSS
+
+ Most options may be changed either on the command line,
+ or from within less by using the - or -- command.
+ Options may be given in one of two forms: either a single
+ character preceded by a -, or a name preceded by --.
+
+ -? ........ --help
+ Display help (from command line).
+ -a ........ --search-skip-screen
+ Search skips current screen.
+ -A ........ --SEARCH-SKIP-SCREEN
+ Search starts just after target line.
+ -b [_N] .... --buffers=[_N]
+ Number of buffers.
+ -B ........ --auto-buffers
+ Don't automatically allocate buffers for pipes.
+ -c ........ --clear-screen
+ Repaint by clearing rather than scrolling.
+ -d ........ --dumb
+ Dumb terminal.
+ -D xx_c_o_l_o_r . --color=xx_c_o_l_o_r
+ Set screen colors.
+ -e -E .... --quit-at-eof --QUIT-AT-EOF
+ Quit at end of file.
+ -f ........ --force
+ Force open non-regular files.
+ -F ........ --quit-if-one-screen
+ Quit if entire file fits on first screen.
+ -g ........ --hilite-search
+ Highlight only last match for searches.
+ -G ........ --HILITE-SEARCH
+ Don't highlight any matches for searches.
+ -h [_N] .... --max-back-scroll=[_N]
+ Backward scroll limit.
+ -i ........ --ignore-case
+ Ignore case in searches that do not contain uppercase.
+ -I ........ --IGNORE-CASE
+ Ignore case in all searches.
+ -j [_N] .... --jump-target=[_N]
+ Screen position of target lines.
+ -J ........ --status-column
+ Display a status column at left edge of screen.
+ -k _f_i_l_e ... --lesskey-file=_f_i_l_e
+ Use a compiled lesskey file.
+ -K ........ --quit-on-intr
+ Exit less in response to ctrl-C.
+ -L ........ --no-lessopen
+ Ignore the LESSOPEN environment variable.
+ -m -M .... --long-prompt --LONG-PROMPT
+ Set prompt style.
+ -n ......... --line-numbers
+ Suppress line numbers in prompts and messages.
+ -N ......... --LINE-NUMBERS
+ Display line number at start of each line.
+ -o [_f_i_l_e] .. --log-file=[_f_i_l_e]
+ Copy to log file (standard input only).
+ -O [_f_i_l_e] .. --LOG-FILE=[_f_i_l_e]
+ Copy to log file (unconditionally overwrite).
+ -p _p_a_t_t_e_r_n . --pattern=[_p_a_t_t_e_r_n]
+ Start at pattern (from command line).
+ -P [_p_r_o_m_p_t] --prompt=[_p_r_o_m_p_t]
+ Define new prompt.
+ -q -Q .... --quiet --QUIET --silent --SILENT
+ Quiet the terminal bell.
+ -r -R .... --raw-control-chars --RAW-CONTROL-CHARS
+ Output "raw" control characters.
+ -s ........ --squeeze-blank-lines
+ Squeeze multiple blank lines.
+ -S ........ --chop-long-lines
+ Chop (truncate) long lines rather than wrapping.
+ -t _t_a_g .... --tag=[_t_a_g]
+ Find a tag.
+ -T [_t_a_g_s_f_i_l_e] --tag-file=[_t_a_g_s_f_i_l_e]
+ Use an alternate tags file.
+ -u -U .... --underline-special --UNDERLINE-SPECIAL
+ Change handling of backspaces, tabs and carriage returns.
+ -V ........ --version
+ Display the version number of "less".
+ -w ........ --hilite-unread
+ Highlight first new line after forward-screen.
+ -W ........ --HILITE-UNREAD
+ Highlight first new line after any forward movement.
+ -x [_N[,...]] --tabs=[_N[,...]]
+ Set tab stops.
+ -X ........ --no-init
+ Don't use termcap init/deinit strings.
+ -y [_N] .... --max-forw-scroll=[_N]
+ Forward scroll limit.
+ -z [_N] .... --window=[_N]
+ Set size of window.
+ -" [_c[_c]] . --quotes=[_c[_c]]
+ Set shell quote characters.
+ -~ ........ --tilde
+ Don't display tildes after end of file.
+ -# [_N] .... --shift=[_N]
+ Set horizontal scroll amount (0 = one half screen width).
+
+ --exit-follow-on-close
+ Exit F command on a pipe when writer closes pipe.
+ --file-size
+ Automatically determine the size of the input file.
+ --follow-name
+ The F command changes files if the input file is renamed.
+ --header=[_L[,_C[,_N]]]
+ Use _L lines (starting at line _N) and _C columns as headers.
+ --incsearch
+ Search file as each pattern character is typed in.
+ --intr=[_C]
+ Use _C instead of ^X to interrupt a read.
+ --lesskey-context=_t_e_x_t
+ Use lesskey source file contents.
+ --lesskey-src=_f_i_l_e
+ Use a lesskey source file.
+ --line-num-width=[_N]
+ Set the width of the -N line number field to _N characters.
+ --match-shift=[_N]
+ Show at least _N characters to the left of a search match.
+ --modelines=[_N]
+ Read _N lines from the input file and look for vim modelines.
+ --mouse
+ Enable mouse input.
+ --no-keypad
+ Don't send termcap keypad init/deinit strings.
+ --no-histdups
+ Remove duplicates from command history.
+ --no-number-headers
+ Don't give line numbers to header lines.
+ --no-search-header-lines
+ Searches do not include header lines.
+ --no-search-header-columns
+ Searches do not include header columns.
+ --no-search-headers
+ Searches do not include header lines or columns.
+ --no-vbell
+ Disable the terminal's visual bell.
+ --redraw-on-quit
+ Redraw final screen when quitting.
+ --rscroll=[_C]
+ Set the character used to mark truncated lines.
+ --save-marks
+ Retain marks across invocations of less.
+ --search-options=[EFKNRW-]
+ Set default options for every search.
+ --show-preproc-errors
+ Display a message if preprocessor exits with an error status.
+ --proc-backspace
+ Process backspaces for bold/underline.
+ --PROC-BACKSPACE
+ Treat backspaces as control characters.
+ --proc-return
+ Delete carriage returns before newline.
+ --PROC-RETURN
+ Treat carriage returns as control characters.
+ --proc-tab
+ Expand tabs to spaces.
+ --PROC-TAB
+ Treat tabs as control characters.
+ --status-col-width=[_N]
+ Set the width of the -J status column to _N characters.
+ --status-line
+ Highlight or color the entire line containing a mark.
+ --use-backslash
+ Subsequent options use backslash as escape char.
+ --use-color
+ Enables colored text.
+ --wheel-lines=[_N]
+ Each click of the mouse wheel moves _N lines.
+ --wordwrap
+ Wrap lines at spaces.
+
+
+ ---------------------------------------------------------------------------
+
+ LLIINNEE EEDDIITTIINNGG
+
+ These keys can be used to edit text being entered
+ on the "command line" at the bottom of the screen.
+
+ RightArrow ..................... ESC-l ... Move cursor right one character.
+ LeftArrow ...................... ESC-h ... Move cursor left one character.
+ ctrl-RightArrow ESC-RightArrow ESC-w ... Move cursor right one word.
+ ctrl-LeftArrow ESC-LeftArrow ESC-b ... Move cursor left one word.
+ HOME ........................... ESC-0 ... Move cursor to start of line.
+ END ............................ ESC-$ ... Move cursor to end of line.
+ BACKSPACE ................................ Delete char to left of cursor.
+ DELETE ......................... ESC-x ... Delete char under cursor.
+ ctrl-BACKSPACE ESC-BACKSPACE ........... Delete word to left of cursor.
+ ctrl-DELETE .... ESC-DELETE .... ESC-X ... Delete word under cursor.
+ ctrl-U ......... ESC (MS-DOS only) ....... Delete entire line.
+ UpArrow ........................ ESC-k ... Retrieve previous command line.
+ DownArrow ...................... ESC-j ... Retrieve next command line.
+ TAB ...................................... Complete filename & cycle.
+ SHIFT-TAB ...................... ESC-TAB Complete filename & reverse cycle.
+ ctrl-L ................................... Complete filename, list all.