From 8679dcea69788303ce51b5f4b8fc4a2b01618269 Mon Sep 17 00:00:00 2001 From: Shiv Tyagi Date: Sun, 18 Jan 2026 21:42:29 +0530 Subject: [PATCH 1/4] metadata_manager: add method to get feature defaults from firmware server --- metadata_manager/ap_src_meta_fetcher.py | 73 +++++++++++++++++++++++++ 1 file changed, 73 insertions(+) diff --git a/metadata_manager/ap_src_meta_fetcher.py b/metadata_manager/ap_src_meta_fetcher.py index ea0a604..33bfff0 100644 --- a/metadata_manager/ap_src_meta_fetcher.py +++ b/metadata_manager/ap_src_meta_fetcher.py @@ -478,6 +478,79 @@ def get_build_options_at_commit(self, remote: str, ) return build_options + def get_board_defaults_from_fw_server( + self, + artifacts_url: str, + board_id: str, + vehicle_id: str = None, + ) -> dict: + """ + Fetch board defaults from firmware.ardupilot.org features.txt. + + The features.txt file contains lines like: + - FEATURE_NAME (enabled features) + - !FEATURE_NAME (disabled features) + + Parameters: + artifacts_url (str): Base URL for build artifacts for a version. + board_id (str): Board identifier + vehicle_id (str): Vehicle identifier + (for special handling like Heli) + + Returns: + dict: Dictionary mapping feature define to state + (1 for enabled, 0 for disabled), or None if fetch fails + """ + import requests + + # Heli builds are stored under a separate folder + artifacts_subdir = board_id + if vehicle_id == "Heli": + artifacts_subdir += "-heli" + + features_txt_url = f"{artifacts_url}/{artifacts_subdir}/features.txt" + + try: + response = requests.get(features_txt_url, timeout=30) + response.raise_for_status() + + feature_states = {} + enabled_count = 0 + disabled_count = 0 + + for line in response.text.splitlines(): + line = line.strip() + + # Skip empty lines and comments + if not line or line.startswith('#'): + continue + + # Check if feature is disabled (prefixed with !) + if line.startswith('!'): + feature_name = line[1:].strip() + if feature_name: + feature_states[feature_name] = 0 + disabled_count += 1 + else: + # Enabled feature + if line: + feature_states[line] = 1 + enabled_count += 1 + + self.logger.info( + f"Fetched board defaults from firmware server: " + f"{enabled_count} enabled, " + f"{disabled_count} disabled" + ) + + return feature_states + + except requests.RequestException as e: + self.logger.warning( + f"Failed to fetch board defaults from {features_txt_url}: {e}" + ) + return None + @staticmethod def get_singleton(): return APSourceMetadataFetcher.__singleton From 0114eb55b6ba61edc4d4c74b4da5863bf5d81467 Mon Sep 17 00:00:00 2001 From: Shiv Tyagi Date: Sun, 18 Jan 2026 21:44:51 +0530 Subject: [PATCH 2/4] web: migrate web backend to fastapi --- web/Dockerfile | 9 +- web/api/v1/__init__.py | 4 + web/api/v1/admin.py | 81 +++++++ web/api/v1/builds.py | 225 ++++++++++++++++++ web/api/v1/router.py | 17 ++ web/api/v1/vehicles.py | 253 ++++++++++++++++++++ web/app.py | 404 ------------------------------- web/core/__init__.py | 10 + web/core/config.py | 85 +++++++ web/core/logging_config.py | 85 +++++++ web/core/startup.py | 104 ++++++++ web/main.py | 154 ++++++++++++ web/requirements.txt | 14 +- web/schemas/__init__.py | 54 +++++ web/schemas/admin.py | 12 + web/schemas/builds.py | 65 +++++ web/schemas/vehicles.py | 93 ++++++++ web/services/__init__.py | 15 ++ web/services/admin.py | 115 +++++++++ web/services/builds.py | 397 +++++++++++++++++++++++++++++++ web/services/vehicles.py | 273 +++++++++++++++++++++ web/static/js/add_build.js | 448 ++++++++++++++++++++--------------- web/static/js/index.js | 14 +- web/templates/add_build.html | 16 +- web/templates/error.html | 13 - web/templates/index.html | 8 +- web/ui/__init__.py | 6 + web/ui/router.py | 66 ++++++ web/wsgi.py | 30 --- 29 files changed, 2410 insertions(+), 660 deletions(-) create mode 100644 web/api/v1/__init__.py create mode 100644 web/api/v1/admin.py create mode 100644 web/api/v1/builds.py create mode 100644 web/api/v1/router.py create mode 100644 web/api/v1/vehicles.py delete mode 100755 web/app.py create mode 100644 web/core/__init__.py create mode 100644 web/core/config.py create mode 100644 web/core/logging_config.py create mode 100644 web/core/startup.py create mode 100755 web/main.py create mode 100644 web/schemas/__init__.py create mode 100644 web/schemas/admin.py create mode 100644 web/schemas/builds.py create mode 100644 web/schemas/vehicles.py create mode 100644 web/services/__init__.py create mode 100644 web/services/admin.py create mode 100644 web/services/builds.py create mode 100644 web/services/vehicles.py delete mode 100644 web/templates/error.html create mode 100644 web/ui/__init__.py create mode 100644 web/ui/router.py delete mode 100644 web/wsgi.py diff --git a/web/Dockerfile b/web/Dockerfile index aacb445..fa3c68c 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -1,7 +1,8 @@ FROM python:3.10.16-slim-bookworm RUN apt-get update \ - && apt-get install -y --no-install-recommends git gosu + && apt-get install -y --no-install-recommends git gosu \ + && rm -rf /var/lib/apt/lists/* RUN groupadd -g 999 ardupilot && \ useradd -u 999 -g 999 -m ardupilot --shell /bin/false && \ @@ -12,5 +13,9 @@ COPY --chown=ardupilot:ardupilot . /app WORKDIR /app/web RUN pip install --no-cache-dir -r requirements.txt +ENV PYTHONPATH=/app + +EXPOSE 8080 + ENTRYPOINT ["./docker-entrypoint.sh"] -CMD ["gunicorn", "wsgi:application"] +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8080"] diff --git a/web/api/v1/__init__.py b/web/api/v1/__init__.py new file mode 100644 index 0000000..6cbe7be --- /dev/null +++ b/web/api/v1/__init__.py @@ -0,0 +1,4 @@ +"""API v1 module.""" +from .router import router + +__all__ = ["router"] diff --git a/web/api/v1/admin.py b/web/api/v1/admin.py new file mode 100644 index 0000000..5ce7e89 --- /dev/null +++ b/web/api/v1/admin.py @@ -0,0 +1,81 @@ +from fastapi import APIRouter, HTTPException, Depends, status +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials + +from schemas import RefreshRemotesResponse +from services.admin import get_admin_service, AdminService + + +router = APIRouter(prefix="/admin", tags=["admin"]) +security = HTTPBearer() + + +async def verify_admin_token( + credentials: HTTPAuthorizationCredentials = Depends(security), + admin_service: AdminService = Depends(get_admin_service) +) -> None: + """ + Verify the bearer token for admin authentication. + + Args: + credentials: HTTP authorization credentials from request header + admin_service: Admin service instance + + Raises: + 401: Invalid or missing token + 500: Server configuration error (token not configured) + """ + token = credentials.credentials + try: + if not await admin_service.verify_token(token): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid authentication token" + ) + except RuntimeError as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=str(e) + ) + + +@router.post( + "/refresh_remotes", + response_model=RefreshRemotesResponse, + responses={ + 401: {"description": "Invalid or missing authentication token"}, + 500: { + "description": ( + "Server configuration error (token not configured) " + "or refresh operation failed" + ) + } + } +) +async def refresh_remotes( + _: None = Depends(verify_admin_token), + admin_service: AdminService = Depends(get_admin_service) +): + """ + Trigger a hot reset/refresh of remote metadata. + + This endpoint requires bearer token authentication in the Authorization + header: + ``` + Authorization: Bearer + ``` + + Returns: + RefreshRemotesResponse: List of remotes that were refreshed + + Raises: + 401: Invalid or missing authentication token + 500: Refresh operation failed + """ + try: + remotes = await admin_service.refresh_remotes() + return RefreshRemotesResponse(remotes=remotes) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Failed to refresh remotes: {str(e)}" + ) diff --git a/web/api/v1/builds.py b/web/api/v1/builds.py new file mode 100644 index 0000000..d0f0ab9 --- /dev/null +++ b/web/api/v1/builds.py @@ -0,0 +1,225 @@ +from typing import List, Optional +from fastapi import ( + APIRouter, + HTTPException, + Query, + Path, + status, + Depends, + Request +) +from fastapi.responses import FileResponse, PlainTextResponse + +from schemas import ( + BuildRequest, + BuildSubmitResponse, + BuildOut, +) +from services.builds import get_builds_service, BuildsService +from utils import RateLimitExceededException + +router = APIRouter(prefix="/builds", tags=["builds"]) + + +@router.post( + "", + response_model=BuildSubmitResponse, + status_code=status.HTTP_201_CREATED, + responses={ + 400: {"description": "Invalid build configuration"}, + 404: {"description": "Vehicle, board, or version not found"}, + 429: {"description": "Rate limit exceeded"} + } +) +async def create_build( + build_request: BuildRequest, + request: Request, + service: BuildsService = Depends(get_builds_service) +): + """ + Create a new build request. + + Args: + build_request: Build configuration including vehicle, board, version, + and selected features + + Returns: + Simple response with build_id, URL, and status + + Raises: + 400: Invalid build configuration + 404: Vehicle, board, or version not found + 429: Rate limit exceeded + """ + try: + # Get client IP for rate limiting + forwarded_for = request.headers.get('X-Forwarded-For', None) + if forwarded_for: + client_ip = forwarded_for.split(',')[0].strip() + else: + client_ip = request.client.host if request.client else "unknown" + + return service.create_build(build_request, client_ip) + except RateLimitExceededException as e: + raise HTTPException( + status_code=status.HTTP_429_TOO_MANY_REQUESTS, + detail=str(e) + ) + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) + + +@router.get("", response_model=List[BuildOut]) +async def list_builds( + vehicle_id: Optional[str] = Query( + None, description="Filter by vehicle ID" + ), + board_id: Optional[str] = Query( + None, description="Filter by board ID" + ), + state: Optional[str] = Query( + None, + description="Filter by build state (PENDING, RUNNING, SUCCESS, " + "FAILURE, CANCELLED)" + ), + limit: int = Query( + 20, ge=1, le=100, description="Maximum number of builds to return" + ), + offset: int = Query( + 0, ge=0, description="Number of builds to skip" + ), + service: BuildsService = Depends(get_builds_service) +): + """ + Get list of builds with optional filters. + + Args: + vehicle_id: Filter builds by vehicle + board_id: Filter builds by board + state: Filter builds by current state + limit: Maximum number of results + offset: Number of results to skip (for pagination) + + Returns: + List of builds matching the filters + """ + return service.list_builds( + vehicle_id=vehicle_id, + board_id=board_id, + state=state, + limit=limit, + offset=offset + ) + + +@router.get( + "/{build_id}", + response_model=BuildOut, + responses={ + 404: {"description": "Build not found"} + } +) +async def get_build( + build_id: str = Path(..., description="Unique build identifier"), + service: BuildsService = Depends(get_builds_service) +): + """ + Get details of a specific build. + + Args: + build_id: The unique build identifier + + Returns: + Complete build details including progress and status + + Raises: + 404: Build not found + """ + build = service.get_build(build_id) + if not build: + raise HTTPException( + status_code=404, + detail=f"Build with id '{build_id}' not found" + ) + return build + + +@router.get( + "/{build_id}/logs", + responses={ + 404: {"description": "Build not found or logs not available yet"} + } +) +async def get_build_logs( + build_id: str = Path(..., description="Unique build identifier"), + tail: Optional[int] = Query( + None, ge=1, description="Return only the last N lines" + ), + service: BuildsService = Depends(get_builds_service) +): + """ + Get build logs for a specific build. + + Args: + build_id: The unique build identifier + tail: Optional number of last lines to return + + Returns: + Build logs as text + + Raises: + 404: Build not found + 404: Logs not available yet + """ + logs = service.get_build_logs(build_id, tail) + if logs is None: + raise HTTPException( + status_code=404, + detail=f"Logs not available for build '{build_id}'" + ) + return PlainTextResponse(content=logs) + + +@router.get( + "/{build_id}/artifact", + responses={ + 404: { + "description": ( + "Build not found or artifact not available " + ) + } + } +) +async def download_artifact( + build_id: str = Path(..., description="Unique build identifier"), + service: BuildsService = Depends(get_builds_service) +): + """ + Download the build artifact (firmware binary). + + Args: + build_id: The unique build identifier + + Returns: + Binary file download + + Raises: + 404: Build not found + 404: Artifact not available (build not completed successfully) + """ + artifact_path = service.get_artifact_path(build_id) + if not artifact_path: + raise HTTPException( + status_code=404, + detail=( + f"Artifact not available for build '{build_id}'. " + "Build may not be completed or successful." + ) + ) + return FileResponse( + path=artifact_path, + media_type='application/gzip', + filename=f"{build_id}.tar.gz" + ) diff --git a/web/api/v1/router.py b/web/api/v1/router.py new file mode 100644 index 0000000..9597591 --- /dev/null +++ b/web/api/v1/router.py @@ -0,0 +1,17 @@ +""" +Main API v1 router. + +This module aggregates all v1 API endpoints and provides a single router +to be included in the main FastAPI application. +""" +from fastapi import APIRouter + +from . import vehicles, builds, admin + +# Create the main v1 router +router = APIRouter(prefix="/v1") + +# Include all sub-routers +router.include_router(vehicles.router) +router.include_router(builds.router) +router.include_router(admin.router) diff --git a/web/api/v1/vehicles.py b/web/api/v1/vehicles.py new file mode 100644 index 0000000..b5168d2 --- /dev/null +++ b/web/api/v1/vehicles.py @@ -0,0 +1,253 @@ +from typing import List, Optional +from fastapi import APIRouter, Depends, HTTPException, Query, Path + +from schemas import ( + VehicleBase, + VersionOut, + BoardOut, + FeatureOut, +) +from services.vehicles import get_vehicles_service, VehiclesService + +router = APIRouter(prefix="/vehicles", tags=["vehicles"]) + + +@router.get("", response_model=List[VehicleBase]) +async def list_vehicles( + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get list of all available vehicles. + + Returns: + List of vehicles with their IDs and names. + """ + return service.get_all_vehicles() + + +@router.get( + "/{vehicle_id}", + response_model=VehicleBase, + responses={ + 404: {"description": "Vehicle not found"} + } +) +async def get_vehicle( + vehicle_id: str = Path(..., description="Unique vehicle identifier"), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get a specific vehicle by ID. + + Args: + vehicle_id: The vehicle identifier (e.g., 'copter', 'plane') + + Returns: + Vehicle details + """ + vehicle = service.get_vehicle(vehicle_id) + if not vehicle: + raise HTTPException( + status_code=404, + detail=f"Vehicle with id '{vehicle_id}' not found" + ) + return vehicle + + +# --- Version Endpoints --- +@router.get("/{vehicle_id}/versions", response_model=List[VersionOut]) +async def list_versions( + vehicle_id: str = Path(..., description="Vehicle identifier"), + type: Optional[str] = Query( + None, + description=( + "Filter by version type " + "(beta, stable, latest, tag)" + ) + ), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get all versions available for a specific vehicle. + + Args: + vehicle_id: The vehicle identifier + type: Optional filter by version type + + Returns: + List of versions for the vehicle + """ + return service.get_versions(vehicle_id, type_filter=type) + + +@router.get( + "/{vehicle_id}/versions/{version_id}", + response_model=VersionOut, + responses={ + 404: {"description": "Version not found for the vehicle"} + } +) +async def get_version( + vehicle_id: str = Path(..., description="Vehicle identifier"), + version_id: str = Path(..., description="Version identifier"), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get details of a specific version for a vehicle. + + Args: + vehicle_id: The vehicle identifier + version_id: The version identifier + + Returns: + Version details + """ + version = service.get_version(vehicle_id, version_id) + if not version: + raise HTTPException( + status_code=404, + detail=( + f"Version '{version_id}' not found for " + f"vehicle '{vehicle_id}'" + ) + ) + return version + + +# --- Board Endpoints --- +@router.get( + "/{vehicle_id}/versions/{version_id}/boards", + response_model=List[BoardOut], + responses={ + 404: {"description": "No boards found for the vehicle version"} + } +) +async def list_boards( + vehicle_id: str = Path(..., description="Vehicle identifier"), + version_id: str = Path(..., description="Version identifier"), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get all boards available for a specific vehicle version. + + Args: + vehicle_id: The vehicle identifier + version_id: The version identifier + + Returns: + List of boards for the vehicle version + """ + boards = service.get_boards(vehicle_id, version_id) + if not boards: + raise HTTPException( + status_code=404, + detail=( + f"No boards found for vehicle '{vehicle_id}' and " + f"version '{version_id}'" + ) + ) + + return boards + + +@router.get( + "/{vehicle_id}/versions/{version_id}/boards/{board_id}", + response_model=BoardOut, + responses={ + 404: {"description": "Board not found"} + } +) +async def get_board( + vehicle_id: str = Path(..., description="Vehicle identifier"), + version_id: str = Path(..., description="Version identifier"), + board_id: str = Path(..., description="Board identifier"), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get details of a specific board for a vehicle version. + + Args: + vehicle_id: The vehicle identifier + version_id: The version identifier + board_id: The board identifier + + Returns: + Board details + """ + board = service.get_board(vehicle_id, version_id, board_id) + if not board: + raise HTTPException( + status_code=404, + detail=f"Board '{board_id}' not found" + ) + return board + + +# --- Feature Endpoints --- +@router.get( + "/{vehicle_id}/versions/{version_id}/boards/{board_id}/features", + response_model=List[FeatureOut] +) +async def list_features( + vehicle_id: str = Path(..., description="Vehicle identifier"), + version_id: str = Path(..., description="Version identifier"), + board_id: str = Path(..., description="Board identifier"), + category_id: Optional[str] = Query( + None, description="Filter by category ID" + ), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get all features with defaults for a specific vehicle/version/board. + + Args: + vehicle_id: The vehicle identifier + version_id: The version identifier + board_id: The board identifier + category_id: Optional filter by category + + Returns: + List of features with default settings for the board + """ + features = service.get_features( + vehicle_id, version_id, board_id, category_id + ) + return features + + +@router.get( + "/{vehicle_id}/versions/{version_id}/boards/{board_id}/" + "features/{feature_id}", + response_model=FeatureOut, + responses={ + 404: {"description": "Feature not found"} + } +) +async def get_feature( + vehicle_id: str = Path(..., description="Vehicle identifier"), + version_id: str = Path(..., description="Version identifier"), + board_id: str = Path(..., description="Board identifier"), + feature_id: str = Path(..., description="Feature identifier"), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get details of a specific feature for a vehicle/version/board. + + Args: + vehicle_id: The vehicle identifier + version_id: The version identifier + board_id: The board identifier + feature_id: The feature identifier + + Returns: + Feature details with default settings + """ + feature = service.get_feature( + vehicle_id, version_id, board_id, feature_id + ) + if not feature: + raise HTTPException( + status_code=404, + detail=f"Feature '{feature_id}' not found" + ) + return feature diff --git a/web/app.py b/web/app.py deleted file mode 100755 index 8a4aa43..0000000 --- a/web/app.py +++ /dev/null @@ -1,404 +0,0 @@ -#!/usr/bin/env python3 - -import os -from flask import Flask, render_template, request, send_from_directory, jsonify, redirect -from threading import Thread -import sys -import requests -import signal - -from logging.config import dictConfig - -dictConfig({ - 'version': 1, - 'formatters': {'default': { - 'format': '[%(asctime)s] %(levelname)s in %(module)s: %(message)s', - }}, - 'handlers': {'wsgi': { - 'class': 'logging.StreamHandler', - 'stream': 'ext://flask.logging.wsgi_errors_stream', - 'formatter': 'default' - }}, - 'root': { - 'level': os.getenv('CBS_LOG_LEVEL', default='INFO'), - 'handlers': ['wsgi'] - } -}) - -# let app.py know about the modules in the parent directory -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) -import ap_git -import metadata_manager -import build_manager -from builder import Builder - -# run at lower priority -os.nice(20) - -import optparse -parser = optparse.OptionParser("app.py") - -parser.add_option("", "--basedir", type="string", - default=os.getenv( - key="CBS_BASEDIR", - default=os.path.abspath(os.path.join(os.path.dirname(__file__),"..","base")) - ), - help="base directory") - -cmd_opts, cmd_args = parser.parse_args() - -# define directories -basedir = os.path.abspath(cmd_opts.basedir) -sourcedir = os.path.join(basedir, 'ardupilot') -outdir_parent = os.path.join(basedir, 'artifacts') -workdir_parent = os.path.join(basedir, 'workdir') - -appdir = os.path.dirname(__file__) - -builds_dict = {} -REMOTES = None - -repo = ap_git.GitRepo.clone_if_needed( - source="https://github.com/ardupilot/ardupilot.git", - dest=sourcedir, - recurse_submodules=True, -) - -vehicles_manager = metadata_manager.VehiclesManager() -ap_src_metadata_fetcher = metadata_manager.APSourceMetadataFetcher( - ap_repo=repo, - caching_enabled=True, - redis_host=os.getenv('CBS_REDIS_HOST', default='localhost'), - redis_port=os.getenv('CBS_REDIS_PORT', default='6379'), -) -versions_fetcher = metadata_manager.VersionsFetcher( - remotes_json_path=os.path.join(basedir, 'configs', 'remotes.json'), - ap_repo=repo -) - -manager = build_manager.BuildManager( - outdir=outdir_parent, - redis_host=os.getenv('CBS_REDIS_HOST', default='localhost'), - redis_port=os.getenv('CBS_REDIS_PORT', default='6379') -) -cleaner = build_manager.BuildArtifactsCleaner() -progress_updater = build_manager.BuildProgressUpdater() - -versions_fetcher.start() -cleaner.start() -progress_updater.start() - -# Initialize builder if enabled -builder = None -builder_thread = None -if os.getenv('CBS_ENABLE_INBUILT_BUILDER', default='1') == '1': - builder = Builder( - workdir=workdir_parent, - source_repo=repo - ) - builder_thread = Thread( - target=builder.run, - daemon=True - ) - builder_thread.start() - -app = Flask(__name__, template_folder='templates') - -# Setup graceful shutdown handler -def shutdown_handler(signum=None, frame=None): - """ - Gracefully shutdown all background services. - """ - app.logger.info("Shutting down application gracefully...") - - # Stop all TaskRunner instances - versions_fetcher.stop() - cleaner.stop() - progress_updater.stop() - - # Request builder shutdown if it's running - if builder is not None: - builder.shutdown() - - app.logger.info("All background services stopped successfully.") - sys.exit(0) - -# Register signal handlers for graceful shutdown -signal.signal(signal.SIGINT, shutdown_handler) -signal.signal(signal.SIGTERM, shutdown_handler) - -versions_fetcher.reload_remotes_json() -app.logger.info('Python version is: %s' % sys.version) - -def get_auth_token(): - try: - # try to read the secret token from the file - with open(os.path.join(basedir, 'secrets', 'reload_token'), 'r') as file: - token = file.read().strip() - return token - except (FileNotFoundError, PermissionError): - app.logger.error("Couldn't open token file. Checking environment for token.") - # if the file does not exist, check the environment variable - return os.getenv('CBS_REMOTES_RELOAD_TOKEN') - -@app.route('/refresh_remotes', methods=['POST']) -def refresh_remotes(): - auth_token = get_auth_token() - - if auth_token is None: - app.logger.error("Couldn't retrieve authorization token") - return "Internal Server Error", 500 - - token = request.get_json().get('token') - if not token or token != auth_token: - return "Unauthorized", 401 - - versions_fetcher.reload_remotes_json() - return "Successfully refreshed remotes", 200 - -@app.route('/generate', methods=['GET', 'POST']) -def generate(): - try: - version = request.form['version'] - vehicle = request.form['vehicle'] - - version_info = versions_fetcher.get_version_info( - vehicle_id=vehicle, - version_id=version - ) - - if version_info is None: - raise Exception("Version invalid or not listed to be built for given vehicle") - - remote_name = version_info.remote_info.name - commit_ref = version_info.commit_ref - - board = request.form['board'] - boards_at_commit = ap_src_metadata_fetcher.get_boards( - remote=remote_name, - commit_ref=commit_ref, - vehicle_id=vehicle, - ) - if board not in boards_at_commit: - raise Exception("bad board") - - all_features = ap_src_metadata_fetcher.get_build_options_at_commit( - remote=remote_name, - commit_ref=commit_ref - ) - - chosen_defines = { - feature.define - for feature in all_features - if request.form.get(feature.label) == "1" - } - - git_hash = repo.commit_id_for_remote_ref( - remote=remote_name, - commit_ref=commit_ref - ) - - build_info = build_manager.BuildInfo( - vehicle_id=vehicle, - remote_info=version_info.remote_info, - git_hash=git_hash, - board=board, - selected_features=chosen_defines - ) - - forwarded_for = request.headers.get('X-Forwarded-For', None) - if forwarded_for: - client_ip = forwarded_for.split(',')[0].strip() - else: - client_ip = request.remote_addr - - build_id = manager.submit_build( - build_info=build_info, - client_ip=client_ip, - ) - - app.logger.info('Redirecting to /viewlog') - return redirect('/viewlog/'+build_id) - - except Exception as ex: - app.logger.error(ex) - return render_template('error.html', ex=ex) - -@app.route('/add_build') -def add_build(): - app.logger.info('Rendering add_build.html') - return render_template('add_build.html') - - -def filter_build_options_by_category(build_options, category): - return sorted([f for f in build_options if f.category == category], key=lambda x: x.description.lower()) - -def parse_build_categories(build_options): - return sorted(list(set([f.category for f in build_options]))) - -@app.route('/', defaults={'token': None}, methods=['GET']) -@app.route('/viewlog/', methods=['GET']) -def home(token): - if token: - app.logger.info("Showing log for build id " + token) - app.logger.info('Rendering index.html') - return render_template('index.html', token=token) - -@app.route("/builds//artifacts/") -def download_file(build_id, name): - path = os.path.join( - basedir, - 'artifacts', - build_id, - ) - app.logger.info('Downloading %s/%s' % (path, name)) - return send_from_directory(path, name, as_attachment=False) - -@app.route("/boards_and_features//", methods=['GET']) -def boards_and_features(vehicle_id, version_id): - version_info = versions_fetcher.get_version_info( - vehicle_id=vehicle_id, - version_id=version_id - ) - - if version_info is None: - return "Bad request. Version not allowed to build for the vehicle.", 400 - - remote_name = version_info.remote_info.name - commit_reference = version_info.commit_ref - - app.logger.info('Board list and build options requested for %s %s' % (vehicle_id, version_id)) - # getting board list for the branch - with repo.get_checkout_lock(): - boards = ap_src_metadata_fetcher.get_boards( - remote=remote_name, - commit_ref=commit_reference, - vehicle_id=vehicle_id, - ) - - options = ap_src_metadata_fetcher.get_build_options_at_commit( - remote=remote_name, - commit_ref=commit_reference - ) # this is a list of Feature() objects defined in build_options.py - - # parse the set of categories from these objects - categories = parse_build_categories(options) - features = [] - for category in categories: - filtered_options = filter_build_options_by_category(options, category) - category_options = [] # options belonging to a given category - for option in filtered_options: - category_options.append({ - 'label' : option.label, - 'description' : option.description, - 'default' : option.default, - 'define' : option.define, - 'dependency' : option.dependency, - }) - features.append({ - 'name' : category, - 'options' : category_options, - }) - # creating result dictionary - result = { - 'boards' : boards, - 'default_board' : boards[0], - 'features' : features, - } - # return jsonified result dict - return jsonify(result) - -@app.route("/get_versions/", methods=['GET']) -def get_versions(vehicle_id): - versions = list() - for version_info in versions_fetcher.get_versions_for_vehicle(vehicle_id=vehicle_id): - if version_info.release_type == "latest": - title = f"Latest ({version_info.remote_info.name})" - else: - title = f"{version_info.release_type} {version_info.version_number} ({version_info.remote_info.name})" - versions.append({ - "title": title, - "id": version_info.version_id, - }) - - return jsonify(sorted(versions, key=lambda x: x['title'])) - -@app.route("/get_vehicles") -def get_vehicles(): - vehicles = [ - {"id": vehicle.id, "name": vehicle.name} - for vehicle in vehicles_manager.get_all_vehicles() - ] - return jsonify(sorted(vehicles, key=lambda x: x['id'])) - -@app.route("/get_defaults///", methods = ['GET']) -def get_deafults(vehicle_id, version_id, board_name): - vehicle = vehicles_manager.get_vehicle_by_id(vehicle_id) - if vehicle is None: - return "Invalid vehicle ID", 400 - # Heli is built on copter boards with -heli suffix - if vehicle_id == "heli": - board_name += "-heli" - - version_info = versions_fetcher.get_version_info( - vehicle_id=vehicle_id, - version_id=version_id - ) - - if version_info is None: - return "Bad request. Version is not allowed for builds for the %s." % vehicle.name, 400 - - artifacts_dir = version_info.ap_build_artifacts_url - - if artifacts_dir is None: - return "Couldn't find artifacts for requested release/branch/commit on ardupilot server", 404 - - url_to_features_txt = artifacts_dir + '/' + board_name + '/features.txt' - response = requests.get(url_to_features_txt, timeout=30) - - if not response.status_code == 200: - return ("Could not retrieve features.txt for given vehicle, version and board combination (Status Code: %d, url: %s)" % (response.status_code, url_to_features_txt), response.status_code) - # split response by new line character to get a list of defines - result = response.text.split('\n') - # omit the last two elements as they are always blank - return jsonify(result[:-2]) - -@app.route('/builds', methods=['GET']) -def get_all_builds(): - all_build_ids = manager.get_all_build_ids() - all_build_info = [ - { - **manager.get_build_info(build_id).to_dict(), - 'build_id': build_id - } - for build_id in all_build_ids - ] - - all_build_info_sorted = sorted( - all_build_info, - key=lambda x: x['time_created'], - reverse=True, - ) - - return ( - jsonify(all_build_info_sorted), - 200 - ) - -@app.route('/builds/', methods=['GET']) -def get_build_by_id(build_id): - if not manager.build_exists(build_id): - response = { - 'error': f'build with id {build_id} does not exist.', - } - return jsonify(response), 200 - - response = { - **manager.get_build_info(build_id).to_dict(), - 'build_id': build_id - } - - return jsonify(response), 200 - -if __name__ == '__main__': - app.run() diff --git a/web/core/__init__.py b/web/core/__init__.py new file mode 100644 index 0000000..1028ac4 --- /dev/null +++ b/web/core/__init__.py @@ -0,0 +1,10 @@ +""" +Core application components. +""" +from .config import get_settings +from .startup import initialize_application + +__all__ = [ + "get_settings", + "initialize_application", +] diff --git a/web/core/config.py b/web/core/config.py new file mode 100644 index 0000000..f322b0e --- /dev/null +++ b/web/core/config.py @@ -0,0 +1,85 @@ +""" +Application configuration and settings. +""" +import os +from pathlib import Path +from functools import lru_cache + + +class Settings: + """Application settings.""" + + def __init__(self): + # Application + self.app_name: str = "CustomBuild API" + self.app_version: str = "1.0.0" + self.debug: bool = False + + # Paths + self.base_dir: str = os.getenv( + "CBS_BASEDIR", + default=str(Path(__file__).parent.parent.parent.parent / "base") + ) + + # Redis + self.redis_host: str = os.getenv( + 'CBS_REDIS_HOST', + default='localhost' + ) + self.redis_port: str = os.getenv( + 'CBS_REDIS_PORT', + default='6379' + ) + + # Logging + self.log_level: str = os.getenv('CBS_LOG_LEVEL', default='INFO') + + # ArduPilot Git Repository + self.ap_git_url: str = "https://github.com/ardupilot/ardupilot.git" + + @property + def source_dir(self) -> str: + """ArduPilot source directory.""" + return os.path.join(self.base_dir, 'ardupilot') + + @property + def artifacts_dir(self) -> str: + """Build artifacts directory.""" + return os.path.join(self.base_dir, 'artifacts') + + @property + def outdir_parent(self) -> str: + """Build output directory (same as artifacts_dir).""" + return self.artifacts_dir + + @property + def workdir_parent(self) -> str: + """Work directory parent.""" + return os.path.join(self.base_dir, 'workdir') + + @property + def remotes_json_path(self) -> str: + """Path to remotes.json configuration.""" + return os.path.join(self.base_dir, 'configs', 'remotes.json') + + @property + def admin_token_file_path(self) -> str: + """Path to admin token secret file.""" + return os.path.join(self.base_dir, 'secrets', 'reload_token') + + @property + def enable_inbuilt_builder(self) -> bool: + """Whether to enable the inbuilt builder.""" + return os.getenv('CBS_ENABLE_INBUILT_BUILDER', '1') == '1' + + @property + def admin_token_env(self) -> str: + """Token required to reload remotes.json via API.""" + env = os.getenv('CBS_REMOTES_RELOAD_TOKEN', '') + return env if env != '' else None + + +@lru_cache() +def get_settings() -> Settings: + """Get cached settings instance.""" + return Settings() diff --git a/web/core/logging_config.py b/web/core/logging_config.py new file mode 100644 index 0000000..24a7bb0 --- /dev/null +++ b/web/core/logging_config.py @@ -0,0 +1,85 @@ +""" +Logging configuration for the application. +""" +import logging +import logging.config +import os +import sys + + +def setup_logging(log_level: str = None): + """ + Configure logging for the application and all imported modules. + + This must be called BEFORE importing any modules that use logging, + to ensure they all use the same logging configuration. + + Args: + log_level: The logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL). + If None, reads from CBS_LOG_LEVEL environment variable. + """ + if log_level is None: + log_level = os.getenv('CBS_LOG_LEVEL', default='INFO') + + # Configure logging with dictConfig for consistency with Flask app + logging_config = { + 'version': 1, + 'disable_existing_loggers': False, + 'formatters': { + 'default': { + 'format': ( + '[%(asctime)s] %(levelname)s in %(module)s: ' + '%(message)s' + ), + 'datefmt': '%Y-%m-%d %H:%M:%S', + }, + 'detailed': { + 'format': ( + '[%(asctime)s] %(levelname)s ' + '[%(name)s.%(funcName)s:%(lineno)d] %(message)s' + ), + 'datefmt': '%Y-%m-%d %H:%M:%S', + }, + }, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + 'stream': sys.stdout, + 'formatter': 'default', + 'level': log_level.upper(), + }, + }, + 'root': { + 'level': log_level.upper(), + 'handlers': ['console'], + }, + 'loggers': { + 'uvicorn': { + 'level': 'INFO', + 'handlers': ['console'], + 'propagate': False, + }, + 'uvicorn.access': { + 'level': 'INFO', + 'handlers': ['console'], + 'propagate': False, + }, + 'uvicorn.error': { + 'level': 'INFO', + 'handlers': ['console'], + 'propagate': False, + }, + 'fastapi': { + 'level': log_level.upper(), + 'handlers': ['console'], + 'propagate': False, + }, + }, + } + + logging.config.dictConfig(logging_config) + + # Log that logging has been configured + logger = logging.getLogger(__name__) + logger.info(f"Logging configured with level: {log_level.upper()}") + logger.info(f"Python version: {sys.version}") diff --git a/web/core/startup.py b/web/core/startup.py new file mode 100644 index 0000000..08c7f7d --- /dev/null +++ b/web/core/startup.py @@ -0,0 +1,104 @@ +""" +Application startup utilities. + +Handles initial setup of required directories and configuration files. +This module ensures the application environment is properly configured +before the main application starts. +""" +import os +import logging + +logger = logging.getLogger(__name__) + + +def ensure_base_structure(base_dir: str) -> None: + """ + Ensure required base directory structure exists. + + Creates necessary subdirectories for artifacts, configs, workdir, + and secrets if they don't already exist. + + Args: + base_dir: The base directory path (typically from CBS_BASEDIR) + """ + if not base_dir: + logger.warning("Base directory not specified, skipping initialization") + return + + # Define required subdirectories + subdirs = [ + 'artifacts', + 'configs', + 'workdir', + 'secrets', + ] + + for subdir in subdirs: + path = os.path.join(base_dir, subdir) + os.makedirs(path, exist_ok=True) + logger.debug(f"Ensured directory exists: {path}") + + +def ensure_remotes_json(base_dir: str, remote_name: str = "ardupilot") -> None: + """ + Ensure remotes.json configuration file exists. + + If the remotes.json file doesn't exist, creates it by fetching release + information from the specified remote. + + Args: + base_dir: The base directory path (typically from CBS_BASEDIR) + remote_name: The remote repository name to fetch releases from + """ + if not base_dir: + logger.warning( + "Base directory not specified, " + "skipping remotes.json initialization" + ) + return + + remotes_json_path = os.path.join(base_dir, 'configs', 'remotes.json') + + if not os.path.isfile(remotes_json_path): + logger.info( + f"remotes.json not found at {remotes_json_path}, " + f"creating it..." + ) + try: + from scripts import fetch_releases + fetch_releases.run( + base_dir=base_dir, + remote_name=remote_name, + ) + logger.info("Successfully created remotes.json") + except Exception as e: + logger.error(f"Failed to create remotes.json: {e}") + raise + else: + logger.debug(f"remotes.json already exists at {remotes_json_path}") + + +def initialize_application(base_dir: str) -> None: + """ + Initialize the application environment. + + Performs all necessary setup operations including: + - Creating required directory structure + - Ensuring remotes.json configuration exists + + Args: + base_dir: The base directory path (typically from CBS_BASEDIR) + """ + if not base_dir: + logger.warning("CBS_BASEDIR not set, skipping initialization") + return + + logger.info(f"Initializing application with base directory: {base_dir}") + + # Ensure directory structure + ensure_base_structure(base_dir) + + # Ensure remotes.json exists + ensure_remotes_json(base_dir) + + logger.info("Application initialization complete") diff --git a/web/main.py b/web/main.py new file mode 100755 index 0000000..4f7adbf --- /dev/null +++ b/web/main.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python3 + +""" +Main FastAPI application entry point. +""" +from contextlib import asynccontextmanager +from pathlib import Path +import threading +import os +import argparse + +from fastapi import FastAPI +from fastapi.staticfiles import StaticFiles + +from api.v1 import router as v1_router +from ui import router as ui_router +from core.config import get_settings +from core.startup import initialize_application +from core.logging_config import setup_logging + +import ap_git +import metadata_manager +import build_manager + +setup_logging() + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """ + Lifespan context manager for startup and shutdown events. + """ + # Startup + settings = get_settings() + + initialize_application(settings.base_dir) + + repo = ap_git.GitRepo.clone_if_needed( + source=settings.ap_git_url, + dest=settings.source_dir, + recurse_submodules=True, + ) + + vehicles_manager = metadata_manager.VehiclesManager() + + ap_src_metadata_fetcher = metadata_manager.APSourceMetadataFetcher( + ap_repo=repo, + caching_enabled=True, + redis_host=settings.redis_host, + redis_port=settings.redis_port, + ) + + versions_fetcher = metadata_manager.VersionsFetcher( + remotes_json_path=settings.remotes_json_path, + ap_repo=repo + ) + versions_fetcher.reload_remotes_json() + + build_mgr = build_manager.BuildManager( + outdir=settings.outdir_parent, + redis_host=settings.redis_host, + redis_port=settings.redis_port + ) + + cleaner = build_manager.BuildArtifactsCleaner() + progress_updater = build_manager.BuildProgressUpdater() + + inbuilt_builder = None + inbuilt_builder_thread = None + if settings.enable_inbuilt_builder: + from builder.builder import Builder # noqa: E402 + inbuilt_builder = Builder( + workdir=settings.workdir_parent, + source_repo=repo + ) + inbuilt_builder_thread = threading.Thread( + target=inbuilt_builder.run, + daemon=True + ) + inbuilt_builder_thread.start() + + versions_fetcher.start() + cleaner.start() + progress_updater.start() + + app.state.repo = repo + app.state.ap_src_metadata_fetcher = ap_src_metadata_fetcher + app.state.versions_fetcher = versions_fetcher + app.state.vehicles_manager = vehicles_manager + app.state.build_manager = build_mgr + app.state.inbuilt_builder = inbuilt_builder + app.state.inbuilt_builder_thread = inbuilt_builder_thread + + yield + + # Shutdown + versions_fetcher.stop() + cleaner.stop() + progress_updater.stop() + if inbuilt_builder is not None: + inbuilt_builder.shutdown() + if (inbuilt_builder_thread is not None and + inbuilt_builder_thread.is_alive()): + inbuilt_builder_thread.join() + + +# Create FastAPI application +app = FastAPI( + title="CustomBuild API", + description="API for ArduPilot Custom Firmware Builder", + version="1.0.0", + docs_url="/api/docs", + redoc_url="/api/redoc", + lifespan=lifespan, +) + +# Mount static files +WEB_ROOT = Path(__file__).resolve().parent +app.mount( + "/static", + StaticFiles(directory=str(WEB_ROOT / "static")), + name="static" +) + +# Include API v1 router +app.include_router(v1_router, prefix="/api") + +# Include Web UI router +app.include_router(ui_router) + + +@app.get("/health") +async def health_check(): + """Health check endpoint.""" + return {"status": "healthy"} + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="CustomBuild API Server") + parser.add_argument( + "--port", + type=int, + default=int(os.getenv("WEB_PORT", 8080)), + help="Port to run the server on (default: 8080 or WEB_PORT env var)" + ) + args = parser.parse_args() + + import uvicorn + uvicorn.run( + "main:app", + host="0.0.0.0", + port=args.port, + reload=True + ) diff --git a/web/requirements.txt b/web/requirements.txt index cd57da3..c513415 100644 --- a/web/requirements.txt +++ b/web/requirements.txt @@ -1,6 +1,10 @@ -flask -requests -jsonschema -dill==0.3.8 +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +pydantic==2.5.0 redis==5.2.1 -gunicorn==21.1 +requests==2.31.0 +jsonschema==4.20.0 +dill==0.3.8 +packaging==25.0 +jinja2==3.1.2 +python-multipart==0.0.6 diff --git a/web/schemas/__init__.py b/web/schemas/__init__.py new file mode 100644 index 0000000..3f9340a --- /dev/null +++ b/web/schemas/__init__.py @@ -0,0 +1,54 @@ +""" +API schemas for the CustomBuild application. + +This module exports all Pydantic models used for request/response validation +across the API endpoints. +""" + +# Admin schemas +from .admin import ( + RefreshRemotesResponse, +) + +# Build schemas +from .builds import ( + RemoteInfo, + BuildProgress, + BuildRequest, + BuildSubmitResponse, + BuildOut, +) + +# Vehicle schemas +from .vehicles import ( + VehicleBase, + VersionBase, + VersionOut, + BoardBase, + BoardOut, + CategoryBase, + FeatureDefault, + FeatureBase, + FeatureOut, +) + +__all__ = [ + # Admin + "RefreshRemotesResponse", + # Builds + "RemoteInfo", + "BuildProgress", + "BuildRequest", + "BuildSubmitResponse", + "BuildOut", + # Vehicles + "VehicleBase", + "VersionBase", + "VersionOut", + "BoardBase", + "BoardOut", + "CategoryBase", + "FeatureDefault", + "FeatureBase", + "FeatureOut", +] diff --git a/web/schemas/admin.py b/web/schemas/admin.py new file mode 100644 index 0000000..ab8068e --- /dev/null +++ b/web/schemas/admin.py @@ -0,0 +1,12 @@ +from typing import List + +from pydantic import BaseModel, Field + + +# --- Refresh Remotes Response --- +class RefreshRemotesResponse(BaseModel): + """Response schema for remote refresh operation.""" + remotes: List[str] = Field( + ..., + description="List of remotes discovered in remotes.json file" + ) diff --git a/web/schemas/builds.py b/web/schemas/builds.py new file mode 100644 index 0000000..57b59bb --- /dev/null +++ b/web/schemas/builds.py @@ -0,0 +1,65 @@ +from typing import List, Literal + +from pydantic import BaseModel, Field +from schemas.vehicles import VehicleBase, BoardBase, RemoteInfo + + +# --- Build Progress --- +class BuildProgress(BaseModel): + """Build progress and status information.""" + percent: int = Field( + ..., ge=0, le=100, description="Build completion percentage" + ) + state: Literal[ + "PENDING", "RUNNING", "SUCCESS", "FAILURE", "ERROR" + ] = Field(..., description="Current build state") + + +# --- Build Request --- +class BuildRequest(BaseModel): + """Schema for creating a new build request.""" + vehicle_id: str = Field( + ..., description="Vehicle ID to build for" + ) + board_id: str = Field( + ..., description="Board ID to build for" + ) + version_id: str = Field( + ..., description="Version ID for build source code" + ) + selected_features: List[str] = Field( + default_factory=list, + description="Feature IDs to enable for this build" + ) + + +# --- Build Submit Response --- +class BuildSubmitResponse(BaseModel): + """Response schema for build submission.""" + build_id: str = Field(..., description="Unique build identifier") + url: str = Field(..., description="URL to get build details") + status: Literal["submitted"] = Field( + ..., description="Build submission status" + ) + + +# --- Build Output --- +class BuildOut(BaseModel): + """Complete build information output schema.""" + build_id: str = Field(..., description="Unique build identifier") + vehicle: VehicleBase = Field(..., description="Target vehicle information") + board: BoardBase = Field(..., description="Target board information") + git_hash: str = Field(..., description="Git commit hash used for build") + remote_info: RemoteInfo = Field( + ..., description="Source repository information" + ) + selected_features: List[str] = Field( + default_factory=list, + description="Enabled feature flags for this build" + ) + progress: BuildProgress = Field( + ..., description="Current build status and progress" + ) + time_created: float = Field( + ..., description="Unix timestamp when build was created" + ) diff --git a/web/schemas/vehicles.py b/web/schemas/vehicles.py new file mode 100644 index 0000000..64ac43c --- /dev/null +++ b/web/schemas/vehicles.py @@ -0,0 +1,93 @@ +# app/schemas/vehicles.py +from typing import Literal, Optional + +from pydantic import BaseModel, Field + + +# --- Vehicles --- +class VehicleBase(BaseModel): + id: str = Field(..., description="Unique vehicle identifier") + name: str = Field(..., description="Vehicle display name") + + +# --- Remote Information --- +class RemoteInfo(BaseModel): + """Git remote repository information.""" + name: str = Field(..., description="Remote name (e.g., 'ardupilot')") + url: str = Field(..., description="Git repository URL") + + +# --- Versions --- +class VersionBase(BaseModel): + id: str = Field(..., description="Unique version identifier") + name: str = Field(..., description="Version display name") + type: Literal["beta", "stable", "latest", "tag"] = Field( + ..., description="Version type classification" + ) + remote: RemoteInfo = Field( + ..., description="Git remote repository information for the version" + ) + commit_ref: Optional[str] = Field( + None, description="Git reference (tag, branch name, or commit SHA)" + ) + + +class VersionOut(VersionBase): + vehicle_id: str = Field( + ..., description="Vehicle identifier associated with this version" + ) + + +# --- Boards --- +class BoardBase(BaseModel): + id: str = Field(..., description="Unique board identifier") + name: str = Field(..., description="Board display name") + + +class BoardOut(BoardBase): + vehicle_id: str = Field(..., description="Associated vehicle identifier") + version_id: str = Field(..., description="Associated version identifier") + + +# --- Features --- +class CategoryBase(BaseModel): + id: str = Field(..., description="Unique category identifier") + name: str = Field(..., description="Category display name") + description: Optional[str] = Field( + None, description="Category description" + ) + + +class FeatureDefault(BaseModel): + enabled: bool = Field( + ..., description="Whether feature is enabled by default" + ) + source: Literal["firmware-server", "build-options-py"] = Field( + ..., + description=( + "Source of the default value: 'firmware-server' from " + "firmware.ardupilot.org, 'build-options-py' from git repository" + ) + ) + + +class FeatureBase(BaseModel): + id: str = Field(..., description="Unique feature identifier/flag name") + name: str = Field(..., description="Feature display name") + category: CategoryBase = Field(..., description="Feature category") + description: Optional[str] = Field( + None, description="Feature description" + ) + + +class FeatureOut(FeatureBase): + vehicle_id: str = Field(..., description="Associated vehicle identifier") + version_id: str = Field(..., description="Associated version identifier") + board_id: str = Field(..., description="Associated board identifier") + default: FeatureDefault = Field( + ..., description="Default state for this feature on this board" + ) + dependencies: list[str] = Field( + default_factory=list, + description="List of feature IDs that this feature depends on" + ) diff --git a/web/services/__init__.py b/web/services/__init__.py new file mode 100644 index 0000000..d801d2e --- /dev/null +++ b/web/services/__init__.py @@ -0,0 +1,15 @@ +""" +Business logic services for the application. +""" +from .vehicles import get_vehicles_service, VehiclesService +from .builds import get_builds_service, BuildsService +from .admin import get_admin_service, AdminService + +__all__ = [ + "get_vehicles_service", + "VehiclesService", + "get_builds_service", + "BuildsService", + "get_admin_service", + "AdminService", +] diff --git a/web/services/admin.py b/web/services/admin.py new file mode 100644 index 0000000..83aa79f --- /dev/null +++ b/web/services/admin.py @@ -0,0 +1,115 @@ +""" +Admin service for handling administrative operations. +""" +import logging +from typing import Optional, List + +from fastapi import Request + +from core.config import get_settings + +logger = logging.getLogger(__name__) + + +class AdminService: + """Service for managing administrative operations.""" + + def __init__(self, versions_fetcher=None): + """ + Initialize the admin service. + + Args: + versions_fetcher: VersionsFetcher instance for managing remotes + """ + self.versions_fetcher = versions_fetcher + self.settings = get_settings() + + def get_auth_token(self) -> Optional[str]: + """ + Retrieve the authorization token from file or environment. + + Returns: + The authorization token if found, None otherwise + """ + try: + # Try to read the secret token from the file + token_file_path = self.settings.admin_token_file_path + with open(token_file_path, 'r') as file: + token = file.read().strip() + return token + except (FileNotFoundError, PermissionError) as e: + logger.error( + f"Couldn't open token file at " + f"{self.settings.admin_token_file_path}: {e}. " + "Checking environment for token." + ) + # If the file does not exist or no permission, check environment + return self.settings.admin_token_env + except Exception as e: + logger.error( + f"Unexpected error reading token file at " + f"{self.settings.admin_token_file_path}: {e}. " + "Checking environment for token." + ) + # For any other error, fall back to environment variable + return self.settings.admin_token_env + + async def verify_token(self, token: str) -> bool: + """ + Verify that the provided token matches the expected admin token. + + Args: + token: The token to verify + + Returns: + True if token is valid, False otherwise + + Raises: + RuntimeError: If admin token is not configured on server + """ + expected_token = self.get_auth_token() + + if expected_token is None: + logger.error("No admin token configured") + raise RuntimeError("Admin token not configured on server") + + return token == expected_token + + async def refresh_remotes(self) -> List[str]: + """ + Trigger a refresh of remote metadata. + + Returns: + List of remote names that were refreshed + + Raises: + Exception: If refresh operation fails + """ + logger.info("Triggering remote metadata refresh") + + # Reload remotes.json + self.versions_fetcher.reload_remotes_json() + + # Get list of remotes that are now available + remotes_info = self.versions_fetcher.get_all_remotes_info() + remotes_refreshed = [remote.name for remote in remotes_info] + + logger.info( + f"Successfully refreshed {len(remotes_refreshed)} remotes: " + f"{remotes_refreshed}" + ) + + return remotes_refreshed + + +def get_admin_service(request: Request) -> AdminService: + """ + Get AdminService instance with dependencies from app state. + + Args: + request: FastAPI Request object + + Returns: + AdminService instance initialized with app state dependencies + """ + return AdminService(versions_fetcher=request.app.state.versions_fetcher) diff --git a/web/services/builds.py b/web/services/builds.py new file mode 100644 index 0000000..691577b --- /dev/null +++ b/web/services/builds.py @@ -0,0 +1,397 @@ +""" +Builds service for handling build-related business logic. +""" +import logging +import os +from fastapi import Request +from typing import List, Optional + +from schemas import ( + BuildRequest, + BuildSubmitResponse, + BuildOut, + BuildProgress, + RemoteInfo, +) +from schemas.vehicles import VehicleBase, BoardBase + +# Import external modules +# pylint: disable=wrong-import-position +import build_manager # noqa: E402 + +logger = logging.getLogger(__name__) + + +class BuildsService: + """Service for managing firmware builds.""" + + def __init__( + self, + build_manager=None, + versions_fetcher=None, + ap_src_metadata_fetcher=None, + repo=None, + vehicles_manager=None + ): + self.manager = build_manager + self.versions_fetcher = versions_fetcher + self.ap_src_metadata_fetcher = ap_src_metadata_fetcher + self.repo = repo + self.vehicles_manager = vehicles_manager + + def create_build( + self, + build_request: BuildRequest, + client_ip: str + ) -> BuildSubmitResponse: + """ + Create a new build request. + + Args: + build_request: Build configuration + client_ip: Client IP address for rate limiting + + Returns: + Simple response with build_id and URL + + Raises: + ValueError: If validation fails + """ + # Validate version_id + if not build_request.version_id: + raise ValueError("version_id is required") + + # Validate vehicle + vehicle_id = build_request.vehicle_id + if not vehicle_id: + raise ValueError("vehicle_id is required") + + # Get version info using version_id + version_info = self.versions_fetcher.get_version_info( + vehicle_id=vehicle_id, + version_id=build_request.version_id + ) + if version_info is None: + raise ValueError("Invalid version_id for vehicle") + + remote_name = version_info.remote_info.name + commit_ref = version_info.commit_ref + + # Validate remote + remote_info = self.versions_fetcher.get_remote_info(remote_name) + if remote_info is None: + raise ValueError(f"Remote {remote_name} is not whitelisted") + + # Validate board + board_name = build_request.board_id + if not board_name: + raise ValueError("board_id is required") + + # Check board exists at this version + with self.repo.get_checkout_lock(): + boards_at_commit = self.ap_src_metadata_fetcher.get_boards( + remote=remote_name, + commit_ref=commit_ref, + vehicle_id=vehicle_id, + ) + + if board_name not in boards_at_commit: + raise ValueError("Invalid board for this version") + + # Get git hash + git_hash = self.repo.commit_id_for_remote_ref( + remote=remote_name, + commit_ref=commit_ref + ) + + # Map feature labels (IDs from API) to defines + # (required by build manager) + selected_feature_defines = set() + if build_request.selected_features: + # Get build options to map labels to defines + with self.repo.get_checkout_lock(): + options = ( + self.ap_src_metadata_fetcher + .get_build_options_at_commit( + remote=remote_name, + commit_ref=commit_ref + ) + ) + + # Create label to define mapping + label_to_define = { + option.label: option.define for option in options + } + + # Map each selected feature label to its define + for feature_label in build_request.selected_features: + if feature_label in label_to_define: + selected_feature_defines.add( + label_to_define[feature_label] + ) + else: + logger.warning( + f"Feature label '{feature_label}' not found in " + f"build options for {vehicle_id} {remote_name} " + f"{commit_ref}" + ) + + # Create build info + build_info = build_manager.BuildInfo( + vehicle_id=vehicle_id, + remote_info=remote_info, + git_hash=git_hash, + board=board_name, + selected_features=selected_feature_defines + ) + + # Submit build + build_id = self.manager.submit_build( + build_info=build_info, + client_ip=client_ip, + ) + + # Return simple submission response + return BuildSubmitResponse( + build_id=build_id, + url=f"/api/v1/builds/{build_id}", + status="submitted" + ) + + def list_builds( + self, + vehicle_id: Optional[str] = None, + board_id: Optional[str] = None, + state: Optional[str] = None, + limit: int = 20, + offset: int = 0 + ) -> List[BuildOut]: + """ + Get list of builds with optional filters. + + Args: + vehicle_id: Filter by vehicle + board_id: Filter by board + state: Filter by build state + limit: Maximum results + offset: Results to skip + + Returns: + List of builds + """ + all_build_ids = self.manager.get_all_build_ids() + all_builds = [] + + for build_id in all_build_ids: + build_info = self.manager.get_build_info(build_id) + if build_info is None: + continue + + # Apply filters + if (vehicle_id and + build_info.vehicle_id.lower() != vehicle_id.lower()): + continue + if board_id and build_info.board != board_id: + continue + if state and build_info.progress.state.name != state: + continue + + all_builds.append( + self._build_info_to_output(build_id, build_info) + ) + + # Sort by creation time (newest first) + all_builds.sort(key=lambda x: x.time_created, reverse=True) + + # Apply pagination + return all_builds[offset:offset + limit] + + def get_build(self, build_id: str) -> Optional[BuildOut]: + """ + Get details of a specific build. + + Args: + build_id: The unique build identifier + + Returns: + Build details or None if not found + """ + if not self.manager.build_exists(build_id): + return None + + build_info = self.manager.get_build_info(build_id) + if build_info is None: + return None + + return self._build_info_to_output(build_id, build_info) + + def get_build_logs( + self, + build_id: str, + tail: Optional[int] = None + ) -> Optional[str]: + """ + Get build logs for a specific build. + + Args: + build_id: The unique build identifier + tail: Optional number of last lines to return + + Returns: + Build logs as text or None if not found/available + """ + if not self.manager.build_exists(build_id): + return None + + log_path = self.manager.get_build_log_path(build_id) + if not os.path.exists(log_path): + return None + + try: + with open(log_path, 'r') as f: + if tail: + # Read last N lines + lines = f.readlines() + return ''.join(lines[-tail:]) + else: + return f.read() + except Exception as e: + logger.error(f"Error reading log file for build {build_id}: {e}") + return None + + def get_artifact_path(self, build_id: str) -> Optional[str]: + """ + Get the path to the build artifact. + + Args: + build_id: The unique build identifier + + Returns: + Path to artifact or None if not available + """ + if not self.manager.build_exists(build_id): + return None + + build_info = self.manager.get_build_info(build_id) + if build_info is None: + return None + + # Only return artifact if build was successful + if build_info.progress.state.name != "SUCCESS": + return None + + artifact_path = self.manager.get_build_archive_path(build_id) + if os.path.exists(artifact_path): + return artifact_path + + return None + + def _build_info_to_output( + self, + build_id: str, + build_info + ) -> BuildOut: + """ + Convert BuildInfo object to BuildOut schema. + + Args: + build_id: The build identifier + build_info: BuildInfo object from build_manager + + Returns: + BuildOut schema object + """ + # Convert build_manager.BuildProgress to schema BuildProgress + progress = BuildProgress( + percent=build_info.progress.percent, + state=build_info.progress.state.name + ) + + # Convert RemoteInfo + remote_info = RemoteInfo( + name=build_info.remote_info.name, + url=build_info.remote_info.url + ) + + # Map feature defines back to labels for API response + selected_feature_labels = [] + if build_info.selected_features: + try: + # Get build options to map defines back to labels + with self.repo.get_checkout_lock(): + options = ( + self.ap_src_metadata_fetcher + .get_build_options_at_commit( + remote=build_info.remote_info.name, + commit_ref=build_info.git_hash + ) + ) + + # Create define to label mapping + define_to_label = { + option.define: option.label for option in options + } + + # Map each selected feature define to its label + for feature_define in build_info.selected_features: + if feature_define in define_to_label: + selected_feature_labels.append( + define_to_label[feature_define] + ) + else: + # Fallback: use define if label not found + logger.warning( + f"Feature define '{feature_define}' not " + f"found in build options for build " + f"{build_id}" + ) + selected_feature_labels.append(feature_define) + except Exception as e: + logger.error( + f"Error mapping feature defines to labels for " + f"build {build_id}: {e}" + ) + # Fallback: use defines as-is + selected_feature_labels = list( + build_info.selected_features + ) + + vehicle = self.vehicles_manager.get_vehicle_by_id( + build_info.vehicle_id + ) + + return BuildOut( + build_id=build_id, + vehicle=VehicleBase( + id=build_info.vehicle_id, + name=vehicle.name + ), + board=BoardBase( + id=build_info.board, + name=build_info.board # Board name is same as board ID for now + ), + git_hash=build_info.git_hash, + remote_info=remote_info, + selected_features=selected_feature_labels, + progress=progress, + time_created=build_info.time_created, + ) + + +def get_builds_service(request: Request) -> BuildsService: + """ + Get BuildsService instance with dependencies from app state. + + Args: + request: FastAPI Request object + + Returns: + BuildsService instance initialized with app state dependencies + """ + return BuildsService( + build_manager=request.app.state.build_manager, + versions_fetcher=request.app.state.versions_fetcher, + ap_src_metadata_fetcher=request.app.state.ap_src_metadata_fetcher, + repo=request.app.state.repo, + vehicles_manager=request.app.state.vehicles_manager, + ) diff --git a/web/services/vehicles.py b/web/services/vehicles.py new file mode 100644 index 0000000..9332293 --- /dev/null +++ b/web/services/vehicles.py @@ -0,0 +1,273 @@ +""" +Vehicles service for handling vehicle-related business logic. +""" +import logging +from typing import List, Optional +from fastapi import Request + +from schemas import ( + VehicleBase, + RemoteInfo, + VersionOut, + BoardOut, + FeatureOut, + CategoryBase, + FeatureDefault, +) + + +logger = logging.getLogger(__name__) + + +class VehiclesService: + """Service for managing vehicles, versions, boards, and features.""" + + def __init__(self, vehicle_manager=None, + versions_fetcher=None, + ap_src_metadata_fetcher=None, + repo=None): + self.vehicles_manager = vehicle_manager + self.versions_fetcher = versions_fetcher + self.ap_src_metadata_fetcher = ap_src_metadata_fetcher + self.repo = repo + + def get_all_vehicles(self) -> List[VehicleBase]: + """Get list of all available vehicles.""" + logger.info('Fetching all vehicles') + vehicles = self.vehicles_manager.get_all_vehicles() + # Sort by name for consistent ordering + sorted_vehicles = sorted(vehicles, key=lambda v: v.name) + logger.info(f'Found vehicles: {[v.name for v in sorted_vehicles]}') + return [ + VehicleBase(id=vehicle.id, name=vehicle.name) + for vehicle in sorted_vehicles + ] + + def get_vehicle(self, vehicle_id: str) -> Optional[VehicleBase]: + """Get a specific vehicle by ID.""" + vehicle = self.vehicles_manager.get_vehicle_by_id(vehicle_id) + if vehicle: + return VehicleBase(id=vehicle.id, name=vehicle.name) + return None + + def get_versions( + self, + vehicle_id: str, + type_filter: Optional[str] = None + ) -> List[VersionOut]: + """Get all versions available for a specific vehicle.""" + versions = [] + + for version_info in self.versions_fetcher.get_versions_for_vehicle( + vehicle_id=vehicle_id + ): + # Apply type filter if provided + if type_filter and version_info.release_type != type_filter: + continue + + if version_info.release_type == "latest": + title = f"Latest ({version_info.remote_info.name})" + else: + rel_type = version_info.release_type + ver_num = version_info.version_number + remote = version_info.remote_info.name + title = f"{rel_type} {ver_num} ({remote})" + + versions.append(VersionOut( + id=version_info.version_id, + name=title, + type=version_info.release_type, + remote=RemoteInfo( + name=version_info.remote_info.name, + url=version_info.remote_info.url, + ), + commit_ref=version_info.commit_ref, + vehicle_id=vehicle_id, + )) + + # Sort by name + return sorted(versions, key=lambda x: x.name) + + def get_version( + self, + vehicle_id: str, + version_id: str + ) -> Optional[VersionOut]: + """Get details of a specific version for a vehicle.""" + versions = self.get_versions(vehicle_id) + for version in versions: + if version.id == version_id: + return version + return None + + def get_boards( + self, + vehicle_id: str, + version_id: str + ) -> List[BoardOut]: + """Get all boards available for a specific vehicle version.""" + # Get version info + version_info = self.versions_fetcher.get_version_info( + vehicle_id=vehicle_id, + version_id=version_id + ) + if not version_info: + return [] + + logger.info( + f'Board list requested for {vehicle_id} ' + f'{version_info.remote_info.name} {version_info.commit_ref}' + ) + + # Get boards list + with self.repo.get_checkout_lock(): + boards = self.ap_src_metadata_fetcher.get_boards( + remote=version_info.remote_info.name, + commit_ref=version_info.commit_ref, + vehicle_id=vehicle_id, + ) + + return [ + BoardOut( + id=board, + name=board, + vehicle_id=vehicle_id, + version_id=version_id + ) + for board in boards + ] + + def get_board( + self, + vehicle_id: str, + version_id: str, + board_id: str + ) -> Optional[BoardOut]: + """Get details of a specific board for a vehicle version.""" + boards = self.get_boards(vehicle_id, version_id) + for board in boards: + if board.id == board_id: + return board + return None + + def get_features( + self, + vehicle_id: str, + version_id: str, + board_id: str, + category_id: Optional[str] = None + ) -> List[FeatureOut]: + """ + Get all features with defaults for a specific + vehicle version/board. + """ + # Get version info + version_info = self.versions_fetcher.get_version_info( + vehicle_id=vehicle_id, + version_id=version_id + ) + if not version_info: + return [] + + logger.info( + f'Features requested for {vehicle_id} ' + f'{version_info.remote_info.name} {version_info.commit_ref}' + ) + + # Get build options from source + with self.repo.get_checkout_lock(): + options = self.ap_src_metadata_fetcher.get_build_options_at_commit( + remote=version_info.remote_info.name, + commit_ref=version_info.commit_ref + ) + + # Try to fetch board-specific defaults from firmware-server + board_defaults = None + artifacts_dir = version_info.ap_build_artifacts_url + if artifacts_dir is not None: + board_defaults = ( + self.ap_src_metadata_fetcher.get_board_defaults_from_fw_server( + artifacts_url=artifacts_dir, + board_id=board_id, + vehicle_id=vehicle_id, + ) + ) + + # Build feature list + features = [] + for option in options: + # Apply category filter if provided + if category_id and option.category != category_id: + continue + + # Determine default state and source + if board_defaults and option.define in board_defaults: + # Override with firmware server data + default_enabled = (board_defaults[option.define] != 0) + default_source = 'firmware-server' + else: + # Use build-options-py fallback + default_enabled = (option.default != 0) + default_source = 'build-options-py' + + # Parse dependencies (comma-separated labels) + dependencies = [] + if option.dependency: + dependencies = [ + label.strip() + for label in option.dependency.split(',') + ] + + features.append(FeatureOut( + id=option.label, + name=option.label, + category=CategoryBase( + id=option.category, + name=option.category, + description=None + ), + description=option.description, + vehicle_id=vehicle_id, + version_id=version_id, + board_id=board_id, + default=FeatureDefault( + enabled=default_enabled, + source=default_source + ), + dependencies=dependencies + )) + + # Sort by name + return sorted(features, key=lambda x: x.category.name) + + def get_feature( + self, + vehicle_id: str, + version_id: str, + board_id: str, + feature_id: str + ) -> Optional[FeatureOut]: + """Get details of a specific feature for a vehicle version/board.""" + features = self.get_features(vehicle_id, version_id, board_id) + for feature in features: + if feature.id == feature_id: + return feature + return None + + +def get_vehicles_service(request: Request) -> VehiclesService: + """ + Get VehiclesService instance with dependencies from app state. + + Args: + request: FastAPI Request object + + Returns: + VehiclesService instance initialized with app state dependencies + """ + return VehiclesService( + vehicle_manager=request.app.state.vehicles_manager, + versions_fetcher=request.app.state.versions_fetcher, + ap_src_metadata_fetcher=request.app.state.ap_src_metadata_fetcher, + repo=request.app.state.repo, + ) diff --git a/web/static/js/add_build.js b/web/static/js/add_build.js index 22d6b59..b3caba8 100644 --- a/web/static/js/add_build.js +++ b/web/static/js/add_build.js @@ -1,152 +1,156 @@ const Features = (() => { - let features = {}; - let defines_dictionary = {}; - let labels_dictionary = {}; - let category_dictionary = {}; + let features = []; // Flat array of feature objects from API + let features_by_id = {}; // Map feature IDs to feature objects + let categories_grouped = {}; // Features grouped by category name let selected_options = 0; function resetDictionaries() { // clear old dictionaries - defines_dictionary = {}; - labels_dictionary = {}; - category_dictionary = {}; - - features.forEach((category) => { - category_dictionary[category.name] = category; - category['options'].forEach((option) => { - defines_dictionary[option.define] = labels_dictionary[option.label] = option; - }); - }); - } - - function store_category_in_options() { - features.forEach((category) => { - category['options'].forEach((option) => { - option.category_name = category.name; - }); + features_by_id = {}; + categories_grouped = {}; + + // Build lookup maps from flat feature array + features.forEach((feature) => { + features_by_id[feature.id] = feature; + + // Group by category + const cat_name = feature.category.name; + if (!categories_grouped[cat_name]) { + categories_grouped[cat_name] = { + name: cat_name, + description: feature.category.description, + features: [] + }; + } + categories_grouped[cat_name].features.push(feature); }); } function updateRequiredFor() { - features.forEach((category) => { - category['options'].forEach((option) => { - if (option.dependency != null) { - option.dependency.split(',').forEach((dependency) => { - let dep = getOptionByLabel(dependency); - if (dep.requiredFor == undefined) { - dep.requiredFor = []; - } - dep.requiredFor.push(option.label); - }); - } - }); + features.forEach((feature) => { + if (feature.dependencies && feature.dependencies.length > 0) { + feature.dependencies.forEach((dependency_id) => { + let dep = getOptionById(dependency_id); + if (dep && dep.requiredFor == undefined) { + dep.requiredFor = []; + } + if (dep) { + dep.requiredFor.push(feature.id); + } + }); + } }); } function reset(new_features) { features = new_features; + selected_options = 0; resetDictionaries(); updateRequiredFor(); - store_category_in_options(); } - function getOptionByDefine(define) { - return defines_dictionary[define]; + function getOptionById(id) { + return features_by_id[id]; } - function getOptionByLabel(label) { - return labels_dictionary[label]; + function getCategoryByName(category_name) { + return categories_grouped[category_name]; } - function getCategoryByName(category_name) { - return category_dictionary[category_name]; + function getAllCategories() { + return Object.values(categories_grouped); } function getCategoryIdByName(category_name) { return 'category_'+category_name.split(" ").join("_"); } - function featureIsDisabledByDefault(feature_label) { - return getOptionByLabel(feature_label).default == 0; + function featureIsDisabledByDefault(feature_id) { + let feature = getOptionById(feature_id); + return feature && !feature.default.enabled; } - function featureisEnabledByDefault(feature_label) { - return !featureIsDisabledByDefault(feature_label); + function featureisEnabledByDefault(feature_id) { + return !featureIsDisabledByDefault(feature_id); } - function updateDefaults(defines_array) { - // updates default on the basis of define array passed - // the define array consists define in format, EXAMPLE_DEFINE or !EXAMPLE_DEFINE - // we update the defaults in features object by processing those defines - for (let i=0; i { + feature.dependencies.forEach((dependency_id) => { const check = true; - checkUncheckOptionByLabel(child, check); + checkUncheckOptionById(dependency_id, check); }); } - function handleOptionStateChange(feature_label, triggered_by_ui) { - if (document.getElementById(feature_label).checked) { + function handleOptionStateChange(feature_id, triggered_by_ui) { + // feature_id is the feature ID from the API + let element = document.getElementById(feature_id); + if (!element) return; + + let feature = getOptionById(feature_id); + if (!feature) return; + + if (element.checked) { selected_options += 1; - enableDependenciesForFeature(feature_label); + enableDependenciesForFeature(feature.id); } else { selected_options -= 1; if (triggered_by_ui) { - askToDisableDependentsForFeature(feature_label); + askToDisableDependentsForFeature(feature.id); } else { - disabledDependentsForFeature(feature_label); + disabledDependentsForFeature(feature.id); } } - updateCategoryCheckboxState(getOptionByLabel(feature_label).category_name); + updateCategoryCheckboxState(feature.category.name); updateGlobalCheckboxState(); } - function askToDisableDependentsForFeature(feature_label) { - let enabled_dependent_features = getEnabledDependentFeaturesFor(feature_label); + function askToDisableDependentsForFeature(feature_id) { + let enabled_dependent_features = getEnabledDependentFeaturesFor(feature_id); if (enabled_dependent_features.length <= 0) { return; } - document.getElementById('modalBody').innerHTML = "The feature(s) "+enabled_dependent_features.join(", ")+" is/are dependant on "+feature_label+"" + + let feature = getOptionById(feature_id); + let feature_display_name = feature ? feature.name : feature_id; + + // Get display names for dependent features + let dependent_names = enabled_dependent_features.map(dep_id => { + let dep_feature = getOptionById(dep_id); + return dep_feature ? dep_feature.name : dep_id; + }); + + document.getElementById('modalBody').innerHTML = "The feature(s) "+dependent_names.join(", ")+" is/are dependant on "+feature_display_name+"" + " and hence will be disabled too.
Do you want to continue?"; - document.getElementById('modalDisableButton').onclick = () => { disabledDependentsForFeature(feature_label); }; + document.getElementById('modalDisableButton').onclick = () => { disabledDependentsForFeature(feature_id); }; document.getElementById('modalCancelButton').onclick = document.getElementById('modalCloseButton').onclick = () => { - const check = true; - checkUncheckOptionByLabel(feature_label, check); + const check = true; + if (feature) { + checkUncheckOptionById(feature.id, check); + } }; var confirmationModal = bootstrap.Modal.getOrCreateInstance(document.getElementById('dependencyCheckModal')); confirmationModal.show(); } - function disabledDependentsForFeature(feature_label) { - let feature = getOptionByLabel(feature_label); + function disabledDependentsForFeature(feature_id) { + let feature = getOptionById(feature_id); - if (feature.requiredFor == undefined) { + if (!feature || feature.requiredFor == undefined) { return; } let dependents = feature.requiredFor; - dependents.forEach((dependent) => { + dependents.forEach((dependent_id) => { const check = false; - checkUncheckOptionByLabel(dependent, false); + checkUncheckOptionById(dependent_id, check); }); } @@ -155,12 +159,14 @@ const Features = (() => { if (category == undefined) { console.log("Could not find category by given name"); + return; } let checked_options_count = 0; - category.options.forEach((option) => { - let element = document.getElementById(option.label); + category.features.forEach((feature) => { + // Use ID to find the element + let element = document.getElementById(feature.id); if (element && element.checked) { checked_options_count += 1; @@ -170,6 +176,7 @@ const Features = (() => { let category_checkbox_element = document.getElementById(getCategoryIdByName(category_name)); if (category_checkbox_element == undefined) { console.log("Could not find element for given category"); + return; } let indeterminate_state = false; @@ -177,7 +184,7 @@ const Features = (() => { case 0: category_checkbox_element.checked = false; break; - case category.options.length: + case category.features.length: category_checkbox_element.checked = true; break; default: @@ -189,7 +196,7 @@ const Features = (() => { } function updateGlobalCheckboxState() { - const total_options = Object.keys(defines_dictionary).length; + const total_options = Object.keys(features_by_id).length; let global_checkbox = document.getElementById("check-uncheck-all"); let indeterminate_state = false; @@ -208,31 +215,40 @@ const Features = (() => { global_checkbox.indeterminate = indeterminate_state; } - function getEnabledDependentFeaturesHelper(feature_label, visited, dependent_features) { - if (visited[feature_label] != undefined || document.getElementById(feature_label).checked == false) { + function getEnabledDependentFeaturesHelper(feature_id, visited, dependent_features) { + if (visited[feature_id] != undefined) { + return; + } + + let feature = getOptionById(feature_id); + if (!feature) return; + + // Use ID to check the checkbox + let element = document.getElementById(feature.id); + if (!element || element.checked == false) { return; } - visited[feature_label] = true; - dependent_features.push(feature_label); + visited[feature_id] = true; + dependent_features.push(feature_id); - let feature = getOptionByLabel(feature_label); if (feature.requiredFor == null) { return; } - feature.requiredFor.forEach((dependent_feature) => { - getEnabledDependentFeaturesHelper(dependent_feature, visited, dependent_features); + feature.requiredFor.forEach((dependent_feature_id) => { + getEnabledDependentFeaturesHelper(dependent_feature_id, visited, dependent_features); }); } - function getEnabledDependentFeaturesFor(feature_label) { + function getEnabledDependentFeaturesFor(feature_id) { let dependent_features = []; let visited = {}; - if (getOptionByLabel(feature_label).requiredFor) { - getOptionByLabel(feature_label).requiredFor.forEach((dependent_feature) => { - getEnabledDependentFeaturesHelper(dependent_feature, visited, dependent_features); + let feature = getOptionById(feature_id); + if (feature && feature.requiredFor) { + feature.requiredFor.forEach((dependent_feature_id) => { + getEnabledDependentFeaturesHelper(dependent_feature_id, visited, dependent_features); }); } @@ -240,43 +256,43 @@ const Features = (() => { } function applyDefaults() { - features.forEach(category => { - category['options'].forEach(option => { - const check = featureisEnabledByDefault(option.label); - checkUncheckOptionByLabel(option.label, check); - }); + features.forEach(feature => { + const check = featureisEnabledByDefault(feature.id); + checkUncheckOptionById(feature.id, check); }); } - function checkUncheckOptionByLabel(label, check) { - let element = document.getElementById(label); + function checkUncheckOptionById(id, check) { + let feature = getOptionById(id); + if (!feature) return; + + // Use ID to find the element + let element = document.getElementById(feature.id); if (element == undefined || element.checked == check) { return; } element.checked = check; const triggered_by_ui = false; - handleOptionStateChange(label, triggered_by_ui); + handleOptionStateChange(feature.id, triggered_by_ui); } function checkUncheckAll(check) { - features.forEach(category => { + getAllCategories().forEach(category => { checkUncheckCategory(category.name, check); }); } function checkUncheckCategory(category_name, check) { - getCategoryByName(category_name).options.forEach(option => { - checkUncheckOptionByLabel(option.label, check); + getCategoryByName(category_name).features.forEach(feature => { + checkUncheckOptionById(feature.id, check); }); } - return {reset, handleOptionStateChange, getCategoryIdByName, updateDefaults, applyDefaults, checkUncheckAll, checkUncheckCategory}; + return {reset, handleOptionStateChange, getCategoryIdByName, applyDefaults, checkUncheckAll, checkUncheckCategory, getOptionById}; })(); var init_categories_expanded = false; -var pending_update_calls = 0; // to keep track of unresolved Promises - function init() { fetchVehicles(); } @@ -309,9 +325,8 @@ function fetchVehicles() { // following elemets will be blocked (disabled) when we make the request let elements_to_block = ['vehicle', 'version', 'board', 'submit', 'reset_def', 'exp_col_button']; enableDisableElementsById(elements_to_block, false); - let request_url = '/get_vehicles'; + let request_url = '/api/v1/vehicles'; setSpinnerToDiv('vehicle_list', 'Fetching vehicles...'); - pending_update_calls += 1; sendAjaxRequestForJsonResponse(request_url) .then((json_response) => { let all_vehicles = json_response; @@ -323,8 +338,6 @@ function fetchVehicles() { }) .finally(() => { enableDisableElementsById(elements_to_block, true); - pending_update_calls -= 1; - fetchAndUpdateDefaults(); }); } @@ -341,9 +354,8 @@ function onVehicleChange(new_vehicle_id) { // following elemets will be blocked (disabled) when we make the request let elements_to_block = ['vehicle', 'version', 'board', 'submit', 'reset_def', 'exp_col_button']; enableDisableElementsById(elements_to_block, false); - let request_url = '/get_versions/'+new_vehicle_id; + let request_url = '/api/v1/vehicles/'+new_vehicle_id+'/versions'; setSpinnerToDiv('version_list', 'Fetching versions...'); - pending_update_calls += 1; sendAjaxRequestForJsonResponse(request_url) .then((json_response) => { let all_versions = json_response; @@ -356,8 +368,6 @@ function onVehicleChange(new_vehicle_id) { }) .finally(() => { enableDisableElementsById(elements_to_block, true); - pending_update_calls -= 1; - fetchAndUpdateDefaults(); }); } @@ -376,40 +386,39 @@ function onVersionChange(new_version) { enableDisableElementsById(elements_to_block, false); let vehicle_id = document.getElementById("vehicle").value; let version_id = new_version; - let request_url = `/boards_and_features/${vehicle_id}/${version_id}`; - - // create a temporary container to set spinner inside it + + // Fetch boards first + let boards_url = `/api/v1/vehicles/${vehicle_id}/versions/${version_id}/boards`; + setSpinnerToDiv('board_list', 'Fetching boards...'); + + // Clear build options and show loading state let temp_container = document.createElement('div'); temp_container.id = "temp_container"; temp_container.setAttribute('class', 'container-fluid w-25 mt-3'); - let features_list_element = document.getElementById('build_options'); // append the temp container to the main features_list container + let features_list_element = document.getElementById('build_options'); features_list_element.innerHTML = ""; features_list_element.appendChild(temp_container); setSpinnerToDiv('temp_container', 'Fetching features...'); - setSpinnerToDiv('board_list', 'Fetching boards...'); - pending_update_calls += 1; - sendAjaxRequestForJsonResponse(request_url) - .then((json_response) => { - let boards = json_response.boards; - let new_board = json_response.default_board; - let new_features = json_response.features; - Features.reset(new_features); + + // Fetch boards + sendAjaxRequestForJsonResponse(boards_url) + .then((boards_response) => { + // Keep full board objects with id and name + let boards = boards_response; + let new_board = boards.length > 0 ? boards[0].id : null; updateBoards(boards, new_board); - fillBuildOptions(new_features); }) .catch((message) => { - console.log("Boards and features update failed. "+message); + console.log("Boards update failed. "+message); }) .finally(() => { enableDisableElementsById(elements_to_block, true); - pending_update_calls -= 1; - fetchAndUpdateDefaults(); }); } function updateBoards(all_boards, new_board) { let board_element = document.getElementById('board'); - let old_board = board_element ? board.value : ''; + let old_board = board_element ? board_element.value : ''; fillBoards(all_boards, new_board); if (old_board != new_board) { onBoardChange(new_board); @@ -417,48 +426,40 @@ function updateBoards(all_boards, new_board) { } function onBoardChange(new_board) { - fetchAndUpdateDefaults(); -} - -function fetchAndUpdateDefaults() { - // return early if there is an unresolved promise (i.e., there is an ongoing ajax call) - if (pending_update_calls > 0) { - return; - } - elements_to_block = ['reset_def']; - document.getElementById('reset_def').innerHTML = 'Fetching defaults'; - enableDisableElementsById(elements_to_block, false); + // When board changes, fetch features for the new board + let vehicle_id = document.getElementById('vehicle').value; let version_id = document.getElementById('version').value; - let vehicle = document.getElementById('vehicle').value; - let board = document.getElementById('board').value; - - let request_url = '/get_defaults/'+vehicle+'/'+version_id+'/'+board; - sendAjaxRequestForJsonResponse(request_url) - .then((json_response) => { - Features.updateDefaults(json_response); + + let temp_container = document.createElement('div'); + temp_container.id = "temp_container"; + temp_container.setAttribute('class', 'container-fluid w-25 mt-3'); + let features_list_element = document.getElementById('build_options'); + features_list_element.innerHTML = ""; + features_list_element.appendChild(temp_container); + setSpinnerToDiv('temp_container', 'Fetching features...'); + + let features_url = `/api/v1/vehicles/${vehicle_id}/versions/${version_id}/boards/${new_board}/features`; + sendAjaxRequestForJsonResponse(features_url) + .then((features_response) => { + Features.reset(features_response); + fillBuildOptions(features_response); + Features.applyDefaults(); }) .catch((message) => { - console.log("Default reset failed. "+message); - }) - .finally(() => { - if (document.getElementById('auto_apply_def').checked) { - Features.applyDefaults(); - } - enableDisableElementsById(elements_to_block, true); - document.getElementById('reset_def').innerHTML = 'Reset feature defaults'; + console.log("Features update failed. "+message); }); } -function fillBoards(boards, default_board) { +function fillBoards(boards, default_board_id) { let output = document.getElementById('board_list'); output.innerHTML = '' + ''; let boardList = document.getElementById("board") boards.forEach(board => { let opt = document.createElement('option'); - opt.value = board; - opt.innerHTML = board; - opt.selected = (board === default_board); + opt.value = board.id; + opt.innerHTML = board.name; + opt.selected = (board.id === default_board_id); boardList.appendChild(opt); }); } @@ -487,13 +488,13 @@ var toggle_all_categories = (() => { return toggle_method; })(); -function createCategoryCard(category_name, options, expanded) { +function createCategoryCard(category_name, features_in_category, expanded) { options_html = ""; - options.forEach(option => { + features_in_category.forEach(feature => { options_html += '
' + - '' + - '
'; }); @@ -534,7 +535,7 @@ function createCategoryCard(category_name, options, expanded) { return card_element; } -function fillBuildOptions(buildOptions) { +function fillBuildOptions(features) { let output = document.getElementById('build_options'); output.innerHTML = `
@@ -543,7 +544,20 @@ function fillBuildOptions(buildOptions) {
`; - buildOptions.forEach((category, cat_idx) => { + // Group features by category + let categories_map = {}; + features.forEach(feature => { + const cat_name = feature.category.name; + if (!categories_map[cat_name]) { + categories_map[cat_name] = []; + } + categories_map[cat_name].push(feature); + }); + + // Convert to array and display + let categories = Object.entries(categories_map).map(([name, feats]) => ({name, features: feats})); + + categories.forEach((category, cat_idx) => { if (cat_idx % 4 == 0) { let new_row = document.createElement('div'); new_row.setAttribute('class', 'row'); @@ -552,7 +566,7 @@ function fillBuildOptions(buildOptions) { } let col_element = document.createElement('div'); col_element.setAttribute('class', 'col-md-3 col-sm-6 mb-2'); - col_element.appendChild(createCategoryCard(category['name'], category['options'], init_categories_expanded)); + col_element.appendChild(createCategoryCard(category.name, category.features, init_categories_expanded)); document.getElementById('category_'+parseInt(cat_idx/4)+'_row').appendChild(col_element); }); } @@ -617,27 +631,21 @@ function sortVersions(versions) { } versions.sort((a, b) => { - const version_a_type = a.title.split(" ")[0].toLowerCase(); - const version_b_type = b.title.split(" ")[0].toLowerCase(); - // sort the version types in order mentioned above - if (version_a_type != version_b_type) { - return order[version_a_type] - order[version_b_type]; + if (a.type != b.type) { + return order[a.type] - order[b.type]; } // for numbered versions, do reverse sorting to make sure recent versions come first - if (version_a_type == "stable" || version_b_type == "beta") { - const version_a_num = a.title.split(" ")[1]; - const version_b_num = b.title.split(" ")[1]; - - return compareVersionNums(version_a_num, version_b_num); + if (a.type == "stable" || b.type == "beta") { + return compareVersionNums(a.name.split(" ")[1], b.name.split(" ")[1]); } - return a.title.localeCompare(b.title); + return a.name.localeCompare(b.name); }); // Push the first stable version in the list to the top - const firstStableIndex = versions.findIndex(v => v.title.split(" ")[0].toLowerCase() === "stable"); + const firstStableIndex = versions.findIndex(v => v.name.split(" ")[0].toLowerCase() === "stable"); if (firstStableIndex !== -1) { const stableVersion = versions.splice(firstStableIndex, 1)[0]; versions.unshift(stableVersion); @@ -655,8 +663,78 @@ function fillVersions(versions, version_to_select) { versions.forEach(version => { opt = document.createElement('option'); opt.value = version.id; - opt.innerHTML = version.title; + opt.innerHTML = version.name; opt.selected = (version.id === version_to_select); versionList.appendChild(opt); }); } + +// Handle form submission +async function handleFormSubmit(event) { + event.preventDefault(); + + const submitButton = document.getElementById('submit'); + const originalButtonText = submitButton.innerHTML; + + try { + // Disable submit button and show loading state + submitButton.disabled = true; + submitButton.innerHTML = 'Submitting...'; + + // Collect form data + const vehicle_id = document.getElementById('vehicle').value; + const version_id = document.getElementById('version').value; + const board_id = document.getElementById('board').value; + + // Collect selected features - checkboxes now have feature IDs directly + const selected_features = []; + const checkboxes = document.querySelectorAll('.feature-checkbox:checked'); + checkboxes.forEach(checkbox => { + // The checkbox ID is already the feature define (ID) + selected_features.push(checkbox.id); + }); + + // Create build request payload + const buildRequest = { + vehicle_id: vehicle_id, + version_id: version_id, + board_id: board_id, + selected_features: selected_features + }; + + // Send POST request to API + const response = await fetch('/api/v1/builds', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(buildRequest) + }); + + if (!response.ok) { + const errorData = await response.json(); + throw new Error(errorData.detail || 'Failed to submit build'); + } + + const result = await response.json(); + + // Redirect to viewlog page + window.location.href = `/viewlog/${result.build_id}`; + + } catch (error) { + console.error('Error submitting build:', error); + alert('Failed to submit build: ' + error.message); + + // Re-enable submit button + submitButton.disabled = false; + submitButton.innerHTML = originalButtonText; + } +} + +// Initialize form submission handler +document.addEventListener('DOMContentLoaded', () => { + const buildForm = document.getElementById('build-form'); + if (buildForm) { + buildForm.addEventListener('submit', handleFormSubmit); + } +}); diff --git a/web/static/js/index.js b/web/static/js/index.js index aaba266..a72126e 100644 --- a/web/static/js/index.js +++ b/web/static/js/index.js @@ -8,7 +8,7 @@ function init() { function refresh_builds() { var xhr = new XMLHttpRequest(); - xhr.open('GET', "/builds"); + xhr.open('GET', "/api/v1/builds"); // disable cache, thanks to: https://stackoverflow.com/questions/22356025/force-cache-control-no-cache-in-chrome-via-xmlhttprequest-on-f5-reload xhr.setRequestHeader("Cache-Control", "no-cache, no-store, max-age=0"); @@ -82,8 +82,8 @@ function updateBuildsTable(builds) { ${build_info['progress']['state']} ${build_age} ${build_info['git_hash'].substring(0,8)} - ${build_info['board']} - ${build_info['vehicle_id']} + ${build_info['board']['name']} + ${build_info['vehicle']['name']} ${features_string.substring(0, 100)}... @@ -98,7 +98,7 @@ function updateBuildsTable(builds) { - @@ -151,7 +151,7 @@ const LogFetch = (() => { } var xhr = new XMLHttpRequest(); - xhr.open('GET', `/builds/${build_id}/artifacts/build.log`); + xhr.open('GET', `/api/v1/builds/${build_id}/logs`); // disable cache, thanks to: https://stackoverflow.com/questions/22356025/force-cache-control-no-cache-in-chrome-via-xmlhttprequest-on-f5-reload xhr.setRequestHeader("Cache-Control", "no-cache, no-store, max-age=0"); @@ -204,7 +204,7 @@ async function tryAutoDownload(buildId) { } try { - const apiUrl = `/builds/${buildId}` + const apiUrl = `/api/v1/builds/${buildId}` const response = await fetch(apiUrl); const data = await response.json(); @@ -212,7 +212,7 @@ async function tryAutoDownload(buildId) { if (previousState === "RUNNING" && currentState === "SUCCESS") { console.log("Build completed successfully. Starting download..."); - document.getElementById(`${buildId}-download-btn`).click(); + window.location.href = `/api/v1/builds/${buildId}/artifact`; } // Stop running if the build is in a terminal state diff --git a/web/templates/add_build.html b/web/templates/add_build.html index 1255975..6c97488 100644 --- a/web/templates/add_build.html +++ b/web/templates/add_build.html @@ -20,7 +20,7 @@ - + @@ -33,7 +33,7 @@
@@ -50,7 +50,7 @@ ADD NEW BUILD
-
+
@@ -91,17 +91,13 @@ - + diff --git a/web/templates/error.html b/web/templates/error.html deleted file mode 100644 index a1f5075..0000000 --- a/web/templates/error.html +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - ArduPilot Custom Firmware Builder - - - -

ArduPilot Custom Firmware Builder

-

Error Occured: {{ex}}

- - diff --git a/web/templates/index.html b/web/templates/index.html index 962bca6..d9436b7 100644 --- a/web/templates/index.html +++ b/web/templates/index.html @@ -20,7 +20,7 @@ - + @@ -34,7 +34,7 @@
- ArduPilot Custom Firmware Builder @@ -125,7 +125,7 @@ - + {% if token != None %}