diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..5229a18 --- /dev/null +++ b/.flake8 @@ -0,0 +1,14 @@ +[flake8] +max-line-length = 127 +max-complexity = 10 +exclude = + .git, + __pycache__, + .pytest_cache, + *.pyc, + .venv, + venv, + env, + base/, + redis_data/, + .vagrant/ diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml new file mode 100644 index 0000000..a54c106 --- /dev/null +++ b/.github/workflows/linting.yml @@ -0,0 +1,27 @@ +name: Linting + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + flake8: + runs-on: ubuntu-24.04 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: Install flake8 + run: pip install flake8 + + - name: Run flake8 + run: | + flake8 . --count --show-source --statistics diff --git a/README.md b/README.md index 25471ac..e91ff27 100644 --- a/README.md +++ b/README.md @@ -105,24 +105,28 @@ To run the ArduPilot Custom Firmware Builder locally without Docker, ensure you ``` 5. **Execute the Application:** - - For a development environment, run: + - For a development environment with auto-reload, run: ```bash - ./web/app.py + python3 web/main.py + ``` + To change the port, use the `--port` argument: + ```bash + python3 web/main.py --port 9000 ``` - For a production environment, use: ```bash - gunicorn web.wsgi:application + uvicorn web.main:app --host 0.0.0.0 --port 8080 ``` - During the coding and testing phases, use the development environment to easily debug and make changes. When deploying the app for end users, use the production environment to ensure better performance, scalability, and security. + During the coding and testing phases, use the development environment to easily debug and make changes with auto-reload enabled. When deploying the app for end users, use the production environment to ensure better performance, scalability, and security. - The application will automatically set up the required base directory at `./base` upon first execution. You may customize this path by using the `--basedir` option with the above commands or by setting the `CBS_BASEDIR` environment variable. + The application will automatically set up the required base directory at `./base` upon first execution. You may customize this path by setting the `CBS_BASEDIR` environment variable. 6. **Access the Web Interface:** - Once the application is running, you can access the interface in your web browser at http://localhost:5000 if running directly using app.py (development environment), or at http://localhost:8000 if using Gunicorn (production environment). + Once the application is running, you can access the interface in your web browser at http://localhost:8080. - To change the default port when running with app.py, modify the `app.run()` call in web/app.py file by passing `port=` as an argument. For Gunicorn, refer to the [commonly used arguments](https://docs.gunicorn.org/en/latest/run.html#commonly-used-arguments) section of the Gunicorn documentation to specify a different port. + The default port is 8080, or the value of the `WEB_PORT` environment variable if set. You can override this by passing the `--port` argument when running the application directly (e.g., `python3 web/main.py --port 9000`) or when using uvicorn (e.g., `uvicorn web.main:app --port 5000`). Refer to the [uvicorn documentation](https://www.uvicorn.org/) for additional configuration options. ## Directory Structure The default directory structure is established as follows: diff --git a/build_manager/manager.py b/build_manager/manager.py index 4143f64..315025f 100644 --- a/build_manager/manager.py +++ b/build_manager/manager.py @@ -15,6 +15,7 @@ class BuildState(Enum): SUCCESS = 2 FAILURE = 3 ERROR = 4 + TIMED_OUT = 5 class BuildProgress: @@ -44,6 +45,7 @@ def to_dict(self) -> dict: class BuildInfo: def __init__(self, vehicle_id: str, + version_id: str, remote_info: RemoteInfo, git_hash: str, board: str, @@ -55,6 +57,7 @@ def __init__(self, Parameters: vehicle_id (str): The vehicle ID associated with the build. + version_id (str): The version ID associated with the build. remote_info (RemoteInfo): The remote repository containing the source commit to build on. git_hash (str): The git commit hash to build on. @@ -62,6 +65,7 @@ def __init__(self, selected_features (set): Set of features selected for the build. """ self.vehicle_id = vehicle_id + self.version_id = version_id self.remote_info = remote_info self.git_hash = git_hash self.board = board @@ -71,16 +75,19 @@ def __init__(self, percent=0 ) self.time_created = time.time() + self.time_started = None # when build state becomes RUNNING def to_dict(self) -> dict: return { 'vehicle_id': self.vehicle_id, + 'version_id': self.version_id, 'remote_info': self.remote_info.to_dict(), 'git_hash': self.git_hash, 'board': self.board, 'selected_features': list(self.selected_features), 'progress': self.progress.to_dict(), 'time_created': self.time_created, + 'time_started': getattr(self, 'time_started', None), } @@ -353,6 +360,27 @@ def __update_build_info(self, keepttl=True ) + def update_build_time_started(self, + build_id: str, + time_started: float) -> None: + """ + Update the build's time_started timestamp. + + Parameters: + build_id (str): The ID of the build to update. + time_started (float): The timestamp when the build started running. + """ + build_info = self.get_build_info(build_id=build_id) + + if build_info is None: + raise ValueError(f"Build with id {build_id} not found.") + + build_info.time_started = time_started + self.__update_build_info( + build_id=build_id, + build_info=build_info + ) + def update_build_progress_percent(self, build_id: str, percent: int) -> None: diff --git a/build_manager/progress_updater.py b/build_manager/progress_updater.py index c6ddd35..ee15504 100644 --- a/build_manager/progress_updater.py +++ b/build_manager/progress_updater.py @@ -6,6 +6,9 @@ BuildManager as bm, BuildState ) +import time + +CBS_BUILD_TIMEOUT_SEC = int(os.getenv('CBS_BUILD_TIMEOUT_SEC', 900)) class BuildProgressUpdater: @@ -157,6 +160,28 @@ def __refresh_running_build_state(self, build_id: str) -> BuildState: raise RuntimeError( "This method should only be called for running builds." ) + # Set time_started if not already set + if build_info.time_started is None: + start_time = time.time() + bm.get_singleton().update_build_time_started( + build_id=build_id, + time_started=start_time + ) + self.logger.info( + f"Build {build_id} started running at {start_time}" + ) + build_info.time_started = start_time + + # Check for timeout + elapsed = time.time() - build_info.time_started + if elapsed > CBS_BUILD_TIMEOUT_SEC: + self.logger.warning( + f"Build {build_id} timed out after {elapsed:.0f} seconds" + ) + build_info.error_message = ( + f"Build exceeded {CBS_BUILD_TIMEOUT_SEC // 60} minute timeout" + ) + return BuildState.TIMED_OUT # Builder ships the archive post completion # This is irrespective of SUCCESS or FAILURE @@ -213,6 +238,9 @@ def __update_build_percent(self, build_id: str) -> None: elif current_state == BuildState.ERROR: # Keep existing percentage pass + elif current_state == BuildState.TIMED_OUT: + # Keep existing percentage + pass else: raise Exception("Unhandled BuildState.") @@ -259,6 +287,9 @@ def __update_build_state(self, build_id: str) -> None: elif current_state == BuildState.ERROR: # ERROR is a conclusive state pass + elif current_state == BuildState.TIMED_OUT: + # TIMED_OUT is a conclusive state + pass else: raise Exception("Unhandled BuildState.") diff --git a/builder/builder.py b/builder/builder.py index e9deb70..7a16cb1 100644 --- a/builder/builder.py +++ b/builder/builder.py @@ -14,6 +14,8 @@ ) from pathlib import Path +CBS_BUILD_TIMEOUT_SEC = int(os.getenv('CBS_BUILD_TIMEOUT_SEC', 900)) + class Builder: """ @@ -366,7 +368,9 @@ def __build(self, build_id: str) -> None: logpath = bm.get_singleton().get_build_log_path(build_id) with open(logpath, "a") as build_log: # Get vehicle object - vehicle = vehm.get_singleton().get_vehicle_by_id(build_info.vehicle_id) + vehicle = vehm.get_singleton().get_vehicle_by_id( + build_info.vehicle_id + ) # Log initial configuration build_log.write( @@ -375,52 +379,65 @@ def __build(self, build_id: str) -> None: ) build_log.flush() - # Run the build steps - self.logger.info("Running waf configure") - build_log.write("Running waf configure\n") - build_log.flush() - subprocess.run( - [ - "python3", - "./waf", - "configure", - "--board", - build_info.board, - "--out", - self.__get_path_to_build_dir(build_id), - "--extra-hwdef", - self.__get_path_to_extra_hwdef(build_id), - ], - cwd=self.__get_path_to_build_src(build_id), - stdout=build_log, - stderr=build_log, - shell=False, - ) - - self.logger.info("Running clean") - build_log.write("Running clean\n") - build_log.flush() - subprocess.run( - ["python3", "./waf", "clean"], - cwd=self.__get_path_to_build_src(build_id), - stdout=build_log, - stderr=build_log, - shell=False, - ) - - self.logger.info("Running build") - build_log.write("Running build\n") - build_log.flush() - build_command = vehicle.waf_build_command - subprocess.run( - ["python3", "./waf", build_command], - cwd=self.__get_path_to_build_src(build_id), - stdout=build_log, - stderr=build_log, - shell=False, - ) - build_log.write("done build\n") - build_log.flush() + try: + # Run the build steps + self.logger.info("Running waf configure") + build_log.write("Running waf configure\n") + build_log.flush() + subprocess.run( + [ + "python3", + "./waf", + "configure", + "--board", + build_info.board, + "--out", + self.__get_path_to_build_dir(build_id), + "--extra-hwdef", + self.__get_path_to_extra_hwdef(build_id), + ], + cwd=self.__get_path_to_build_src(build_id), + stdout=build_log, + stderr=build_log, + shell=False, + timeout=CBS_BUILD_TIMEOUT_SEC, + ) + + self.logger.info("Running clean") + build_log.write("Running clean\n") + build_log.flush() + subprocess.run( + ["python3", "./waf", "clean"], + cwd=self.__get_path_to_build_src(build_id), + stdout=build_log, + stderr=build_log, + shell=False, + timeout=CBS_BUILD_TIMEOUT_SEC, + ) + + self.logger.info("Running build") + build_log.write("Running build\n") + build_log.flush() + build_command = vehicle.waf_build_command + subprocess.run( + ["python3", "./waf", build_command], + cwd=self.__get_path_to_build_src(build_id), + stdout=build_log, + stderr=build_log, + shell=False, + timeout=CBS_BUILD_TIMEOUT_SEC, + ) + build_log.write("done build\n") + build_log.flush() + except subprocess.TimeoutExpired: + self.logger.error( + f"Build {build_id} timed out after " + f"{CBS_BUILD_TIMEOUT_SEC} seconds." + ) + build_log.write( + f"Build timed out after {CBS_BUILD_TIMEOUT_SEC} seconds.\n" + ) + build_log.flush() def shutdown(self) -> None: """ diff --git a/docker-compose.yml b/docker-compose.yml index cb55812..385df2c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -19,14 +19,15 @@ services: CBS_LOG_LEVEL: ${CBS_LOG_LEVEL:-INFO} CBS_ENABLE_INBUILT_BUILDER: 0 CBS_GITHUB_ACCESS_TOKEN: ${CBS_GITHUB_ACCESS_TOKEN} + CBS_REMOTES_RELOAD_TOKEN: ${CBS_REMOTES_RELOAD_TOKEN} PYTHONPATH: /app - GUNICORN_CMD_ARGS: --bind=0.0.0.0:80 --timeout=300 + CBS_BUILD_TIMEOUT_SEC: ${CBS_BUILD_TIMEOUT_SEC:-900} volumes: - ./base:/base:rw depends_on: - redis ports: - - "127.0.0.1:${WEB_PORT:-8080}:80" + - "127.0.0.1:${WEB_PORT:-8080}:8080" builder: build: @@ -40,6 +41,7 @@ services: CBS_BASEDIR: /base CBS_LOG_LEVEL: ${CBS_LOG_LEVEL:-INFO} PYTHONPATH: /app + CBS_BUILD_TIMEOUT_SEC: ${CBS_BUILD_TIMEOUT_SEC:-900} volumes: - ./base:/base:rw depends_on: diff --git a/metadata_manager/ap_src_meta_fetcher.py b/metadata_manager/ap_src_meta_fetcher.py index ea0a604..d6d19a3 100644 --- a/metadata_manager/ap_src_meta_fetcher.py +++ b/metadata_manager/ap_src_meta_fetcher.py @@ -5,7 +5,21 @@ import logging import ap_git import os - +import re + +class Board: + def __init__(self, id: str, name: str, attributes: dict): + self.id = id + self.name = name + self.attributes = dict(attributes) + + def to_dict(self) -> dict: + out = { + "id": self.id, + "name": self.name, + "attributes": self.attributes, + } + return out class APSourceMetadataFetcher: """ @@ -58,7 +72,9 @@ def __init__(self, ap_repo: ap_git.GitRepo, self.logger.info( f"Redis connection established with {redis_host}:{redis_port}" ) - self.__boards_key_prefix = "boards-" + # bump version to invalidate stale board metadata in cache + # (schema now includes has_can and defaults are stricter) + self.__boards_key_prefix = "boards-v4-" self.__build_options_key_prefix = "bopts-" APSourceMetadataFetcher.__singleton = self @@ -190,8 +206,8 @@ def __get_boards_at_commit_from_cache(self, Returns: tuple: A tuple of two lists in order: - - A list contains boards for NON-'ap_periph' targets. - - A list contains boards for the 'ap_periph' target. + - A list of Boards for NON-'ap_periph' targets. + - A list of Boards for the 'ap_periph' target. Raises: RuntimeError: If the method is called when caching is disabled. @@ -220,6 +236,11 @@ def __get_boards_at_commit_from_cache(self, self.logger.exception(e) return None + # Invalidate stale cache entries that contain dicts instead of Board objects + if non_periph_boards and isinstance(non_periph_boards[0], dict): + self.logger.debug("Stale cache entry found, treating as cache miss") + return None + return ( non_periph_boards, periph_boards @@ -237,8 +258,78 @@ def __exclude_boards_matching_patterns(self, boards: list, patterns: list): ret.append(b) return ret + def __board_has_can(self, hwdef_path: str) -> bool: + """Return True when the hwdef file advertises CAN support.""" + if not hwdef_path or not os.path.isfile(hwdef_path): + self.logger.debug( + "hwdef.dat not found while checking CAN support: %s", + hwdef_path, + ) + return False + + try: + with open(hwdef_path, "r", encoding="utf-8", errors="ignore") as hwdef_file: + hwdef_contents = hwdef_file.read() + except OSError as exc: + self.logger.warning( + "Failed to read hwdef.dat at %s: %s", + hwdef_path, + exc, + ) + return False + + combined_contents = hwdef_contents + + # If the hwdef uses an include *.inc, read that file as well so + # CAN keywords defined there are detected (e.g., CubeOrange). + include_match = re.search(r"^\s*include\s+(.+\.inc)\s*$", hwdef_contents, re.MULTILINE) + if include_match: + include_name = include_match.group(1).strip() + include_path = os.path.join(os.path.dirname(hwdef_path), include_name) + if os.path.isfile(include_path): + try: + with open(include_path, "r", encoding="utf-8", errors="ignore") as inc_file: + combined_contents += "\n" + inc_file.read() + except OSError as exc: + self.logger.warning( + "Failed to read included hwdef %s: %s", + include_path, + exc, + ) + + return ( + "CAN1" in combined_contents + or "HAL_NUM_CAN_IFACES" in combined_contents + or "CAN_P1_DRIVER" in combined_contents + or "CAN_D1_DRIVER" in combined_contents + ) + + def __build_board_metadata(self, board_names: list[str], hwdef_dir: str) -> list[Board]: + board_data: list[Board] = [] + for board_name in board_names: + hwdef_path = None + if hwdef_dir: + candidate_path = os.path.join(hwdef_dir, board_name, "hwdef.dat") + if os.path.isfile(candidate_path): + hwdef_path = candidate_path + else: + self.logger.debug( + "hwdef.dat not found for board %s at %s", + board_name, + candidate_path, + ) + + has_can = self.__board_has_can(hwdef_path) if hwdef_path else False + board = Board( + id=board_name, + name=board_name, + attributes={"has_can": has_can}, + ) + board_data.append(board) + return board_data + def __get_boards_at_commit_from_repo(self, remote: str, - commit_ref: str) -> tuple[list, list]: + commit_ref: str) -> tuple[list[Board], list[Board]]: """ Returns the tuple of boards (for both non-periph and periph targets, in order) for a given commit from the git repo. @@ -249,8 +340,8 @@ def __get_boards_at_commit_from_repo(self, remote: str, Returns: tuple: A tuple of two lists in order: - - A list contains boards for NON-'ap_periph' targets. - - A list contains boards for the 'ap_periph' target. + - A list of Boards for NON-'ap_periph' targets. + - A list of Boards for the 'ap_periph' target. """ with self.repo.get_checkout_lock(): self.repo.checkout_remote_commit_ref( @@ -270,29 +361,31 @@ def __get_boards_at_commit_from_repo(self, remote: str, ) mod = importlib.util.module_from_spec(spec) spec.loader.exec_module(mod) + board_list = mod.BoardList() + hwdef_dir = getattr(board_list, "hwdef_dir", None) non_periph_boards = mod.AUTOBUILD_BOARDS periph_boards = mod.AP_PERIPH_BOARDS self.logger.debug(f"non_periph_boards raw: {non_periph_boards}") self.logger.debug(f"periph_boards raw: {periph_boards}") - non_periph_boards = self.__exclude_boards_matching_patterns( - boards=non_periph_boards, - patterns=['fmuv*', 'SITL*'], - ) - self.logger.debug(f"non_periph_boards filtered: {non_periph_boards}") + non_periph_boards = self.__exclude_boards_matching_patterns( + boards=non_periph_boards, + patterns=['fmuv*', 'SITL*'], + ) + self.logger.debug(f"non_periph_boards filtered: {non_periph_boards}") - non_periph_boards_sorted = sorted(non_periph_boards) - periph_boards_sorted = sorted(periph_boards) + non_periph_boards_sorted = sorted(non_periph_boards) + periph_boards_sorted = sorted(periph_boards) - self.logger.debug( - f"non_periph_boards sorted: {non_periph_boards_sorted}" - ) - self.logger.debug(f"periph_boards sorted: {periph_boards_sorted}") + self.logger.debug( + f"non_periph_boards sorted: {non_periph_boards_sorted}" + ) + self.logger.debug(f"periph_boards sorted: {periph_boards_sorted}") - return ( - non_periph_boards_sorted, - periph_boards_sorted, - ) + return ( + self.__build_board_metadata(non_periph_boards_sorted, hwdef_dir), + self.__build_board_metadata(periph_boards_sorted, hwdef_dir), + ) def __get_build_options_at_commit_from_repo(self, remote: str, @@ -334,7 +427,7 @@ def __get_build_options_at_commit_from_repo(self, return build_options def __get_boards_at_commit(self, remote: str, - commit_ref: str) -> tuple[list, list]: + commit_ref: str) -> tuple[list[Board], list[Board]]: """ Retrieves lists of boards available for building at a specified commit for both NON-'ap_periph' and ap_periph targets @@ -350,8 +443,8 @@ def __get_boards_at_commit(self, remote: str, Returns: tuple: A tuple of two lists in order: - - A list contains boards for NON-'ap_periph' targets. - - A list contains boards for the 'ap_periph' target. + - A list of Boards for NON-'ap_periph' targets. + - A list of Boards for the 'ap_periph' target. """ tstart = time.time() if not self.caching_enabled: @@ -376,7 +469,8 @@ def __get_boards_at_commit(self, remote: str, if cached_boards: boards = cached_boards - else: + + if not cached_boards or boards is None: self.logger.debug( "Cache miss. Fetching boards from repo for " f"commit {commid_id}." @@ -396,7 +490,7 @@ def __get_boards_at_commit(self, remote: str, return boards def get_boards(self, remote: str, commit_ref: str, - vehicle_id: str) -> list: + vehicle_id: str) -> list[Board]: """ Returns a list of boards available for building at a specified commit for given vehicle. @@ -407,7 +501,7 @@ def get_boards(self, remote: str, commit_ref: str, vehicle_id (str): The vehicle ID to get the boards list for. Returns: - list: A list of boards. + list: A list of Boards. """ non_periph_boards, periph_boards = self.__get_boards_at_commit( remote=remote, @@ -478,6 +572,79 @@ def get_build_options_at_commit(self, remote: str, ) return build_options + def get_board_defaults_from_fw_server( + self, + artifacts_url: str, + board_id: str, + vehicle_id: str = None, + ) -> dict: + """ + Fetch board defaults from firmware.ardupilot.org features.txt. + + The features.txt file contains lines like: + - FEATURE_NAME (enabled features) + - !FEATURE_NAME (disabled features) + + Parameters: + artifacts_url (str): Base URL for build artifacts for a version. + board_id (str): Board identifier + vehicle_id (str): Vehicle identifier + (for special handling like Heli) + + Returns: + dict: Dictionary mapping feature define to state + (1 for enabled, 0 for disabled), or None if fetch fails + """ + import requests + + # Heli builds are stored under a separate folder + artifacts_subdir = board_id + if vehicle_id == "Heli": + artifacts_subdir += "-heli" + + features_txt_url = f"{artifacts_url}/{artifacts_subdir}/features.txt" + + try: + response = requests.get(features_txt_url, timeout=30) + response.raise_for_status() + + feature_states = {} + enabled_count = 0 + disabled_count = 0 + + for line in response.text.splitlines(): + line = line.strip() + + # Skip empty lines and comments + if not line or line.startswith('#'): + continue + + # Check if feature is disabled (prefixed with !) + if line.startswith('!'): + feature_name = line[1:].strip() + if feature_name: + feature_states[feature_name] = 0 + disabled_count += 1 + else: + # Enabled feature + if line: + feature_states[line] = 1 + enabled_count += 1 + + self.logger.info( + f"Fetched board defaults from firmware server: " + f"{enabled_count} enabled, " + f"{disabled_count} disabled" + ) + + return feature_states + + except requests.RequestException as e: + self.logger.warning( + f"Failed to fetch board defaults from {features_txt_url}: {e}" + ) + return None + @staticmethod def get_singleton(): return APSourceMetadataFetcher.__singleton diff --git a/web/Dockerfile b/web/Dockerfile index aacb445..fa3c68c 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -1,7 +1,8 @@ FROM python:3.10.16-slim-bookworm RUN apt-get update \ - && apt-get install -y --no-install-recommends git gosu + && apt-get install -y --no-install-recommends git gosu \ + && rm -rf /var/lib/apt/lists/* RUN groupadd -g 999 ardupilot && \ useradd -u 999 -g 999 -m ardupilot --shell /bin/false && \ @@ -12,5 +13,9 @@ COPY --chown=ardupilot:ardupilot . /app WORKDIR /app/web RUN pip install --no-cache-dir -r requirements.txt +ENV PYTHONPATH=/app + +EXPOSE 8080 + ENTRYPOINT ["./docker-entrypoint.sh"] -CMD ["gunicorn", "wsgi:application"] +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8080"] diff --git a/web/api/v1/__init__.py b/web/api/v1/__init__.py new file mode 100644 index 0000000..6cbe7be --- /dev/null +++ b/web/api/v1/__init__.py @@ -0,0 +1,4 @@ +"""API v1 module.""" +from .router import router + +__all__ = ["router"] diff --git a/web/api/v1/admin.py b/web/api/v1/admin.py new file mode 100644 index 0000000..5ce7e89 --- /dev/null +++ b/web/api/v1/admin.py @@ -0,0 +1,81 @@ +from fastapi import APIRouter, HTTPException, Depends, status +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials + +from schemas import RefreshRemotesResponse +from services.admin import get_admin_service, AdminService + + +router = APIRouter(prefix="/admin", tags=["admin"]) +security = HTTPBearer() + + +async def verify_admin_token( + credentials: HTTPAuthorizationCredentials = Depends(security), + admin_service: AdminService = Depends(get_admin_service) +) -> None: + """ + Verify the bearer token for admin authentication. + + Args: + credentials: HTTP authorization credentials from request header + admin_service: Admin service instance + + Raises: + 401: Invalid or missing token + 500: Server configuration error (token not configured) + """ + token = credentials.credentials + try: + if not await admin_service.verify_token(token): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid authentication token" + ) + except RuntimeError as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=str(e) + ) + + +@router.post( + "/refresh_remotes", + response_model=RefreshRemotesResponse, + responses={ + 401: {"description": "Invalid or missing authentication token"}, + 500: { + "description": ( + "Server configuration error (token not configured) " + "or refresh operation failed" + ) + } + } +) +async def refresh_remotes( + _: None = Depends(verify_admin_token), + admin_service: AdminService = Depends(get_admin_service) +): + """ + Trigger a hot reset/refresh of remote metadata. + + This endpoint requires bearer token authentication in the Authorization + header: + ``` + Authorization: Bearer + ``` + + Returns: + RefreshRemotesResponse: List of remotes that were refreshed + + Raises: + 401: Invalid or missing authentication token + 500: Refresh operation failed + """ + try: + remotes = await admin_service.refresh_remotes() + return RefreshRemotesResponse(remotes=remotes) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Failed to refresh remotes: {str(e)}" + ) diff --git a/web/api/v1/builds.py b/web/api/v1/builds.py new file mode 100644 index 0000000..d0f0ab9 --- /dev/null +++ b/web/api/v1/builds.py @@ -0,0 +1,225 @@ +from typing import List, Optional +from fastapi import ( + APIRouter, + HTTPException, + Query, + Path, + status, + Depends, + Request +) +from fastapi.responses import FileResponse, PlainTextResponse + +from schemas import ( + BuildRequest, + BuildSubmitResponse, + BuildOut, +) +from services.builds import get_builds_service, BuildsService +from utils import RateLimitExceededException + +router = APIRouter(prefix="/builds", tags=["builds"]) + + +@router.post( + "", + response_model=BuildSubmitResponse, + status_code=status.HTTP_201_CREATED, + responses={ + 400: {"description": "Invalid build configuration"}, + 404: {"description": "Vehicle, board, or version not found"}, + 429: {"description": "Rate limit exceeded"} + } +) +async def create_build( + build_request: BuildRequest, + request: Request, + service: BuildsService = Depends(get_builds_service) +): + """ + Create a new build request. + + Args: + build_request: Build configuration including vehicle, board, version, + and selected features + + Returns: + Simple response with build_id, URL, and status + + Raises: + 400: Invalid build configuration + 404: Vehicle, board, or version not found + 429: Rate limit exceeded + """ + try: + # Get client IP for rate limiting + forwarded_for = request.headers.get('X-Forwarded-For', None) + if forwarded_for: + client_ip = forwarded_for.split(',')[0].strip() + else: + client_ip = request.client.host if request.client else "unknown" + + return service.create_build(build_request, client_ip) + except RateLimitExceededException as e: + raise HTTPException( + status_code=status.HTTP_429_TOO_MANY_REQUESTS, + detail=str(e) + ) + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) + + +@router.get("", response_model=List[BuildOut]) +async def list_builds( + vehicle_id: Optional[str] = Query( + None, description="Filter by vehicle ID" + ), + board_id: Optional[str] = Query( + None, description="Filter by board ID" + ), + state: Optional[str] = Query( + None, + description="Filter by build state (PENDING, RUNNING, SUCCESS, " + "FAILURE, CANCELLED)" + ), + limit: int = Query( + 20, ge=1, le=100, description="Maximum number of builds to return" + ), + offset: int = Query( + 0, ge=0, description="Number of builds to skip" + ), + service: BuildsService = Depends(get_builds_service) +): + """ + Get list of builds with optional filters. + + Args: + vehicle_id: Filter builds by vehicle + board_id: Filter builds by board + state: Filter builds by current state + limit: Maximum number of results + offset: Number of results to skip (for pagination) + + Returns: + List of builds matching the filters + """ + return service.list_builds( + vehicle_id=vehicle_id, + board_id=board_id, + state=state, + limit=limit, + offset=offset + ) + + +@router.get( + "/{build_id}", + response_model=BuildOut, + responses={ + 404: {"description": "Build not found"} + } +) +async def get_build( + build_id: str = Path(..., description="Unique build identifier"), + service: BuildsService = Depends(get_builds_service) +): + """ + Get details of a specific build. + + Args: + build_id: The unique build identifier + + Returns: + Complete build details including progress and status + + Raises: + 404: Build not found + """ + build = service.get_build(build_id) + if not build: + raise HTTPException( + status_code=404, + detail=f"Build with id '{build_id}' not found" + ) + return build + + +@router.get( + "/{build_id}/logs", + responses={ + 404: {"description": "Build not found or logs not available yet"} + } +) +async def get_build_logs( + build_id: str = Path(..., description="Unique build identifier"), + tail: Optional[int] = Query( + None, ge=1, description="Return only the last N lines" + ), + service: BuildsService = Depends(get_builds_service) +): + """ + Get build logs for a specific build. + + Args: + build_id: The unique build identifier + tail: Optional number of last lines to return + + Returns: + Build logs as text + + Raises: + 404: Build not found + 404: Logs not available yet + """ + logs = service.get_build_logs(build_id, tail) + if logs is None: + raise HTTPException( + status_code=404, + detail=f"Logs not available for build '{build_id}'" + ) + return PlainTextResponse(content=logs) + + +@router.get( + "/{build_id}/artifact", + responses={ + 404: { + "description": ( + "Build not found or artifact not available " + ) + } + } +) +async def download_artifact( + build_id: str = Path(..., description="Unique build identifier"), + service: BuildsService = Depends(get_builds_service) +): + """ + Download the build artifact (firmware binary). + + Args: + build_id: The unique build identifier + + Returns: + Binary file download + + Raises: + 404: Build not found + 404: Artifact not available (build not completed successfully) + """ + artifact_path = service.get_artifact_path(build_id) + if not artifact_path: + raise HTTPException( + status_code=404, + detail=( + f"Artifact not available for build '{build_id}'. " + "Build may not be completed or successful." + ) + ) + return FileResponse( + path=artifact_path, + media_type='application/gzip', + filename=f"{build_id}.tar.gz" + ) diff --git a/web/api/v1/router.py b/web/api/v1/router.py new file mode 100644 index 0000000..9597591 --- /dev/null +++ b/web/api/v1/router.py @@ -0,0 +1,17 @@ +""" +Main API v1 router. + +This module aggregates all v1 API endpoints and provides a single router +to be included in the main FastAPI application. +""" +from fastapi import APIRouter + +from . import vehicles, builds, admin + +# Create the main v1 router +router = APIRouter(prefix="/v1") + +# Include all sub-routers +router.include_router(vehicles.router) +router.include_router(builds.router) +router.include_router(admin.router) diff --git a/web/api/v1/vehicles.py b/web/api/v1/vehicles.py new file mode 100644 index 0000000..b5168d2 --- /dev/null +++ b/web/api/v1/vehicles.py @@ -0,0 +1,253 @@ +from typing import List, Optional +from fastapi import APIRouter, Depends, HTTPException, Query, Path + +from schemas import ( + VehicleBase, + VersionOut, + BoardOut, + FeatureOut, +) +from services.vehicles import get_vehicles_service, VehiclesService + +router = APIRouter(prefix="/vehicles", tags=["vehicles"]) + + +@router.get("", response_model=List[VehicleBase]) +async def list_vehicles( + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get list of all available vehicles. + + Returns: + List of vehicles with their IDs and names. + """ + return service.get_all_vehicles() + + +@router.get( + "/{vehicle_id}", + response_model=VehicleBase, + responses={ + 404: {"description": "Vehicle not found"} + } +) +async def get_vehicle( + vehicle_id: str = Path(..., description="Unique vehicle identifier"), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get a specific vehicle by ID. + + Args: + vehicle_id: The vehicle identifier (e.g., 'copter', 'plane') + + Returns: + Vehicle details + """ + vehicle = service.get_vehicle(vehicle_id) + if not vehicle: + raise HTTPException( + status_code=404, + detail=f"Vehicle with id '{vehicle_id}' not found" + ) + return vehicle + + +# --- Version Endpoints --- +@router.get("/{vehicle_id}/versions", response_model=List[VersionOut]) +async def list_versions( + vehicle_id: str = Path(..., description="Vehicle identifier"), + type: Optional[str] = Query( + None, + description=( + "Filter by version type " + "(beta, stable, latest, tag)" + ) + ), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get all versions available for a specific vehicle. + + Args: + vehicle_id: The vehicle identifier + type: Optional filter by version type + + Returns: + List of versions for the vehicle + """ + return service.get_versions(vehicle_id, type_filter=type) + + +@router.get( + "/{vehicle_id}/versions/{version_id}", + response_model=VersionOut, + responses={ + 404: {"description": "Version not found for the vehicle"} + } +) +async def get_version( + vehicle_id: str = Path(..., description="Vehicle identifier"), + version_id: str = Path(..., description="Version identifier"), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get details of a specific version for a vehicle. + + Args: + vehicle_id: The vehicle identifier + version_id: The version identifier + + Returns: + Version details + """ + version = service.get_version(vehicle_id, version_id) + if not version: + raise HTTPException( + status_code=404, + detail=( + f"Version '{version_id}' not found for " + f"vehicle '{vehicle_id}'" + ) + ) + return version + + +# --- Board Endpoints --- +@router.get( + "/{vehicle_id}/versions/{version_id}/boards", + response_model=List[BoardOut], + responses={ + 404: {"description": "No boards found for the vehicle version"} + } +) +async def list_boards( + vehicle_id: str = Path(..., description="Vehicle identifier"), + version_id: str = Path(..., description="Version identifier"), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get all boards available for a specific vehicle version. + + Args: + vehicle_id: The vehicle identifier + version_id: The version identifier + + Returns: + List of boards for the vehicle version + """ + boards = service.get_boards(vehicle_id, version_id) + if not boards: + raise HTTPException( + status_code=404, + detail=( + f"No boards found for vehicle '{vehicle_id}' and " + f"version '{version_id}'" + ) + ) + + return boards + + +@router.get( + "/{vehicle_id}/versions/{version_id}/boards/{board_id}", + response_model=BoardOut, + responses={ + 404: {"description": "Board not found"} + } +) +async def get_board( + vehicle_id: str = Path(..., description="Vehicle identifier"), + version_id: str = Path(..., description="Version identifier"), + board_id: str = Path(..., description="Board identifier"), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get details of a specific board for a vehicle version. + + Args: + vehicle_id: The vehicle identifier + version_id: The version identifier + board_id: The board identifier + + Returns: + Board details + """ + board = service.get_board(vehicle_id, version_id, board_id) + if not board: + raise HTTPException( + status_code=404, + detail=f"Board '{board_id}' not found" + ) + return board + + +# --- Feature Endpoints --- +@router.get( + "/{vehicle_id}/versions/{version_id}/boards/{board_id}/features", + response_model=List[FeatureOut] +) +async def list_features( + vehicle_id: str = Path(..., description="Vehicle identifier"), + version_id: str = Path(..., description="Version identifier"), + board_id: str = Path(..., description="Board identifier"), + category_id: Optional[str] = Query( + None, description="Filter by category ID" + ), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get all features with defaults for a specific vehicle/version/board. + + Args: + vehicle_id: The vehicle identifier + version_id: The version identifier + board_id: The board identifier + category_id: Optional filter by category + + Returns: + List of features with default settings for the board + """ + features = service.get_features( + vehicle_id, version_id, board_id, category_id + ) + return features + + +@router.get( + "/{vehicle_id}/versions/{version_id}/boards/{board_id}/" + "features/{feature_id}", + response_model=FeatureOut, + responses={ + 404: {"description": "Feature not found"} + } +) +async def get_feature( + vehicle_id: str = Path(..., description="Vehicle identifier"), + version_id: str = Path(..., description="Version identifier"), + board_id: str = Path(..., description="Board identifier"), + feature_id: str = Path(..., description="Feature identifier"), + service: VehiclesService = Depends(get_vehicles_service) +): + """ + Get details of a specific feature for a vehicle/version/board. + + Args: + vehicle_id: The vehicle identifier + version_id: The version identifier + board_id: The board identifier + feature_id: The feature identifier + + Returns: + Feature details with default settings + """ + feature = service.get_feature( + vehicle_id, version_id, board_id, feature_id + ) + if not feature: + raise HTTPException( + status_code=404, + detail=f"Feature '{feature_id}' not found" + ) + return feature diff --git a/web/app.py b/web/app.py deleted file mode 100755 index 8a4aa43..0000000 --- a/web/app.py +++ /dev/null @@ -1,404 +0,0 @@ -#!/usr/bin/env python3 - -import os -from flask import Flask, render_template, request, send_from_directory, jsonify, redirect -from threading import Thread -import sys -import requests -import signal - -from logging.config import dictConfig - -dictConfig({ - 'version': 1, - 'formatters': {'default': { - 'format': '[%(asctime)s] %(levelname)s in %(module)s: %(message)s', - }}, - 'handlers': {'wsgi': { - 'class': 'logging.StreamHandler', - 'stream': 'ext://flask.logging.wsgi_errors_stream', - 'formatter': 'default' - }}, - 'root': { - 'level': os.getenv('CBS_LOG_LEVEL', default='INFO'), - 'handlers': ['wsgi'] - } -}) - -# let app.py know about the modules in the parent directory -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) -import ap_git -import metadata_manager -import build_manager -from builder import Builder - -# run at lower priority -os.nice(20) - -import optparse -parser = optparse.OptionParser("app.py") - -parser.add_option("", "--basedir", type="string", - default=os.getenv( - key="CBS_BASEDIR", - default=os.path.abspath(os.path.join(os.path.dirname(__file__),"..","base")) - ), - help="base directory") - -cmd_opts, cmd_args = parser.parse_args() - -# define directories -basedir = os.path.abspath(cmd_opts.basedir) -sourcedir = os.path.join(basedir, 'ardupilot') -outdir_parent = os.path.join(basedir, 'artifacts') -workdir_parent = os.path.join(basedir, 'workdir') - -appdir = os.path.dirname(__file__) - -builds_dict = {} -REMOTES = None - -repo = ap_git.GitRepo.clone_if_needed( - source="https://github.com/ardupilot/ardupilot.git", - dest=sourcedir, - recurse_submodules=True, -) - -vehicles_manager = metadata_manager.VehiclesManager() -ap_src_metadata_fetcher = metadata_manager.APSourceMetadataFetcher( - ap_repo=repo, - caching_enabled=True, - redis_host=os.getenv('CBS_REDIS_HOST', default='localhost'), - redis_port=os.getenv('CBS_REDIS_PORT', default='6379'), -) -versions_fetcher = metadata_manager.VersionsFetcher( - remotes_json_path=os.path.join(basedir, 'configs', 'remotes.json'), - ap_repo=repo -) - -manager = build_manager.BuildManager( - outdir=outdir_parent, - redis_host=os.getenv('CBS_REDIS_HOST', default='localhost'), - redis_port=os.getenv('CBS_REDIS_PORT', default='6379') -) -cleaner = build_manager.BuildArtifactsCleaner() -progress_updater = build_manager.BuildProgressUpdater() - -versions_fetcher.start() -cleaner.start() -progress_updater.start() - -# Initialize builder if enabled -builder = None -builder_thread = None -if os.getenv('CBS_ENABLE_INBUILT_BUILDER', default='1') == '1': - builder = Builder( - workdir=workdir_parent, - source_repo=repo - ) - builder_thread = Thread( - target=builder.run, - daemon=True - ) - builder_thread.start() - -app = Flask(__name__, template_folder='templates') - -# Setup graceful shutdown handler -def shutdown_handler(signum=None, frame=None): - """ - Gracefully shutdown all background services. - """ - app.logger.info("Shutting down application gracefully...") - - # Stop all TaskRunner instances - versions_fetcher.stop() - cleaner.stop() - progress_updater.stop() - - # Request builder shutdown if it's running - if builder is not None: - builder.shutdown() - - app.logger.info("All background services stopped successfully.") - sys.exit(0) - -# Register signal handlers for graceful shutdown -signal.signal(signal.SIGINT, shutdown_handler) -signal.signal(signal.SIGTERM, shutdown_handler) - -versions_fetcher.reload_remotes_json() -app.logger.info('Python version is: %s' % sys.version) - -def get_auth_token(): - try: - # try to read the secret token from the file - with open(os.path.join(basedir, 'secrets', 'reload_token'), 'r') as file: - token = file.read().strip() - return token - except (FileNotFoundError, PermissionError): - app.logger.error("Couldn't open token file. Checking environment for token.") - # if the file does not exist, check the environment variable - return os.getenv('CBS_REMOTES_RELOAD_TOKEN') - -@app.route('/refresh_remotes', methods=['POST']) -def refresh_remotes(): - auth_token = get_auth_token() - - if auth_token is None: - app.logger.error("Couldn't retrieve authorization token") - return "Internal Server Error", 500 - - token = request.get_json().get('token') - if not token or token != auth_token: - return "Unauthorized", 401 - - versions_fetcher.reload_remotes_json() - return "Successfully refreshed remotes", 200 - -@app.route('/generate', methods=['GET', 'POST']) -def generate(): - try: - version = request.form['version'] - vehicle = request.form['vehicle'] - - version_info = versions_fetcher.get_version_info( - vehicle_id=vehicle, - version_id=version - ) - - if version_info is None: - raise Exception("Version invalid or not listed to be built for given vehicle") - - remote_name = version_info.remote_info.name - commit_ref = version_info.commit_ref - - board = request.form['board'] - boards_at_commit = ap_src_metadata_fetcher.get_boards( - remote=remote_name, - commit_ref=commit_ref, - vehicle_id=vehicle, - ) - if board not in boards_at_commit: - raise Exception("bad board") - - all_features = ap_src_metadata_fetcher.get_build_options_at_commit( - remote=remote_name, - commit_ref=commit_ref - ) - - chosen_defines = { - feature.define - for feature in all_features - if request.form.get(feature.label) == "1" - } - - git_hash = repo.commit_id_for_remote_ref( - remote=remote_name, - commit_ref=commit_ref - ) - - build_info = build_manager.BuildInfo( - vehicle_id=vehicle, - remote_info=version_info.remote_info, - git_hash=git_hash, - board=board, - selected_features=chosen_defines - ) - - forwarded_for = request.headers.get('X-Forwarded-For', None) - if forwarded_for: - client_ip = forwarded_for.split(',')[0].strip() - else: - client_ip = request.remote_addr - - build_id = manager.submit_build( - build_info=build_info, - client_ip=client_ip, - ) - - app.logger.info('Redirecting to /viewlog') - return redirect('/viewlog/'+build_id) - - except Exception as ex: - app.logger.error(ex) - return render_template('error.html', ex=ex) - -@app.route('/add_build') -def add_build(): - app.logger.info('Rendering add_build.html') - return render_template('add_build.html') - - -def filter_build_options_by_category(build_options, category): - return sorted([f for f in build_options if f.category == category], key=lambda x: x.description.lower()) - -def parse_build_categories(build_options): - return sorted(list(set([f.category for f in build_options]))) - -@app.route('/', defaults={'token': None}, methods=['GET']) -@app.route('/viewlog/', methods=['GET']) -def home(token): - if token: - app.logger.info("Showing log for build id " + token) - app.logger.info('Rendering index.html') - return render_template('index.html', token=token) - -@app.route("/builds//artifacts/") -def download_file(build_id, name): - path = os.path.join( - basedir, - 'artifacts', - build_id, - ) - app.logger.info('Downloading %s/%s' % (path, name)) - return send_from_directory(path, name, as_attachment=False) - -@app.route("/boards_and_features//", methods=['GET']) -def boards_and_features(vehicle_id, version_id): - version_info = versions_fetcher.get_version_info( - vehicle_id=vehicle_id, - version_id=version_id - ) - - if version_info is None: - return "Bad request. Version not allowed to build for the vehicle.", 400 - - remote_name = version_info.remote_info.name - commit_reference = version_info.commit_ref - - app.logger.info('Board list and build options requested for %s %s' % (vehicle_id, version_id)) - # getting board list for the branch - with repo.get_checkout_lock(): - boards = ap_src_metadata_fetcher.get_boards( - remote=remote_name, - commit_ref=commit_reference, - vehicle_id=vehicle_id, - ) - - options = ap_src_metadata_fetcher.get_build_options_at_commit( - remote=remote_name, - commit_ref=commit_reference - ) # this is a list of Feature() objects defined in build_options.py - - # parse the set of categories from these objects - categories = parse_build_categories(options) - features = [] - for category in categories: - filtered_options = filter_build_options_by_category(options, category) - category_options = [] # options belonging to a given category - for option in filtered_options: - category_options.append({ - 'label' : option.label, - 'description' : option.description, - 'default' : option.default, - 'define' : option.define, - 'dependency' : option.dependency, - }) - features.append({ - 'name' : category, - 'options' : category_options, - }) - # creating result dictionary - result = { - 'boards' : boards, - 'default_board' : boards[0], - 'features' : features, - } - # return jsonified result dict - return jsonify(result) - -@app.route("/get_versions/", methods=['GET']) -def get_versions(vehicle_id): - versions = list() - for version_info in versions_fetcher.get_versions_for_vehicle(vehicle_id=vehicle_id): - if version_info.release_type == "latest": - title = f"Latest ({version_info.remote_info.name})" - else: - title = f"{version_info.release_type} {version_info.version_number} ({version_info.remote_info.name})" - versions.append({ - "title": title, - "id": version_info.version_id, - }) - - return jsonify(sorted(versions, key=lambda x: x['title'])) - -@app.route("/get_vehicles") -def get_vehicles(): - vehicles = [ - {"id": vehicle.id, "name": vehicle.name} - for vehicle in vehicles_manager.get_all_vehicles() - ] - return jsonify(sorted(vehicles, key=lambda x: x['id'])) - -@app.route("/get_defaults///", methods = ['GET']) -def get_deafults(vehicle_id, version_id, board_name): - vehicle = vehicles_manager.get_vehicle_by_id(vehicle_id) - if vehicle is None: - return "Invalid vehicle ID", 400 - # Heli is built on copter boards with -heli suffix - if vehicle_id == "heli": - board_name += "-heli" - - version_info = versions_fetcher.get_version_info( - vehicle_id=vehicle_id, - version_id=version_id - ) - - if version_info is None: - return "Bad request. Version is not allowed for builds for the %s." % vehicle.name, 400 - - artifacts_dir = version_info.ap_build_artifacts_url - - if artifacts_dir is None: - return "Couldn't find artifacts for requested release/branch/commit on ardupilot server", 404 - - url_to_features_txt = artifacts_dir + '/' + board_name + '/features.txt' - response = requests.get(url_to_features_txt, timeout=30) - - if not response.status_code == 200: - return ("Could not retrieve features.txt for given vehicle, version and board combination (Status Code: %d, url: %s)" % (response.status_code, url_to_features_txt), response.status_code) - # split response by new line character to get a list of defines - result = response.text.split('\n') - # omit the last two elements as they are always blank - return jsonify(result[:-2]) - -@app.route('/builds', methods=['GET']) -def get_all_builds(): - all_build_ids = manager.get_all_build_ids() - all_build_info = [ - { - **manager.get_build_info(build_id).to_dict(), - 'build_id': build_id - } - for build_id in all_build_ids - ] - - all_build_info_sorted = sorted( - all_build_info, - key=lambda x: x['time_created'], - reverse=True, - ) - - return ( - jsonify(all_build_info_sorted), - 200 - ) - -@app.route('/builds/', methods=['GET']) -def get_build_by_id(build_id): - if not manager.build_exists(build_id): - response = { - 'error': f'build with id {build_id} does not exist.', - } - return jsonify(response), 200 - - response = { - **manager.get_build_info(build_id).to_dict(), - 'build_id': build_id - } - - return jsonify(response), 200 - -if __name__ == '__main__': - app.run() diff --git a/web/core/__init__.py b/web/core/__init__.py new file mode 100644 index 0000000..1028ac4 --- /dev/null +++ b/web/core/__init__.py @@ -0,0 +1,10 @@ +""" +Core application components. +""" +from .config import get_settings +from .startup import initialize_application + +__all__ = [ + "get_settings", + "initialize_application", +] diff --git a/web/core/config.py b/web/core/config.py new file mode 100644 index 0000000..f322b0e --- /dev/null +++ b/web/core/config.py @@ -0,0 +1,85 @@ +""" +Application configuration and settings. +""" +import os +from pathlib import Path +from functools import lru_cache + + +class Settings: + """Application settings.""" + + def __init__(self): + # Application + self.app_name: str = "CustomBuild API" + self.app_version: str = "1.0.0" + self.debug: bool = False + + # Paths + self.base_dir: str = os.getenv( + "CBS_BASEDIR", + default=str(Path(__file__).parent.parent.parent.parent / "base") + ) + + # Redis + self.redis_host: str = os.getenv( + 'CBS_REDIS_HOST', + default='localhost' + ) + self.redis_port: str = os.getenv( + 'CBS_REDIS_PORT', + default='6379' + ) + + # Logging + self.log_level: str = os.getenv('CBS_LOG_LEVEL', default='INFO') + + # ArduPilot Git Repository + self.ap_git_url: str = "https://github.com/ardupilot/ardupilot.git" + + @property + def source_dir(self) -> str: + """ArduPilot source directory.""" + return os.path.join(self.base_dir, 'ardupilot') + + @property + def artifacts_dir(self) -> str: + """Build artifacts directory.""" + return os.path.join(self.base_dir, 'artifacts') + + @property + def outdir_parent(self) -> str: + """Build output directory (same as artifacts_dir).""" + return self.artifacts_dir + + @property + def workdir_parent(self) -> str: + """Work directory parent.""" + return os.path.join(self.base_dir, 'workdir') + + @property + def remotes_json_path(self) -> str: + """Path to remotes.json configuration.""" + return os.path.join(self.base_dir, 'configs', 'remotes.json') + + @property + def admin_token_file_path(self) -> str: + """Path to admin token secret file.""" + return os.path.join(self.base_dir, 'secrets', 'reload_token') + + @property + def enable_inbuilt_builder(self) -> bool: + """Whether to enable the inbuilt builder.""" + return os.getenv('CBS_ENABLE_INBUILT_BUILDER', '1') == '1' + + @property + def admin_token_env(self) -> str: + """Token required to reload remotes.json via API.""" + env = os.getenv('CBS_REMOTES_RELOAD_TOKEN', '') + return env if env != '' else None + + +@lru_cache() +def get_settings() -> Settings: + """Get cached settings instance.""" + return Settings() diff --git a/web/core/logging_config.py b/web/core/logging_config.py new file mode 100644 index 0000000..24a7bb0 --- /dev/null +++ b/web/core/logging_config.py @@ -0,0 +1,85 @@ +""" +Logging configuration for the application. +""" +import logging +import logging.config +import os +import sys + + +def setup_logging(log_level: str = None): + """ + Configure logging for the application and all imported modules. + + This must be called BEFORE importing any modules that use logging, + to ensure they all use the same logging configuration. + + Args: + log_level: The logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL). + If None, reads from CBS_LOG_LEVEL environment variable. + """ + if log_level is None: + log_level = os.getenv('CBS_LOG_LEVEL', default='INFO') + + # Configure logging with dictConfig for consistency with Flask app + logging_config = { + 'version': 1, + 'disable_existing_loggers': False, + 'formatters': { + 'default': { + 'format': ( + '[%(asctime)s] %(levelname)s in %(module)s: ' + '%(message)s' + ), + 'datefmt': '%Y-%m-%d %H:%M:%S', + }, + 'detailed': { + 'format': ( + '[%(asctime)s] %(levelname)s ' + '[%(name)s.%(funcName)s:%(lineno)d] %(message)s' + ), + 'datefmt': '%Y-%m-%d %H:%M:%S', + }, + }, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + 'stream': sys.stdout, + 'formatter': 'default', + 'level': log_level.upper(), + }, + }, + 'root': { + 'level': log_level.upper(), + 'handlers': ['console'], + }, + 'loggers': { + 'uvicorn': { + 'level': 'INFO', + 'handlers': ['console'], + 'propagate': False, + }, + 'uvicorn.access': { + 'level': 'INFO', + 'handlers': ['console'], + 'propagate': False, + }, + 'uvicorn.error': { + 'level': 'INFO', + 'handlers': ['console'], + 'propagate': False, + }, + 'fastapi': { + 'level': log_level.upper(), + 'handlers': ['console'], + 'propagate': False, + }, + }, + } + + logging.config.dictConfig(logging_config) + + # Log that logging has been configured + logger = logging.getLogger(__name__) + logger.info(f"Logging configured with level: {log_level.upper()}") + logger.info(f"Python version: {sys.version}") diff --git a/web/core/startup.py b/web/core/startup.py new file mode 100644 index 0000000..08c7f7d --- /dev/null +++ b/web/core/startup.py @@ -0,0 +1,104 @@ +""" +Application startup utilities. + +Handles initial setup of required directories and configuration files. +This module ensures the application environment is properly configured +before the main application starts. +""" +import os +import logging + +logger = logging.getLogger(__name__) + + +def ensure_base_structure(base_dir: str) -> None: + """ + Ensure required base directory structure exists. + + Creates necessary subdirectories for artifacts, configs, workdir, + and secrets if they don't already exist. + + Args: + base_dir: The base directory path (typically from CBS_BASEDIR) + """ + if not base_dir: + logger.warning("Base directory not specified, skipping initialization") + return + + # Define required subdirectories + subdirs = [ + 'artifacts', + 'configs', + 'workdir', + 'secrets', + ] + + for subdir in subdirs: + path = os.path.join(base_dir, subdir) + os.makedirs(path, exist_ok=True) + logger.debug(f"Ensured directory exists: {path}") + + +def ensure_remotes_json(base_dir: str, remote_name: str = "ardupilot") -> None: + """ + Ensure remotes.json configuration file exists. + + If the remotes.json file doesn't exist, creates it by fetching release + information from the specified remote. + + Args: + base_dir: The base directory path (typically from CBS_BASEDIR) + remote_name: The remote repository name to fetch releases from + """ + if not base_dir: + logger.warning( + "Base directory not specified, " + "skipping remotes.json initialization" + ) + return + + remotes_json_path = os.path.join(base_dir, 'configs', 'remotes.json') + + if not os.path.isfile(remotes_json_path): + logger.info( + f"remotes.json not found at {remotes_json_path}, " + f"creating it..." + ) + try: + from scripts import fetch_releases + fetch_releases.run( + base_dir=base_dir, + remote_name=remote_name, + ) + logger.info("Successfully created remotes.json") + except Exception as e: + logger.error(f"Failed to create remotes.json: {e}") + raise + else: + logger.debug(f"remotes.json already exists at {remotes_json_path}") + + +def initialize_application(base_dir: str) -> None: + """ + Initialize the application environment. + + Performs all necessary setup operations including: + - Creating required directory structure + - Ensuring remotes.json configuration exists + + Args: + base_dir: The base directory path (typically from CBS_BASEDIR) + """ + if not base_dir: + logger.warning("CBS_BASEDIR not set, skipping initialization") + return + + logger.info(f"Initializing application with base directory: {base_dir}") + + # Ensure directory structure + ensure_base_structure(base_dir) + + # Ensure remotes.json exists + ensure_remotes_json(base_dir) + + logger.info("Application initialization complete") diff --git a/web/main.py b/web/main.py new file mode 100755 index 0000000..4f7adbf --- /dev/null +++ b/web/main.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python3 + +""" +Main FastAPI application entry point. +""" +from contextlib import asynccontextmanager +from pathlib import Path +import threading +import os +import argparse + +from fastapi import FastAPI +from fastapi.staticfiles import StaticFiles + +from api.v1 import router as v1_router +from ui import router as ui_router +from core.config import get_settings +from core.startup import initialize_application +from core.logging_config import setup_logging + +import ap_git +import metadata_manager +import build_manager + +setup_logging() + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """ + Lifespan context manager for startup and shutdown events. + """ + # Startup + settings = get_settings() + + initialize_application(settings.base_dir) + + repo = ap_git.GitRepo.clone_if_needed( + source=settings.ap_git_url, + dest=settings.source_dir, + recurse_submodules=True, + ) + + vehicles_manager = metadata_manager.VehiclesManager() + + ap_src_metadata_fetcher = metadata_manager.APSourceMetadataFetcher( + ap_repo=repo, + caching_enabled=True, + redis_host=settings.redis_host, + redis_port=settings.redis_port, + ) + + versions_fetcher = metadata_manager.VersionsFetcher( + remotes_json_path=settings.remotes_json_path, + ap_repo=repo + ) + versions_fetcher.reload_remotes_json() + + build_mgr = build_manager.BuildManager( + outdir=settings.outdir_parent, + redis_host=settings.redis_host, + redis_port=settings.redis_port + ) + + cleaner = build_manager.BuildArtifactsCleaner() + progress_updater = build_manager.BuildProgressUpdater() + + inbuilt_builder = None + inbuilt_builder_thread = None + if settings.enable_inbuilt_builder: + from builder.builder import Builder # noqa: E402 + inbuilt_builder = Builder( + workdir=settings.workdir_parent, + source_repo=repo + ) + inbuilt_builder_thread = threading.Thread( + target=inbuilt_builder.run, + daemon=True + ) + inbuilt_builder_thread.start() + + versions_fetcher.start() + cleaner.start() + progress_updater.start() + + app.state.repo = repo + app.state.ap_src_metadata_fetcher = ap_src_metadata_fetcher + app.state.versions_fetcher = versions_fetcher + app.state.vehicles_manager = vehicles_manager + app.state.build_manager = build_mgr + app.state.inbuilt_builder = inbuilt_builder + app.state.inbuilt_builder_thread = inbuilt_builder_thread + + yield + + # Shutdown + versions_fetcher.stop() + cleaner.stop() + progress_updater.stop() + if inbuilt_builder is not None: + inbuilt_builder.shutdown() + if (inbuilt_builder_thread is not None and + inbuilt_builder_thread.is_alive()): + inbuilt_builder_thread.join() + + +# Create FastAPI application +app = FastAPI( + title="CustomBuild API", + description="API for ArduPilot Custom Firmware Builder", + version="1.0.0", + docs_url="/api/docs", + redoc_url="/api/redoc", + lifespan=lifespan, +) + +# Mount static files +WEB_ROOT = Path(__file__).resolve().parent +app.mount( + "/static", + StaticFiles(directory=str(WEB_ROOT / "static")), + name="static" +) + +# Include API v1 router +app.include_router(v1_router, prefix="/api") + +# Include Web UI router +app.include_router(ui_router) + + +@app.get("/health") +async def health_check(): + """Health check endpoint.""" + return {"status": "healthy"} + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="CustomBuild API Server") + parser.add_argument( + "--port", + type=int, + default=int(os.getenv("WEB_PORT", 8080)), + help="Port to run the server on (default: 8080 or WEB_PORT env var)" + ) + args = parser.parse_args() + + import uvicorn + uvicorn.run( + "main:app", + host="0.0.0.0", + port=args.port, + reload=True + ) diff --git a/web/requirements.txt b/web/requirements.txt index cd57da3..789a92e 100644 --- a/web/requirements.txt +++ b/web/requirements.txt @@ -1,6 +1,10 @@ -flask -requests -jsonschema -dill==0.3.8 +fastapi==0.104.1 +uvicorn==0.40.0 +pydantic==2.5.0 redis==5.2.1 -gunicorn==21.1 +requests==2.31.0 +jsonschema==4.20.0 +dill==0.3.8 +packaging==25.0 +jinja2==3.1.2 +python-multipart==0.0.6 diff --git a/web/schemas/__init__.py b/web/schemas/__init__.py new file mode 100644 index 0000000..9202cac --- /dev/null +++ b/web/schemas/__init__.py @@ -0,0 +1,56 @@ +""" +API schemas for the CustomBuild application. + +This module exports all Pydantic models used for request/response validation +across the API endpoints. +""" + +# Admin schemas +from .admin import ( + RefreshRemotesResponse, +) + +# Build schemas +from .builds import ( + BuildVersionInfo, + RemoteInfo, + BuildProgress, + BuildRequest, + BuildSubmitResponse, + BuildOut, +) + +# Vehicle schemas +from .vehicles import ( + VehicleBase, + VersionBase, + VersionOut, + BoardBase, + BoardOut, + CategoryBase, + FeatureDefault, + FeatureBase, + FeatureOut, +) + +__all__ = [ + # Admin + "RefreshRemotesResponse", + # Builds + "BuildVersionInfo", + "RemoteInfo", + "BuildProgress", + "BuildRequest", + "BuildSubmitResponse", + "BuildOut", + # Vehicles + "VehicleBase", + "VersionBase", + "VersionOut", + "BoardBase", + "BoardOut", + "CategoryBase", + "FeatureDefault", + "FeatureBase", + "FeatureOut", +] diff --git a/web/schemas/admin.py b/web/schemas/admin.py new file mode 100644 index 0000000..ab8068e --- /dev/null +++ b/web/schemas/admin.py @@ -0,0 +1,12 @@ +from typing import List + +from pydantic import BaseModel, Field + + +# --- Refresh Remotes Response --- +class RefreshRemotesResponse(BaseModel): + """Response schema for remote refresh operation.""" + remotes: List[str] = Field( + ..., + description="List of remotes discovered in remotes.json file" + ) diff --git a/web/schemas/builds.py b/web/schemas/builds.py new file mode 100644 index 0000000..861d92c --- /dev/null +++ b/web/schemas/builds.py @@ -0,0 +1,74 @@ +from typing import List, Literal + +from pydantic import BaseModel, Field +from schemas.vehicles import VehicleBase, BoardBase, RemoteInfo + + +# --- Build Progress --- +class BuildProgress(BaseModel): + """Build progress and status information.""" + percent: int = Field( + ..., ge=0, le=100, description="Build completion percentage" + ) + state: Literal[ + "PENDING", "RUNNING", "SUCCESS", "FAILURE", "ERROR", "TIMED_OUT" + ] = Field(..., description="Current build state") + + +# --- Build Request --- +class BuildRequest(BaseModel): + """Schema for creating a new build request.""" + vehicle_id: str = Field( + ..., description="Vehicle ID to build for" + ) + board_id: str = Field( + ..., description="Board ID to build for" + ) + version_id: str = Field( + ..., description="Version ID for build source code" + ) + selected_features: List[str] = Field( + default_factory=list, + description="Feature IDs to enable for this build" + ) + + +# --- Build Submit Response --- +class BuildSubmitResponse(BaseModel): + """Response schema for build submission.""" + build_id: str = Field(..., description="Unique build identifier") + url: str = Field(..., description="URL to get build details") + status: Literal["submitted"] = Field( + ..., description="Build submission status" + ) + + +# --- Build Version Info --- +class BuildVersionInfo(BaseModel): + """Version information for a build.""" + id: str = Field(..., description="Version ID used for this build") + remote_info: RemoteInfo = Field( + ..., description="Source repository information" + ) + git_hash: str = Field(..., description="Git commit hash used for build") + + +# --- Build Output --- +class BuildOut(BaseModel): + """Complete build information output schema.""" + build_id: str = Field(..., description="Unique build identifier") + vehicle: VehicleBase = Field(..., description="Target vehicle information") + board: BoardBase = Field(..., description="Target board information") + version: BuildVersionInfo = Field( + ..., description="Version information for this build" + ) + selected_features: List[str] = Field( + default_factory=list, + description="Enabled feature flags for this build" + ) + progress: BuildProgress = Field( + ..., description="Current build status and progress" + ) + time_created: float = Field( + ..., description="Unix timestamp when build was created" + ) diff --git a/web/schemas/vehicles.py b/web/schemas/vehicles.py new file mode 100644 index 0000000..64ac43c --- /dev/null +++ b/web/schemas/vehicles.py @@ -0,0 +1,93 @@ +# app/schemas/vehicles.py +from typing import Literal, Optional + +from pydantic import BaseModel, Field + + +# --- Vehicles --- +class VehicleBase(BaseModel): + id: str = Field(..., description="Unique vehicle identifier") + name: str = Field(..., description="Vehicle display name") + + +# --- Remote Information --- +class RemoteInfo(BaseModel): + """Git remote repository information.""" + name: str = Field(..., description="Remote name (e.g., 'ardupilot')") + url: str = Field(..., description="Git repository URL") + + +# --- Versions --- +class VersionBase(BaseModel): + id: str = Field(..., description="Unique version identifier") + name: str = Field(..., description="Version display name") + type: Literal["beta", "stable", "latest", "tag"] = Field( + ..., description="Version type classification" + ) + remote: RemoteInfo = Field( + ..., description="Git remote repository information for the version" + ) + commit_ref: Optional[str] = Field( + None, description="Git reference (tag, branch name, or commit SHA)" + ) + + +class VersionOut(VersionBase): + vehicle_id: str = Field( + ..., description="Vehicle identifier associated with this version" + ) + + +# --- Boards --- +class BoardBase(BaseModel): + id: str = Field(..., description="Unique board identifier") + name: str = Field(..., description="Board display name") + + +class BoardOut(BoardBase): + vehicle_id: str = Field(..., description="Associated vehicle identifier") + version_id: str = Field(..., description="Associated version identifier") + + +# --- Features --- +class CategoryBase(BaseModel): + id: str = Field(..., description="Unique category identifier") + name: str = Field(..., description="Category display name") + description: Optional[str] = Field( + None, description="Category description" + ) + + +class FeatureDefault(BaseModel): + enabled: bool = Field( + ..., description="Whether feature is enabled by default" + ) + source: Literal["firmware-server", "build-options-py"] = Field( + ..., + description=( + "Source of the default value: 'firmware-server' from " + "firmware.ardupilot.org, 'build-options-py' from git repository" + ) + ) + + +class FeatureBase(BaseModel): + id: str = Field(..., description="Unique feature identifier/flag name") + name: str = Field(..., description="Feature display name") + category: CategoryBase = Field(..., description="Feature category") + description: Optional[str] = Field( + None, description="Feature description" + ) + + +class FeatureOut(FeatureBase): + vehicle_id: str = Field(..., description="Associated vehicle identifier") + version_id: str = Field(..., description="Associated version identifier") + board_id: str = Field(..., description="Associated board identifier") + default: FeatureDefault = Field( + ..., description="Default state for this feature on this board" + ) + dependencies: list[str] = Field( + default_factory=list, + description="List of feature IDs that this feature depends on" + ) diff --git a/web/services/__init__.py b/web/services/__init__.py new file mode 100644 index 0000000..d801d2e --- /dev/null +++ b/web/services/__init__.py @@ -0,0 +1,15 @@ +""" +Business logic services for the application. +""" +from .vehicles import get_vehicles_service, VehiclesService +from .builds import get_builds_service, BuildsService +from .admin import get_admin_service, AdminService + +__all__ = [ + "get_vehicles_service", + "VehiclesService", + "get_builds_service", + "BuildsService", + "get_admin_service", + "AdminService", +] diff --git a/web/services/admin.py b/web/services/admin.py new file mode 100644 index 0000000..83aa79f --- /dev/null +++ b/web/services/admin.py @@ -0,0 +1,115 @@ +""" +Admin service for handling administrative operations. +""" +import logging +from typing import Optional, List + +from fastapi import Request + +from core.config import get_settings + +logger = logging.getLogger(__name__) + + +class AdminService: + """Service for managing administrative operations.""" + + def __init__(self, versions_fetcher=None): + """ + Initialize the admin service. + + Args: + versions_fetcher: VersionsFetcher instance for managing remotes + """ + self.versions_fetcher = versions_fetcher + self.settings = get_settings() + + def get_auth_token(self) -> Optional[str]: + """ + Retrieve the authorization token from file or environment. + + Returns: + The authorization token if found, None otherwise + """ + try: + # Try to read the secret token from the file + token_file_path = self.settings.admin_token_file_path + with open(token_file_path, 'r') as file: + token = file.read().strip() + return token + except (FileNotFoundError, PermissionError) as e: + logger.error( + f"Couldn't open token file at " + f"{self.settings.admin_token_file_path}: {e}. " + "Checking environment for token." + ) + # If the file does not exist or no permission, check environment + return self.settings.admin_token_env + except Exception as e: + logger.error( + f"Unexpected error reading token file at " + f"{self.settings.admin_token_file_path}: {e}. " + "Checking environment for token." + ) + # For any other error, fall back to environment variable + return self.settings.admin_token_env + + async def verify_token(self, token: str) -> bool: + """ + Verify that the provided token matches the expected admin token. + + Args: + token: The token to verify + + Returns: + True if token is valid, False otherwise + + Raises: + RuntimeError: If admin token is not configured on server + """ + expected_token = self.get_auth_token() + + if expected_token is None: + logger.error("No admin token configured") + raise RuntimeError("Admin token not configured on server") + + return token == expected_token + + async def refresh_remotes(self) -> List[str]: + """ + Trigger a refresh of remote metadata. + + Returns: + List of remote names that were refreshed + + Raises: + Exception: If refresh operation fails + """ + logger.info("Triggering remote metadata refresh") + + # Reload remotes.json + self.versions_fetcher.reload_remotes_json() + + # Get list of remotes that are now available + remotes_info = self.versions_fetcher.get_all_remotes_info() + remotes_refreshed = [remote.name for remote in remotes_info] + + logger.info( + f"Successfully refreshed {len(remotes_refreshed)} remotes: " + f"{remotes_refreshed}" + ) + + return remotes_refreshed + + +def get_admin_service(request: Request) -> AdminService: + """ + Get AdminService instance with dependencies from app state. + + Args: + request: FastAPI Request object + + Returns: + AdminService instance initialized with app state dependencies + """ + return AdminService(versions_fetcher=request.app.state.versions_fetcher) diff --git a/web/services/builds.py b/web/services/builds.py new file mode 100644 index 0000000..ede6d91 --- /dev/null +++ b/web/services/builds.py @@ -0,0 +1,402 @@ +""" +Builds service for handling build-related business logic. +""" +import logging +import os +from fastapi import Request +from typing import List, Optional + +from schemas import ( + BuildRequest, + BuildSubmitResponse, + BuildOut, + BuildProgress, + RemoteInfo, + BuildVersionInfo, +) +from schemas.vehicles import VehicleBase, BoardBase + +# Import external modules +# pylint: disable=wrong-import-position +import build_manager # noqa: E402 + +logger = logging.getLogger(__name__) + + +class BuildsService: + """Service for managing firmware builds.""" + + def __init__( + self, + build_manager=None, + versions_fetcher=None, + ap_src_metadata_fetcher=None, + repo=None, + vehicles_manager=None + ): + self.manager = build_manager + self.versions_fetcher = versions_fetcher + self.ap_src_metadata_fetcher = ap_src_metadata_fetcher + self.repo = repo + self.vehicles_manager = vehicles_manager + + def create_build( + self, + build_request: BuildRequest, + client_ip: str + ) -> BuildSubmitResponse: + """ + Create a new build request. + + Args: + build_request: Build configuration + client_ip: Client IP address for rate limiting + + Returns: + Simple response with build_id and URL + + Raises: + ValueError: If validation fails + """ + # Validate version_id + if not build_request.version_id: + raise ValueError("version_id is required") + + # Validate vehicle + vehicle_id = build_request.vehicle_id + if not vehicle_id: + raise ValueError("vehicle_id is required") + + # Get version info using version_id + version_info = self.versions_fetcher.get_version_info( + vehicle_id=vehicle_id, + version_id=build_request.version_id + ) + if version_info is None: + raise ValueError("Invalid version_id for vehicle") + + remote_name = version_info.remote_info.name + commit_ref = version_info.commit_ref + + # Validate remote + remote_info = self.versions_fetcher.get_remote_info(remote_name) + if remote_info is None: + raise ValueError(f"Remote {remote_name} is not whitelisted") + + # Validate board + board_name = build_request.board_id + if not board_name: + raise ValueError("board_id is required") + + # Check board exists at this version + with self.repo.get_checkout_lock(): + boards_at_commit = self.ap_src_metadata_fetcher.get_boards( + remote=remote_name, + commit_ref=commit_ref, + vehicle_id=vehicle_id, + ) + + if board_name not in boards_at_commit: + raise ValueError("Invalid board for this version") + + # Get git hash + git_hash = self.repo.commit_id_for_remote_ref( + remote=remote_name, + commit_ref=commit_ref + ) + + # Map feature labels (IDs from API) to defines + # (required by build manager) + selected_feature_defines = set() + if build_request.selected_features: + # Get build options to map labels to defines + with self.repo.get_checkout_lock(): + options = ( + self.ap_src_metadata_fetcher + .get_build_options_at_commit( + remote=remote_name, + commit_ref=commit_ref + ) + ) + + # Create label to define mapping + label_to_define = { + option.label: option.define for option in options + } + + # Map each selected feature label to its define + for feature_label in build_request.selected_features: + if feature_label in label_to_define: + selected_feature_defines.add( + label_to_define[feature_label] + ) + else: + logger.warning( + f"Feature label '{feature_label}' not found in " + f"build options for {vehicle_id} {remote_name} " + f"{commit_ref}" + ) + + # Create build info + build_info = build_manager.BuildInfo( + vehicle_id=vehicle_id, + version_id=build_request.version_id, + remote_info=remote_info, + git_hash=git_hash, + board=board_name, + selected_features=selected_feature_defines + ) + + # Submit build + build_id = self.manager.submit_build( + build_info=build_info, + client_ip=client_ip, + ) + + # Return simple submission response + return BuildSubmitResponse( + build_id=build_id, + url=f"/api/v1/builds/{build_id}", + status="submitted" + ) + + def list_builds( + self, + vehicle_id: Optional[str] = None, + board_id: Optional[str] = None, + state: Optional[str] = None, + limit: int = 20, + offset: int = 0 + ) -> List[BuildOut]: + """ + Get list of builds with optional filters. + + Args: + vehicle_id: Filter by vehicle + board_id: Filter by board + state: Filter by build state + limit: Maximum results + offset: Results to skip + + Returns: + List of builds + """ + all_build_ids = self.manager.get_all_build_ids() + all_builds = [] + + for build_id in all_build_ids: + build_info = self.manager.get_build_info(build_id) + if build_info is None: + continue + + # Apply filters + if (vehicle_id and + build_info.vehicle_id.lower() != vehicle_id.lower()): + continue + if board_id and build_info.board != board_id: + continue + if state and build_info.progress.state.name != state: + continue + + all_builds.append( + self._build_info_to_output(build_id, build_info) + ) + + # Sort by creation time (newest first) + all_builds.sort(key=lambda x: x.time_created, reverse=True) + + # Apply pagination + return all_builds[offset:offset + limit] + + def get_build(self, build_id: str) -> Optional[BuildOut]: + """ + Get details of a specific build. + + Args: + build_id: The unique build identifier + + Returns: + Build details or None if not found + """ + if not self.manager.build_exists(build_id): + return None + + build_info = self.manager.get_build_info(build_id) + if build_info is None: + return None + + return self._build_info_to_output(build_id, build_info) + + def get_build_logs( + self, + build_id: str, + tail: Optional[int] = None + ) -> Optional[str]: + """ + Get build logs for a specific build. + + Args: + build_id: The unique build identifier + tail: Optional number of last lines to return + + Returns: + Build logs as text or None if not found/available + """ + if not self.manager.build_exists(build_id): + return None + + log_path = self.manager.get_build_log_path(build_id) + if not os.path.exists(log_path): + return None + + try: + with open(log_path, 'r') as f: + if tail: + # Read last N lines + lines = f.readlines() + return ''.join(lines[-tail:]) + else: + return f.read() + except Exception as e: + logger.error(f"Error reading log file for build {build_id}: {e}") + return None + + def get_artifact_path(self, build_id: str) -> Optional[str]: + """ + Get the path to the build artifact. + + Args: + build_id: The unique build identifier + + Returns: + Path to artifact or None if not available + """ + if not self.manager.build_exists(build_id): + return None + + build_info = self.manager.get_build_info(build_id) + if build_info is None: + return None + + # Only return artifact if build was successful + if build_info.progress.state.name != "SUCCESS": + return None + + artifact_path = self.manager.get_build_archive_path(build_id) + if os.path.exists(artifact_path): + return artifact_path + + return None + + def _build_info_to_output( + self, + build_id: str, + build_info + ) -> BuildOut: + """ + Convert BuildInfo object to BuildOut schema. + + Args: + build_id: The build identifier + build_info: BuildInfo object from build_manager + + Returns: + BuildOut schema object + """ + # Convert build_manager.BuildProgress to schema BuildProgress + progress = BuildProgress( + percent=build_info.progress.percent, + state=build_info.progress.state.name + ) + + # Convert RemoteInfo + remote_info = RemoteInfo( + name=build_info.remote_info.name, + url=build_info.remote_info.url + ) + + # Map feature defines back to labels for API response + selected_feature_labels = [] + if build_info.selected_features: + try: + # Get build options to map defines back to labels + with self.repo.get_checkout_lock(): + options = ( + self.ap_src_metadata_fetcher + .get_build_options_at_commit( + remote=build_info.remote_info.name, + commit_ref=build_info.git_hash + ) + ) + + # Create define to label mapping + define_to_label = { + option.define: option.label for option in options + } + + # Map each selected feature define to its label + for feature_define in build_info.selected_features: + if feature_define in define_to_label: + selected_feature_labels.append( + define_to_label[feature_define] + ) + else: + # Fallback: use define if label not found + logger.warning( + f"Feature define '{feature_define}' not " + f"found in build options for build " + f"{build_id}" + ) + selected_feature_labels.append(feature_define) + except Exception as e: + logger.error( + f"Error mapping feature defines to labels for " + f"build {build_id}: {e}" + ) + # Fallback: use defines as-is + selected_feature_labels = list( + build_info.selected_features + ) + + vehicle = self.vehicles_manager.get_vehicle_by_id( + build_info.vehicle_id + ) + + return BuildOut( + build_id=build_id, + vehicle=VehicleBase( + id=build_info.vehicle_id, + name=vehicle.name + ), + board=BoardBase( + id=build_info.board, + name=build_info.board # Board name is same as board ID for now + ), + version=BuildVersionInfo( + id=build_info.version_id, + remote_info=remote_info, + git_hash=build_info.git_hash + ), + selected_features=selected_feature_labels, + progress=progress, + time_created=build_info.time_created, + ) + + +def get_builds_service(request: Request) -> BuildsService: + """ + Get BuildsService instance with dependencies from app state. + + Args: + request: FastAPI Request object + + Returns: + BuildsService instance initialized with app state dependencies + """ + return BuildsService( + build_manager=request.app.state.build_manager, + versions_fetcher=request.app.state.versions_fetcher, + ap_src_metadata_fetcher=request.app.state.ap_src_metadata_fetcher, + repo=request.app.state.repo, + vehicles_manager=request.app.state.vehicles_manager, + ) diff --git a/web/services/vehicles.py b/web/services/vehicles.py new file mode 100644 index 0000000..7000fee --- /dev/null +++ b/web/services/vehicles.py @@ -0,0 +1,318 @@ +""" +Vehicles service for handling vehicle-related business logic. +""" +import logging +from typing import List, Optional +from fastapi import Request + +from schemas import ( + VehicleBase, + RemoteInfo, + VersionOut, + BoardOut, + FeatureOut, + CategoryBase, + FeatureDefault, +) + + +logger = logging.getLogger(__name__) + + +class VehiclesService: + """Service for managing vehicles, versions, boards, and features.""" + + def __init__(self, vehicle_manager=None, + versions_fetcher=None, + ap_src_metadata_fetcher=None, + repo=None): + self.vehicles_manager = vehicle_manager + self.versions_fetcher = versions_fetcher + self.ap_src_metadata_fetcher = ap_src_metadata_fetcher + self.repo = repo + + def get_all_vehicles(self) -> List[VehicleBase]: + """Get list of all available vehicles.""" + logger.info('Fetching all vehicles') + vehicles = self.vehicles_manager.get_all_vehicles() + # Sort by name for consistent ordering + sorted_vehicles = sorted(vehicles, key=lambda v: v.name) + logger.info(f'Found vehicles: {[v.name for v in sorted_vehicles]}') + return [ + VehicleBase(id=vehicle.id, name=vehicle.name) + for vehicle in sorted_vehicles + ] + + def get_vehicle(self, vehicle_id: str) -> Optional[VehicleBase]: + """Get a specific vehicle by ID.""" + vehicle = self.vehicles_manager.get_vehicle_by_id(vehicle_id) + if vehicle: + return VehicleBase(id=vehicle.id, name=vehicle.name) + return None + + def get_versions( + self, + vehicle_id: str, + type_filter: Optional[str] = None + ) -> List[VersionOut]: + """Get all versions available for a specific vehicle.""" + versions = [] + + for version_info in self.versions_fetcher.get_versions_for_vehicle( + vehicle_id=vehicle_id + ): + # Apply type filter if provided + if type_filter and version_info.release_type != type_filter: + continue + + if version_info.release_type == "latest": + title = f"Latest ({version_info.remote_info.name})" + else: + rel_type = version_info.release_type + ver_num = version_info.version_number + remote = version_info.remote_info.name + title = f"{rel_type} {ver_num} ({remote})" + + versions.append(VersionOut( + id=version_info.version_id, + name=title, + type=version_info.release_type, + remote=RemoteInfo( + name=version_info.remote_info.name, + url=version_info.remote_info.url, + ), + commit_ref=version_info.commit_ref, + vehicle_id=vehicle_id, + )) + + # Sort by name + return sorted(versions, key=lambda x: x.name) + + def get_version( + self, + vehicle_id: str, + version_id: str + ) -> Optional[VersionOut]: + """Get details of a specific version for a vehicle.""" + versions = self.get_versions(vehicle_id) + for version in versions: + if version.id == version_id: + return version + return None + + def get_boards( + self, + vehicle_id: str, + version_id: str + ) -> List[BoardOut]: + """Get all boards available for a specific vehicle version.""" + # Get version info + version_info = self.versions_fetcher.get_version_info( + vehicle_id=vehicle_id, + version_id=version_id + ) + if not version_info: + return [] + + logger.info( + f'Board list requested for {vehicle_id} ' + f'{version_info.remote_info.name} {version_info.commit_ref}' + ) + + # Get boards list + with self.repo.get_checkout_lock(): + boards = self.ap_src_metadata_fetcher.get_boards( + remote=version_info.remote_info.name, + commit_ref=version_info.commit_ref, + vehicle_id=vehicle_id, + ) + + board_dicts = [board.to_dict() for board in boards] + return [ + BoardOut( + id=d['id'], + name=d['name'], + vehicle_id=vehicle_id, + version_id=version_id + ) + for d in board_dicts + ] + + def get_board( + self, + vehicle_id: str, + version_id: str, + board_id: str + ) -> Optional[BoardOut]: + """Get details of a specific board for a vehicle version.""" + boards = self.get_boards(vehicle_id, version_id) + for board in boards: + if board.id == board_id: + return board + return None + + def get_features( + self, + vehicle_id: str, + version_id: str, + board_id: str, + category_id: Optional[str] = None + ) -> List[FeatureOut]: + """ + Get all features with defaults for a specific + vehicle version/board. + """ + # Get version info + version_info = self.versions_fetcher.get_version_info( + vehicle_id=vehicle_id, + version_id=version_id + ) + if not version_info: + return [] + + logger.info( + f'Features requested for {vehicle_id} ' + f'{version_info.remote_info.name} {version_info.commit_ref}' + ) + + boards = self.ap_src_metadata_fetcher.get_boards( + remote=version_info.remote_info.name, + commit_ref=version_info.commit_ref, + vehicle_id=vehicle_id, + ) + board_has_can = False + for board in boards: + if board.id == board_id or board.name == board_id: + board_has_can = bool(board.attributes.get("has_can")) + break + + # Get build options from source + with self.repo.get_checkout_lock(): + options = self.ap_src_metadata_fetcher.get_build_options_at_commit( + remote=version_info.remote_info.name, + commit_ref=version_info.commit_ref + ) + + if board_has_can is False: + options = [ + option for option in options + if not option.category or ( + "CAN" not in option.category + and "DroneCAN" not in option.category + ) + ] + + available_labels = {option.label for option in options} + pruned_options = [] + changed = True + while changed: + changed = False + pruned_options = [] + for option in options: + if not option.dependency: + pruned_options.append(option) + continue + + dependencies = [ + label.strip() + for label in option.dependency.split(',') + if label.strip() + ] + if all(dep in available_labels for dep in dependencies): + pruned_options.append(option) + else: + changed = True + + options = pruned_options + available_labels = {option.label for option in options} + + # Try to fetch board-specific defaults from firmware-server + board_defaults = None + artifacts_dir = version_info.ap_build_artifacts_url + if artifacts_dir is not None: + board_defaults = ( + self.ap_src_metadata_fetcher.get_board_defaults_from_fw_server( + artifacts_url=artifacts_dir, + board_id=board_id, + vehicle_id=vehicle_id, + ) + ) + + # Build feature list + features = [] + for option in options: + # Apply category filter if provided + if category_id and option.category != category_id: + continue + + # Determine default state and source + if board_defaults and option.define in board_defaults: + # Override with firmware server data + default_enabled = (board_defaults[option.define] != 0) + default_source = 'firmware-server' + else: + # Use build-options-py fallback + default_enabled = (option.default != 0) + default_source = 'build-options-py' + + # Parse dependencies (comma-separated labels) + dependencies = [] + if option.dependency: + dependencies = [ + label.strip() + for label in option.dependency.split(',') + ] + + features.append(FeatureOut( + id=option.label, + name=option.label, + category=CategoryBase( + id=option.category, + name=option.category, + description=None + ), + description=option.description, + vehicle_id=vehicle_id, + version_id=version_id, + board_id=board_id, + default=FeatureDefault( + enabled=default_enabled, + source=default_source + ), + dependencies=dependencies + )) + + # Sort by name + return sorted(features, key=lambda x: x.category.name) + + def get_feature( + self, + vehicle_id: str, + version_id: str, + board_id: str, + feature_id: str + ) -> Optional[FeatureOut]: + """Get details of a specific feature for a vehicle version/board.""" + features = self.get_features(vehicle_id, version_id, board_id) + for feature in features: + if feature.id == feature_id: + return feature + return None + + +def get_vehicles_service(request: Request) -> VehiclesService: + """ + Get VehiclesService instance with dependencies from app state. + + Args: + request: FastAPI Request object + + Returns: + VehiclesService instance initialized with app state dependencies + """ + return VehiclesService( + vehicle_manager=request.app.state.vehicles_manager, + versions_fetcher=request.app.state.versions_fetcher, + ap_src_metadata_fetcher=request.app.state.ap_src_metadata_fetcher, + repo=request.app.state.repo, + ) diff --git a/web/static/js/add_build.js b/web/static/js/add_build.js index 22d6b59..9643c03 100644 --- a/web/static/js/add_build.js +++ b/web/static/js/add_build.js @@ -1,152 +1,160 @@ const Features = (() => { - let features = {}; - let defines_dictionary = {}; - let labels_dictionary = {}; - let category_dictionary = {}; + let features = []; // Flat array of feature objects from API + let features_by_id = {}; // Map feature IDs to feature objects + let categories_grouped = {}; // Features grouped by category name let selected_options = 0; function resetDictionaries() { // clear old dictionaries - defines_dictionary = {}; - labels_dictionary = {}; - category_dictionary = {}; - - features.forEach((category) => { - category_dictionary[category.name] = category; - category['options'].forEach((option) => { - defines_dictionary[option.define] = labels_dictionary[option.label] = option; - }); - }); - } - - function store_category_in_options() { - features.forEach((category) => { - category['options'].forEach((option) => { - option.category_name = category.name; - }); + features_by_id = {}; + categories_grouped = {}; + + // Build lookup maps from flat feature array + features.forEach((feature) => { + features_by_id[feature.id] = feature; + + // Group by category + const cat_name = feature.category.name; + if (!categories_grouped[cat_name]) { + categories_grouped[cat_name] = { + name: cat_name, + description: feature.category.description, + features: [] + }; + } + categories_grouped[cat_name].features.push(feature); }); } function updateRequiredFor() { - features.forEach((category) => { - category['options'].forEach((option) => { - if (option.dependency != null) { - option.dependency.split(',').forEach((dependency) => { - let dep = getOptionByLabel(dependency); - if (dep.requiredFor == undefined) { - dep.requiredFor = []; - } - dep.requiredFor.push(option.label); - }); - } - }); + features.forEach((feature) => { + if (feature.dependencies && feature.dependencies.length > 0) { + feature.dependencies.forEach((dependency_id) => { + let dep = getOptionById(dependency_id); + if (dep && dep.requiredFor == undefined) { + dep.requiredFor = []; + } + if (dep) { + dep.requiredFor.push(feature.id); + } + }); + } }); } function reset(new_features) { features = new_features; + selected_options = 0; resetDictionaries(); updateRequiredFor(); - store_category_in_options(); } - function getOptionByDefine(define) { - return defines_dictionary[define]; + function getOptionById(id) { + return features_by_id[id]; } - function getOptionByLabel(label) { - return labels_dictionary[label]; + function getCategoryByName(category_name) { + return categories_grouped[category_name]; } - function getCategoryByName(category_name) { - return category_dictionary[category_name]; + function getAllCategories() { + return Object.values(categories_grouped); } function getCategoryIdByName(category_name) { return 'category_'+category_name.split(" ").join("_"); } - function featureIsDisabledByDefault(feature_label) { - return getOptionByLabel(feature_label).default == 0; + function featureIsDisabledByDefault(feature_id) { + let feature = getOptionById(feature_id); + return feature && !feature.default.enabled; } - function featureisEnabledByDefault(feature_label) { - return !featureIsDisabledByDefault(feature_label); + function featureisEnabledByDefault(feature_id) { + return !featureIsDisabledByDefault(feature_id); } - function updateDefaults(defines_array) { - // updates default on the basis of define array passed - // the define array consists define in format, EXAMPLE_DEFINE or !EXAMPLE_DEFINE - // we update the defaults in features object by processing those defines - for (let i=0; i { + feature.dependencies.forEach((dependency_id) => { const check = true; - checkUncheckOptionByLabel(child, check); + checkUncheckOptionById(dependency_id, check); }); } - function handleOptionStateChange(feature_label, triggered_by_ui) { - if (document.getElementById(feature_label).checked) { + function handleOptionStateChange(feature_id, triggered_by_ui, updateDependencies = true) { + // feature_id is the feature ID from the API + let element = document.getElementById(feature_id); + if (!element) return; + + let feature = getOptionById(feature_id); + if (!feature) return; + + if (element.checked) { selected_options += 1; - enableDependenciesForFeature(feature_label); + if (updateDependencies) { + enableDependenciesForFeature(feature.id); + } } else { selected_options -= 1; - if (triggered_by_ui) { - askToDisableDependentsForFeature(feature_label); - } else { - disabledDependentsForFeature(feature_label); + if (updateDependencies) { + if (triggered_by_ui) { + askToDisableDependentsForFeature(feature.id); + } else { + disabledDependentsForFeature(feature.id); + } } } - updateCategoryCheckboxState(getOptionByLabel(feature_label).category_name); + updateCategoryCheckboxState(feature.category.name); updateGlobalCheckboxState(); } - function askToDisableDependentsForFeature(feature_label) { - let enabled_dependent_features = getEnabledDependentFeaturesFor(feature_label); + function askToDisableDependentsForFeature(feature_id) { + let enabled_dependent_features = getEnabledDependentFeaturesFor(feature_id); if (enabled_dependent_features.length <= 0) { return; } - document.getElementById('modalBody').innerHTML = "The feature(s) "+enabled_dependent_features.join(", ")+" is/are dependant on "+feature_label+"" + + let feature = getOptionById(feature_id); + let feature_display_name = feature ? feature.name : feature_id; + + // Get display names for dependent features + let dependent_names = enabled_dependent_features.map(dep_id => { + let dep_feature = getOptionById(dep_id); + return dep_feature ? dep_feature.name : dep_id; + }); + + document.getElementById('modalBody').innerHTML = "The feature(s) "+dependent_names.join(", ")+" is/are dependant on "+feature_display_name+"" + " and hence will be disabled too.
Do you want to continue?"; - document.getElementById('modalDisableButton').onclick = () => { disabledDependentsForFeature(feature_label); }; + document.getElementById('modalDisableButton').onclick = () => { disabledDependentsForFeature(feature_id); }; document.getElementById('modalCancelButton').onclick = document.getElementById('modalCloseButton').onclick = () => { - const check = true; - checkUncheckOptionByLabel(feature_label, check); + const check = true; + if (feature) { + checkUncheckOptionById(feature.id, check); + } }; var confirmationModal = bootstrap.Modal.getOrCreateInstance(document.getElementById('dependencyCheckModal')); confirmationModal.show(); } - function disabledDependentsForFeature(feature_label) { - let feature = getOptionByLabel(feature_label); + function disabledDependentsForFeature(feature_id) { + let feature = getOptionById(feature_id); - if (feature.requiredFor == undefined) { + if (!feature || feature.requiredFor == undefined) { return; } let dependents = feature.requiredFor; - dependents.forEach((dependent) => { + dependents.forEach((dependent_id) => { const check = false; - checkUncheckOptionByLabel(dependent, false); + checkUncheckOptionById(dependent_id, check); }); } @@ -155,12 +163,14 @@ const Features = (() => { if (category == undefined) { console.log("Could not find category by given name"); + return; } let checked_options_count = 0; - category.options.forEach((option) => { - let element = document.getElementById(option.label); + category.features.forEach((feature) => { + // Use ID to find the element + let element = document.getElementById(feature.id); if (element && element.checked) { checked_options_count += 1; @@ -170,6 +180,7 @@ const Features = (() => { let category_checkbox_element = document.getElementById(getCategoryIdByName(category_name)); if (category_checkbox_element == undefined) { console.log("Could not find element for given category"); + return; } let indeterminate_state = false; @@ -177,7 +188,7 @@ const Features = (() => { case 0: category_checkbox_element.checked = false; break; - case category.options.length: + case category.features.length: category_checkbox_element.checked = true; break; default: @@ -189,7 +200,7 @@ const Features = (() => { } function updateGlobalCheckboxState() { - const total_options = Object.keys(defines_dictionary).length; + const total_options = Object.keys(features_by_id).length; let global_checkbox = document.getElementById("check-uncheck-all"); let indeterminate_state = false; @@ -208,31 +219,40 @@ const Features = (() => { global_checkbox.indeterminate = indeterminate_state; } - function getEnabledDependentFeaturesHelper(feature_label, visited, dependent_features) { - if (visited[feature_label] != undefined || document.getElementById(feature_label).checked == false) { + function getEnabledDependentFeaturesHelper(feature_id, visited, dependent_features) { + if (visited[feature_id] != undefined) { + return; + } + + let feature = getOptionById(feature_id); + if (!feature) return; + + // Use ID to check the checkbox + let element = document.getElementById(feature.id); + if (!element || element.checked == false) { return; } - visited[feature_label] = true; - dependent_features.push(feature_label); + visited[feature_id] = true; + dependent_features.push(feature_id); - let feature = getOptionByLabel(feature_label); if (feature.requiredFor == null) { return; } - feature.requiredFor.forEach((dependent_feature) => { - getEnabledDependentFeaturesHelper(dependent_feature, visited, dependent_features); + feature.requiredFor.forEach((dependent_feature_id) => { + getEnabledDependentFeaturesHelper(dependent_feature_id, visited, dependent_features); }); } - function getEnabledDependentFeaturesFor(feature_label) { + function getEnabledDependentFeaturesFor(feature_id) { let dependent_features = []; let visited = {}; - if (getOptionByLabel(feature_label).requiredFor) { - getOptionByLabel(feature_label).requiredFor.forEach((dependent_feature) => { - getEnabledDependentFeaturesHelper(dependent_feature, visited, dependent_features); + let feature = getOptionById(feature_id); + if (feature && feature.requiredFor) { + feature.requiredFor.forEach((dependent_feature_id) => { + getEnabledDependentFeaturesHelper(dependent_feature_id, visited, dependent_features); }); } @@ -240,47 +260,113 @@ const Features = (() => { } function applyDefaults() { - features.forEach(category => { - category['options'].forEach(option => { - const check = featureisEnabledByDefault(option.label); - checkUncheckOptionByLabel(option.label, check); - }); + features.forEach(feature => { + const check = featureisEnabledByDefault(feature.id); + checkUncheckOptionById(feature.id, check); }); } - function checkUncheckOptionByLabel(label, check) { - let element = document.getElementById(label); + function checkUncheckOptionById(id, check, updateDependencies = true) { + let feature = getOptionById(id); + if (!feature) return; + + // Use ID to find the element + let element = document.getElementById(feature.id); if (element == undefined || element.checked == check) { return; } element.checked = check; const triggered_by_ui = false; - handleOptionStateChange(label, triggered_by_ui); + handleOptionStateChange(feature.id, triggered_by_ui, updateDependencies); } function checkUncheckAll(check) { - features.forEach(category => { + getAllCategories().forEach(category => { checkUncheckCategory(category.name, check); }); } function checkUncheckCategory(category_name, check) { - getCategoryByName(category_name).options.forEach(option => { - checkUncheckOptionByLabel(option.label, check); + getCategoryByName(category_name).features.forEach(feature => { + checkUncheckOptionById(feature.id, check); }); } - return {reset, handleOptionStateChange, getCategoryIdByName, updateDefaults, applyDefaults, checkUncheckAll, checkUncheckCategory}; + return {reset, handleOptionStateChange, getCategoryIdByName, applyDefaults, checkUncheckAll, checkUncheckCategory, getOptionById, checkUncheckOptionById}; })(); var init_categories_expanded = false; var pending_update_calls = 0; // to keep track of unresolved Promises - -function init() { +var currentBoards = []; +var currentFeatures = []; + +var rebuildConfig = { + vehicleId: null, + versionId: null, + boardId: null, + selectedFeatures: [], + isRebuildMode: false +}; + +async function init() { + if (typeof rebuildFromBuildId !== 'undefined') { + await initRebuild(rebuildFromBuildId); + } + fetchVehicles(); } +async function initRebuild(buildId) { + try { + const buildResponse = await fetch(`/api/v1/builds/${buildId}`); + if (!buildResponse.ok) { + throw new Error('Failed to fetch build details'); + } + const buildData = await buildResponse.json(); + + if (!buildData.vehicle || !buildData.vehicle.id) { + throw new Error('Vehicle information is missing from the build'); + } + if (!buildData.version || !buildData.version.id) { + throw new Error('Version information is missing from the build'); + } + if (!buildData.board || !buildData.board.id) { + throw new Error('Board information is missing from the build'); + } + + rebuildConfig.vehicleId = buildData.vehicle.id; + rebuildConfig.versionId = buildData.version.id; + rebuildConfig.boardId = buildData.board.id; + rebuildConfig.selectedFeatures = buildData.selected_features || []; + rebuildConfig.isRebuildMode = true; + + } catch (error) { + console.error('Error loading rebuild configuration:', error); + alert('Failed to load build configuration: ' + error.message + '\n\nRedirecting to new build page...'); + window.location.href = '/add_build'; + throw error; + } +} + +function applyRebuildFeatures(featuresList) { + Features.checkUncheckAll(false); + + if (featuresList && featuresList.length > 0) { + featuresList.forEach(featureId => { + Features.checkUncheckOptionById(featureId, true, false); + }); + } +} + +function clearRebuildConfig() { + rebuildConfig.vehicleId = null; + rebuildConfig.versionId = null; + rebuildConfig.boardId = null; + rebuildConfig.selectedFeatures = []; + rebuildConfig.isRebuildMode = false; +} + // enables or disables the elements with ids passed as an array // if enable is true, the elements are enabled and vice-versa function enableDisableElementsById(ids, enable) { @@ -309,13 +395,24 @@ function fetchVehicles() { // following elemets will be blocked (disabled) when we make the request let elements_to_block = ['vehicle', 'version', 'board', 'submit', 'reset_def', 'exp_col_button']; enableDisableElementsById(elements_to_block, false); - let request_url = '/get_vehicles'; + let request_url = '/api/v1/vehicles'; setSpinnerToDiv('vehicle_list', 'Fetching vehicles...'); - pending_update_calls += 1; sendAjaxRequestForJsonResponse(request_url) .then((json_response) => { let all_vehicles = json_response; - let new_vehicle = all_vehicles.find(vehicle => vehicle.name === "Copter") ? "copter": all_vehicles[0].id; + + if (rebuildConfig.vehicleId) { + const vehicleExists = all_vehicles.some(v => v.id === rebuildConfig.vehicleId); + if (!vehicleExists) { + console.warn(`Rebuild vehicle '${rebuildConfig.vehicleId}' not found in available vehicles`); + alert(`Warning: The vehicle from the original build is no longer available.\n\nRedirecting to new build page...`); + window.location.href = '/add_build'; + return; + } + } + + let new_vehicle = rebuildConfig.vehicleId || + (all_vehicles.find(vehicle => vehicle.name === "Copter") ? "copter" : all_vehicles[0].id); updateVehicles(all_vehicles, new_vehicle); }) .catch((message) => { @@ -323,8 +420,6 @@ function fetchVehicles() { }) .finally(() => { enableDisableElementsById(elements_to_block, true); - pending_update_calls -= 1; - fetchAndUpdateDefaults(); }); } @@ -341,14 +436,24 @@ function onVehicleChange(new_vehicle_id) { // following elemets will be blocked (disabled) when we make the request let elements_to_block = ['vehicle', 'version', 'board', 'submit', 'reset_def', 'exp_col_button']; enableDisableElementsById(elements_to_block, false); - let request_url = '/get_versions/'+new_vehicle_id; + let request_url = '/api/v1/vehicles/'+new_vehicle_id+'/versions'; setSpinnerToDiv('version_list', 'Fetching versions...'); - pending_update_calls += 1; sendAjaxRequestForJsonResponse(request_url) .then((json_response) => { let all_versions = json_response; all_versions = sortVersions(all_versions); - const new_version = all_versions[0].id; + + if (rebuildConfig.versionId) { + const versionExists = all_versions.some(v => v.id === rebuildConfig.versionId); + if (!versionExists) { + console.warn(`Rebuild version '${rebuildConfig.versionId}' not found for vehicle '${new_vehicle_id}'`); + alert(`Warning: The version from the original build is no longer available.\n\nRedirecting to new build page...`); + window.location.href = '/add_build'; + return; + } + } + + const new_version = rebuildConfig.versionId || all_versions[0].id; updateVersions(all_versions, new_version); }) .catch((message) => { @@ -356,8 +461,6 @@ function onVehicleChange(new_vehicle_id) { }) .finally(() => { enableDisableElementsById(elements_to_block, true); - pending_update_calls -= 1; - fetchAndUpdateDefaults(); }); } @@ -376,40 +479,52 @@ function onVersionChange(new_version) { enableDisableElementsById(elements_to_block, false); let vehicle_id = document.getElementById("vehicle").value; let version_id = new_version; - let request_url = `/boards_and_features/${vehicle_id}/${version_id}`; - - // create a temporary container to set spinner inside it + + // Fetch boards first + let boards_url = `/api/v1/vehicles/${vehicle_id}/versions/${version_id}/boards`; + setSpinnerToDiv('board_list', 'Fetching boards...'); + + // Clear build options and show loading state let temp_container = document.createElement('div'); temp_container.id = "temp_container"; temp_container.setAttribute('class', 'container-fluid w-25 mt-3'); - let features_list_element = document.getElementById('build_options'); // append the temp container to the main features_list container + let features_list_element = document.getElementById('build_options'); features_list_element.innerHTML = ""; features_list_element.appendChild(temp_container); setSpinnerToDiv('temp_container', 'Fetching features...'); - setSpinnerToDiv('board_list', 'Fetching boards...'); - pending_update_calls += 1; - sendAjaxRequestForJsonResponse(request_url) - .then((json_response) => { - let boards = json_response.boards; - let new_board = json_response.default_board; - let new_features = json_response.features; - Features.reset(new_features); + + // Fetch boards + sendAjaxRequestForJsonResponse(boards_url) + .then((boards_response) => { + // Keep full board objects with id and name + let boards = boards_response; + + if (rebuildConfig.boardId) { + const boardExists = boards.some(b => b.id === rebuildConfig.boardId); + if (!boardExists) { + console.warn(`Rebuild board '${rebuildConfig.boardId}' not found for version '${version_id}'`); + alert(`Warning: The board from the original build is no longer available.\n\nRedirecting to new build page...`); + window.location.href = '/add_build'; + return; + } + } + + let new_board = rebuildConfig.boardId || (boards.length > 0 ? boards[0].id : null); updateBoards(boards, new_board); - fillBuildOptions(new_features); }) .catch((message) => { - console.log("Boards and features update failed. "+message); + console.log("Boards update failed. "+message); }) .finally(() => { enableDisableElementsById(elements_to_block, true); - pending_update_calls -= 1; - fetchAndUpdateDefaults(); }); } function updateBoards(all_boards, new_board) { + currentBoards = all_boards || []; let board_element = document.getElementById('board'); - let old_board = board_element ? board.value : ''; + let old_board = board_element ? board_element.value : ''; + let old_board = board_element ? board_element.value : ''; fillBoards(all_boards, new_board); if (old_board != new_board) { onBoardChange(new_board); @@ -417,48 +532,51 @@ function updateBoards(all_boards, new_board) { } function onBoardChange(new_board) { - fetchAndUpdateDefaults(); -} - -function fetchAndUpdateDefaults() { - // return early if there is an unresolved promise (i.e., there is an ongoing ajax call) - if (pending_update_calls > 0) { - return; - } - elements_to_block = ['reset_def']; - document.getElementById('reset_def').innerHTML = 'Fetching defaults'; - enableDisableElementsById(elements_to_block, false); + // When board changes, fetch features for the new board + let vehicle_id = document.getElementById('vehicle').value; let version_id = document.getElementById('version').value; - let vehicle = document.getElementById('vehicle').value; - let board = document.getElementById('board').value; - - let request_url = '/get_defaults/'+vehicle+'/'+version_id+'/'+board; - sendAjaxRequestForJsonResponse(request_url) - .then((json_response) => { - Features.updateDefaults(json_response); - }) - .catch((message) => { - console.log("Default reset failed. "+message); - }) - .finally(() => { - if (document.getElementById('auto_apply_def').checked) { + + let temp_container = document.createElement('div'); + temp_container.id = "temp_container"; + temp_container.setAttribute('class', 'container-fluid w-25 mt-3'); + let features_list_element = document.getElementById('build_options'); + features_list_element.innerHTML = ""; + features_list_element.appendChild(temp_container); + setSpinnerToDiv('temp_container', 'Fetching features...'); + + let features_url = `/api/v1/vehicles/${vehicle_id}/versions/${version_id}/boards/${new_board}/features`; + sendAjaxRequestForJsonResponse(features_url) + .then((features_response) => { + Features.reset(features_response); + fillBuildOptions(features_response); + + // TODO: Refactor to use a single method to apply both rebuild and default features + if (rebuildConfig.isRebuildMode) { + applyRebuildFeatures(rebuildConfig.selectedFeatures); + clearRebuildConfig(); + } else { Features.applyDefaults(); } - enableDisableElementsById(elements_to_block, true); - document.getElementById('reset_def').innerHTML = 'Reset feature defaults'; + }) + .catch((message) => { + console.log("Features update failed. "+message); }); } -function fillBoards(boards, default_board) { +function fillBoards(boards, default_board_id) { let output = document.getElementById('board_list'); output.innerHTML = '' + ''; let boardList = document.getElementById("board") boards.forEach(board => { + const boardName = (typeof board === 'object' && board !== null) ? board.name : board; + if (!boardName) { + return; + } let opt = document.createElement('option'); - opt.value = board; - opt.innerHTML = board; - opt.selected = (board === default_board); + opt.value = board.id; + opt.innerHTML = board.name; + opt.selected = (board.id === default_board_id); boardList.appendChild(opt); }); } @@ -487,13 +605,13 @@ var toggle_all_categories = (() => { return toggle_method; })(); -function createCategoryCard(category_name, options, expanded) { +function createCategoryCard(category_name, features_in_category, expanded) { options_html = ""; - options.forEach(option => { + features_in_category.forEach(feature => { options_html += '
' + - '' + - '
'; }); @@ -534,7 +652,7 @@ function createCategoryCard(category_name, options, expanded) { return card_element; } -function fillBuildOptions(buildOptions) { +function fillBuildOptions(features) { let output = document.getElementById('build_options'); output.innerHTML = `
@@ -543,7 +661,20 @@ function fillBuildOptions(buildOptions) {
`; - buildOptions.forEach((category, cat_idx) => { + // Group features by category + let categories_map = {}; + features.forEach(feature => { + const cat_name = feature.category.name; + if (!categories_map[cat_name]) { + categories_map[cat_name] = []; + } + categories_map[cat_name].push(feature); + }); + + // Convert to array and display + let categories = Object.entries(categories_map).map(([name, feats]) => ({name, features: feats})); + + categories.forEach((category, cat_idx) => { if (cat_idx % 4 == 0) { let new_row = document.createElement('div'); new_row.setAttribute('class', 'row'); @@ -552,7 +683,7 @@ function fillBuildOptions(buildOptions) { } let col_element = document.createElement('div'); col_element.setAttribute('class', 'col-md-3 col-sm-6 mb-2'); - col_element.appendChild(createCategoryCard(category['name'], category['options'], init_categories_expanded)); + col_element.appendChild(createCategoryCard(category.name, category.features, init_categories_expanded)); document.getElementById('category_'+parseInt(cat_idx/4)+'_row').appendChild(col_element); }); } @@ -617,27 +748,21 @@ function sortVersions(versions) { } versions.sort((a, b) => { - const version_a_type = a.title.split(" ")[0].toLowerCase(); - const version_b_type = b.title.split(" ")[0].toLowerCase(); - // sort the version types in order mentioned above - if (version_a_type != version_b_type) { - return order[version_a_type] - order[version_b_type]; + if (a.type != b.type) { + return order[a.type] - order[b.type]; } // for numbered versions, do reverse sorting to make sure recent versions come first - if (version_a_type == "stable" || version_b_type == "beta") { - const version_a_num = a.title.split(" ")[1]; - const version_b_num = b.title.split(" ")[1]; - - return compareVersionNums(version_a_num, version_b_num); + if (a.type == "stable" || b.type == "beta") { + return compareVersionNums(a.name.split(" ")[1], b.name.split(" ")[1]); } - return a.title.localeCompare(b.title); + return a.name.localeCompare(b.name); }); // Push the first stable version in the list to the top - const firstStableIndex = versions.findIndex(v => v.title.split(" ")[0].toLowerCase() === "stable"); + const firstStableIndex = versions.findIndex(v => v.name.split(" ")[0].toLowerCase() === "stable"); if (firstStableIndex !== -1) { const stableVersion = versions.splice(firstStableIndex, 1)[0]; versions.unshift(stableVersion); @@ -655,8 +780,78 @@ function fillVersions(versions, version_to_select) { versions.forEach(version => { opt = document.createElement('option'); opt.value = version.id; - opt.innerHTML = version.title; + opt.innerHTML = version.name; opt.selected = (version.id === version_to_select); versionList.appendChild(opt); }); } + +// Handle form submission +async function handleFormSubmit(event) { + event.preventDefault(); + + const submitButton = document.getElementById('submit'); + const originalButtonText = submitButton.innerHTML; + + try { + // Disable submit button and show loading state + submitButton.disabled = true; + submitButton.innerHTML = 'Submitting...'; + + // Collect form data + const vehicle_id = document.getElementById('vehicle').value; + const version_id = document.getElementById('version').value; + const board_id = document.getElementById('board').value; + + // Collect selected features - checkboxes now have feature IDs directly + const selected_features = []; + const checkboxes = document.querySelectorAll('.feature-checkbox:checked'); + checkboxes.forEach(checkbox => { + // The checkbox ID is already the feature define (ID) + selected_features.push(checkbox.id); + }); + + // Create build request payload + const buildRequest = { + vehicle_id: vehicle_id, + version_id: version_id, + board_id: board_id, + selected_features: selected_features + }; + + // Send POST request to API + const response = await fetch('/api/v1/builds', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(buildRequest) + }); + + if (!response.ok) { + const errorData = await response.json(); + throw new Error(errorData.detail || 'Failed to submit build'); + } + + const result = await response.json(); + + // Redirect to viewlog page + window.location.href = `/?build_id=${result.build_id}`; + + } catch (error) { + console.error('Error submitting build:', error); + alert('Failed to submit build: ' + error.message); + + // Re-enable submit button + submitButton.disabled = false; + submitButton.innerHTML = originalButtonText; + } +} + +// Initialize form submission handler +document.addEventListener('DOMContentLoaded', () => { + const buildForm = document.getElementById('build-form'); + if (buildForm) { + buildForm.addEventListener('submit', handleFormSubmit); + } +}); diff --git a/web/static/js/index.js b/web/static/js/index.js index aaba266..a831f02 100644 --- a/web/static/js/index.js +++ b/web/static/js/index.js @@ -8,7 +8,7 @@ function init() { function refresh_builds() { var xhr = new XMLHttpRequest(); - xhr.open('GET', "/builds"); + xhr.open('GET', "/api/v1/builds"); // disable cache, thanks to: https://stackoverflow.com/questions/22356025/force-cache-control-no-cache-in-chrome-via-xmlhttprequest-on-f5-reload xhr.setRequestHeader("Cache-Control", "no-cache, no-store, max-age=0"); @@ -67,23 +67,23 @@ function updateBuildsTable(builds) { status_color = 'success'; } else if (build_info['progress']['state'] == 'PENDING') { status_color = 'warning'; - } else if (build_info['progress']['state'] == 'FAILURE' || build_info['progress']['state'] == 'ERROR') { + } else if (build_info['progress']['state'] == 'FAILURE' || build_info['progress']['state'] == 'ERROR' || build_info['progress']['state'] == 'TIMED_OUT') { status_color = 'danger'; } const features_string = build_info['selected_features'].join(', ') const build_age = timeAgo(build_info['time_created']) - const isSuccess = build_info['progress']['state'] === 'SUCCESS'; - const downloadDisabled = !isSuccess ? 'disabled' : ''; - const download_button_color = isSuccess ? 'primary' : 'secondary'; + const isNonTerminal = (build_info['progress']['state'] == 'PENDING' || build_info['progress']['state'] == 'RUNNING'); + const downloadDisabled = isNonTerminal ? 'disabled' : ''; + const download_button_color = isNonTerminal ? 'secondary' : 'primary'; table_body_html += ` ${build_info['progress']['state']} ${build_age} - ${build_info['git_hash'].substring(0,8)} - ${build_info['board']} - ${build_info['vehicle_id']} + ${build_info['version']['git_hash'].substring(0,8)} + ${build_info['board']['name']} + ${build_info['vehicle']['name']} ${features_string.substring(0, 100)}... @@ -98,9 +98,12 @@ function updateBuildsTable(builds) { - + `; row_num += 1; @@ -115,7 +118,7 @@ function updateBuildsTable(builds) { Vehicle Features Progress - Actions + Actions ${table_body_html} `; @@ -151,7 +154,7 @@ const LogFetch = (() => { } var xhr = new XMLHttpRequest(); - xhr.open('GET', `/builds/${build_id}/artifacts/build.log`); + xhr.open('GET', `/api/v1/builds/${build_id}/logs`); // disable cache, thanks to: https://stackoverflow.com/questions/22356025/force-cache-control-no-cache-in-chrome-via-xmlhttprequest-on-f5-reload xhr.setRequestHeader("Cache-Control", "no-cache, no-store, max-age=0"); @@ -204,7 +207,7 @@ async function tryAutoDownload(buildId) { } try { - const apiUrl = `/builds/${buildId}` + const apiUrl = `/api/v1/builds/${buildId}` const response = await fetch(apiUrl); const data = await response.json(); @@ -212,11 +215,11 @@ async function tryAutoDownload(buildId) { if (previousState === "RUNNING" && currentState === "SUCCESS") { console.log("Build completed successfully. Starting download..."); - document.getElementById(`${buildId}-download-btn`).click(); + window.location.href = `/api/v1/builds/${buildId}/artifact`; } // Stop running if the build is in a terminal state - if (["FAILURE", "SUCCESS", "ERROR"].includes(currentState)) { + if (["FAILURE", "SUCCESS", "ERROR", "TIMED_OUT"].includes(currentState)) { clearInterval(autoDownloadIntervalId); return; } diff --git a/web/templates/add_build.html b/web/templates/add_build.html index 1255975..ba792b0 100644 --- a/web/templates/add_build.html +++ b/web/templates/add_build.html @@ -20,7 +20,7 @@ - + @@ -33,7 +33,7 @@
@@ -50,7 +50,7 @@ ADD NEW BUILD
-
+
@@ -91,17 +91,13 @@ - + {% if rebuild_from != None %} + + {% endif %} + + diff --git a/web/templates/error.html b/web/templates/error.html deleted file mode 100644 index a1f5075..0000000 --- a/web/templates/error.html +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - ArduPilot Custom Firmware Builder - - - -

ArduPilot Custom Firmware Builder

-

Error Occured: {{ex}}

- - diff --git a/web/templates/index.html b/web/templates/index.html index 962bca6..5e3eec3 100644 --- a/web/templates/index.html +++ b/web/templates/index.html @@ -20,7 +20,7 @@ - + @@ -34,12 +34,13 @@
+ Donate Docs Included features - + - {% if token != None %} + {% if build_id != None %} {% endif %} - \ No newline at end of file + diff --git a/web/ui/__init__.py b/web/ui/__init__.py new file mode 100644 index 0000000..a0d6810 --- /dev/null +++ b/web/ui/__init__.py @@ -0,0 +1,6 @@ +""" +UI module for web interface routes. +""" +from .router import router + +__all__ = ["router"] diff --git a/web/ui/router.py b/web/ui/router.py new file mode 100644 index 0000000..5689236 --- /dev/null +++ b/web/ui/router.py @@ -0,0 +1,50 @@ +""" +Web UI routes for serving HTML templates. +""" +from fastapi import APIRouter, Request +from fastapi.responses import HTMLResponse +from fastapi.templating import Jinja2Templates +from pathlib import Path + +router = APIRouter(tags=["web"]) + +# Setup templates directory +WEB_ROOT = Path(__file__).resolve().parent.parent +templates = Jinja2Templates(directory=str(WEB_ROOT / "templates")) + + +@router.get("/", response_class=HTMLResponse) +async def index(request: Request, build_id: str = None): + """ + Render the main index page showing all builds. + + Args: + request: FastAPI Request object + build_id: Optional build ID to automatically show log modal and + trigger artifact download on build completion + + Returns: + Rendered HTML template + """ + return templates.TemplateResponse( + "index.html", + {"request": request, "build_id": build_id} + ) + + +@router.get("/add_build", response_class=HTMLResponse) +async def add_build(request: Request, rebuild_from: str = None): + """ + Render the add build page for creating new firmware builds. + + Args: + request: FastAPI Request object + rebuild_from: Optional build ID to copy configuration from + + Returns: + Rendered HTML template + """ + return templates.TemplateResponse( + "add_build.html", + {"request": request, "rebuild_from": rebuild_from} + ) diff --git a/web/wsgi.py b/web/wsgi.py deleted file mode 100644 index d261916..0000000 --- a/web/wsgi.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python3 - -import logging -import sys -import os - -cbs_basedir = os.environ.get('CBS_BASEDIR') - -if cbs_basedir: - # Ensure base subdirectories exist - os.makedirs(os.path.join(cbs_basedir, 'artifacts'), exist_ok=True) - os.makedirs(os.path.join(cbs_basedir, 'configs'), exist_ok=True) - - # Ensure remotes.json exists - remotes_json_path = os.path.join(cbs_basedir, 'configs', 'remotes.json') - if not os.path.isfile(remotes_json_path): - print("Creating remotes.json...") - from scripts import fetch_releases - fetch_releases.run( - base_dir=os.path.join( - os.path.dirname(remotes_json_path), - '..', - ), - remote_name="ardupilot", - ) - -logging.basicConfig(stream=sys.stderr) -sys.path.insert(0, os.path.dirname(__file__)) -from app import app as application -application.secret_key = 'key'