diff --git a/.gitignore b/.gitignore index 8281cbca..45a344cf 100644 --- a/.gitignore +++ b/.gitignore @@ -9,5 +9,17 @@ logs/* .env *.egg-info/* site/ -build/ dist/ +**/metadata/*.json + +# Studio frontend build artifacts +**/node_modules/* +**/.svelte-kit/* +studio/frontend/build/ + +# Studio runtime data +studio/.executions_history.json +studio/.executions/* + +# Studio config +studio/config/custom_models.yaml diff --git a/Makefile b/Makefile index eed212f8..bb2df25f 100644 --- a/Makefile +++ b/Makefile @@ -34,6 +34,57 @@ setup-dev: ## Install development dependencies @echo "Installing SyGra Core, Extra and Development dependencies" $(UV) sync --extra dev --extra ui +######################################################################################################################## +# SYGRA STUDIO +######################################################################################################################## + +# Studio directories +STUDIO_DIR = studio +STUDIO_FRONTEND_DIR = $(STUDIO_DIR)/frontend +STUDIO_BUILD_DIR = $(STUDIO_FRONTEND_DIR)/build + +# Studio configuration (can be overridden: make studio TASKS_DIR=./my/tasks PORT=9000) +TASKS_DIR ?= ./tasks/examples +PORT ?= 8000 + +.PHONY: studio +studio: studio-build ## Launch SyGra Studio (builds frontend if needed, starts server) + @echo "๐Ÿš€ Starting SyGra Studio..." + @echo " Tasks: $(TASKS_DIR)" + @echo " Port: $(PORT)" + $(UV) run $(PYTHON) -m studio.server --svelte --tasks-dir $(TASKS_DIR) --port $(PORT) + +.PHONY: studio-build +studio-build: ## Build the Studio frontend (only if not already built) + @if [ ! -d "$(STUDIO_BUILD_DIR)" ] || [ ! -f "$(STUDIO_BUILD_DIR)/index.html" ]; then \ + echo "๐Ÿ“ฆ Building Studio frontend..."; \ + cd $(STUDIO_FRONTEND_DIR) && npm install && npm run build; \ + else \ + echo "โœ… Studio frontend already built. Use 'make studio-rebuild' to force rebuild."; \ + fi + +.PHONY: studio-rebuild +studio-rebuild: ## Force rebuild the Studio frontend + @echo "๐Ÿ”จ Rebuilding Studio frontend..." + cd $(STUDIO_FRONTEND_DIR) && npm install && npm run build + +.PHONY: studio-dev +studio-dev: ## Launch Studio in development mode (hot-reload for frontend) + @echo "๐Ÿ”ง Starting Studio in development mode..." + @echo " Backend: http://localhost:$(PORT)" + @echo " Frontend: http://localhost:5173 (with hot-reload)" + @echo "" + @echo "Run these commands in separate terminals:" + @echo " Terminal 1: $(UV) run $(PYTHON) -m studio.server --tasks-dir $(TASKS_DIR) --port $(PORT)" + @echo " Terminal 2: cd $(STUDIO_FRONTEND_DIR) && npm run dev" + +.PHONY: studio-clean +studio-clean: ## Clean Studio frontend build artifacts + @echo "๐Ÿงน Cleaning Studio frontend build..." + rm -rf $(STUDIO_BUILD_DIR) + rm -rf $(STUDIO_FRONTEND_DIR)/node_modules + rm -rf $(STUDIO_FRONTEND_DIR)/.svelte-kit + ######################################################################################################################## # TESTING ######################################################################################################################## diff --git a/docs/eval/metrics/README.md b/docs/eval/metrics/README.md index 18b14dac..dc8108b8 100644 --- a/docs/eval/metrics/README.md +++ b/docs/eval/metrics/README.md @@ -148,4 +148,3 @@ When adding new metrics documentation: 3. Explain the "why" behind design decisions 4. Cover edge cases and common pitfalls 5. Provide real-world use case scenarios - diff --git a/pyproject.toml b/pyproject.toml index c3bd69f6..2fe0f423 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,6 +62,9 @@ dependencies = [ "boto3>=1.40.71,<2.0.0", "google-auth>=2.43.0,<3.0.0", "google-cloud-aiplatform>=1.128.0,<2.0.0", + "fastapi (>=0.124.4,<0.125.0)", + "uvicorn (>=0.38.0,<0.39.0)", + "debugpy (>=1.8.19,<2.0.0)", ] [project.optional-dependencies] @@ -96,13 +99,16 @@ Issues = "https://github.com/ServiceNow/SyGra/issues" Discussions = "https://github.com/ServiceNow/SyGra/discussions" [tool.hatch.build.targets.wheel] -packages = ["sygra"] +packages = ["sygra", "studio"] include = [ "sygra/**/*.yaml", "sygra/**/*.yml", "sygra/**/*.jinja2", "sygra/**/*.j2", "sygra/**/*.json", + "studio/**/*.yaml", + "studio/**/*.yml", + "studio/**/*.json", ] # --- Tool configurations --- @@ -157,3 +163,6 @@ exclude = [ module = ["tests.*"] disallow_untyped_defs = false check_untyped_defs = false + +[tool.poetry.group.dev.dependencies] +uvicorn = "^0.38.0" diff --git a/studio/__init__.py b/studio/__init__.py new file mode 100644 index 00000000..9791c646 --- /dev/null +++ b/studio/__init__.py @@ -0,0 +1,126 @@ +""" +SyGra Studio Integration + +This module provides seamless integration between SyGra workflows and Studio +UI visualization. It enables: +- Converting SyGra YAML configs to visualization-friendly graph format +- Running SyGra workflows from a web UI +- Real-time workflow execution monitoring + +Usage: + # Start the UI server + from studio import run_server + run_server(tasks_dir="./tasks/examples", port=8000) + + # Or use the CLI + python -m studio.server --tasks-dir ./tasks/examples + + # Build graph from YAML + from studio import build_graph_from_yaml + graph = build_graph_from_yaml("./tasks/examples/glaive_code_assistant/graph_config.yaml") + + # Convert to OpenFlow format + from studio import convert_sygra_to_openflow + openflow = convert_sygra_to_openflow("./tasks/examples/glaive_code_assistant/graph_config.yaml") +""" + +from studio.converter import ( + SygraToStudioConverter, + convert_sygra_to_openflow, +) +from studio.graph_builder import ( + SygraGraphBuilder, + build_graph_from_yaml, + build_graph_from_config, +) +from studio.models import ( + WorkflowNode, + WorkflowEdge, + WorkflowGraph, + WorkflowExecution, + ExecutionStatus, + NodeType, + NodePosition, + NodeSize, + ModelConfig, + PromptMessage, + EdgeCondition, + NodeExecutionState, + WorkflowListItem, + ExecutionRequest, + ExecutionResponse, +) +from studio.execution_manager import ( + ExecutionManager, + ExecutionCallback, + SygraExecutionRunner, + get_execution_manager, +) + +# Server components are lazily imported to avoid circular import warnings +# when running `python -m studio.server` + +def create_server(*args, **kwargs): + """Create and return the Studio server instance (lazy import).""" + from studio.server import create_server as _create_server + return _create_server(*args, **kwargs) + + +def run_server(*args, **kwargs): + """Run the Studio server (lazy import).""" + from studio.server import run_server as _run_server + return _run_server(*args, **kwargs) + +def create_app(*args, **kwargs): + """Create the FastAPI application (lazy import).""" + from studio.api import create_app as _create_app + return _create_app(*args, **kwargs) + + +def __getattr__(name): + """Lazy import for server components.""" + if name == "create_server": + from studio.server import create_server + return create_server + elif name == "run_server": + from studio.server import run_server + return run_server + elif name == "create_app": + from studio.api import create_app + return create_app + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") + +__all__ = [ + # Converter + "SygraToStudioConverter", + "convert_sygra_to_openflow", + # Graph Builder + "SygraGraphBuilder", + "build_graph_from_yaml", + "build_graph_from_config", + # Models + "WorkflowNode", + "WorkflowEdge", + "WorkflowGraph", + "WorkflowExecution", + "ExecutionStatus", + "NodeType", + "NodePosition", + "NodeSize", + "ModelConfig", + "PromptMessage", + "EdgeCondition", + "NodeExecutionState", + "WorkflowListItem", + "ExecutionRequest", + "ExecutionResponse", + # Execution + "ExecutionManager", + "ExecutionCallback", + "SygraExecutionRunner", + "get_execution_manager", + # Server + "create_server", + "run_server", + "create_app", +] diff --git a/studio/api.py b/studio/api.py new file mode 100644 index 00000000..ba124851 --- /dev/null +++ b/studio/api.py @@ -0,0 +1,6058 @@ +""" +FastAPI Backend for SyGra Studio Integration. + +Provides REST API endpoints for: +- Listing available SyGra workflows +- Getting workflow graph details for visualization +- Executing workflows +- Monitoring execution progress +- Code execution and debugging +""" + +import asyncio +import os +import sys +import uuid +import subprocess +import threading +import queue +import signal +from datetime import datetime +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple + +from fastapi import FastAPI, HTTPException, WebSocket, WebSocketDisconnect, Request +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse, FileResponse +from pydantic import BaseModel + +import yaml +import json as json_module +import traceback +import httpx +import re + +from sygra.utils import utils as sygra_utils +from sygra.utils import constants as sygra_constants + +from studio.graph_builder import SygraGraphBuilder +from studio.converter import SygraToStudioConverter +from studio.models import ( + ExecutionRequest, + ExecutionResponse, + ExecutionStatus, + NodeExecutionState, + WorkflowCreateRequest, + WorkflowExecution, + WorkflowGraph, + WorkflowListItem, + WorkflowSaveResponse, +) +from studio.execution_storage import get_storage, ExecutionStorage + + +# Store for active executions (in-memory cache for running executions) +# Persistent storage is handled by ExecutionStorage class +_executions: Dict[str, WorkflowExecution] = {} + + +def _convert_prompts_to_yaml_format(prompts: List[Dict[str, Any]]) -> List[Dict[str, str]]: + """ + Convert prompts from frontend format to SyGra YAML format. + + Frontend format: [{"role": "system", "content": "..."}, {"role": "user", "content": "..."}] + SyGra YAML format: [{"system": "..."}, {"user": "..."}] + """ + if not prompts: + return [] + + yaml_prompts = [] + for msg in prompts: + if isinstance(msg, dict): + role = msg.get('role', 'user') + content = msg.get('content', '') + yaml_prompts.append({role: content}) + elif hasattr(msg, 'role') and hasattr(msg, 'content'): + # Handle PromptMessage objects + yaml_prompts.append({msg.role: msg.content}) + return yaml_prompts + +# Scalable execution storage instance (lazy initialized) +_execution_storage: ExecutionStorage = None + +def _get_execution_storage() -> ExecutionStorage: + """Get the execution storage instance (lazy initialization).""" + global _execution_storage + if _execution_storage is None: + _execution_storage = get_storage() + return _execution_storage + +# Store for discovered workflows +_workflows: Dict[str, WorkflowGraph] = {} + +# Store for cancelled execution IDs (for signaling background tasks to stop) +_cancelled_executions: set = set() + +# Store for running processes (for actual process termination) +import multiprocessing +import json as json_module +import asyncio as asyncio_module +_running_processes: Dict[str, multiprocessing.Process] = {} + +# Job queue for sequential execution +_execution_queue: asyncio_module.Queue = None +_queue_processor_task = None +_current_running_execution: str = None + +# Persistence file for executions +_EXECUTIONS_FILE = Path(__file__).parent / ".executions_history.json" + +# Code execution stores +_code_executions: Dict[str, Dict] = {} # Active code execution sessions +_debug_sessions: Dict[str, Dict] = {} # Active debug sessions +_websocket_connections: Dict[str, WebSocket] = {} # WebSocket connections for streaming output + +# Environment variables store (loaded from .env + runtime modifications) +_env_vars: Dict[str, str] = {} +_ENV_FILE = Path(__file__).parent / ".env" # Local env file for UI-managed vars + +# ==================== Models API (Module Level) ==================== +# Models config file paths - use library constants for consistency +_BUILTIN_MODELS_CONFIG_PATH = Path(sygra_constants.MODEL_CONFIG_YAML) +_CUSTOM_MODELS_CONFIG_PATH = Path(sygra_constants.CUSTOM_MODELS_CONFIG_YAML) + +# Legacy path for backwards compatibility +_MODELS_CONFIG_PATH = _BUILTIN_MODELS_CONFIG_PATH + +# Cache for model status (lightweight - just connectivity info) +_models_status: Dict[str, Dict[str, Any]] = {} + +# Supported model types with their configurations +_MODEL_TYPES = { + "azure_openai": { + "label": "Azure OpenAI", + "description": "Azure-hosted OpenAI models (GPT-4, GPT-4o, etc.)", + "env_vars": ["URL", "TOKEN"], + }, + "openai": { + "label": "OpenAI", + "description": "OpenAI API models", + "env_vars": ["URL", "TOKEN"], + }, + "vllm": { + "label": "vLLM", + "description": "Self-hosted vLLM inference server", + "env_vars": ["URL", "TOKEN"], + }, + "ollama": { + "label": "Ollama", + "description": "Local Ollama models", + "env_vars": ["URL"], + }, + "tgi": { + "label": "TGI (Text Generation Inference)", + "description": "Hugging Face Text Generation Inference", + "env_vars": ["URL", "TOKEN"], + }, + "mistralai": { + "label": "Mistral AI", + "description": "Mistral AI API", + "env_vars": ["URL", "TOKEN"], + }, + "vertex_ai": { + "label": "Google Vertex AI", + "description": "Google Cloud Vertex AI models (Gemini)", + "env_vars": ["VERTEX_PROJECT", "VERTEX_LOCATION", "VERTEX_CREDENTIALS"], + }, + "bedrock": { + "label": "AWS Bedrock", + "description": "AWS Bedrock models (Claude, Titan, etc.)", + "env_vars": ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_REGION_NAME"], + }, + "triton": { + "label": "Triton Inference Server", + "description": "NVIDIA Triton Inference Server", + "env_vars": ["URL", "TOKEN"], + } +} + + +# ==================== Model Config Functions (Using Library) ==================== +# These wrapper functions use the sygra library functions with Studio-specific additions + +def _load_builtin_models_config() -> Dict[str, Any]: + """Load builtin/core models from SyGra's models.yaml (read-only). + Uses library function: sygra_utils.load_builtin_models() + """ + return sygra_utils.load_builtin_models() + + +def _load_custom_models_config() -> Dict[str, Any]: + """Load custom user-defined models from studio config. + Uses library function: sygra_utils.load_custom_models() + """ + return sygra_utils.load_custom_models() + + +def _save_custom_models_config(config: Dict[str, Any]) -> None: + """Save custom models to studio config file. + Uses library function: sygra_utils.save_custom_models() + """ + sygra_utils.save_custom_models(config) + + +def _load_models_config_sync() -> Dict[str, Any]: + """Load all models configuration (builtin + custom), synchronous, fast. + + Note: This is a simplified version that doesn't inject env vars. + For env var injection, use sygra_utils.load_model_config() instead. + """ + builtin = _load_builtin_models_config() + custom = _load_custom_models_config() + merged = {} + merged.update(builtin) + merged.update(custom) + return merged + + +def _get_builtin_model_names() -> set: + """Get the set of builtin model names. + Uses library function: sygra_utils.get_builtin_model_names() + """ + return sygra_utils.get_builtin_model_names() + + +def _get_model_env_value(model_name: str, env_suffix: str) -> tuple: + """Get env var value trying multiple prefix formats. Returns (value, key_used). + + Studio-specific: Also checks _env_vars (UI-managed env vars) in addition + to os.environ. + """ + # First check Studio's UI-managed env vars + prefixes = [ + f"SYGRA_{model_name.upper()}", # SYGRA_GPT-4O (keep hyphens) + f"SYGRA_{sygra_utils.get_env_name(model_name)}", # SYGRA_GPT_4O (normalized) + ] + for prefix in prefixes: + env_key = f"{prefix}_{env_suffix}" + # Check Studio's _env_vars first, then os.environ + value = _env_vars.get(env_key) or os.environ.get(env_key, "") + if value: + return value, env_key + return "", f"{prefixes[0]}_{env_suffix}" + + +def _get_model_credentials_fast(model_name: str, model_config: Dict[str, Any]) -> Dict[str, Any]: + """Get credentials for a model - fast, no heavy operations. + + Studio-specific: Uses Studio's _env_vars in addition to os.environ. + """ + model_type = model_config.get("model_type", "") + type_info = _MODEL_TYPES.get(model_type, {"env_vars": ["URL", "TOKEN"]}) + + creds = {} + for env_var in type_info.get("env_vars", ["URL", "TOKEN"]): + value, key = _get_model_env_value(model_name, env_var) + creds[env_var.lower()] = value + creds[f"{env_var.lower()}_env_key"] = key + creds[f"{env_var.lower()}_configured"] = bool(value) + + return creds + + +async def _ping_model_http(model_name: str, model_config: Dict[str, Any]) -> Dict[str, Any]: + """Lightweight HTTP ping - just checks endpoint reachability.""" + model_type = model_config.get("model_type", "") + url, _ = _get_model_env_value(model_name, "URL") + + if not url: + return { + "status": "unconfigured", + "status_code": None, + "latency_ms": None, + "last_checked": datetime.now().isoformat(), + "error": "URL not configured" + } + + # Health check endpoints by model type + health_endpoints = {"vllm": "/health", "tgi": "/health", "ollama": "/api/tags", "triton": "/v2/health/ready"} + + if model_type in ["azure_openai", "openai", "mistralai"]: + check_url = url.rstrip("/") + if "/openai/deployments/" in check_url: + check_url = check_url.split("/openai/deployments/")[0] + elif model_type in health_endpoints: + base_url = url.rstrip("/").rsplit("/v1", 1)[0] if "/v1" in url else url.rstrip("/") + check_url = f"{base_url}{health_endpoints[model_type]}" + else: + check_url = url.rstrip("/") + + try: + start = datetime.now() + async with httpx.AsyncClient(timeout=8.0, verify=False) as client: + resp = await client.get(check_url) + latency = (datetime.now() - start).total_seconds() * 1000 + # 401/403/404/405 means server is up (just needs auth or wrong path) + is_up = resp.status_code in [200, 401, 403, 404, 405] + return { + "status": "online" if is_up else "error", + "status_code": resp.status_code, + "latency_ms": round(latency, 2), + "last_checked": datetime.now().isoformat(), + "error": None if is_up else f"HTTP {resp.status_code}" + } + except httpx.TimeoutException: + return {"status": "timeout", "status_code": 408, "latency_ms": None, "last_checked": datetime.now().isoformat(), "error": "Timeout"} + except httpx.ConnectError: + return {"status": "offline", "status_code": None, "latency_ms": None, "last_checked": datetime.now().isoformat(), "error": "Connection refused"} + except Exception as e: + return {"status": "error", "status_code": 500, "latency_ms": None, "last_checked": datetime.now().isoformat(), "error": str(e)[:50]} + + +def _load_env_vars(): + """Load environment variables from .env files.""" + global _env_vars + + # Load from project .env files (studio/api.py -> studio/ -> project_root/) + project_root = Path(__file__).parent.parent + env_files = [ + project_root / ".env", # Project root .env (e.g., /path/to/GraSP/.env) + Path.cwd() / ".env", # Current working directory .env + Path.home() / ".env", # User home .env + _ENV_FILE, # Local UI-managed env file + ] + + for env_file in env_files: + if env_file.exists(): + try: + with open(env_file, 'r') as f: + for line in f: + line = line.strip() + if line and not line.startswith('#') and '=' in line: + key, _, value = line.partition('=') + key = key.strip() + value = value.strip().strip('"').strip("'") + if key: + _env_vars[key] = value + except Exception as e: + print(f"Warning: Failed to load {env_file}: {e}") + + +def _save_env_vars(): + """Save UI-managed environment variables to local file.""" + try: + with open(_ENV_FILE, 'w') as f: + f.write("# SyGra Studio managed environment variables\n") + f.write(f"# Last updated: {datetime.now().isoformat()}\n\n") + for key, value in sorted(_env_vars.items()): + # Escape values with special characters + if ' ' in value or '"' in value or "'" in value: + value = f'"{value}"' + f.write(f"{key}={value}\n") + except Exception as e: + print(f"Warning: Failed to save env vars: {e}") + + +class CodeExecutionRequest(BaseModel): + """Request model for code execution.""" + file_path: str # Path to the Python file to execute + function_name: Optional[str] = None # Specific function to run + args: Optional[List[str]] = None # Command line arguments + workflow_id: Optional[str] = None # Associated workflow + debug: bool = False # Whether to run in debug mode + breakpoints: Optional[List[int]] = None # Line numbers for breakpoints + + +class DebugAction(BaseModel): + """Request model for debug actions.""" + action: str # 'continue', 'step_over', 'step_into', 'step_out', 'stop' + session_id: str + + +def _save_executions(): + """ + Save executions to persistent storage using the new scalable storage. + + Also cleans up completed executions from in-memory dict to prevent memory leaks. + """ + try: + storage = _get_execution_storage() + to_remove = [] + + # Save completed/failed/cancelled executions to scalable storage + for exec_id, execution in _executions.items(): + if execution.status in (ExecutionStatus.COMPLETED, ExecutionStatus.FAILED, ExecutionStatus.CANCELLED): + storage.save_execution(execution) + to_remove.append(exec_id) + + # Clean up from in-memory dict after saving to storage + for exec_id in to_remove: + del _executions[exec_id] + + except Exception as e: + print(f"Warning: Failed to save executions: {e}") + + +_executions_loaded = False + +def _load_executions(): + """ + Initialize execution storage (will automatically migrate from legacy format if needed). + + This function now delegates to the scalable ExecutionStorage class which: + - Uses per-run files instead of a monolithic JSON + - Maintains a lightweight index for fast listing + - Supports pagination and lazy loading + - Automatically migrates from legacy .executions_history.json + """ + global _executions_loaded + + # Prevent duplicate loading + if _executions_loaded: + return + _executions_loaded = True + + # Initialize storage (handles migration from legacy format automatically) + storage = _get_execution_storage() + + # Note: We no longer load all executions into memory. + # Executions are loaded on-demand via storage.get_execution() + # and listed via storage.list_executions() with pagination. + + +async def _process_execution_queue(): + """Process executions from the queue one at a time.""" + global _current_running_execution, _execution_queue + + while True: + try: + # Wait for next job in queue + job = await _execution_queue.get() + execution_id, workflow, request = job + + # Skip if already cancelled while waiting in queue + if execution_id in _cancelled_executions: + execution = _executions.get(execution_id) + if execution and execution.status == ExecutionStatus.PENDING: + execution.status = ExecutionStatus.CANCELLED + execution.completed_at = datetime.now() + _save_executions() + _cancelled_executions.discard(execution_id) + _execution_queue.task_done() + continue + + # Set current running execution + _current_running_execution = execution_id + + try: + # Run the workflow + await _run_workflow(execution_id, workflow, request) + finally: + _current_running_execution = None + _execution_queue.task_done() + + except Exception as e: + print(f"Error processing execution queue: {e}") + import traceback + traceback.print_exc() + + +def _ensure_queue_initialized(): + """Ensure the execution queue and processor are initialized.""" + global _execution_queue, _queue_processor_task + + if _execution_queue is None: + _execution_queue = asyncio_module.Queue() + + # Start queue processor if not running + if _queue_processor_task is None or _queue_processor_task.done(): + try: + loop = asyncio_module.get_running_loop() + _queue_processor_task = loop.create_task(_process_execution_queue()) + except RuntimeError: + # No running loop yet, will be started when first execution is queued + pass + + +def create_app( + tasks_dir: Optional[str] = None, + cors_origins: Optional[List[str]] = None, +) -> FastAPI: + """ + Create and configure the FastAPI application. + + Args: + tasks_dir: Directory containing SyGra task workflows. + cors_origins: List of allowed CORS origins. + + Returns: + Configured FastAPI application. + """ + app = FastAPI( + title="SyGra Workflow API", + description="API for visualizing and executing SyGra workflows", + version="1.0.0", + ) + + # Configure CORS + if cors_origins is None: + cors_origins = ["http://localhost:3000", "http://localhost:5173", "http://localhost:8080"] + + app.add_middleware( + CORSMiddleware, + allow_origins=cors_origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + # Set tasks directory + if tasks_dir is None: + tasks_dir = os.environ.get( + "SYGRA_TASKS_DIR", + str(Path(__file__).parent.parent / "tasks" / "examples") # studio -> project root + ) + + app.state.tasks_dir = tasks_dir + + # Load persisted executions on startup + _load_executions() + + # Load environment variables from .env files + _load_env_vars() + + app.state.graph_builder = SygraGraphBuilder() + app.state.converter = SygraToStudioConverter() + + # Register routes + _register_routes(app) + + return app + + +def _register_routes(app: FastAPI) -> None: + """Register all API routes.""" + + @app.get("/api/health") + async def health_check(): + """Health check endpoint.""" + return {"status": "ok", "service": "SyGra Workflow API"} + + @app.get("/api/config") + async def get_config(): + """Get server configuration including tasks directory.""" + tasks_path = Path(app.state.tasks_dir) + tasks_dir = str(tasks_path.resolve()) if tasks_path.exists() else str(tasks_path) + + # Get list of subdirectories for directory picker + subdirs = [] + if tasks_path.exists(): + subdirs = [str(tasks_path / d.name) for d in tasks_path.iterdir() if d.is_dir()] + + return { + "tasks_dir": tasks_dir, + "subdirectories": sorted(subdirs), + "version": "1.0.0" + } + + # ==================== Environment Variables API ==================== + + @app.get("/api/settings/env") + async def get_env_vars(): + """Get all environment variables.""" + # Return vars with masked sensitive values + SENSITIVE_PATTERNS = ['key', 'secret', 'token', 'password', 'api_key', 'apikey', 'auth', 'credential'] + + result = [] + for key, value in sorted(_env_vars.items()): + is_sensitive = any(pattern in key.lower() for pattern in SENSITIVE_PATTERNS) + result.append({ + "key": key, + "value": value, + "masked_value": "*" * min(len(value), 20) if is_sensitive else value, + "is_sensitive": is_sensitive + }) + + return { + "variables": result, + "count": len(result), + "env_file": str(_ENV_FILE) + } + + @app.post("/api/settings/env") + async def set_env_var(data: dict): + """Add or update an environment variable.""" + key = data.get("key", "").strip() + value = data.get("value", "") + + if not key: + raise HTTPException(status_code=400, detail="Key is required") + + # Validate key format (alphanumeric + underscore) + if not key.replace("_", "").isalnum(): + raise HTTPException(status_code=400, detail="Key must be alphanumeric with underscores only") + + _env_vars[key] = value + _save_env_vars() + + # Also set in os.environ for immediate use + os.environ[key] = value + + return {"success": True, "key": key, "message": f"Variable '{key}' saved"} + + @app.delete("/api/settings/env/{key}") + async def delete_env_var(key: str): + """Delete an environment variable.""" + if key not in _env_vars: + raise HTTPException(status_code=404, detail=f"Variable '{key}' not found") + + del _env_vars[key] + _save_env_vars() + + # Also remove from os.environ + if key in os.environ: + del os.environ[key] + + return {"success": True, "key": key, "message": f"Variable '{key}' deleted"} + + @app.post("/api/settings/env/reload") + async def reload_env_vars(): + """Reload environment variables from .env files.""" + global _env_vars + _env_vars = {} + _load_env_vars() + + # Update os.environ with loaded vars + for key, value in _env_vars.items(): + os.environ[key] = value + + return {"success": True, "count": len(_env_vars), "message": "Environment variables reloaded"} + + # ==================== Models API (using module-level functions) ==================== + + @app.get("/api/models/types") + async def get_model_types(): + """Get all supported model types.""" + return {"types": _MODEL_TYPES, "count": len(_MODEL_TYPES)} + + @app.get("/api/models") + async def list_models(): + """List all configured models (fast, no pinging).""" + config = _load_models_config_sync() + builtin_names = _get_builtin_model_names() + models = [] + + for name, model_config in config.items(): + model_type = model_config.get("model_type", "unknown") + creds = _get_model_credentials_fast(name, model_config) + status_info = _models_status.get(name, {"status": "unknown", "last_checked": None}) + + type_info = _MODEL_TYPES.get(model_type, {"env_vars": ["URL", "TOKEN"]}) + all_creds_configured = all( + creds.get(f"{ev.lower()}_configured", False) + for ev in type_info.get("env_vars", []) + ) + + models.append({ + "name": name, + "model_type": model_type, + "model_type_label": type_info.get("label", model_type), + "model": model_config.get("model", name), + "description": type_info.get("description", ""), + "parameters": model_config.get("parameters", {}), + "api_version": model_config.get("api_version"), + "credentials": creds, + "credentials_configured": all_creds_configured, + "status": status_info.get("status", "unknown"), + "status_code": status_info.get("status_code"), + "latency_ms": status_info.get("latency_ms"), + "last_checked": status_info.get("last_checked"), + "error": status_info.get("error"), + "is_builtin": name in builtin_names, # Flag for SyGra core models (read-only) + }) + + return {"models": models, "count": len(models), "config_path": str(_MODELS_CONFIG_PATH)} + + @app.get("/api/models/{model_name}") + async def get_model(model_name: str): + """Get a specific model's configuration.""" + config = _load_models_config_sync() + if model_name not in config: + raise HTTPException(status_code=404, detail=f"Model '{model_name}' not found") + + model_config = config[model_name] + model_type = model_config.get("model_type", "unknown") + + return { + "name": model_name, + "model_type": model_type, + "model_type_info": _MODEL_TYPES.get(model_type, {}), + "config": model_config, + "credentials": _get_model_credentials_fast(model_name, model_config), + "status": _models_status.get(model_name, {"status": "unknown"}), + } + + @app.post("/api/models/{model_name}/ping") + async def ping_model(model_name: str): + """Ping a single model (lightweight HTTP check).""" + config = _load_models_config_sync() + if model_name not in config: + raise HTTPException(status_code=404, detail=f"Model '{model_name}' not found") + + result = await _ping_model_http(model_name, config[model_name]) + _models_status[model_name] = result + return {"model": model_name, **result} + + @app.post("/api/models/ping-all") + async def ping_all_models(): + """Ping all models in parallel (lightweight HTTP checks).""" + config = _load_models_config_sync() + if not config: + return {"results": {}, "total": 0, "online": 0, "offline": 0} + + # Run all pings concurrently + tasks = [_ping_model_http(name, cfg) for name, cfg in config.items()] + names = list(config.keys()) + + try: + results_list = await asyncio.wait_for(asyncio.gather(*tasks, return_exceptions=True), timeout=20) + except asyncio.TimeoutError: + results_list = [{"status": "timeout", "error": "Overall timeout"} for _ in names] + + results = {} + for name, res in zip(names, results_list): + if isinstance(res, Exception): + res = {"status": "error", "error": str(res)[:50]} + results[name] = res + _models_status[name] = res + + online = sum(1 for r in results.values() if r.get("status") == "online") + return {"results": results, "total": len(results), "online": online, "offline": len(results) - online} + + @app.post("/api/models") + async def create_or_update_model(data: dict): + """Create or update a model configuration (custom models only).""" + name = data.get("name", "").strip() + model_type = data.get("model_type", "").strip() + + if not name: + raise HTTPException(status_code=400, detail="Model name is required") + if not model_type or model_type not in _MODEL_TYPES: + raise HTTPException(status_code=400, detail=f"Invalid model type: {model_type}") + + # Check if trying to edit a builtin model + builtin_names = _get_builtin_model_names() + if name in builtin_names: + raise HTTPException( + status_code=403, + detail=f"Cannot modify builtin SyGra model '{name}'. Create a custom model with a different name instead." + ) + + # Load custom models config (not builtin) + custom_config = _load_custom_models_config() + is_new = name not in custom_config and name not in _load_models_config_sync() + + model_config = {"model_type": model_type} + if data.get("model"): model_config["model"] = data["model"] + if data.get("api_version"): model_config["api_version"] = data["api_version"] + if data.get("parameters"): model_config["parameters"] = data["parameters"] + + # Add additional optional fields + for field in ["hf_chat_template_model_id", "model_serving_name", "post_process", "input_type", "output_type"]: + if data.get(field): model_config[field] = data[field] + + custom_config[name] = model_config + + # Save to custom models config (NOT the builtin SyGra models.yaml) + try: + _save_custom_models_config(custom_config) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to save: {e}") + + # Save credentials if provided + credentials = data.get("credentials", {}) + if credentials: + prefix = f"SYGRA_{name.upper()}" + for key, value in credentials.items(): + if value: + env_key = f"{prefix}_{key.upper()}" + _env_vars[env_key] = value + os.environ[env_key] = value + + return {"success": True, "model": name, "is_new": is_new, "is_builtin": False} + + @app.delete("/api/models/{model_name}") + async def delete_model(model_name: str): + """Delete a model configuration (custom models only).""" + # Check if it's a builtin model + builtin_names = _get_builtin_model_names() + if model_name in builtin_names: + raise HTTPException( + status_code=403, + detail=f"Cannot delete builtin SyGra model '{model_name}'. Only custom models can be deleted." + ) + + # Check if model exists in custom config + custom_config = _load_custom_models_config() + if model_name not in custom_config: + raise HTTPException(status_code=404, detail=f"Custom model '{model_name}' not found") + + del custom_config[model_name] + + try: + _save_custom_models_config(custom_config) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to save: {e}") + + _models_status.pop(model_name, None) + return {"success": True, "model": model_name} + + # ================================================================== + + @app.get("/api/workflows", response_model=List[WorkflowListItem]) + async def list_workflows(): + """ + List all available SyGra workflows. + + Recursively scans the tasks directory for graph_config.yaml files, + supporting nested folder structures. + """ + workflows = [] + tasks_dir = Path(app.state.tasks_dir) + + if not tasks_dir.exists(): + return workflows + + # Recursively find all graph_config.yaml files + for config_path in tasks_dir.rglob("graph_config.yaml"): + if not config_path.is_file(): + continue + + try: + graph = app.state.graph_builder.build_from_yaml(str(config_path)) + _workflows[graph.id] = graph + + workflows.append(WorkflowListItem( + id=graph.id, + name=graph.name, + description=graph.description, + source_path=str(config_path), + node_count=len(graph.nodes), + edge_count=len(graph.edges), + last_modified=graph.last_modified, + )) + except Exception as e: + # Skip invalid workflows + print(f"Error loading workflow from {config_path}: {e}") + continue + + return workflows + + @app.get("/api/workflows/{workflow_id}", response_model=WorkflowGraph) + async def get_workflow(workflow_id: str): + """ + Get detailed workflow graph for visualization. + + Args: + workflow_id: The workflow ID to retrieve. + """ + # Check cache first + if workflow_id in _workflows: + return _workflows[workflow_id] + + # Try to find by scanning + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found") + + return _workflows[workflow_id] + + @app.get("/api/workflows/{workflow_id}/openflow") + async def get_workflow_openflow(workflow_id: str): + """ + Get workflow in Studio OpenFlow format. + + Args: + workflow_id: The workflow ID to convert. + """ + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found") + + workflow = _workflows[workflow_id] + openflow = app.state.converter.convert_workflow(workflow) + + return openflow + + @app.get("/api/workflows/{workflow_id}/sample-data") + async def get_workflow_sample_data(workflow_id: str, limit: int = 3, source_index: int = 0): + """ + Get sample data records from a workflow's data source. + + Args: + workflow_id: The workflow ID. + limit: Maximum number of records to return (default: 3). + source_index: Index of the source to preview (default: 0, first source). + + Returns: + Sample data records and metadata. + """ + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found") + + workflow = _workflows[workflow_id] + data_config = workflow.data_config + + if not data_config or not data_config.get("source"): + return { + "records": [], + "total": 0, + "message": "No data source configured for this workflow" + } + + source = data_config.get("source", {}) + # Handle array or single source + if isinstance(source, list): + if source_index < 0 or source_index >= len(source): + return { + "records": [], + "total": 0, + "message": f"Invalid source index {source_index}. Available: 0-{len(source)-1}" if source else "No sources configured" + } + source = source[source_index] if source else {} + elif source_index > 0: + return { + "records": [], + "total": 0, + "message": f"Only one source configured (index 0)" + } + + source_type = (source.get("type") or "").lower() + + try: + records = [] + total_count = None + + if source_type in ("disk", "local_file", "local", "json", "jsonl", "csv"): + # Local file source + file_path = source.get("file_path") or source.get("path") + file_format = source.get("file_format") or source.get("format") or "json" + + if file_path and os.path.exists(file_path): + import json as json_lib + if file_format in ("json",): + with open(file_path, 'r') as f: + data = json_lib.load(f) + if isinstance(data, list): + total_count = len(data) + records = data[:limit] + else: + records = [data] + total_count = 1 + elif file_format in ("jsonl",): + with open(file_path, 'r') as f: + lines = f.readlines() + total_count = len(lines) + for line in lines[:limit]: + if line.strip(): + records.append(json_lib.loads(line)) + elif file_format in ("csv",): + import csv + with open(file_path, 'r') as f: + reader = csv.DictReader(f) + all_rows = list(reader) + total_count = len(all_rows) + records = all_rows[:limit] + else: + return { + "records": [], + "total": 0, + "message": f"File not found: {file_path}" + } + + elif source_type in ("hf", "huggingface"): + # HuggingFace source - use HF Hub API directly (much faster than datasets library) + repo_id = source.get("repo_id") + config_name = source.get("config_name") or "default" + split = source.get("split", "train") + + if not repo_id: + return { + "records": [], + "total": 0, + "message": "No repo_id specified for HuggingFace dataset" + } + + try: + import httpx + # Use HuggingFace datasets-server API for fast row access + url = f"https://datasets-server.huggingface.co/first-rows?dataset={repo_id}&config={config_name}&split={split}" + + async with httpx.AsyncClient(timeout=15.0) as client: + response = await client.get(url) + + if response.status_code == 200: + data = response.json() + rows = data.get("rows", []) + records = [row.get("row", row) for row in rows[:limit]] + total_count = data.get("num_rows_total", "unknown") + else: + # Fallback: return info without records + return { + "records": [], + "total": "unknown", + "source_type": "huggingface", + "message": f"Could not fetch preview (API returned {response.status_code}). View dataset at HuggingFace.", + "dataset_info": {"repo_id": repo_id, "config_name": config_name, "split": split} + } + except Exception as hf_err: + return { + "records": [], + "total": 0, + "source_type": "huggingface", + "message": f"Failed to fetch preview: {str(hf_err)[:150]}", + "dataset_info": {"repo_id": repo_id, "config_name": config_name, "split": split} + } + + elif source_type in ("servicenow", "snow"): + # ServiceNow - would need actual connection + return { + "records": [], + "total": 0, + "message": "ServiceNow preview requires authentication. Configure SNOW credentials to preview data." + } + else: + return { + "records": [], + "total": 0, + "message": f"Preview not supported for source type: {source_type}" + } + + return { + "records": records, + "total": total_count, + "source_type": source_type, + "message": None + } + + except Exception as e: + return { + "records": [], + "total": 0, + "message": f"Error loading sample data: {str(e)}" + } + + @app.get("/api/workflows/{workflow_id}/data-columns") + async def get_workflow_data_columns(workflow_id: str, source_index: int = 0): + """ + Get column names from a workflow's data source. + + This endpoint fetches a sample record and extracts column names. + Supports all data source types: HuggingFace, local files, memory, etc. + + Args: + workflow_id: The workflow ID. + source_index: Index of the source to get columns from (default: 0). + + Returns: + List of column names and source metadata. + """ + # Use existing sample data logic to get columns + sample_result = await get_workflow_sample_data(workflow_id, limit=1, source_index=source_index) + + columns = [] + if sample_result.get("records") and len(sample_result["records"]) > 0: + # Extract column names from first record + first_record = sample_result["records"][0] + if isinstance(first_record, dict): + columns = list(first_record.keys()) + + return { + "columns": columns, + "source_type": sample_result.get("source_type"), + "source_index": source_index, + "message": sample_result.get("message") if not columns else None + } + + @app.post("/api/preview-source") + async def preview_source_data(request: Request, limit: int = 5): + """ + Preview data from a source configuration directly (without requiring a saved workflow). + + Accepts source configuration in the POST body and returns sample data. + This allows previewing data before saving a workflow. + + Args: + request: Request containing source configuration in body. + limit: Maximum number of records to return (default: 5). + + Returns: + Sample data records and metadata. + """ + try: + source = await request.json() + except Exception as e: + return { + "records": [], + "total": 0, + "message": f"Invalid JSON body: {str(e)}" + } + + if not source: + return { + "records": [], + "total": 0, + "message": "No source configuration provided" + } + + source_type = (source.get("type") or "").lower() + + try: + records = [] + total_count = None + + if source_type in ("disk", "local_file", "local", "json", "jsonl", "csv"): + # Local file source + file_path = source.get("file_path") or source.get("path") + file_format = source.get("file_format") or source.get("format") or "json" + + if not file_path: + return { + "records": [], + "total": 0, + "message": "No file_path specified for local file source" + } + + if file_path and os.path.exists(file_path): + import json as json_lib + if file_format in ("json",): + with open(file_path, 'r') as f: + data = json_lib.load(f) + if isinstance(data, list): + total_count = len(data) + records = data[:limit] + else: + records = [data] + total_count = 1 + elif file_format in ("jsonl",): + with open(file_path, 'r') as f: + lines = f.readlines() + total_count = len(lines) + for line in lines[:limit]: + if line.strip(): + records.append(json_lib.loads(line)) + elif file_format in ("csv",): + import csv + with open(file_path, 'r') as f: + reader = csv.DictReader(f) + all_rows = list(reader) + total_count = len(all_rows) + records = all_rows[:limit] + elif file_format in ("parquet",): + try: + import pandas as pd + df = pd.read_parquet(file_path) + total_count = len(df) + records = df.head(limit).to_dict('records') + except ImportError: + return { + "records": [], + "total": 0, + "message": "pandas and pyarrow required for parquet files" + } + else: + return { + "records": [], + "total": 0, + "message": f"File not found: {file_path}" + } + + elif source_type in ("hf", "huggingface"): + # HuggingFace source - use HF Hub API directly + repo_id = source.get("repo_id") + config_name = source.get("config_name") # Don't default - auto-detect if needed + split = source.get("split", "train") + + if not repo_id: + return { + "records": [], + "total": 0, + "message": "No repo_id specified for HuggingFace dataset" + } + + try: + import requests as hf_requests + + # If no config specified, try to auto-detect from available configs + if not config_name: + # Get available configs from the dataset info API + info_url = f"https://datasets-server.huggingface.co/info?dataset={repo_id}" + info_response = hf_requests.get(info_url, timeout=10) + if info_response.status_code == 200: + info_data = info_response.json() + available_configs = list(info_data.get("dataset_info", {}).keys()) + if available_configs: + # Use "default" if available, otherwise use the first config + config_name = "default" if "default" in available_configs else available_configs[0] + else: + config_name = "default" + else: + config_name = "default" + + # Try the dataset viewer API first (much faster) + viewer_url = f"https://datasets-server.huggingface.co/rows?dataset={repo_id}&config={config_name}&split={split}&offset=0&length={limit}" + response = hf_requests.get(viewer_url, timeout=10) + + if response.status_code == 200: + data = response.json() + records = [row.get("row", row) for row in data.get("rows", [])] + total_count = data.get("num_rows_total", len(records)) + else: + # Fall back to datasets library + from datasets import load_dataset + ds = load_dataset(repo_id, config_name, split=split, streaming=True) + records = list(ds.take(limit)) + total_count = "streaming" + except Exception as hf_error: + return { + "records": [], + "total": 0, + "message": f"HuggingFace error: {str(hf_error)}" + } + + elif source_type == "servicenow": + # ServiceNow source - would need credentials + return { + "records": [], + "total": 0, + "message": "ServiceNow preview requires saved workflow with credentials" + } + + else: + return { + "records": [], + "total": 0, + "message": f"Unknown source type: {source_type}" + } + + return { + "records": records, + "total": total_count, + "source_type": source_type + } + + except Exception as e: + return { + "records": [], + "total": 0, + "message": f"Error loading sample data: {str(e)}" + } + + @app.post("/api/workflows/{workflow_id}/execute", response_model=ExecutionResponse) + async def execute_workflow( + workflow_id: str, + request: ExecutionRequest, + ): + """ + Start workflow execution. + + Jobs are queued and executed one at a time in order. + + Args: + workflow_id: The workflow ID to execute. + request: Execution request with input data. + """ + global _execution_queue, _queue_processor_task + + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found") + + workflow = _workflows[workflow_id] + + # Normalize input_data - if it's a list, use first element for single run + input_data = request.input_data + if isinstance(input_data, list): + input_data = input_data[0] if input_data else {} + + # Create execution record + execution_id = str(uuid.uuid4()) + execution = WorkflowExecution( + id=execution_id, + workflow_id=workflow_id, + workflow_name=workflow.name, + status=ExecutionStatus.PENDING, + input_data=input_data, + started_at=datetime.now(), + ) + + # Initialize node states + for node in workflow.nodes: + execution.node_states[node.id] = NodeExecutionState( + node_id=node.id, + status=ExecutionStatus.PENDING, + ) + + _executions[execution_id] = execution + + # Initialize queue if needed + if _execution_queue is None: + _execution_queue = asyncio_module.Queue() + + # Start queue processor if not running + if _queue_processor_task is None or _queue_processor_task.done(): + _queue_processor_task = asyncio_module.create_task(_process_execution_queue()) + + # Add to execution queue + await _execution_queue.put((execution_id, workflow, request)) + + # Determine position in queue + queue_position = _execution_queue.qsize() + is_running = _current_running_execution is not None + + if is_running: + message = f"Workflow execution queued: {execution_id} (position {queue_position} in queue)" + else: + message = f"Workflow execution started: {execution_id}" + + return ExecutionResponse( + execution_id=execution_id, + status=ExecutionStatus.PENDING, + message=message, + ) + + @app.get("/api/executions/queue/status") + async def get_queue_status(): + """ + Get the current execution queue status. + + Returns: + Queue status including running execution and queue size. + """ + return { + "current_running": _current_running_execution, + "queue_size": _execution_queue.qsize() if _execution_queue else 0, + "is_processing": _queue_processor_task is not None and not _queue_processor_task.done() + } + + @app.get("/api/executions/{execution_id}", response_model=WorkflowExecution) + async def get_execution(execution_id: str): + """ + Get execution status and details. + + First checks in-memory cache (for running executions), then storage (for completed). + + Args: + execution_id: The execution ID to retrieve. + """ + # First check in-memory cache (for running/pending executions) + if execution_id in _executions: + return _executions[execution_id] + + # Then check scalable storage (for completed/failed/cancelled) + storage = _get_execution_storage() + execution = storage.get_execution(execution_id) + if execution: + return execution + + raise HTTPException(status_code=404, detail=f"Execution {execution_id} not found") + + @app.post("/api/executions/{execution_id}/cancel") + async def cancel_execution(execution_id: str): + """ + Cancel a running execution. + + Args: + execution_id: The execution ID to cancel. + """ + if execution_id not in _executions: + raise HTTPException(status_code=404, detail=f"Execution {execution_id} not found") + + execution = _executions[execution_id] + + if execution.status not in (ExecutionStatus.PENDING, ExecutionStatus.RUNNING): + raise HTTPException( + status_code=400, + detail=f"Cannot cancel execution in {execution.status} state" + ) + + # Signal the background task to stop + _cancelled_executions.add(execution_id) + + # Actually terminate the running process if it exists + if execution_id in _running_processes: + process = _running_processes[execution_id] + if process.is_alive(): + process.terminate() + process.join(timeout=5) # Wait up to 5 seconds for graceful termination + if process.is_alive(): + process.kill() # Force kill if still running + del _running_processes[execution_id] + + execution.status = ExecutionStatus.CANCELLED + execution.completed_at = datetime.now() + + # Save to persistence immediately + _save_executions() + + return {"status": "cancelled", "execution_id": execution_id} + + @app.get("/api/executions") + async def list_executions( + workflow_id: Optional[str] = None, + status: Optional[ExecutionStatus] = None, + limit: int = 50, + offset: int = 0, + ): + """ + List workflow executions with optional filtering and pagination. + + Uses scalable storage with per-run files for efficient handling of large datasets. + + Args: + workflow_id: Filter by workflow ID. + status: Filter by execution status. + limit: Maximum number of results (default 50). + offset: Number of results to skip for pagination (default 0). + + Returns: + Dict with executions list, total count, pagination info. + """ + storage = _get_execution_storage() + status_str = status.value if status else None + + # Get paginated executions from storage + executions, total = storage.list_executions_full( + workflow_id=workflow_id, + status=status_str, + limit=limit, + offset=offset, + ) + + # Also include currently running executions from in-memory cache + # (they may not be persisted to storage yet) + running_executions = [] + for exec_id, exec in _executions.items(): + if exec.status in (ExecutionStatus.RUNNING, ExecutionStatus.PENDING): + # Apply filters + if workflow_id and exec.workflow_id != workflow_id: + continue + if status_str and exec.status.value != status_str: + continue + # Check if not already in list (avoid duplicates) + if not any(e.id == exec_id for e in executions): + running_executions.append(exec) + + # Merge running executions at the top if on first page + if offset == 0 and running_executions: + # Sort running executions by start time + running_executions.sort(key=lambda e: e.started_at or datetime.min, reverse=True) + executions = running_executions + executions + total += len(running_executions) + + return { + "executions": executions, + "total": total, + "limit": limit, + "offset": offset, + "has_more": (offset + len(executions)) < total, + } + + @app.post("/api/executions/storage/refresh") + async def refresh_execution_storage(): + """ + Refresh the execution storage index. + + Detects files deleted or added externally and updates the index. + Call this after manually modifying files on disk. + + Returns: + Number of changes detected. + """ + storage = _get_execution_storage() + changes = storage.refresh_index() + + return { + "status": "refreshed", + "changes_detected": changes, + "total_executions": len(storage._index_cache) + } + + @app.get("/api/executions/storage/stats") + async def get_execution_storage_stats(): + """ + Get execution storage statistics. + + Useful for monitoring and debugging storage health. + """ + storage = _get_execution_storage() + stats = storage.get_stats() + + # Add info about in-memory executions + stats["in_memory_executions"] = len(_executions) + stats["in_memory_running"] = sum( + 1 for e in _executions.values() + if e.status in (ExecutionStatus.RUNNING, ExecutionStatus.PENDING) + ) + + return stats + + @app.delete("/api/executions/{execution_id}") + async def delete_execution(execution_id: str): + """ + Delete an execution from storage. + + Removes both the per-run file and index entry. + + Args: + execution_id: The execution ID to delete. + + Returns: + Success status and message. + """ + # Remove from in-memory cache if present + if execution_id in _executions: + del _executions[execution_id] + + # Remove from storage + storage = _get_execution_storage() + success = storage.delete_execution(execution_id) + + if not success: + raise HTTPException(status_code=404, detail=f"Execution {execution_id} not found") + + return {"status": "deleted", "execution_id": execution_id} + + @app.delete("/api/executions") + async def delete_multiple_executions(execution_ids: List[str]): + """ + Delete multiple executions from storage. + + Args: + execution_ids: List of execution IDs to delete. + + Returns: + Success status with count of deleted executions. + """ + storage = _get_execution_storage() + deleted_count = 0 + failed_ids = [] + + for exec_id in execution_ids: + # Remove from in-memory cache if present + if exec_id in _executions: + del _executions[exec_id] + + # Remove from storage + if storage.delete_execution(exec_id): + deleted_count += 1 + else: + failed_ids.append(exec_id) + + return { + "status": "deleted", + "deleted_count": deleted_count, + "total_requested": len(execution_ids), + "failed_ids": failed_ids + } + + @app.get("/api/tasks") + async def list_task_directories(): + """ + List available task directories for workflow discovery. + + Recursively scans the tasks directory for folders containing + graph_config.yaml files, supporting nested folder structures. + """ + tasks_dir = Path(app.state.tasks_dir) + + if not tasks_dir.exists(): + return [] + + tasks = [] + # Recursively find all graph_config.yaml files and get their parent directories + for config_path in tasks_dir.rglob("graph_config.yaml"): + if not config_path.is_file(): + continue + + task_dir = config_path.parent + # Create a relative name from the tasks_dir for better display + relative_path = task_dir.relative_to(tasks_dir) + + tasks.append({ + "name": str(relative_path), # e.g., "structured_output_with_multi_llm/dpo_samples" + "path": str(task_dir), + "has_workflow": True, # We only include dirs with graph_config.yaml + }) + + return tasks + + # NOTE: The /api/models endpoint is defined earlier in the file (around line 589) + # with full status checking and is_builtin flag support. Do not duplicate here. + + def _extract_class_or_function(content: str, name: str) -> Optional[str]: + """ + Extract a specific class or function definition from Python source code. + + Args: + content: Full Python file content + name: Name of the class or function to extract + + Returns: + The extracted code block or None if not found + """ + import ast + + try: + tree = ast.parse(content) + except SyntaxError: + return None + + lines = content.splitlines(keepends=True) + + for node in ast.walk(tree): + if isinstance(node, (ast.ClassDef, ast.FunctionDef, ast.AsyncFunctionDef)): + if node.name == name: + # Get the start line (1-indexed in AST) + start_line = node.lineno - 1 + + # Find the end line by looking at the last line of the node + end_line = node.end_lineno if hasattr(node, 'end_lineno') and node.end_lineno else start_line + 1 + + # Extract the code block + extracted_lines = lines[start_line:end_line] + + # Also extract any decorators above the class/function + decorator_lines = [] + for decorator in getattr(node, 'decorator_list', []): + dec_start = decorator.lineno - 1 + dec_end = decorator.end_lineno if hasattr(decorator, 'end_lineno') else dec_start + 1 + # Only include if it's above the function/class definition + if dec_start < start_line: + decorator_lines.extend(lines[dec_start:dec_end]) + + # Combine decorators and the main code + result = ''.join(decorator_lines + extracted_lines) + return result.rstrip() + '\n' + + return None + + @app.get("/api/file-content") + async def get_file_content(file_path: str, workflow_id: Optional[str] = None, extract_only: bool = True): + """ + Get the content of a Python file referenced by a node. + + Args: + file_path: Module path (e.g., 'tasks.examples.image_to_qna.task_executor.QuestionExtractProcessor') + or relative/absolute file path + workflow_id: Optional workflow ID to resolve relative paths + extract_only: If True, extract only the specific class/function. If False, return full file. + """ + try: + # If it looks like a module path (contains dots but no slashes) + if '.' in file_path and '/' not in file_path and '\\' not in file_path: + parts = file_path.split('.') + class_or_func_name = parts[-1] if len(parts) >= 2 else None + + # Get workflow directory if available + workflow_dir = None + if workflow_id and workflow_id in _workflows: + workflow = _workflows[workflow_id] + workflow_dir = Path(workflow.source_path).parent + + possible_paths = [] + + # Strategy 1: Check if it's a simple local module (e.g., 'task_executor.ClassName') + if len(parts) == 2: + module_name = parts[0] + if workflow_dir: + possible_paths.extend([ + workflow_dir / f"{module_name}.py", + workflow_dir / "functions" / f"{module_name}.py", + workflow_dir / "processors" / f"{module_name}.py", + ]) + + # Strategy 2: Full module path (e.g., 'tasks.examples.image_to_qna.task_executor.ClassName') + # The last part is likely the class name, second-to-last is the module file + if len(parts) >= 3: + # Try treating last part as class name + module_parts = parts[:-1] # Everything except the class name + module_file = '/'.join(module_parts) + '.py' + + # Try relative to tasks_dir root + tasks_root = Path(app.state.tasks_dir).parent # Go up from tasks_dir + possible_paths.append(tasks_root / module_file) + + # Try relative to current working directory + possible_paths.append(Path(module_file)) + + # Try relative to workflow directory + if workflow_dir: + possible_paths.append(workflow_dir / module_file) + # Also try just the last module file name in workflow dir + possible_paths.append(workflow_dir / f"{module_parts[-1]}.py") + + # Strategy 3: Check workflow directory for common names like task_executor.py + if workflow_dir: + possible_paths.extend([ + workflow_dir / "task_executor.py", + workflow_dir / "processors.py", + workflow_dir / "functions.py", + ]) + + # Try all possible paths + for possible_path in possible_paths: + if possible_path.exists(): + with open(possible_path, 'r') as f: + full_content = f.read() + + # Try to extract just the specific class or function + extracted_content = None + if extract_only and class_or_func_name: + extracted_content = _extract_class_or_function(full_content, class_or_func_name) + + # If extraction was requested but failed, return empty string (not the full file) + # This prevents showing the entire file when a specific class doesn't exist + if extract_only and class_or_func_name and not extracted_content: + return { + "content": "", # Class/function not found - return empty + "full_content": full_content, + "path": str(possible_path.resolve()), + "module_path": file_path, + "class_name": class_or_func_name, + "extracted": False, + "not_found": True + } + + return { + "content": extracted_content if extracted_content else full_content, + "full_content": full_content, + "path": str(possible_path.resolve()), + "module_path": file_path, + "class_name": class_or_func_name, + "extracted": extracted_content is not None + } + + return { + "error": f"Could not find module file for {file_path}. Tried: {[str(p) for p in possible_paths[:5]]}", + "content": None + } + else: + # It's a file path + full_path = Path(file_path) + + # If relative, try to resolve against workflow directory + if not full_path.is_absolute() and workflow_id and workflow_id in _workflows: + workflow = _workflows[workflow_id] + workflow_dir = Path(workflow.source_path).parent + full_path = workflow_dir / file_path + + if full_path.exists(): + with open(full_path, 'r') as f: + content = f.read() + return {"content": content, "path": str(full_path)} + else: + return {"error": f"File not found: {full_path}", "content": None} + + except Exception as e: + return {"error": str(e), "content": None} + + @app.get("/api/media/file") + async def serve_media_file(path: str, workflow_id: Optional[str] = None): + """ + Serve a media file (audio, image, etc.) from the filesystem. + + This endpoint allows the frontend to access media files generated by workflows, + such as audio outputs stored in workflow output directories. + + Args: + path: Absolute path to the media file + workflow_id: Optional workflow ID for validation + + Returns: + FileResponse with the media file content + """ + import mimetypes + + try: + file_path = Path(path) + + # Security: Only allow serving files from allowed directories + # 1. Files in tasks/ directory (workflow outputs) + # 2. Files in output/ directory + allowed_prefixes = [ + str(Path.cwd() / "tasks"), + str(Path.cwd() / "output"), + "/tmp", + ] + + # Also allow paths that are under a workflow's directory + if workflow_id and workflow_id in _workflows: + workflow = _workflows[workflow_id] + workflow_dir = str(Path(workflow.source_path).parent) + allowed_prefixes.append(workflow_dir) + + # Check if path is allowed + abs_path = str(file_path.resolve()) + is_allowed = any(abs_path.startswith(prefix) for prefix in allowed_prefixes) + + if not is_allowed: + raise HTTPException( + status_code=403, + detail=f"Access denied: Path not in allowed directories" + ) + + if not file_path.exists(): + raise HTTPException(status_code=404, detail=f"File not found: {path}") + + if not file_path.is_file(): + raise HTTPException(status_code=400, detail=f"Not a file: {path}") + + # Determine MIME type + mime_type, _ = mimetypes.guess_type(str(file_path)) + if mime_type is None: + mime_type = "application/octet-stream" + + return FileResponse( + path=str(file_path), + media_type=mime_type, + filename=file_path.name + ) + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + @app.get("/api/workflows/{workflow_id}/yaml") + async def get_workflow_yaml(workflow_id: str): + """ + Get the raw YAML content for a workflow. + + Returns the full graph_config.yaml file content. + """ + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail="Workflow not found") + + workflow = _workflows[workflow_id] + yaml_path = Path(workflow.source_path) + + if not yaml_path.exists(): + return {"error": "YAML file not found", "content": None, "path": None} + + try: + with open(yaml_path, 'r') as f: + content = f.read() + return { + "content": content, + "path": str(yaml_path.resolve()), + "filename": yaml_path.name + } + except Exception as e: + return {"error": str(e), "content": None, "path": None} + + @app.get("/api/workflows/{workflow_id}/code") + async def get_workflow_code(workflow_id: str): + """ + Get the task_executor.py content for a workflow. + + Looks for task_executor.py in the workflow's directory. + Also returns a list of all Python files in the directory. + """ + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail="Workflow not found") + + workflow = _workflows[workflow_id] + workflow_dir = Path(workflow.source_path).parent + + # Look for common Python files + python_files = [] + code_files = {} + + common_names = ['task_executor.py', 'processors.py', 'functions.py', 'utils.py'] + + # First add common named files if they exist + for name in common_names: + py_path = workflow_dir / name + if py_path.exists(): + try: + with open(py_path, 'r') as f: + content = f.read() + code_files[name] = { + "content": content, + "path": str(py_path.resolve()), + "filename": name + } + python_files.append(name) + except Exception: + pass + + # Then add any other .py files + for py_path in workflow_dir.glob('*.py'): + if py_path.name not in python_files and not py_path.name.startswith('__'): + try: + with open(py_path, 'r') as f: + content = f.read() + code_files[py_path.name] = { + "content": content, + "path": str(py_path.resolve()), + "filename": py_path.name + } + python_files.append(py_path.name) + except Exception: + pass + + # Return primary task_executor.py content plus list of all files + primary_file = code_files.get('task_executor.py') + + return { + "primary": primary_file, + "files": code_files, + "file_list": python_files, + "workflow_dir": str(workflow_dir.resolve()) + } + + @app.get("/api/workflows/{workflow_id}/node/{node_id}/code/{code_type}") + async def get_node_code(workflow_id: str, node_id: str, code_type: str): + """ + Get the code for a specific node from task_executor.py. + + Uses AST-based detection to find code blocks by checking base class inheritance. + This is the single source of truth - no markers or metadata copies. + + Args: + workflow_id: The workflow ID + node_id: The node ID + code_type: Type of code ('pre_process', 'post_process', 'lambda', 'branch_condition', 'output_generator', 'data_transform') + + Returns: + { "code": "...", "found": true/false } + """ + import ast + + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found") + + workflow = _workflows[workflow_id] + workflow_dir = Path(workflow.source_path).parent + task_executor_path = workflow_dir / "task_executor.py" + + valid_types = {'pre_process', 'post_process', 'lambda', 'branch_condition', 'output_generator', 'data_transform'} + if code_type not in valid_types: + raise HTTPException(status_code=400, detail=f"Invalid code_type: {code_type}") + + if not task_executor_path.exists(): + return {"code": "", "found": False, "path": None} + + try: + with open(task_executor_path, 'r') as f: + content = f.read() + except Exception as e: + return {"code": "", "found": False, "error": str(e)} + + # Find the code block using AST + code = _get_node_code_from_file(content, node_id, code_type) + + return { + "code": code if code else "", + "found": code is not None, + "path": str(task_executor_path.resolve()) + } + + @app.put("/api/workflows/{workflow_id}/yaml") + async def save_workflow_yaml(workflow_id: str, data: Dict[str, Any]): + """ + Save edited YAML content to the workflow's graph_config.yaml file. + + Body: { "content": "yaml content string" } + """ + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found") + + workflow = _workflows[workflow_id] + config_path = Path(workflow.source_path) + + if not config_path.exists(): + raise HTTPException(status_code=404, detail="Workflow config file not found") + + content = data.get("content", "") + if not content: + raise HTTPException(status_code=400, detail="Content is required") + + try: + # Validate YAML syntax + import yaml + yaml.safe_load(content) + + # Write to file + with open(config_path, 'w') as f: + f.write(content) + + # Reload the workflow from disk to update in-memory cache + try: + graph = app.state.graph_builder.build_from_yaml(str(config_path)) + _workflows[workflow_id] = graph + except Exception as reload_error: + print(f"Warning: Could not reload workflow after YAML save: {reload_error}") + # Still return success since file was saved + + return {"status": "saved", "path": str(config_path)} + except yaml.YAMLError as e: + raise HTTPException(status_code=400, detail=f"Invalid YAML: {str(e)}") + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to save: {str(e)}") + + @app.put("/api/workflows/{workflow_id}/code/{filename}") + async def save_workflow_code(workflow_id: str, filename: str, data: Dict[str, Any]): + """ + Save edited Python code to a file in the workflow directory. + + Body: { "content": "python code string" } + """ + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found") + + workflow = _workflows[workflow_id] + workflow_dir = Path(workflow.source_path).parent + + # Security: only allow .py files in the workflow directory + if not filename.endswith('.py'): + raise HTTPException(status_code=400, detail="Only Python files can be saved") + + # Prevent path traversal + safe_filename = Path(filename).name + file_path = workflow_dir / safe_filename + + content = data.get("content", "") + if not content: + raise HTTPException(status_code=400, detail="Content is required") + + try: + # Basic Python syntax check + compile(content, safe_filename, 'exec') + + # Write to file + with open(file_path, 'w') as f: + f.write(content) + + return {"status": "saved", "path": str(file_path)} + except SyntaxError as e: + raise HTTPException(status_code=400, detail=f"Python syntax error: {str(e)}") + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to save: {str(e)}") + + @app.post("/api/workflows", response_model=WorkflowSaveResponse) + async def create_workflow(request: WorkflowCreateRequest): + """ + Create a new workflow in the tasks directory. + + Creates: + - A new directory with the workflow name (sanitized) + - graph_config.yaml with the workflow configuration + - task_executor.py with any custom processors + """ + return await _save_workflow_to_disk(app, request, is_new=True) + + @app.put("/api/workflows/{workflow_id}", response_model=WorkflowSaveResponse) + async def update_workflow(workflow_id: str, request: WorkflowCreateRequest): + """ + Update an existing workflow. + + Updates: + - graph_config.yaml with the workflow configuration + - task_executor.py with any custom processors + """ + request.id = workflow_id + return await _save_workflow_to_disk(app, request, is_new=False) + + @app.delete("/api/workflows/{workflow_id}") + async def delete_workflow(workflow_id: str): + """ + Delete a workflow. + + If the workflow has a source_path (file-based), deletes the entire workflow directory. + Also removes the workflow from the cache. + """ + import shutil + + # Check if workflow exists + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow '{workflow_id}' not found") + + workflow = _workflows[workflow_id] + + # If workflow has a source path, delete the directory + if workflow.source_path: + workflow_dir = Path(workflow.source_path).parent + if workflow_dir.exists(): + try: + shutil.rmtree(workflow_dir) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to delete workflow directory: {str(e)}") + + # Remove from cache + del _workflows[workflow_id] + + return {"success": True, "message": f"Workflow '{workflow_id}' deleted successfully"} + + @app.patch("/api/workflows/{workflow_id}/rename") + async def rename_workflow(workflow_id: str, data: Dict[str, Any]): + """ + Rename a workflow. + + Updates the name in the graph_config.yaml file. + """ + import yaml + + new_name = data.get("name", "").strip() + if not new_name: + raise HTTPException(status_code=400, detail="Name is required") + + # Check if workflow exists + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow '{workflow_id}' not found") + + workflow = _workflows[workflow_id] + + # Update the YAML file if it exists + if workflow.source_path: + yaml_path = Path(workflow.source_path) + if yaml_path.exists(): + try: + with open(yaml_path, 'r') as f: + config = yaml.safe_load(f) + + config['name'] = new_name + + with open(yaml_path, 'w') as f: + yaml.dump(config, f, default_flow_style=False, sort_keys=False) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to update workflow file: {str(e)}") + + # Update in cache + workflow.name = new_name + + return {"success": True, "name": new_name, "message": f"Workflow renamed to '{new_name}'"} + + @app.put("/api/workflows/{workflow_id}/nodes/{node_id}") + async def update_node(workflow_id: str, node_id: str, node_data: Dict[str, Any]): + """ + Update a node's configuration in the workflow. + + Args: + workflow_id: The workflow ID. + node_id: The node ID to update. + node_data: The updated node data. + """ + import yaml + + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found") + + workflow = _workflows[workflow_id] + + # Find the node + node = next((n for n in workflow.nodes if n.id == node_id), None) + if not node: + raise HTTPException(status_code=404, detail=f"Node {node_id} not found in workflow") + + # Update in-memory node fields + if "summary" in node_data: + node.summary = node_data["summary"] + if "description" in node_data: + node.description = node_data["description"] + if "prompt" in node_data: + node.prompt = node_data["prompt"] + if "model" in node_data: + if node.model: + node.model.name = node_data["model"].get("name", node.model.name) + node.model.parameters = node_data["model"].get("parameters", node.model.parameters) + # Handle structured_output - can be set, updated, or removed (None) + if "structured_output" in node_data["model"]: + node.model.structured_output = node_data["model"]["structured_output"] + else: + from studio.models import ModelConfig + node.model = ModelConfig(**node_data["model"]) + if "pre_process" in node_data: + node.pre_process = node_data["pre_process"] + if "post_process" in node_data: + node.post_process = node_data["post_process"] + if "function_path" in node_data: + node.function_path = node_data["function_path"] + if "output_config" in node_data: + node.output_config = node_data["output_config"] + if "data_config" in node_data: + node.data_config = node_data["data_config"] + if "output_keys" in node_data: + node.output_keys = node_data["output_keys"] + if "metadata" in node_data: + node.metadata.update(node_data["metadata"]) + + # Handle inline code fields - save directly to task_executor.py (single source of truth) + # Empty string signals deletion of existing code + workflow_dir = Path(workflow.source_path).parent if workflow.source_path else None + + if workflow_dir: + # Pre-processor code (empty string = delete) + if "_pre_process_code" in node_data: + code_content = node_data["_pre_process_code"] + # Write to task_executor.py (single source of truth - no metadata copy) + _update_task_executor_code( + workflow_dir=workflow_dir, + node_id=node_id, + code_type='pre_process', + code_content=code_content, + node_summary=node.summary + ) + # Auto-generate path only if code is not a stub + clean_code = _extract_class_or_function_body(code_content) if code_content else "" + is_stub = _is_stub_code(clean_code, 'pre_process') if clean_code else True + if code_content and code_content.strip() and not is_stub: + task_name = _get_task_name_from_path(workflow_dir) + safe_node_id = re.sub(r'[^a-zA-Z0-9_]', '', node_id.replace('-', '_').replace(' ', '_')) + node.pre_process = f"tasks.{task_name}.task_executor.{safe_node_id}PreProcessor" + else: + node.pre_process = None + + # Post-processor code (empty string = delete) + if "_post_process_code" in node_data: + code_content = node_data["_post_process_code"] + _update_task_executor_code( + workflow_dir=workflow_dir, + node_id=node_id, + code_type='post_process', + code_content=code_content, + node_summary=node.summary + ) + # Auto-generate path only if code is not a stub + clean_code = _extract_class_or_function_body(code_content) if code_content else "" + is_stub = _is_stub_code(clean_code, 'post_process') if clean_code else True + if code_content and code_content.strip() and not is_stub: + task_name = _get_task_name_from_path(workflow_dir) + safe_node_id = re.sub(r'[^a-zA-Z0-9_]', '', node_id.replace('-', '_').replace(' ', '_')) + node.post_process = f"tasks.{task_name}.task_executor.{safe_node_id}PostProcessor" + else: + node.post_process = None + + # Lambda function code (empty string = delete) + if "_lambda_code" in node_data: + code_content = node_data["_lambda_code"] + _update_task_executor_code( + workflow_dir=workflow_dir, + node_id=node_id, + code_type='lambda', + code_content=code_content, + node_summary=node.summary + ) + # Auto-generate path only if code is not a stub + clean_code = _extract_class_or_function_body(code_content) if code_content else "" + is_stub = _is_stub_code(clean_code, 'lambda') if clean_code else True + if code_content and code_content.strip() and not is_stub: + task_name = _get_task_name_from_path(workflow_dir) + safe_node_id = re.sub(r'[^a-zA-Z0-9_]', '', node_id.replace('-', '_').replace(' ', '_')) + # Use class-based pattern (LambdaFunction) for new code + node.function_path = f"tasks.{task_name}.task_executor.{safe_node_id}Lambda" + else: + node.function_path = None + + # Branch condition code (empty string = delete) + if "_branch_condition_code" in node_data: + code_content = node_data["_branch_condition_code"] + _update_task_executor_code( + workflow_dir=workflow_dir, + node_id=node_id, + code_type='branch_condition', + code_content=code_content, + node_summary=node.summary + ) + + # Output generator code (from top-level _output_generator_code or output_config._generator_code) + # Empty string signals deletion + generator_code = node_data.get("_output_generator_code") + if generator_code is None and "output_config" in node_data: + generator_code = node_data["output_config"].get("_generator_code") + if generator_code is not None: # Process if key exists (even if empty) + _update_task_executor_code( + workflow_dir=workflow_dir, + node_id=node_id, + code_type='output_generator', + code_content=generator_code, + node_summary=node.summary + ) + # Auto-generate the generator path only if code is not a stub + # (stubs are not actually saved to task_executor.py) + clean_code = _extract_class_or_function_body(generator_code) if generator_code else "" + is_stub = _is_stub_code(clean_code, 'output_generator') if clean_code else True + + if generator_code and generator_code.strip() and not is_stub: + task_name = _get_task_name_from_path(workflow_dir) + safe_node_id = re.sub(r'[^a-zA-Z0-9_]', '', node_id.replace('-', '_').replace(' ', '_')) + generator_path = f"tasks.{task_name}.task_executor.{safe_node_id}Generator" + # Update output_config with the generator path + if not hasattr(node, 'output_config') or node.output_config is None: + node.output_config = {} + node.output_config['generator'] = generator_path + else: + # Clear generator path when code is deleted or is just a stub + if hasattr(node, 'output_config') and node.output_config: + node.output_config['generator'] = None + + # Data transform code (from top-level _data_transform_code or data_config._transform_code) + # Empty string signals deletion + transform_code = node_data.get("_data_transform_code") + if transform_code is None and "data_config" in node_data: + transform_code = node_data["data_config"].get("_transform_code") + if transform_code is not None: # Process if key exists (even if empty) + _update_task_executor_code( + workflow_dir=workflow_dir, + node_id=node_id, + code_type='data_transform', + code_content=transform_code, + node_summary=node.summary + ) + # Auto-generate the transform path only if code is not a stub + clean_code = _extract_class_or_function_body(transform_code) if transform_code else "" + is_stub = _is_stub_code(clean_code, 'data_transform') if clean_code else True + if transform_code and transform_code.strip() and not is_stub: + task_name = _get_task_name_from_path(workflow_dir) + safe_node_id = re.sub(r'[^a-zA-Z0-9_]', '', node_id.replace('-', '_').replace(' ', '_')) + transform_path = f"tasks.{task_name}.task_executor.{safe_node_id}Transform" + # Update data_config with the transform path + if not hasattr(node, 'data_config') or node.data_config is None: + node.data_config = {} + if 'source' not in node.data_config: + node.data_config['source'] = {} + # Set transform class on the source (or first source if array) + source = node.data_config['source'] + if isinstance(source, list) and len(source) > 0: + source[0]['transform_class'] = transform_path + elif isinstance(source, dict): + source['transform_class'] = transform_path + else: + # Clear transform class when code is deleted or is just a stub + if hasattr(node, 'data_config') and node.data_config and 'source' in node.data_config: + source = node.data_config['source'] + if isinstance(source, list) and len(source) > 0: + source[0].pop('transform_class', None) + elif isinstance(source, dict): + source.pop('transform_class', None) + + # Persist changes to the YAML file + if workflow.source_path: + yaml_path = Path(workflow.source_path) + if yaml_path.exists(): + try: + with open(yaml_path, 'r') as f: + config = yaml.safe_load(f) + + # Handle output nodes specially - they store config at workflow level + if node.node_type == 'output': + if "output_config" in node_data: + output_config = node_data["output_config"] + # Clean output_config - remove internal fields starting with '_' + clean_output_config = {k: v for k, v in output_config.items() if not k.startswith('_')} + if clean_output_config: + if 'output_config' not in config: + config['output_config'] = {} + config['output_config'].update(clean_output_config) + elif node.node_type == 'data': + # Handle data nodes - they store config at workflow level + if "data_config" in node_data: + # Clean data_config - remove internal fields starting with '_' + clean_data_config = {k: v for k, v in node_data["data_config"].items() if not k.startswith('_')} + config['data_config'] = clean_data_config + else: + # Update regular nodes in graph_config.nodes + if 'graph_config' in config and 'nodes' in config['graph_config']: + yaml_node = config['graph_config']['nodes'].get(node_id) + if yaml_node is not None: + # Update fields that were changed + if "summary" in node_data: + yaml_node['node_name'] = node_data["summary"] + if "description" in node_data: + yaml_node['description'] = node_data["description"] + if "prompt" in node_data: + yaml_node['prompt'] = _convert_prompts_to_yaml_format(node_data["prompt"]) + if "model" in node_data: + yaml_node['model'] = { + 'name': node_data["model"].get("name"), + 'parameters': node_data["model"].get("parameters", {}) + } + # Remove empty parameters + if not yaml_node['model']['parameters']: + del yaml_node['model']['parameters'] + # Include structured_output if present and enabled + so = node_data["model"].get("structured_output") + if so and so.get("enabled", True): + # Convert to YAML format (without 'enabled' field) + yaml_so = {} + if so.get("schema"): + yaml_so["schema"] = so["schema"] + # Only include non-default options + if so.get("fallback_strategy") and so["fallback_strategy"] != "instruction": + yaml_so["fallback_strategy"] = so["fallback_strategy"] + if so.get("retry_on_parse_error") is False: + yaml_so["retry_on_parse_error"] = False + if so.get("max_parse_retries") and so["max_parse_retries"] != 2: + yaml_so["max_parse_retries"] = so["max_parse_retries"] + if yaml_so: + yaml_node['model']['structured_output'] = yaml_so + elif 'structured_output' in yaml_node.get('model', {}): + # Remove structured_output if it was disabled + del yaml_node['model']['structured_output'] + if "pre_process" in node_data: + if node_data["pre_process"]: + yaml_node['pre_process'] = node_data["pre_process"] + elif 'pre_process' in yaml_node: + del yaml_node['pre_process'] + if "post_process" in node_data: + if node_data["post_process"]: + yaml_node['post_process'] = node_data["post_process"] + elif 'post_process' in yaml_node: + del yaml_node['post_process'] + if "function_path" in node_data: + if node_data["function_path"]: + yaml_node['function_path'] = node_data["function_path"] + elif 'function_path' in yaml_node: + del yaml_node['function_path'] + if "output_keys" in node_data: + if node_data["output_keys"]: + yaml_node['output_keys'] = node_data["output_keys"] + elif 'output_keys' in yaml_node: + del yaml_node['output_keys'] + + with open(yaml_path, 'w') as f: + yaml.dump(config, f, default_flow_style=False, sort_keys=False) + + # Reload workflow from disk to ensure in-memory cache is consistent + try: + graph = app.state.graph_builder.build_from_yaml(str(yaml_path)) + _workflows[workflow_id] = graph + # Update node reference to the reloaded node + node = next((n for n in graph.nodes if n.id == node_id), node) + except Exception as reload_error: + print(f"Warning: Could not reload workflow after node update: {reload_error}") + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to persist node update to file: {str(e)}") + + # Return updated node data for frontend sync + model_data = None + if node.model: + model_data = { + "name": node.model.name, + "parameters": node.model.parameters + } + if node.model.structured_output: + model_data["structured_output"] = node.model.structured_output + return { + "status": "updated", + "node_id": node_id, + "node": { + "id": node.id, + "summary": node.summary, + "description": node.description, + "model": model_data, + "prompt": node.prompt, + "pre_process": node.pre_process, + "post_process": node.post_process, + "function_path": node.function_path, + } + } + + @app.delete("/api/workflows/{workflow_id}/nodes/{node_id}") + async def delete_node(workflow_id: str, node_id: str): + """ + Delete a node from the workflow. + + Args: + workflow_id: The workflow ID. + node_id: The node ID to delete. + """ + import yaml + + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found") + + workflow = _workflows[workflow_id] + + # Find the node + node = next((n for n in workflow.nodes if n.id == node_id), None) + if not node: + raise HTTPException(status_code=404, detail=f"Node {node_id} not found in workflow") + + # Persist changes to the YAML file + if workflow.source_path: + yaml_path = Path(workflow.source_path) + if yaml_path.exists(): + try: + with open(yaml_path, 'r') as f: + config = yaml.safe_load(f) + + # Delete the node from config + if 'graph_config' in config and 'nodes' in config['graph_config']: + if node_id in config['graph_config']['nodes']: + del config['graph_config']['nodes'][node_id] + + # Delete edges connected to this node + if 'graph_config' in config and 'edges' in config['graph_config']: + config['graph_config']['edges'] = [ + e for e in config['graph_config']['edges'] + if e.get('from') != node_id and e.get('to') != node_id + ] + + with open(yaml_path, 'w') as f: + yaml.dump(config, f, default_flow_style=False, sort_keys=False) + + # Reload workflow from disk to update cache + try: + graph = app.state.graph_builder.build_from_yaml(str(yaml_path)) + _workflows[workflow_id] = graph + except Exception as reload_error: + print(f"Warning: Could not reload workflow after node delete: {reload_error}") + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to delete node from file: {str(e)}") + + return {"status": "deleted", "node_id": node_id} + + @app.post("/api/workflows/{workflow_id}/nodes") + async def add_node(workflow_id: str, node_data: Dict[str, Any]): + """ + Add a new node to the workflow. + + Args: + workflow_id: The workflow ID. + node_data: The node configuration including id, node_type, etc. + """ + import yaml + + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found") + + workflow = _workflows[workflow_id] + + node_id = node_data.get("id") + if not node_id: + raise HTTPException(status_code=400, detail="Node ID is required") + + # Check if node already exists + existing = next((n for n in workflow.nodes if n.id == node_id), None) + if existing: + raise HTTPException(status_code=400, detail=f"Node {node_id} already exists") + + # Persist changes to the YAML file + if workflow.source_path: + yaml_path = Path(workflow.source_path) + if yaml_path.exists(): + try: + with open(yaml_path, 'r') as f: + config = yaml.safe_load(f) + + # Initialize graph_config.nodes if not present + if 'graph_config' not in config: + config['graph_config'] = {} + if 'nodes' not in config['graph_config']: + config['graph_config']['nodes'] = {} + + node_type = node_data.get('node_type', 'llm') + + # Handle output nodes specially - store config at workflow level + if node_type == 'output': + if 'output_config' in node_data: + output_config = node_data['output_config'] + # Clean output_config - remove internal fields starting with '_' + clean_output_config = {k: v for k, v in output_config.items() if not k.startswith('_')} + if clean_output_config: + if 'output_config' not in config: + config['output_config'] = {} + config['output_config'].update(clean_output_config) + # Output nodes are not added to graph_config.nodes + elif node_type == 'data': + # Data nodes store config at workflow level as data_config + if 'data_config' in node_data: + # Clean data_config - remove internal fields starting with '_' + clean_data_config = {k: v for k, v in node_data['data_config'].items() if not k.startswith('_')} + config['data_config'] = clean_data_config + # Data nodes are not added to graph_config.nodes + else: + # Create the node config for regular nodes + yaml_node = { + 'node_type': node_type, + } + if 'summary' in node_data: + yaml_node['node_name'] = node_data['summary'] + if 'description' in node_data: + yaml_node['description'] = node_data['description'] + if 'model' in node_data and node_data['model']: + yaml_node['model'] = { + 'name': node_data['model'].get('name'), + } + if node_data['model'].get('parameters'): + yaml_node['model']['parameters'] = node_data['model']['parameters'] + if 'prompt' in node_data: + yaml_node['prompt'] = _convert_prompts_to_yaml_format(node_data['prompt']) + if 'pre_process' in node_data: + yaml_node['pre_process'] = node_data['pre_process'] + if 'post_process' in node_data: + yaml_node['post_process'] = node_data['post_process'] + if 'function_path' in node_data: + yaml_node['function_path'] = node_data['function_path'] + if 'output_keys' in node_data: + yaml_node['output_keys'] = node_data['output_keys'] + if 'tools' in node_data: + yaml_node['tools'] = node_data['tools'] + if 'tool_choice' in node_data: + yaml_node['tool_choice'] = node_data['tool_choice'] + + config['graph_config']['nodes'][node_id] = yaml_node + + with open(yaml_path, 'w') as f: + yaml.dump(config, f, default_flow_style=False, sort_keys=False) + + # Reload workflow from disk to update cache + try: + graph = app.state.graph_builder.build_from_yaml(str(yaml_path)) + _workflows[workflow_id] = graph + except Exception as reload_error: + print(f"Warning: Could not reload workflow after node add: {reload_error}") + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to add node to file: {str(e)}") + + return {"status": "added", "node_id": node_id} + + @app.post("/api/workflows/{workflow_id}/edges") + async def add_edge(workflow_id: str, edge_data: Dict[str, Any]): + """ + Add a new edge to the workflow. + + Args: + workflow_id: The workflow ID. + edge_data: The edge configuration including source, target, etc. + """ + import yaml + + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found") + + workflow = _workflows[workflow_id] + + source = edge_data.get("source") or edge_data.get("from") + target = edge_data.get("target") or edge_data.get("to") + if not source or not target: + raise HTTPException(status_code=400, detail="Source and target are required") + + # Persist changes to the YAML file + if workflow.source_path: + yaml_path = Path(workflow.source_path) + if yaml_path.exists(): + try: + with open(yaml_path, 'r') as f: + config = yaml.safe_load(f) + + # Initialize graph_config.edges if not present + if 'graph_config' not in config: + config['graph_config'] = {} + if 'edges' not in config['graph_config']: + config['graph_config']['edges'] = [] + + # Create the edge config + yaml_edge = { + 'from': source, + 'to': target, + } + if 'label' in edge_data: + yaml_edge['label'] = edge_data['label'] + if edge_data.get('is_conditional') or edge_data.get('condition'): + if 'condition' in edge_data: + yaml_edge['condition'] = edge_data['condition'].get('condition_path') + if 'path_map' in edge_data.get('condition', {}): + yaml_edge['path_map'] = edge_data['condition']['path_map'] + + config['graph_config']['edges'].append(yaml_edge) + + with open(yaml_path, 'w') as f: + yaml.dump(config, f, default_flow_style=False, sort_keys=False) + + # Reload workflow from disk to update cache + try: + graph = app.state.graph_builder.build_from_yaml(str(yaml_path)) + _workflows[workflow_id] = graph + except Exception as reload_error: + print(f"Warning: Could not reload workflow after edge add: {reload_error}") + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to add edge to file: {str(e)}") + + return {"status": "added", "source": source, "target": target} + + @app.delete("/api/workflows/{workflow_id}/edges/{edge_id}") + async def delete_edge(workflow_id: str, edge_id: str): + """ + Delete an edge from the workflow. + + Args: + workflow_id: The workflow ID. + edge_id: The edge ID to delete (format: source-target or the edge's id). + """ + import yaml + + if workflow_id not in _workflows: + await list_workflows() + + if workflow_id not in _workflows: + raise HTTPException(status_code=404, detail=f"Workflow {workflow_id} not found") + + workflow = _workflows[workflow_id] + + # Persist changes to the YAML file + if workflow.source_path: + yaml_path = Path(workflow.source_path) + if yaml_path.exists(): + try: + with open(yaml_path, 'r') as f: + config = yaml.safe_load(f) + + # Delete matching edges + if 'graph_config' in config and 'edges' in config['graph_config']: + original_count = len(config['graph_config']['edges']) + # Try to match by edge_id (source-target pattern) or exact id + config['graph_config']['edges'] = [ + e for e in config['graph_config']['edges'] + if not ( + f"{e.get('from')}-{e.get('to')}" == edge_id or + e.get('id') == edge_id + ) + ] + deleted_count = original_count - len(config['graph_config']['edges']) + + if deleted_count == 0: + raise HTTPException(status_code=404, detail=f"Edge {edge_id} not found") + + with open(yaml_path, 'w') as f: + yaml.dump(config, f, default_flow_style=False, sort_keys=False) + + # Reload workflow from disk to update cache + try: + graph = app.state.graph_builder.build_from_yaml(str(yaml_path)) + _workflows[workflow_id] = graph + except Exception as reload_error: + print(f"Warning: Could not reload workflow after edge delete: {reload_error}") + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to delete edge from file: {str(e)}") + + return {"status": "deleted", "edge_id": edge_id} + + # ==================== Code Execution & Debug Endpoints ==================== + + @app.post("/api/code/execute") + async def execute_code(request: CodeExecutionRequest): + """ + Execute a Python file or function. + + Returns an execution ID for tracking. Use the WebSocket endpoint + /ws/code/{execution_id} to stream output in real-time. + """ + execution_id = str(uuid.uuid4())[:8] + + # Resolve file path + file_path = Path(request.file_path) + if not file_path.is_absolute(): + # Try to resolve relative to workflow directory + if request.workflow_id and request.workflow_id in _workflows: + workflow = _workflows[request.workflow_id] + workflow_dir = Path(workflow.source_path).parent + file_path = workflow_dir / request.file_path + else: + file_path = Path(app.state.tasks_dir) / request.file_path + + if not file_path.exists(): + raise HTTPException(status_code=404, detail=f"File not found: {file_path}") + + # Prepare execution + _code_executions[execution_id] = { + "id": execution_id, + "file_path": str(file_path), + "function_name": request.function_name, + "args": request.args or [], + "workflow_id": request.workflow_id, + "debug": request.debug, + "breakpoints": request.breakpoints or [], + "status": "pending", + "output": [], + "error": None, + "started_at": None, + "completed_at": None, + "process": None, + "debug_port": None + } + + # Start execution in background + asyncio.create_task(_run_code_execution(execution_id, file_path, request)) + + return { + "execution_id": execution_id, + "status": "started", + "websocket_url": f"/ws/code/{execution_id}" + } + + @app.get("/api/code/executions/{execution_id}") + async def get_code_execution(execution_id: str): + """Get the status and output of a code execution.""" + if execution_id not in _code_executions: + raise HTTPException(status_code=404, detail="Execution not found") + + execution = _code_executions[execution_id] + return { + "id": execution["id"], + "status": execution["status"], + "output": execution["output"], + "error": execution["error"], + "debug": execution["debug"], + "debug_port": execution.get("debug_port"), + "started_at": execution["started_at"], + "completed_at": execution["completed_at"] + } + + @app.post("/api/code/executions/{execution_id}/stop") + async def stop_code_execution(execution_id: str): + """Stop a running code execution.""" + if execution_id not in _code_executions: + raise HTTPException(status_code=404, detail="Execution not found") + + execution = _code_executions[execution_id] + if execution["status"] != "running": + return {"status": execution["status"], "message": "Execution not running"} + + # Kill the process + if execution.get("process"): + try: + execution["process"].terminate() + execution["process"].wait(timeout=5) + except Exception: + execution["process"].kill() + + execution["status"] = "cancelled" + execution["completed_at"] = datetime.now().isoformat() + + return {"status": "cancelled"} + + @app.post("/api/debug/action") + async def debug_action(action: DebugAction): + """ + Send a debug action to a running debug session. + + Actions: continue, step_over, step_into, step_out, stop + """ + if action.session_id not in _code_executions: + raise HTTPException(status_code=404, detail="Debug session not found") + + execution = _code_executions[action.session_id] + if not execution.get("debug"): + raise HTTPException(status_code=400, detail="Not a debug session") + + # Handle stop action + if action.action == "stop": + if execution.get("process"): + execution["process"].terminate() + execution["status"] = "cancelled" + return {"status": "stopped"} + + # Get the DAP send function stored during connection + dap_send = execution.get("dap_send_command") + if not dap_send: + raise HTTPException(status_code=400, detail="Debug session not ready - DAP not connected") + + # Map actions to DAP commands + command_map = { + "continue": "continue", + "step_over": "next", + "step_into": "stepIn", + "step_out": "stepOut", + "pause": "pause" + } + + dap_command = command_map.get(action.action) + if not dap_command: + raise HTTPException(status_code=400, detail=f"Unknown debug action: {action.action}") + + try: + # Send the DAP command (fire and forget - response comes via event listener) + thread_id = execution.get("thread_id", 1) + await dap_send(dap_command, {"threadId": thread_id}) + + await _send_to_websocket(action.session_id, { + "type": "debug", + "content": f"[DEBUG] Sent {action.action} command" + }) + + return {"status": "sent", "action": action.action} + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Debug action failed: {str(e)}") + + class VariablesRequest(BaseModel): + session_id: str + variables_reference: int + + @app.post("/api/debug/variables") + async def get_debug_variables(request: VariablesRequest): + """Fetch child variables for a given variablesReference.""" + execution = _code_executions.get(request.session_id) + if not execution: + raise HTTPException(status_code=404, detail="Execution not found") + + fetch_vars = execution.get("dap_fetch_variables") + if not fetch_vars: + raise HTTPException(status_code=400, detail="Debug session not ready") + + try: + variables = await fetch_vars(request.variables_reference) + return {"variables": variables} + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to fetch variables: {str(e)}") + + @app.websocket("/ws/code/{execution_id}") + async def websocket_code_output(websocket: WebSocket, execution_id: str): + """ + WebSocket endpoint for streaming code execution output. + + Sends JSON messages with: + - type: 'stdout' | 'stderr' | 'status' | 'debug' + - content: The actual content + - timestamp: ISO timestamp + """ + await websocket.accept() + _websocket_connections[execution_id] = websocket + + try: + if execution_id not in _code_executions: + await websocket.send_json({"type": "error", "content": "Execution not found"}) + await websocket.close() + return + + execution = _code_executions[execution_id] + + # Send any existing output + for line in execution["output"]: + await websocket.send_json(line) + + # Keep connection alive until execution completes + while execution["status"] in ("pending", "running"): + try: + # Check for incoming messages (like breakpoint updates) + try: + data = await asyncio.wait_for(websocket.receive_json(), timeout=0.5) + # Handle incoming debug commands + if data.get("type") == "breakpoint": + execution["breakpoints"] = data.get("lines", []) + except asyncio.TimeoutError: + pass + + # Send heartbeat + await websocket.send_json({"type": "heartbeat", "status": execution["status"]}) + await asyncio.sleep(0.5) + except WebSocketDisconnect: + break + + # Send final status + await websocket.send_json({ + "type": "status", + "status": execution["status"], + "error": execution.get("error") + }) + + except WebSocketDisconnect: + pass + finally: + _websocket_connections.pop(execution_id, None) + + +def _get_task_name_from_path(workflow_dir: Path) -> str: + """ + Extract SyGra task name from workflow directory path. + + Examples: + /path/to/tasks/examples/image_to_qna -> examples.image_to_qna + /path/to/tasks/my_task -> my_task + """ + # Find the 'tasks' directory in the path + parts = workflow_dir.parts + task_parts = [] + found_tasks = False + + for part in parts: + if found_tasks: + task_parts.append(part) + elif part == "tasks": + found_tasks = True + + if task_parts: + return ".".join(task_parts) + else: + # Fallback: use just the directory name + return workflow_dir.name + + +async def _run_code_execution(execution_id: str, file_path: Path, request: CodeExecutionRequest): + """Background task to run Python code and stream output.""" + execution = _code_executions[execution_id] + + # Wait a moment for WebSocket to connect + for _ in range(20): # Wait up to 2 seconds + if execution_id in _websocket_connections: + break + await asyncio.sleep(0.1) + + execution["status"] = "running" + execution["started_at"] = datetime.now().isoformat() + + try: + # Prepare the command + python_executable = sys.executable + + # Store original file path for debug display + original_file_path = file_path + + if request.debug and request.breakpoints: + # Run with debugpy for debugging using DAP protocol + debug_port = 5678 + hash(execution_id) % 1000 # Generate unique port + execution["debug_port"] = debug_port + execution["debug"] = True + execution["breakpoints"] = request.breakpoints + execution["target_file"] = str(file_path) # Store for display + + # Create a wrapper script that sets up debugpy and runs the target + # Uses wait_for_client() which waits for FULL DAP handshake including configurationDone + import textwrap + + # Check if this is a SyGra workflow - if so, run via main.py + workflow_dir = file_path.parent + graph_config = workflow_dir / "graph_config.yaml" + project_root = Path(__file__).parent.parent # studio -> project root + main_py = project_root / "main.py" + + if graph_config.exists() and main_py.exists(): + # SyGra workflow - run main.py with task argument (same as normal execution) + task_name = _get_task_name_from_path(workflow_dir) + run_target = str(main_py) + run_args = ["--task", task_name, "--num_records", "2", "--batch_size", "1", "--debug", "True"] + else: + # Regular Python file - run directly + run_target = str(file_path) + run_args = request.args or [] + + wrapper_code = textwrap.dedent(f''' +import sys +import os + +# Set up paths +project_root = r"{str(project_root)}" +sys.path.insert(0, project_root) +os.chdir(project_root) + +import debugpy + +# Configure debugpy to listen - this starts the DAP server +debugpy.listen(("127.0.0.1", {debug_port})) + +# Signal that we're ready +print("[DEBUGPY_READY]", flush=True) + +# Wait for debugger to FULLY attach (including configurationDone) +# This ensures breakpoints are set before code runs +try: + debugpy.wait_for_client() + print("[DEBUG] Debugger attached, starting execution...", flush=True) +except Exception as e: + print(f"[DEBUG] Wait for client failed: {{e}}", flush=True) + sys.exit(1) + +# Run the target script using runpy for proper module execution +import runpy +run_target = r"{run_target}" +sys.argv = [run_target] + {run_args} +runpy.run_path(run_target, run_name="__main__") + ''').strip() + + cmd = [python_executable, "-c", wrapper_code] + + # Notify client about debug port + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Starting debug session on port {debug_port}...", + "debug_port": debug_port + }) + + # Start DAP client connection in background + asyncio.create_task(_connect_dap_client(execution_id, debug_port, str(file_path), request.breakpoints)) + else: + # Normal execution + if request.function_name: + # Run specific function + cmd = [ + python_executable, "-c", + f"import sys; sys.path.insert(0, '{file_path.parent}'); " + f"from {file_path.stem} import {request.function_name}; " + f"{request.function_name}()" + ] + else: + # Check if this is a SyGra workflow (has graph_config.yaml in same directory) + workflow_dir = file_path.parent + graph_config = workflow_dir / "graph_config.yaml" + + if graph_config.exists(): + # This is a SyGra workflow - run via main.py + # Determine task name from directory path + # e.g., /path/to/tasks/examples/image_to_qna -> examples.image_to_qna + task_name = _get_task_name_from_path(workflow_dir) + + # Find main.py in project root + project_root = Path(__file__).parent.parent # studio -> project root + main_py = project_root / "main.py" + + if main_py.exists(): + # Default args for quick test execution + default_args = [ + "--task", task_name, + "--num_records", "2", + "--batch_size", "1", + "--debug", "True" + ] + # Allow user to override with custom args + user_args = request.args or [] + cmd = [python_executable, str(main_py)] + default_args + user_args + # Set working directory to project root for main.py + file_path = main_py + else: + # Fallback: run the file directly + cmd = [python_executable, str(file_path)] + (request.args or []) + else: + # Not a SyGra workflow - run the file directly + cmd = [python_executable, str(file_path)] + (request.args or []) + + # Log the command being executed (simplified for debug mode) + if execution.get("debug"): + display_cmd = f"[DEBUG MODE] {python_executable} {execution.get('target_file', original_file_path)}" + # For SyGra workflows, use project root as working dir (same as Run mode) + workflow_dir = original_file_path.parent + graph_config = workflow_dir / "graph_config.yaml" + if graph_config.exists(): + # This is a SyGra workflow - run from project root + project_root = Path(__file__).parent.parent # studio -> project root + working_dir = project_root + else: + working_dir = original_file_path.parent + else: + display_cmd = ' '.join(cmd) + working_dir = file_path.parent + + start_msg = { + "type": "stdout", + "content": f"$ {display_cmd}", + "timestamp": datetime.now().isoformat() + } + execution["output"].append(start_msg) + await _send_to_websocket(execution_id, start_msg) + + cwd_msg = { + "type": "stdout", + "content": f"Working directory: {working_dir}", + "timestamp": datetime.now().isoformat() + } + execution["output"].append(cwd_msg) + await _send_to_websocket(execution_id, cwd_msg) + + separator_msg = { + "type": "stdout", + "content": "โ”€" * 50, + "timestamp": datetime.now().isoformat() + } + execution["output"].append(separator_msg) + await _send_to_websocket(execution_id, separator_msg) + + # Start the process + process = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=str(working_dir), + text=True, + bufsize=1, + env={**os.environ, "PYTHONUNBUFFERED": "1"} + ) + execution["process"] = process + + # Stream output using threads + output_queue = queue.Queue() + + def read_stream(stream, stream_type): + for line in iter(stream.readline, ''): + if line: + output_queue.put((stream_type, line.rstrip('\n'))) + stream.close() + + stdout_thread = threading.Thread(target=read_stream, args=(process.stdout, "stdout")) + stderr_thread = threading.Thread(target=read_stream, args=(process.stderr, "stderr")) + stdout_thread.daemon = True + stderr_thread.daemon = True + stdout_thread.start() + stderr_thread.start() + + # Process output + while process.poll() is None or not output_queue.empty(): + try: + stream_type, line = output_queue.get(timeout=0.1) + output_entry = { + "type": stream_type, + "content": line, + "timestamp": datetime.now().isoformat() + } + execution["output"].append(output_entry) + await _send_to_websocket(execution_id, output_entry) + except queue.Empty: + await asyncio.sleep(0.05) + + stdout_thread.join(timeout=1) + stderr_thread.join(timeout=1) + + # Set final status + return_code = process.returncode + + # Send completion separator and status + end_separator = { + "type": "stdout", + "content": "โ”€" * 50, + "timestamp": datetime.now().isoformat() + } + execution["output"].append(end_separator) + await _send_to_websocket(execution_id, end_separator) + + if return_code == 0: + execution["status"] = "completed" + exit_msg = { + "type": "stdout", + "content": f"โœ“ Process completed successfully (exit code: {return_code})", + "timestamp": datetime.now().isoformat() + } + else: + execution["status"] = "failed" + execution["error"] = f"Process exited with code {return_code}" + exit_msg = { + "type": "stderr", + "content": f"โœ— Process failed (exit code: {return_code})", + "timestamp": datetime.now().isoformat() + } + + execution["output"].append(exit_msg) + await _send_to_websocket(execution_id, exit_msg) + + except Exception as e: + execution["status"] = "failed" + execution["error"] = str(e) + error_msg = { + "type": "stderr", + "content": f"Execution error: {str(e)}", + "timestamp": datetime.now().isoformat() + } + execution["output"].append(error_msg) + await _send_to_websocket(execution_id, error_msg) + finally: + execution["completed_at"] = datetime.now().isoformat() + execution["process"] = None + + +async def _send_to_websocket(execution_id: str, data: dict): + """Send data to the WebSocket connection for this execution. + + Also stores messages in execution's output array for polling fallback. + """ + # Add timestamp if not present + if "timestamp" not in data: + data["timestamp"] = datetime.now().isoformat() + + # Store in execution's output array for polling fallback + execution = _code_executions.get(execution_id) + if execution and "output" in execution: + execution["output"].append(data) + + # Try to send via WebSocket + ws = _websocket_connections.get(execution_id) + if ws: + try: + await ws.send_json(data) + except Exception: + pass + + +async def _connect_dap_client(execution_id: str, debug_port: int, file_path: str, breakpoints: list): + """ + Connect to debugpy via Debug Adapter Protocol (DAP) and manage debug session. + """ + import json + + execution = _code_executions.get(execution_id) + if not execution: + return + + reader = None + writer = None + seq = 1 + buffer = b"" # Shared buffer for reading + dap_lock = asyncio.Lock() # Lock for coordinating DAP reads + reading_in_progress = False # Flag to track if a read is happening + + async def read_dap_message(timeout: float = 10.0, use_lock: bool = True): + """Read a single DAP message.""" + nonlocal buffer, reading_in_progress + + # Check if another read is in progress + if reading_in_progress: + await asyncio.sleep(0.1) + return None + + try: + if use_lock: + # Try to acquire lock with timeout + try: + await asyncio.wait_for(dap_lock.acquire(), timeout=0.5) + except asyncio.TimeoutError: + return None + + reading_in_progress = True + deadline = asyncio.get_event_loop().time() + timeout + read_attempts = 0 + + # Read until we have headers + while b"\r\n\r\n" not in buffer: + remaining = deadline - asyncio.get_event_loop().time() + if remaining <= 0: + # Don't clear buffer - keep partial data for next read + # Only log if there's significant data stuck + if len(buffer) > 50: + buffer_preview = buffer[:80].decode('utf-8', errors='replace') + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Read timeout with {len(buffer)} bytes in buffer" + }) + return None + try: + read_attempts += 1 + chunk = await asyncio.wait_for(reader.read(4096), timeout=min(remaining, 2.0)) + if not chunk: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Read got empty chunk after {read_attempts} attempts" + }) + return None + buffer += chunk + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Read {len(chunk)} bytes, total buffer: {len(buffer)}" + }) + except asyncio.TimeoutError: + if buffer: + continue # Keep trying if we have partial data + return None + + # Split headers from potential body data + header_end = buffer.index(b"\r\n\r\n") + 4 + header_data = buffer[:header_end].decode() + buffer = buffer[header_end:] + + # Parse Content-Length + content_length = 0 + for line in header_data.split("\r\n"): + if line.lower().startswith("content-length:"): + content_length = int(line.split(":", 1)[1].strip()) + break + + if content_length == 0: + return None + + # Read body (might already be in buffer) + while len(buffer) < content_length: + remaining = deadline - asyncio.get_event_loop().time() + if remaining <= 0: + return None + try: + chunk = await asyncio.wait_for(reader.read(content_length - len(buffer)), timeout=min(remaining, 2.0)) + if not chunk: + return None + buffer += chunk + except asyncio.TimeoutError: + return None + + # Extract body from buffer + body_data = buffer[:content_length] + buffer = buffer[content_length:] + + return json.loads(body_data.decode()) + + except Exception as e: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Read error: {str(e)}" + }) + return None + finally: + reading_in_progress = False + if use_lock and dap_lock.locked(): + dap_lock.release() + + async def send_request_and_wait(command: str, arguments: dict = None, timeout: float = 10.0) -> dict: + """Send a DAP request and wait for the response, handling any events in between.""" + nonlocal seq + + request = { + "seq": seq, + "type": "request", + "command": command + } + if arguments: + request["arguments"] = arguments + + request_seq = seq + seq += 1 + + # Send request + body = json.dumps(request) + header = f"Content-Length: {len(body)}\r\n\r\n" + writer.write((header + body).encode()) + await writer.drain() + + # Wait for response, handling events in between + start_time = asyncio.get_event_loop().time() + messages_received = 0 + while (asyncio.get_event_loop().time() - start_time) < timeout: + remaining = timeout - (asyncio.get_event_loop().time() - start_time) + message = await read_dap_message(timeout=min(3.0, remaining), use_lock=False) # No lock during init + if message is None: + continue + + messages_received += 1 + msg_type = message.get("type") + + # Log what we receive for debugging + if msg_type == "response": + resp_cmd = message.get("command", "unknown") + resp_seq = message.get("request_seq", -1) + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Got response for {resp_cmd} (seq={resp_seq}, expecting={request_seq})" + }) + elif msg_type == "event": + event_name = message.get("event", "unknown") + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Got event: {event_name}" + }) + + if msg_type == "response" and message.get("request_seq") == request_seq: + return message + elif msg_type == "event": + # Handle event asynchronously + asyncio.create_task(handle_event(message)) + + raise asyncio.TimeoutError(f"Timeout waiting for {command} response (received {messages_received} messages)") + + # Message queue for coordinating reads + pending_responses = {} # seq -> asyncio.Future + + async def send_command(command: str, arguments: dict = None) -> None: + """Send a DAP command without waiting for response (fire and forget).""" + nonlocal seq + request = { + "seq": seq, + "type": "request", + "command": command + } + if arguments: + request["arguments"] = arguments + seq += 1 + + body = json.dumps(request) + header = f"Content-Length: {len(body)}\r\n\r\n" + writer.write((header + body).encode()) + await writer.drain() + + async def handle_event(message: dict): + """Handle DAP events.""" + event_name = message.get("event") + body = message.get("body", {}) + + # Log all events for debugging + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] handle_event: {event_name}" + }) + + if event_name == "stopped": + reason = body.get("reason", "unknown") + thread_id = body.get("threadId", 1) + execution["thread_id"] = thread_id # Store for debug actions + + await _send_to_websocket(execution_id, { + "type": "debug_stopped", + "reason": reason, + "thread_id": thread_id, + "content": f"[DEBUG] Paused: {reason}" + }) + + # Queue request for debug state - will be processed by message loop + execution["pending_debug_state"] = thread_id + + elif event_name == "continued": + await _send_to_websocket(execution_id, { + "type": "debug_continued", + "content": "[DEBUG] Execution continued" + }) + + elif event_name in ("terminated", "exited"): + await _send_to_websocket(execution_id, { + "type": "debug_terminated", + "content": "[DEBUG] Debug session ended" + }) + + async def fetch_debug_state(thread_id: int): + """Fetch and send debug state (stack trace + variables).""" + nonlocal seq, buffer + try: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] fetch_debug_state: sending stackTrace for thread {thread_id}" + }) + + # Send stackTrace request + await send_command("stackTrace", { + "threadId": thread_id, + "startFrame": 0, + "levels": 20 + }) + + # Wait for stackTrace response (read_dap_message handles locking) + stack_response = None + for i in range(30): + msg = await read_dap_message(timeout=0.5) + if msg: + msg_type = msg.get("type") + msg_cmd = msg.get("command", "") + if msg_type == "response" and msg_cmd == "stackTrace": + stack_response = msg + break + elif msg_type == "event": + await handle_event(msg) + else: + await asyncio.sleep(0.05) + + if not stack_response or not stack_response.get("body"): + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] fetch_debug_state: no stack response received" + }) + return + + frames = stack_response["body"].get("stackFrames", []) + if not frames: + return + + frame = frames[0] + line_num = frame.get("line", 0) + file_path = frame.get("source", {}).get("path") + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] SENDING debug_location: line={line_num}" + }) + await _send_to_websocket(execution_id, { + "type": "debug_location", + "line": line_num, + "file": file_path, + "frame_id": frame.get("id"), + "frames": frames + }) + + # Get scopes + await send_command("scopes", {"frameId": frame.get("id")}) + + scopes_response = None + for _ in range(20): + msg = await read_dap_message(timeout=0.5) + if msg: + if msg.get("type") == "response" and msg.get("command") == "scopes": + scopes_response = msg + break + elif msg.get("type") == "event": + await handle_event(msg) + else: + await asyncio.sleep(0.05) + + if not scopes_response or not scopes_response.get("body"): + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] No scopes response" + }) + return + + # Get variables for each scope + all_vars = [] + for scope in scopes_response["body"].get("scopes", [])[:2]: + var_ref = scope.get("variablesReference") + if not var_ref: + continue + + await send_command("variables", {"variablesReference": var_ref}) + + vars_response = None + for _ in range(20): + msg = await read_dap_message(timeout=0.5) + if msg: + if msg.get("type") == "response" and msg.get("command") == "variables": + vars_response = msg + break + elif msg.get("type") == "event": + await handle_event(msg) + else: + await asyncio.sleep(0.05) + + if vars_response and vars_response.get("body"): + for v in vars_response["body"].get("variables", [])[:30]: + all_vars.append({ + "scope": scope.get("name"), + "name": v.get("name"), + "value": str(v.get("value", ""))[:200], + "type": v.get("type"), + "variablesReference": v.get("variablesReference", 0) + }) + + if all_vars: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] SENDING debug_variables: {len(all_vars)} vars" + }) + await _send_to_websocket(execution_id, { + "type": "debug_variables", + "variables": all_vars + }) + else: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] No variables found" + }) + + except Exception as e: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Error getting state: {str(e)}" + }) + + async def message_loop(): + """Main message loop - reads all DAP messages and dispatches them.""" + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] Message loop started" + }) + + loop_count = 0 + while execution.get("status") == "running": + try: + loop_count += 1 + + # Check if we need to fetch debug state + pending_thread = execution.pop("pending_debug_state", None) + if pending_thread: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Fetching debug state for thread {pending_thread}" + }) + await fetch_debug_state(pending_thread) + continue + + # read_dap_message handles locking internally + message = await read_dap_message(timeout=0.5) + if message: + msg_type = message.get("type") + event_name = message.get("event", message.get("command", "")) + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Loop received: {msg_type} - {event_name}" + }) + + if msg_type == "event": + await handle_event(message) + elif msg_type == "response": + # Check if anyone is waiting for this response + req_seq = message.get("request_seq") + if req_seq in pending_responses: + pending_responses[req_seq].set_result(message) + except Exception as e: + if loop_count < 5: # Only log first few errors + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Loop error: {str(e)}" + }) + await asyncio.sleep(0.05) + + # Wait for debugpy to be ready by checking for [DEBUGPY_READY] in output + # Give it some time to start + await asyncio.sleep(2.0) + + # Connect with retries - debugpy might take a moment to start listening + connected = False + for attempt in range(15): # Up to ~15 seconds of retries + try: + reader, writer = await asyncio.wait_for( + asyncio.open_connection('127.0.0.1', debug_port), + timeout=2.0 + ) + connected = True + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] Connected to debug adapter" + }) + break + except (ConnectionRefusedError, asyncio.TimeoutError, OSError) as e: + if attempt < 14: + await asyncio.sleep(0.5) + continue + else: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Failed to connect to debug server after {attempt+1} attempts" + }) + return + + if not connected or not reader or not writer: + return + + event_task = None + attach_seq = None # Track attach request seq to get response later + + try: + # Test if we can read from connection + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] Testing connection read..." + }) + + # Try to read any initial data (some debuggers send events on connect) + try: + test_data = await asyncio.wait_for(reader.read(100), timeout=0.5) + if test_data: + buffer = test_data # Put it in buffer for later processing + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Initial data received: {len(test_data)} bytes" + }) + except asyncio.TimeoutError: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] No initial data (normal)" + }) + + # DAP handshake - Step 1: Initialize + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] Sending initialize..." + }) + + init_resp = await send_request_and_wait("initialize", { + "clientID": "sygra", + "adapterID": "debugpy", + "pathFormat": "path", + "linesStartAt1": True, + "columnsStartAt1": True, + "supportsVariableType": True, + "supportsRunInTerminalRequest": False, + "supportsProgressReporting": False + }, timeout=15.0) + + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] Initialize OK" + }) + + # Step 2: Send attach request (but don't wait for response yet - it comes after configurationDone) + attach_request = { + "seq": seq, + "type": "request", + "command": "attach", + "arguments": { + "justMyCode": False, + "subProcess": False + } + } + attach_seq = seq + seq += 1 + + body = json.dumps(attach_request) + header = f"Content-Length: {len(body)}\r\n\r\n" + writer.write((header + body).encode()) + await writer.drain() + + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] Attach request sent" + }) + + # Step 3: Wait for 'initialized' event before setting breakpoints + # Read messages until we get initialized event + got_initialized = False + start_time = asyncio.get_event_loop().time() + while (asyncio.get_event_loop().time() - start_time) < 10.0: + message = await read_dap_message(timeout=2.0) + if message: + if message.get("type") == "event" and message.get("event") == "initialized": + got_initialized = True + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] Got initialized event" + }) + break + elif message.get("type") == "event": + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Event: {message.get('event')}" + }) + + if not got_initialized: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] Warning: No initialized event received" + }) + + # Step 4: Set breakpoints + if breakpoints: + bp_resp = await send_request_and_wait("setBreakpoints", { + "source": {"path": file_path}, + "breakpoints": [{"line": ln} for ln in breakpoints] + }, timeout=10.0) + verified = sum(1 for bp in bp_resp.get("body", {}).get("breakpoints", []) if bp.get("verified")) + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Breakpoints: {verified}/{len(breakpoints)} at {breakpoints}" + }) + + # Step 5: Send configurationDone - this triggers attach response + await send_request_and_wait("configurationDone", timeout=10.0) + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] Configuration done" + }) + + # Step 6: Now wait for attach response + start_time = asyncio.get_event_loop().time() + attach_resp = None + while (asyncio.get_event_loop().time() - start_time) < 10.0: + message = await read_dap_message(timeout=2.0) + if message: + if message.get("type") == "response" and message.get("request_seq") == attach_seq: + attach_resp = message + break + elif message.get("type") == "event": + asyncio.create_task(handle_event(message)) + + if attach_resp: + if attach_resp.get("success", True): + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] Debugger attached - session active!" + }) + else: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Attach failed: {attach_resp.get('message')}" + }) + return + else: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": "[DEBUG] No attach response, but continuing..." + }) + + # Function to fetch variables with proper coordination + async def fetch_variables(variables_reference: int) -> list: + """Fetch child variables for a given reference.""" + nonlocal seq + await send_command("variables", {"variablesReference": variables_reference}) + + # Read responses until we get our variables response (read_dap_message handles locking) + for _ in range(20): + msg = await read_dap_message(timeout=0.5) + if msg: + if msg.get("type") == "response" and msg.get("command") == "variables": + if msg.get("body"): + return [ + { + "name": v.get("name"), + "value": str(v.get("value", ""))[:200], + "type": v.get("type"), + "variablesReference": v.get("variablesReference", 0) + } + for v in msg["body"].get("variables", []) + ] + elif msg.get("type") == "event": + await handle_event(msg) + else: + await asyncio.sleep(0.05) + return [] + + # Store functions for debug actions API + execution["dap_send_command"] = send_command + execution["dap_read_message"] = read_dap_message + execution["dap_fetch_variables"] = fetch_variables + execution["dap_lock"] = dap_lock + + # Start message loop to handle events and fetch debug state + event_task = asyncio.create_task(message_loop()) + + while execution.get("status") == "running": + await asyncio.sleep(0.5) + + except asyncio.TimeoutError as e: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Protocol timeout: {str(e)}" + }) + except Exception as e: + await _send_to_websocket(execution_id, { + "type": "debug", + "content": f"[DEBUG] Error: {str(e)}" + }) + finally: + if event_task: + event_task.cancel() + if writer: + writer.close() + + +def _represent_multiline_str(dumper, data): + """Custom YAML representer that uses literal block scalar style for multiline strings.""" + if '\n' in data: + # Use literal block scalar style (|) for multiline strings + return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|') + return dumper.represent_scalar('tag:yaml.org,2002:str', data) + + +def _get_yaml_dumper(): + """Get a custom YAML dumper that properly formats multiline strings.""" + class CustomDumper(yaml.SafeDumper): + pass + CustomDumper.add_representer(str, _represent_multiline_str) + return CustomDumper + + +async def _save_workflow_to_disk(app: FastAPI, request: WorkflowCreateRequest, is_new: bool) -> WorkflowSaveResponse: + """ + Save a workflow to disk as graph_config.yaml and task_executor.py. + + Args: + app: FastAPI application instance + request: Workflow creation request + is_new: Whether this is a new workflow or an update + + Returns: + WorkflowSaveResponse with details about created files + """ + import re + import yaml + + tasks_dir = Path(app.state.tasks_dir) + + # Sanitize workflow name for directory + sanitized_name = re.sub(r'[^\w\-]', '_', request.name.lower().strip()) + sanitized_name = re.sub(r'_+', '_', sanitized_name).strip('_') + + if not sanitized_name: + raise HTTPException(status_code=400, detail="Invalid workflow name") + + # Determine target directory + if is_new: + workflow_dir = tasks_dir / sanitized_name + # Check if directory already exists + counter = 1 + original_name = sanitized_name + while workflow_dir.exists(): + sanitized_name = f"{original_name}_{counter}" + workflow_dir = tasks_dir / sanitized_name + counter += 1 + else: + # For updates, use existing path if available + if request.source_path: + config_path = Path(request.source_path) + if config_path.name == "graph_config.yaml": + workflow_dir = config_path.parent + else: + workflow_dir = config_path + else: + workflow_dir = tasks_dir / sanitized_name + + # Create directory if needed + workflow_dir.mkdir(parents=True, exist_ok=True) + + files_created = [] + + # Generate graph_config.yaml + graph_config = _generate_graph_config(request, sanitized_name) + config_path = workflow_dir / "graph_config.yaml" + + with open(config_path, 'w') as f: + yaml.dump(graph_config, f, Dumper=_get_yaml_dumper(), default_flow_style=False, sort_keys=False, allow_unicode=True) + files_created.append(str(config_path)) + + # Generate task_executor.py if there are custom processors + executor_code = _generate_task_executor(request, sanitized_name) + if executor_code: + executor_path = workflow_dir / "task_executor.py" + with open(executor_path, 'w') as f: + f.write(executor_code) + files_created.append(str(executor_path)) + + # Generate workflow ID + workflow_id = sanitized_name + + # Update cache + try: + graph = app.state.graph_builder.build_from_yaml(str(config_path)) + _workflows[graph.id] = graph + workflow_id = graph.id + except Exception as e: + print(f"Warning: Could not reload workflow after save: {e}") + + return WorkflowSaveResponse( + success=True, + workflow_id=workflow_id, + source_path=str(config_path), + message=f"Workflow '{request.name}' saved successfully", + files_created=files_created + ) + + +# Track used function/class names to ensure uniqueness +_used_names: set = set() + + +def _generate_readable_name(node, node_type: str, all_nodes: list, suffix: str) -> str: + """ + Generate a unique readable name for a node's class/function based on summary or position. + + Args: + node: The node to generate a name for + node_type: Type of node (lambda, data, output) + all_nodes: All nodes in the workflow + suffix: Suffix to add (function, transform, generator) + + Returns: + A unique readable Python identifier like 'process_data_function' or 'lambda_1_function' + """ + import re + global _used_names + + base_name = None + + # Try to use node summary if available and meaningful + if node.summary and node.summary.strip(): + # Convert summary to valid Python identifier + name = re.sub(r'[^\w\s]', '', node.summary.lower().strip()) + name = re.sub(r'\s+', '_', name) + name = re.sub(r'_+', '_', name).strip('_') + # Skip if summary just equals the node type (not meaningful) + if name and name != node_type: + if name[0].isdigit(): + name = f"{node_type}_{name}" + base_name = name + + # Fall back to numbered name based on position + if not base_name: + same_type_nodes = [n for n in all_nodes if n.node_type == node_type] + try: + index = same_type_nodes.index(node) + 1 + except ValueError: + index = 1 + base_name = f"{node_type}_{index}" + + # Ensure uniqueness by adding counter if needed + full_name = f"{base_name}_{suffix}" + if full_name in _used_names: + counter = 2 + while f"{base_name}_{counter}_{suffix}" in _used_names: + counter += 1 + full_name = f"{base_name}_{counter}_{suffix}" + + _used_names.add(full_name) + return full_name + + +def _reset_used_names(): + """Reset the used names set - call at start of each workflow generation.""" + global _used_names + _used_names = set() + + +def _serialize_inner_graph(inner_graph) -> dict: + """ + Recursively serialize an inner_graph structure for YAML storage. + + Args: + inner_graph: InnerGraph model or dict + + Returns: + Dictionary ready for YAML serialization + """ + result = { + 'name': inner_graph.name if hasattr(inner_graph, 'name') else inner_graph.get('name', 'Subgraph'), + 'nodes': {}, + 'edges': [] + } + + inner_nodes = inner_graph.nodes if hasattr(inner_graph, 'nodes') else inner_graph.get('nodes', []) + for inner_node in inner_nodes: + inner_node_id = inner_node.id if hasattr(inner_node, 'id') else inner_node.get('id') + inner_node_type = inner_node.node_type if hasattr(inner_node, 'node_type') else inner_node.get('node_type') + inner_node_config = { + 'node_type': inner_node_type, + 'node_name': inner_node.summary if hasattr(inner_node, 'summary') else inner_node.get('summary'), + } + # Save position + inner_pos = inner_node.position if hasattr(inner_node, 'position') else inner_node.get('position', {}) + if inner_pos: + inner_node_config['position'] = { + 'x': inner_pos.x if hasattr(inner_pos, 'x') else inner_pos.get('x', 0), + 'y': inner_pos.y if hasattr(inner_pos, 'y') else inner_pos.get('y', 0) + } + # Save size + inner_size = inner_node.size if hasattr(inner_node, 'size') else inner_node.get('size') + if inner_size: + inner_node_config['size'] = { + 'width': inner_size.width if hasattr(inner_size, 'width') else inner_size.get('width', 150), + 'height': inner_size.height if hasattr(inner_size, 'height') else inner_size.get('height', 60) + } + # Recursively handle nested inner_graph + nested_inner_graph = inner_node.inner_graph if hasattr(inner_node, 'inner_graph') else inner_node.get('inner_graph') + if nested_inner_graph: + inner_node_config['inner_graph'] = _serialize_inner_graph(nested_inner_graph) + result['nodes'][inner_node_id] = inner_node_config + + inner_edges = inner_graph.edges if hasattr(inner_graph, 'edges') else inner_graph.get('edges', []) + for inner_edge in inner_edges: + edge_source = inner_edge.source if hasattr(inner_edge, 'source') else inner_edge.get('source') + edge_target = inner_edge.target if hasattr(inner_edge, 'target') else inner_edge.get('target') + result['edges'].append({ + 'from': edge_source, + 'to': edge_target + }) + + return result + + +def _generate_graph_config(request: WorkflowCreateRequest, task_name: str) -> dict: + """ + Generate graph_config.yaml content from workflow request. + + Args: + request: Workflow creation request + task_name: Sanitized task name for paths + + Returns: + Dictionary ready for YAML serialization + """ + # Reset used names for this workflow generation + _reset_used_names() + + config = {} + + # Data config - filter out internal fields (starting with _) + if request.data_config and request.data_config.get('source'): + config['data_config'] = {k: v for k, v in request.data_config.items() if not k.startswith('_')} + + # Graph config + graph_config = {'nodes': {}, 'edges': []} + + for node in request.nodes: + # Skip START and END nodes - they're implicit + if node.node_type in ('start', 'end'): + continue + + node_config: Dict[str, Any] = { + 'node_type': node.node_type + } + + # Add node-specific config + if node.node_type == 'llm': + if node.prompt: + # Convert prompt messages to YAML format + prompt_list = [] + for msg in node.prompt: + if hasattr(msg, 'role') and hasattr(msg, 'content'): + prompt_list.append({msg.role: msg.content}) + elif isinstance(msg, dict): + prompt_list.append({msg.get('role', 'user'): msg.get('content', '')}) + if prompt_list: + node_config['prompt'] = prompt_list + + if node.model: + model_config = {'name': node.model.name if hasattr(node.model, 'name') else node.model.get('name', 'gpt-4o')} + params = node.model.parameters if hasattr(node.model, 'parameters') else node.model.get('parameters', {}) + if params: + model_config['parameters'] = params + node_config['model'] = model_config + + # Include output_keys for LLM nodes + if node.output_keys: + node_config['output_keys'] = node.output_keys + + elif node.node_type == 'multi_llm': + if node.prompt: + # Convert prompt messages to YAML format + prompt_list = [] + for msg in node.prompt: + if hasattr(msg, 'role') and hasattr(msg, 'content'): + prompt_list.append({msg.role: msg.content}) + elif isinstance(msg, dict): + prompt_list.append({msg.get('role', 'user'): msg.get('content', '')}) + if prompt_list: + node_config['prompt'] = prompt_list + + # Include models config for multi_llm nodes + if node.models: + node_config['models'] = node.models + + # Include multi_llm_post_process + if node.multi_llm_post_process: + node_config['multi_llm_post_process'] = node.multi_llm_post_process + + # Include output_keys for multi_llm nodes + if node.output_keys: + node_config['output_keys'] = node.output_keys + + elif node.node_type == 'lambda': + if node.function_path: + node_config['function_path'] = node.function_path + else: + # Generate a readable function name from node summary or a simple counter + func_name = _generate_readable_name(node, 'lambda', request.nodes, 'function') + generated_path = f"tasks.{task_name}.task_executor.{func_name}" + node_config['function_path'] = generated_path + # Store back on node for task_executor generation + node.function_path = generated_path + + elif node.node_type == 'subgraph': + if node.subgraph_path: + node_config['subgraph'] = node.subgraph_path + elif node.inner_graph: + # Save inline subgraph data for grouped nodes + inner_graph_data = { + 'name': node.inner_graph.name if hasattr(node.inner_graph, 'name') else node.inner_graph.get('name', 'Subgraph'), + 'nodes': {}, + 'edges': [] + } + # Convert inner nodes + inner_nodes = node.inner_graph.nodes if hasattr(node.inner_graph, 'nodes') else node.inner_graph.get('nodes', []) + for inner_node in inner_nodes: + inner_node_id = inner_node.id if hasattr(inner_node, 'id') else inner_node.get('id') + inner_node_type = inner_node.node_type if hasattr(inner_node, 'node_type') else inner_node.get('node_type') + inner_node_config = { + 'node_type': inner_node_type, + 'node_name': inner_node.summary if hasattr(inner_node, 'summary') else inner_node.get('summary'), + } + # Save position for inner nodes + inner_pos = inner_node.position if hasattr(inner_node, 'position') else inner_node.get('position', {}) + if inner_pos: + inner_node_config['position'] = { + 'x': inner_pos.x if hasattr(inner_pos, 'x') else inner_pos.get('x', 0), + 'y': inner_pos.y if hasattr(inner_pos, 'y') else inner_pos.get('y', 0) + } + # Save size for inner nodes + inner_size = inner_node.size if hasattr(inner_node, 'size') else inner_node.get('size') + if inner_size: + inner_node_config['size'] = { + 'width': inner_size.width if hasattr(inner_size, 'width') else inner_size.get('width', 150), + 'height': inner_size.height if hasattr(inner_size, 'height') else inner_size.get('height', 60) + } + # Handle nested subgraphs recursively + inner_inner_graph = inner_node.inner_graph if hasattr(inner_node, 'inner_graph') else inner_node.get('inner_graph') + if inner_inner_graph: + # Recursively store nested inner_graph (simplified - just store the raw data) + inner_node_config['inner_graph'] = _serialize_inner_graph(inner_inner_graph) + inner_graph_data['nodes'][inner_node_id] = inner_node_config + # Convert inner edges + inner_edges = node.inner_graph.edges if hasattr(node.inner_graph, 'edges') else node.inner_graph.get('edges', []) + for inner_edge in inner_edges: + edge_source = inner_edge.source if hasattr(inner_edge, 'source') else inner_edge.get('source') + edge_target = inner_edge.target if hasattr(inner_edge, 'target') else inner_edge.get('target') + inner_graph_data['edges'].append({ + 'from': edge_source, + 'to': edge_target + }) + node_config['inner_graph'] = inner_graph_data + + elif node.node_type == 'connector': + if node.metadata: + node_config.update(node.metadata) + + elif node.node_type == 'data': + # Data nodes are stored in data_config at workflow level, NOT in graph_config.nodes + if node.data_config: + # Copy data_config but strip out internal fields + data_config_clean = {k: v for k, v in node.data_config.items() if not k.startswith('_')} + if data_config_clean: + if 'data_config' not in config: + config['data_config'] = {} + config['data_config'].update(data_config_clean) + # Skip adding to graph_config.nodes - data nodes are implicit + continue + + elif node.node_type == 'output': + # Output nodes are stored in output_config at workflow level, NOT in graph_config.nodes + if node.output_config: + # Copy output_config but strip out internal fields + output_config_clean = {k: v for k, v in node.output_config.items() if not k.startswith('_')} + if output_config_clean: + if 'output_config' not in config: + config['output_config'] = {} + config['output_config'].update(output_config_clean) + # Skip adding to graph_config.nodes - output nodes are implicit + continue + + elif node.node_type == 'weighted_sampler': + node_config['node_type'] = 'weighted_sampler' + if node.sampler_config and node.sampler_config.get('attributes'): + node_config['attributes'] = node.sampler_config['attributes'] + + # Add pre/post processors + if node.pre_process: + node_config['pre_process'] = node.pre_process + if node.post_process: + node_config['post_process'] = node.post_process + + # Add node_name - displayed in the graph + if node.summary: + node_config['node_name'] = node.summary + + # Add description as comment (via metadata) + if node.description: + node_config['description'] = node.description + + graph_config['nodes'][node.id] = node_config + + # Build set of data and output node IDs to filter edges + excluded_node_ids = {node.id for node in request.nodes if node.node_type in ('data', 'output')} + + # Edges - exclude any edges involving data or output nodes + for edge in request.edges: + # Skip edges connected to data or output nodes + if edge.source in excluded_node_ids or edge.target in excluded_node_ids: + continue + + edge_config: Dict[str, Any] = { + 'from': edge.source if edge.source != 'START' else 'START', + 'to': edge.target if edge.target != 'END' else 'END' + } + + if edge.is_conditional and edge.condition: + edge_config['condition_path'] = edge.condition.condition_path + if edge.condition.path_map: + edge_config['path_map'] = edge.condition.path_map + + if edge.label: + edge_config['label'] = edge.label + + graph_config['edges'].append(edge_config) + + config['graph_config'] = graph_config + + # Output config - filter out data/output nodes from output_map and internal fields + if request.output_config: + # Clean the output_config by removing internal fields (starting with _) + # and data/output node references from output_map + output_config_clean = {k: v for k, v in request.output_config.items() if not k.startswith('_')} + if 'output_map' in output_config_clean: + output_config_clean['output_map'] = { + k: v for k, v in output_config_clean['output_map'].items() + if k not in excluded_node_ids + } + config['output_config'] = output_config_clean + else: + # Generate default output config with id and messages mapping + # Check if we have LLM nodes that need message conversion + has_llm_nodes = any(n.node_type == 'llm' for n in request.nodes) + + if has_llm_nodes: + # Use output generator with message transform for LLM outputs + config['output_config'] = { + 'generator': f'tasks.{task_name}.task_executor.DefaultOutputGenerator', + 'output_map': { + 'id': {'from': 'id'}, + 'response': {'from': 'messages', 'transform': 'build_response'} + } + } + else: + # Minimal output config for non-LLM workflows + config['output_config'] = {'output_map': {'id': {'from': 'id'}}} + + # Schema config + if request.schema_config: + config['schema_config'] = request.schema_config + + return config + + +def _generate_task_executor(request: WorkflowCreateRequest, task_name: str) -> Optional[str]: + """ + Generate task_executor.py content for custom processors. + + Args: + request: Workflow creation request + task_name: Sanitized task name for paths + + Returns: + Python code as string, or None if no custom code needed + """ + imports = set() + classes = [] + functions = [] + + # Check for custom processors and lambdas + needs_executor = False + + for node in request.nodes: + # Check for pre/post processors that reference this task + if node.pre_process and task_name in node.pre_process: + needs_executor = True + imports.add("from sygra.core.graph.functions.node_processor import NodePreProcessorWithState") + imports.add("from sygra.core.graph.sygra_state import SygraState") + + # Generate placeholder class + class_name = node.pre_process.split('.')[-1] + classes.append(f''' +class {class_name}(NodePreProcessorWithState): + """Pre-processor for {node.id} node.""" + + def apply(self, state: SygraState) -> SygraState: + # TODO: Implement pre-processing logic + # Access state variables: state["variable_name"] + # Modify state as needed + return state +''') + + if node.post_process and task_name in node.post_process: + needs_executor = True + imports.add("from sygra.core.graph.functions.node_processor import NodePostProcessorWithState") + imports.add("from sygra.core.graph.sygra_message import SygraMessage") + imports.add("from sygra.core.graph.sygra_state import SygraState") + + # Generate placeholder class + class_name = node.post_process.split('.')[-1] + classes.append(f''' +class {class_name}(NodePostProcessorWithState): + """Post-processor for {node.id} node.""" + + def apply(self, resp: SygraMessage, state: SygraState) -> SygraState: + # TODO: Implement post-processing logic + # Access LLM response: resp.message.content + # Modify state as needed: state["output_key"] = processed_value + return state +''') + + # Check for lambda functions - always generate stub if function_path contains this task + if node.node_type == 'lambda' and node.function_path: + # Check if function should be defined in this task's executor + if task_name in node.function_path or 'task_executor' in node.function_path: + needs_executor = True + imports.add("from sygra.core.graph.sygra_state import SygraState") + imports.add("from typing import Any") + + func_name = node.function_path.split('.')[-1] + # Generate descriptive docstring + node_desc = node.summary if node.summary else f"Lambda node {node.id}" + functions.append(f''' +def {func_name}(state: SygraState) -> Any: + """ + Lambda function: {node_desc} + + This function is executed as part of the workflow pipeline. + Modify the state and return it, or return a value to be stored. + + Args: + state: Current workflow state containing all variables + + Returns: + Modified state or a value to store in state["{node.id}"] + """ + # TODO: Implement your processing logic here + # + # Example - Access input data: + # input_data = state.get("previous_node_output") + # + # Example - Process and return: + # result = process(input_data) + # return result + # + # Example - Modify state directly: + # state["my_output"] = computed_value + # return state + + return state +''') + + # Check for data node transformations + if node.node_type == 'data' and node.data_config: + transform_code = node.data_config.get('_transform_code', '') + if transform_code and transform_code.strip(): + needs_executor = True + # The transform code already contains imports and class definition + classes.append(f''' +# === Data Transformation for {node.id} === +{transform_code} +''') + + # Check for output node generators + if node.node_type == 'output' and node.output_config: + generator_code = node.output_config.get('_generator_code', '') + if generator_code and generator_code.strip(): + needs_executor = True + # The generator code already contains imports and class definition + classes.append(f''' +# === Output Generator for {node.id} === +{generator_code} +''') + + # Check for conditional edges + for edge in request.edges: + if edge.is_conditional and edge.condition and edge.condition.condition_path: + if task_name in edge.condition.condition_path: + needs_executor = True + imports.add("from sygra.core.graph.sygra_state import SygraState") + + func_name = edge.condition.condition_path.split('.')[-1] + path_map_keys = list(edge.condition.path_map.keys()) if edge.condition.path_map else ['default'] + + functions.append(f''' +def {func_name}(state: SygraState) -> str: + """Conditional edge function from {edge.source} to determine next node.""" + # TODO: Implement condition logic + # Return one of: {path_map_keys} + # Access state variables: state["variable_name"] + return "{path_map_keys[0]}" +''') + + # Check if we have LLM nodes - always generate DefaultOutputGenerator for them + has_llm_nodes = any(n.node_type == 'llm' for n in request.nodes) + if has_llm_nodes: + needs_executor = True + imports.add("from typing import Any") + imports.add("from sygra.processors.output_record_generator import BaseOutputGenerator") + imports.add("from sygra.utils import utils") + + classes.append(''' +class DefaultOutputGenerator(BaseOutputGenerator): + """Output generator that converts LangChain messages to chat format.""" + + @staticmethod + def build_response(data: Any, state: dict) -> list: + """Convert LangChain AIMessage objects to serializable chat format.""" + return utils.convert_messages_from_langchain_to_chat_format(data) +''') + + if not needs_executor: + return None + + # Build the file content + code_parts = [ + '"""', + f'Task executor for {request.name} workflow.', + '', + 'This file contains custom processors, lambda functions, and conditional edge logic.', + '"""', + '', + ] + + # Add imports + for imp in sorted(imports): + code_parts.append(imp) + + code_parts.append('') + + # Add classes + for cls in classes: + code_parts.append(cls) + + # Add functions + for func in functions: + code_parts.append(func) + + return '\n'.join(code_parts) + + +def _extract_class_or_function_body(code_content: str) -> str: + """ + Extract just the class or function definition from code content, + removing any inline imports and docstrings that precede it. + + Args: + code_content: Raw code that may include imports, docstrings, and class/function + + Returns: + Clean class or function definition without preceding imports + """ + lines = code_content.strip().split('\n') + result_lines = [] + in_class_or_func = False + class_indent = 0 + + for i, line in enumerate(lines): + stripped = line.strip() + + # Skip empty lines before class/function + if not in_class_or_func and not stripped: + continue + + # Skip import statements before class/function + if not in_class_or_func and (stripped.startswith('from ') or stripped.startswith('import ')): + continue + + # Skip module-level docstrings (before class/function) + if not in_class_or_func and (stripped.startswith('"""') or stripped.startswith("'''")): + # Skip until closing quote + if stripped.count('"""') == 1 or stripped.count("'''") == 1: + quote = '"""' if '"""' in stripped else "'''" + for j in range(i + 1, len(lines)): + if quote in lines[j]: + break + continue + + # Detect start of class or function + if stripped.startswith('class ') or stripped.startswith('def '): + in_class_or_func = True + class_indent = len(line) - len(line.lstrip()) + result_lines.append(line) + continue + + # Once in class/function, include everything + if in_class_or_func: + result_lines.append(line) + + return '\n'.join(result_lines) + + +def _detect_code_type_from_ast(code: str) -> Optional[Tuple[str, str, int, int]]: + """ + Use AST to detect the type of code based on base class inheritance. + + This is more robust than pattern matching on names because it actually + checks what the class inherits from. + + Args: + code: Python source code to analyze + + Returns: + Tuple of (code_type, name, start_line, end_line) or None if not recognized. + code_type is one of: 'pre_process', 'post_process', 'output_generator', + 'data_transform', 'lambda', 'branch_condition' + """ + import ast + + # Base class mappings - check these against actual inheritance + BASE_CLASS_TO_TYPE = { + # Pre-processors + 'NodePreProcessor': 'pre_process', + # Post-processors + 'NodePostProcessor': 'post_process', + 'NodePostProcessorWithState': 'post_process', + # Output generators + 'BaseOutputGenerator': 'output_generator', + # Data transforms + 'DataTransform': 'data_transform', + # Edge conditions (branch) + 'EdgeCondition': 'branch_condition', + # Lambda functions + 'LambdaFunction': 'lambda', + } + + try: + tree = ast.parse(code) + except SyntaxError: + return None + + for node in ast.walk(tree): + # Check class definitions + if isinstance(node, ast.ClassDef): + for base in node.bases: + # Handle simple base class names: class Foo(BaseClass) + if isinstance(base, ast.Name): + base_name = base.id + if base_name in BASE_CLASS_TO_TYPE: + end_line = node.end_lineno if hasattr(node, 'end_lineno') else node.lineno + return (BASE_CLASS_TO_TYPE[base_name], node.name, node.lineno, end_line) + + # Handle attribute access: class Foo(module.BaseClass) + elif isinstance(base, ast.Attribute): + base_name = base.attr + if base_name in BASE_CLASS_TO_TYPE: + end_line = node.end_lineno if hasattr(node, 'end_lineno') else node.lineno + return (BASE_CLASS_TO_TYPE[base_name], node.name, node.lineno, end_line) + + # Check function definitions (for lambda and branch_condition functions) + elif isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)): + # Lambda functions: typically named {node_id}_function + if node.name.endswith('_function'): + end_line = node.end_lineno if hasattr(node, 'end_lineno') else node.lineno + # Extract node_id from function name + node_id = node.name[:-9] # Remove '_function' suffix + return ('lambda', node.name, node.lineno, end_line) + + # Branch conditions: typically named {node_id}_condition + elif node.name.endswith('_condition'): + end_line = node.end_lineno if hasattr(node, 'end_lineno') else node.lineno + return ('branch_condition', node.name, node.lineno, end_line) + + return None + + +def _find_code_blocks_by_ast(content: str) -> List[Dict[str, Any]]: + """ + Parse file content using AST to find all code blocks and their types. + + Returns a list of dicts with: + - type: code type ('pre_process', 'post_process', etc.) + - name: class/function name + - node_id: extracted node ID from the name + - start_line: 1-indexed start line + - end_line: 1-indexed end line + - code: the extracted code string + """ + import ast + + BASE_CLASS_TO_TYPE = { + 'NodePreProcessor': 'pre_process', + 'NodePostProcessor': 'post_process', + 'NodePostProcessorWithState': 'post_process', + 'BaseOutputGenerator': 'output_generator', + 'DataTransform': 'data_transform', + 'EdgeCondition': 'branch_condition', + 'LambdaFunction': 'lambda', + } + + # Suffixes for extracting node_id from names + SUFFIX_PATTERNS = { + 'pre_process': ('PreProcessor',), + 'post_process': ('PostProcessor',), + 'output_generator': ('Generator',), + 'data_transform': ('Transform',), + 'lambda': ('Lambda', '_function'), # Class-based pattern first, function-based for backwards compat + 'branch_condition': ('Condition', '_condition'), # Class-based pattern first, function-based for backwards compat + } + + results = [] + lines = content.splitlines(keepends=True) + + try: + tree = ast.parse(content) + except SyntaxError: + return results + + for node in ast.walk(tree): + code_type = None + name = None + node_id = None + + # Check class definitions + if isinstance(node, ast.ClassDef): + for base in node.bases: + base_name = None + if isinstance(base, ast.Name): + base_name = base.id + elif isinstance(base, ast.Attribute): + base_name = base.attr + + if base_name and base_name in BASE_CLASS_TO_TYPE: + code_type = BASE_CLASS_TO_TYPE[base_name] + name = node.name + # Extract node_id from class name + for suffix in SUFFIX_PATTERNS.get(code_type, ()): + if name.endswith(suffix): + node_id = name[:-len(suffix)] + break + if not node_id: + node_id = name + break + + # Check function definitions + elif isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)): + if node.name.endswith('_function'): + code_type = 'lambda' + name = node.name + node_id = name[:-9] # Remove '_function' + elif node.name.endswith('_condition'): + code_type = 'branch_condition' + name = node.name + node_id = name[:-10] # Remove '_condition' + + if code_type and name: + start_line = node.lineno - 1 # Convert to 0-indexed + end_line = node.end_lineno if hasattr(node, 'end_lineno') else start_line + 1 + + # Extract the code + code_lines = lines[start_line:end_line] + code = ''.join(code_lines).rstrip() + + results.append({ + 'type': code_type, + 'name': name, + 'node_id': node_id, + 'start_line': start_line, + 'end_line': end_line, + 'code': code + }) + + return results + + +def _find_code_block_for_node(content: str, node_id: str, code_type: str) -> Optional[Tuple[int, int, str]]: + """ + Find a specific code block for a node ID and code type using AST. + + Args: + content: Full file content + node_id: The node ID to find + code_type: Type of code ('pre_process', 'post_process', etc.) + + Returns: + Tuple of (start_pos, end_pos, code) for string replacement, or None if not found + """ + # First try marker-based detection + marker_map = { + 'pre_process': 'Pre-Processor', + 'post_process': 'Post-Processor', + 'lambda': 'Lambda Function', + 'branch_condition': 'Branch Condition', + 'output_generator': 'Output Generator', + 'data_transform': 'Data Transformation', + } + + marker_label = marker_map.get(code_type) + if marker_label: + # Pattern allows for blank lines between code blocks using \n\s* + marker_pattern = rf'# === {re.escape(marker_label)} for {re.escape(node_id)} ===\n.*?(?=\n\s*# ===|\n\s*class DefaultOutputGenerator|\Z)' + match = re.search(marker_pattern, content, re.DOTALL) + if match: + return (match.start(), match.end(), match.group(0)) + + # Fall back to AST-based detection + blocks = _find_code_blocks_by_ast(content) + lines = content.splitlines(keepends=True) + + # Normalize node_id for comparison + safe_node_id = re.sub(r'[^a-zA-Z0-9_]', '', node_id.replace('-', '_').replace(' ', '_')) + + for block in blocks: + if block['type'] != code_type: + continue + + # Compare normalized node IDs + block_safe_id = re.sub(r'[^a-zA-Z0-9_]', '', block['node_id'].replace('-', '_').replace(' ', '_')) + if block_safe_id == safe_node_id: + # Check if there's a marker comment preceding the class/function + # Look at the line(s) before the block start + actual_start_line = block['start_line'] + marker_prefix = f'# === {marker_label} for ' + + # Scan backwards to find marker comment (skip blank lines) + check_line = actual_start_line - 1 + while check_line >= 0: + line_content = lines[check_line].strip() + if line_content.startswith(marker_prefix): + # Found a marker, include it in the range + actual_start_line = check_line + break + elif line_content == '': + # Blank line, continue searching backwards + check_line -= 1 + else: + # Non-blank, non-marker line - stop searching + break + + # Calculate character positions from line numbers + start_pos = sum(len(lines[i]) for i in range(actual_start_line)) + end_pos = sum(len(lines[i]) for i in range(block['end_line'])) + return (start_pos, end_pos, block['code']) + + return None + + +def _is_stub_code(code: str, code_type: str) -> bool: + """ + Detect if code is just a stub/template without actual user modifications. + + Stub patterns: + - Pre-processor: Only has "return state" with comments + - Post-processor: Only has "return {"response": resp.message.content}" with comments + - Data transform: Only has "return record" with comments + - Lambda/Branch: Only has "return" with no real logic + + Args: + code: The code to check + code_type: Type of code ('pre_process', 'post_process', 'data_transform', 'lambda', 'branch_condition', 'output_generator') + + Returns: + True if this is stub code that should not be saved + """ + if not code or not code.strip(): + return True + + # Extract just the meaningful lines (non-comment, non-empty, non-docstring) + lines = code.strip().split('\n') + meaningful_lines = [] + in_docstring = False + docstring_quote = None + + for line in lines: + stripped = line.strip() + + # Handle docstrings + if not in_docstring: + if stripped.startswith('"""') or stripped.startswith("'''"): + docstring_quote = '"""' if stripped.startswith('"""') else "'''" + # Check if docstring ends on same line + if stripped.count(docstring_quote) >= 2: + continue # Single-line docstring, skip + in_docstring = True + continue + else: + if docstring_quote in stripped: + in_docstring = False + continue + + # Skip empty lines + if not stripped: + continue + + # Skip comment lines + if stripped.startswith('#'): + continue + + # Skip class/def declarations + if stripped.startswith('class ') or stripped.startswith('def '): + continue + + # Skip decorator lines + if stripped.startswith('@'): + continue + + # This is a meaningful line + meaningful_lines.append(stripped) + + # Check for stub patterns based on code type + if code_type == 'pre_process': + # Stub pre-processor only has "return state" or assigns self.params + for line in meaningful_lines: + if line not in ('return state', 'self.params = params'): + return False + return True + + elif code_type == 'post_process': + # Stub post-processor only has return {"response": resp.message.content} + for line in meaningful_lines: + if line not in ('return {"response": resp.message.content}',): + return False + return True + + elif code_type == 'data_transform': + # Stub data transform only has "return data" or "return record" or assigns self.params + # Also allow the name property return (e.g., return "SomeTransform") + for line in meaningful_lines: + # Allow standard stub patterns + if line in ('return data', 'return record', 'self.params = params'): + continue + # Allow name property return (quoted string) + if line.startswith('return "') and line.endswith('"'): + continue + if line.startswith("return '") and line.endswith("'"): + continue + # Any other line means it's not a stub + return False + return True + + elif code_type == 'lambda': + # Stub lambda has no meaningful lines or just returns state + if not meaningful_lines: + return True + if len(meaningful_lines) == 1 and meaningful_lines[0].startswith('return'): + if meaningful_lines[0] in ('return state', 'return None', 'return'): + return True + return False + + elif code_type == 'branch_condition': + # Stub branch condition just returns a default string + if not meaningful_lines: + return True + if len(meaningful_lines) == 1 and meaningful_lines[0].startswith('return'): + # Check for stub returns like "return state", "return 'default'", etc. + if meaningful_lines[0] in ('return state', 'return None', 'return', 'return "default"', "return 'default'"): + return True + return False + + elif code_type == 'output_generator': + # Stub output generator only has default return statements + stub_patterns = ( + 'return super()._build_record(state)', + 'return {}', + 'return', + ) + for line in meaningful_lines: + if line not in stub_patterns: + return False + return True + + return False + + +def _rebuild_task_executor(workflow_dir: Path) -> bool: + """ + Rebuild the entire task_executor.py file from stored node metadata. + + This creates a clean, well-organized file with: + - Consolidated imports at the top + - Organized sections for each code type + - No duplicate imports + + Args: + workflow_dir: Path to the workflow directory + + Returns: + True if successful, False otherwise + """ + task_executor_path = workflow_dir / "task_executor.py" + task_name = workflow_dir.name + + # Collect all imports needed + imports = set() + imports.add('from typing import Any, Dict') + + # Code sections + data_transforms = [] + pre_processors = [] + post_processors = [] + lambda_functions = [] + branch_conditions = [] + output_generators = [] + + # Read existing file to extract current code blocks + existing_content = "" + has_default_generator = False + + if task_executor_path.exists(): + try: + with open(task_executor_path, 'r') as f: + existing_content = f.read() + except Exception: + pass + + # Parse existing code blocks using AST-based detection (checks base class inheritance) + if existing_content: + # Track node_ids we've already extracted to avoid duplicates + extracted_ids = { + 'data_transform': set(), + 'pre_process': set(), + 'post_process': set(), + 'lambda': set(), + 'branch_condition': set(), + 'output_generator': set(), + } + + # First, extract marker-based blocks (these take precedence) + # Note: The lookahead allows for blank lines between code blocks using \n\s* + marker_patterns = { + 'data_transform': r'# === Data Transformation for ([^\n]+) ===\n(.*?)(?=\n\s*# ===|\n\s*class DefaultOutputGenerator|\Z)', + 'pre_process': r'# === Pre-Processor for ([^\n]+) ===\n(.*?)(?=\n\s*# ===|\n\s*class DefaultOutputGenerator|\Z)', + 'post_process': r'# === Post-Processor for ([^\n]+) ===\n(.*?)(?=\n\s*# ===|\n\s*class DefaultOutputGenerator|\Z)', + 'lambda': r'# === Lambda Function for ([^\n]+) ===\n(.*?)(?=\n\s*# ===|\n\s*class DefaultOutputGenerator|\Z)', + 'branch_condition': r'# === Branch Condition for ([^\n]+) ===\n(.*?)(?=\n\s*# ===|\n\s*class DefaultOutputGenerator|\Z)', + 'output_generator': r'# === Output Generator for ([^\n]+) ===\n(.*?)(?=\n\s*# ===|\n\s*class DefaultOutputGenerator|\Z)', + } + + code_lists = { + 'data_transform': data_transforms, + 'pre_process': pre_processors, + 'post_process': post_processors, + 'lambda': lambda_functions, + 'branch_condition': branch_conditions, + 'output_generator': output_generators, + } + + # Extract marker-based blocks first + for code_type, pattern in marker_patterns.items(): + for match in re.finditer(pattern, existing_content, re.DOTALL): + node_id = match.group(1).strip() + code = match.group(2).strip() + if code and node_id not in extracted_ids[code_type]: + clean_code = _extract_class_or_function_body(code) + if clean_code and not _is_stub_code(clean_code, code_type): + code_lists[code_type].append((node_id, clean_code)) + extracted_ids[code_type].add(node_id) + + # Now use AST-based detection for code without markers + # This checks actual base class inheritance for robust detection + ast_blocks = _find_code_blocks_by_ast(existing_content) + + for block in ast_blocks: + code_type = block['type'] + node_id = block['node_id'] + code = block['code'] + + # Skip if already extracted via marker + if node_id in extracted_ids.get(code_type, set()): + continue + + # Skip DefaultOutputGenerator + if block['name'] == 'DefaultOutputGenerator': + continue + + # Skip stub code + if _is_stub_code(code, code_type): + continue + + # Add to appropriate list + if code_type in code_lists: + code_lists[code_type].append((node_id, code)) + extracted_ids[code_type].add(node_id) + + # Add required imports based on what was found + if pre_processors: + imports.add('from sygra.core.graph.functions.node_processor import NodePreProcessor') + imports.add('from sygra.core.graph.sygra_state import SygraState') + if post_processors: + imports.add('from sygra.core.graph.functions.node_processor import NodePostProcessor') + imports.add('from sygra.core.graph.functions.node_processor import NodePostProcessorWithState') + imports.add('from sygra.core.graph.sygra_message import SygraMessage') + imports.add('from sygra.core.graph.sygra_state import SygraState') + if lambda_functions: + imports.add('from sygra.core.graph.functions.lambda_function import LambdaFunction') + imports.add('from sygra.core.graph.sygra_state import SygraState') + if branch_conditions: + imports.add('from sygra.core.graph.functions.edge_condition import EdgeCondition') + imports.add('from sygra.core.graph.sygra_state import SygraState') + if output_generators: + imports.add('from sygra.processors.output_record_generator import BaseOutputGenerator') + if data_transforms: + imports.add('from sygra.processors.data_transform import DataTransform') + + # Check for DefaultOutputGenerator - only keep if explicitly present + has_default_generator = 'class DefaultOutputGenerator' in existing_content and not output_generators + + # Build the clean file content + content_parts = [ + '"""', + f'Task executor for {task_name} workflow.', + '', + 'This file contains custom processors, lambda functions, and conditional edge logic.', + '"""', + '', + ] + + # Add utility imports if needed for DefaultOutputGenerator + # Only add these imports if we're actually including the DefaultOutputGenerator + if has_default_generator: + imports.add('from sygra.processors.output_record_generator import BaseOutputGenerator') + imports.add('from sygra.utils import utils') + + # Add sorted imports (deduplicated via set) + sorted_imports = sorted(imports) + for imp in sorted_imports: + content_parts.append(imp) + + content_parts.append('') + content_parts.append('') + + # Add Data Transformations section + if data_transforms: + for node_id, code in data_transforms: + content_parts.append(f'# === Data Transformation for {node_id} ===') + content_parts.append(code) + content_parts.append('') + content_parts.append('') + + # Add Pre-Processors section + if pre_processors: + for node_id, code in pre_processors: + content_parts.append(f'# === Pre-Processor for {node_id} ===') + content_parts.append(code) + content_parts.append('') + content_parts.append('') + + # Add Post-Processors section + if post_processors: + for node_id, code in post_processors: + content_parts.append(f'# === Post-Processor for {node_id} ===') + content_parts.append(code) + content_parts.append('') + content_parts.append('') + + # Add Lambda Functions section + if lambda_functions: + for node_id, code in lambda_functions: + content_parts.append(f'# === Lambda Function for {node_id} ===') + content_parts.append(code) + content_parts.append('') + content_parts.append('') + + # Add Branch Conditions section + if branch_conditions: + for node_id, code in branch_conditions: + content_parts.append(f'# === Branch Condition for {node_id} ===') + content_parts.append(code) + content_parts.append('') + content_parts.append('') + + # Add Output Generators section + if output_generators: + for node_id, code in output_generators: + content_parts.append(f'# === Output Generator for {node_id} ===') + content_parts.append(code) + content_parts.append('') + content_parts.append('') + elif has_default_generator: + # Only add DefaultOutputGenerator if it was explicitly in the file before + # Don't add it automatically - user must explicitly want it + content_parts.append('class DefaultOutputGenerator(BaseOutputGenerator):') + content_parts.append(' """Output generator that converts LangChain messages to chat format."""') + content_parts.append('') + content_parts.append(' @staticmethod') + content_parts.append(' def build_response(data: Any, state: dict) -> list:') + content_parts.append(' """Convert LangChain AIMessage objects to serializable chat format."""') + content_parts.append(' return utils.convert_messages_from_langchain_to_chat_format(data)') + content_parts.append('') + + try: + with open(task_executor_path, 'w') as f: + f.write('\n'.join(content_parts)) + return True + except Exception as e: + print(f"Error writing task_executor.py: {e}") + return False + + +def _update_task_executor_code( + workflow_dir: Path, + node_id: str, + code_type: str, # 'pre_process', 'post_process', 'lambda', 'branch_condition', 'output_generator', 'data_transform' + code_content: str, + node_summary: Optional[str] = None +) -> bool: + """ + Update the task_executor.py file with new processor/function code. + + Uses AST-based detection to find and replace code blocks by checking + base class inheritance. No markers needed - single source of truth. + + Args: + workflow_dir: Path to the workflow directory + node_id: The node ID (used to generate class/function names) + code_type: Type of code ('pre_process', 'post_process', 'lambda', 'branch_condition', 'output_generator', 'data_transform') + code_content: The Python code content to save (can be empty to delete) + node_summary: Optional node summary for docstrings + + Returns: + True if successful, False otherwise + """ + import ast + + task_executor_path = workflow_dir / "task_executor.py" + + valid_types = {'pre_process', 'post_process', 'lambda', 'branch_condition', 'output_generator', 'data_transform'} + if code_type not in valid_types: + print(f"Unknown code_type: {code_type}") + return False + + # Read existing content + existing_content = "" + if task_executor_path.exists(): + try: + with open(task_executor_path, 'r') as f: + existing_content = f.read() + except Exception as e: + print(f"Error reading task_executor.py: {e}") + return False + + # Extract just the class/function body from the code content + clean_code = _extract_class_or_function_body(code_content) if code_content else "" + + # Skip saving if this is stub code (user hasn't added real logic) + if clean_code and _is_stub_code(clean_code, code_type): + # Don't save stub code - return success but don't write + return True + + # If file doesn't exist or is empty, create it with the code + if not existing_content.strip(): + if not clean_code: + return True # Nothing to write + + task_name = workflow_dir.name + content = _create_task_executor_file(task_name, code_type, clean_code) + try: + with open(task_executor_path, 'w') as f: + f.write(content) + return True + except Exception as e: + print(f"Error writing task_executor.py: {e}") + return False + + # File exists - find the code block using AST + found_block = _find_code_block_by_ast(existing_content, node_id, code_type) + + if clean_code: + # Ensure required imports exist + updated_content = _ensure_imports(existing_content, code_type) + + if found_block: + # Replace existing block + start_pos, end_pos = found_block + # Recalculate positions if imports were added + if updated_content != existing_content: + # Re-find the block in updated content + found_block = _find_code_block_by_ast(updated_content, node_id, code_type) + if found_block: + start_pos, end_pos = found_block + else: + # Block no longer found after import changes, add at end + updated_content = updated_content.rstrip() + '\n\n\n' + clean_code + '\n' + try: + with open(task_executor_path, 'w') as f: + f.write(updated_content) + return True + except Exception as e: + print(f"Error writing task_executor.py: {e}") + return False + + updated_content = updated_content[:start_pos] + clean_code + '\n' + updated_content[end_pos:] + else: + # Add new code at the end + updated_content = updated_content.rstrip() + '\n\n\n' + clean_code + '\n' + + # Clean up excessive newlines + updated_content = re.sub(r'\n{4,}', '\n\n\n', updated_content) + else: + # Delete the code block (empty code_content) + if found_block: + start_pos, end_pos = found_block + updated_content = existing_content[:start_pos] + existing_content[end_pos:] + updated_content = re.sub(r'\n{3,}', '\n\n', updated_content) + else: + updated_content = existing_content + + # Write the updated content + try: + with open(task_executor_path, 'w') as f: + f.write(updated_content) + return True + except Exception as e: + print(f"Error writing task_executor.py: {e}") + return False + + +def _create_task_executor_file(task_name: str, code_type: str, code: str) -> str: + """Create a new task_executor.py file with initial code.""" + imports = _get_imports_for_code_type(code_type) + + content_parts = [ + '"""', + f'Task executor for {task_name} workflow.', + '', + 'This file contains custom processors, lambda functions, and conditional edge logic.', + '"""', + '', + 'from typing import Any, Dict', + ] + content_parts.extend(imports) + content_parts.extend(['', '', code, '']) + + return '\n'.join(content_parts) + + +def _get_imports_for_code_type(code_type: str) -> List[str]: + """Get the required imports for a code type.""" + imports = [] + if code_type == 'pre_process': + imports.append('from sygra.core.graph.functions.node_processor import NodePreProcessor') + imports.append('from sygra.core.graph.sygra_state import SygraState') + elif code_type == 'post_process': + imports.append('from sygra.core.graph.functions.node_processor import NodePostProcessor') + imports.append('from sygra.core.graph.sygra_message import SygraMessage') + imports.append('from sygra.core.graph.sygra_state import SygraState') + elif code_type == 'lambda': + imports.append('from sygra.core.graph.functions.lambda_function import LambdaFunction') + imports.append('from sygra.core.graph.sygra_state import SygraState') + elif code_type == 'branch_condition': + imports.append('from sygra.core.graph.functions.edge_condition import EdgeCondition') + imports.append('from sygra.core.graph.sygra_state import SygraState') + elif code_type == 'output_generator': + imports.append('from sygra.processors.output_record_generator import BaseOutputGenerator') + imports.append('from sygra.core.graph.sygra_state import SygraState') + elif code_type == 'data_transform': + imports.append('from sygra.processors.data_transform import DataTransform') + return imports + + +def _ensure_imports(content: str, code_type: str) -> str: + """Ensure the required imports for a code type exist in the file.""" + required_imports = _get_imports_for_code_type(code_type) + + # Find where imports end (look for first class or function definition) + lines = content.split('\n') + import_end_idx = 0 + + for i, line in enumerate(lines): + stripped = line.strip() + if stripped.startswith('from ') or stripped.startswith('import '): + import_end_idx = i + 1 + elif stripped.startswith('class ') or stripped.startswith('def ') or stripped.startswith('@'): + break + + # Check which imports are missing + missing_imports = [] + for imp in required_imports: + if imp not in content: + missing_imports.append(imp) + + if not missing_imports: + return content + + # Insert missing imports + new_lines = lines[:import_end_idx] + missing_imports + lines[import_end_idx:] + return '\n'.join(new_lines) + + +def _find_code_block_by_ast(content: str, node_id: str, code_type: str) -> Optional[Tuple[int, int]]: + """ + Find a code block for a node using AST-based detection. + + Returns (start_pos, end_pos) character positions for replacement, or None if not found. + """ + import ast + + # Base class to code type mapping + BASE_CLASS_TO_TYPE = { + 'NodePreProcessor': 'pre_process', + 'NodePostProcessor': 'post_process', + 'NodePostProcessorWithState': 'post_process', + 'BaseOutputGenerator': 'output_generator', + 'DataTransform': 'data_transform', + 'EdgeCondition': 'branch_condition', + 'LambdaFunction': 'lambda', + } + + # Expected class name suffixes + SUFFIX_MAP = { + 'pre_process': 'PreProcessor', + 'post_process': 'PostProcessor', + 'output_generator': 'Generator', + 'data_transform': 'Transform', + 'lambda': 'Lambda', + 'branch_condition': 'Condition', + } + + # Normalize node_id for comparison + safe_node_id = re.sub(r'[^a-zA-Z0-9_]', '', node_id.replace('-', '_').replace(' ', '_')) + expected_suffix = SUFFIX_MAP.get(code_type, '') + expected_name = f"{safe_node_id}{expected_suffix}" + + try: + tree = ast.parse(content) + except SyntaxError: + return None + + lines = content.splitlines(keepends=True) + + for node in ast.walk(tree): + if isinstance(node, ast.ClassDef): + # Check if class name matches expected pattern + class_name = node.name + class_safe_id = class_name[:-len(expected_suffix)] if class_name.endswith(expected_suffix) else class_name + + # Check base class inheritance + detected_type = None + for base in node.bases: + base_name = None + if isinstance(base, ast.Name): + base_name = base.id + elif isinstance(base, ast.Attribute): + base_name = base.attr + + if base_name and base_name in BASE_CLASS_TO_TYPE: + detected_type = BASE_CLASS_TO_TYPE[base_name] + break + + # Match if type and node_id match + if detected_type == code_type: + # Check if the class name contains the node_id + normalized_class_id = re.sub(r'[^a-zA-Z0-9_]', '', class_safe_id.replace('-', '_')) + if normalized_class_id == safe_node_id or class_name == expected_name: + start_line = node.lineno - 1 # 0-indexed + end_line = node.end_lineno if hasattr(node, 'end_lineno') else start_line + 1 + + start_pos = sum(len(lines[i]) for i in range(start_line)) + end_pos = sum(len(lines[i]) for i in range(end_line)) + return (start_pos, end_pos) + + return None + + +def _get_node_code_from_file(content: str, node_id: str, code_type: str) -> Optional[str]: + """ + Extract the code for a specific node from file content using AST. + + Returns the code string if found, None otherwise. + """ + import ast + + # Base class to code type mapping + BASE_CLASS_TO_TYPE = { + 'NodePreProcessor': 'pre_process', + 'NodePostProcessor': 'post_process', + 'NodePostProcessorWithState': 'post_process', + 'BaseOutputGenerator': 'output_generator', + 'DataTransform': 'data_transform', + 'EdgeCondition': 'branch_condition', + 'LambdaFunction': 'lambda', + } + + # Expected class name suffixes + SUFFIX_MAP = { + 'pre_process': 'PreProcessor', + 'post_process': 'PostProcessor', + 'output_generator': 'Generator', + 'data_transform': 'Transform', + 'lambda': 'Lambda', + 'branch_condition': 'Condition', + } + + # Normalize node_id for comparison + safe_node_id = re.sub(r'[^a-zA-Z0-9_]', '', node_id.replace('-', '_').replace(' ', '_')) + expected_suffix = SUFFIX_MAP.get(code_type, '') + expected_name = f"{safe_node_id}{expected_suffix}" + + try: + tree = ast.parse(content) + except SyntaxError: + return None + + lines = content.splitlines(keepends=True) + + for node in ast.walk(tree): + if isinstance(node, ast.ClassDef): + class_name = node.name + class_safe_id = class_name[:-len(expected_suffix)] if class_name.endswith(expected_suffix) else class_name + + # Check base class inheritance + detected_type = None + for base in node.bases: + base_name = None + if isinstance(base, ast.Name): + base_name = base.id + elif isinstance(base, ast.Attribute): + base_name = base.attr + + if base_name and base_name in BASE_CLASS_TO_TYPE: + detected_type = BASE_CLASS_TO_TYPE[base_name] + break + + # Match if type and node_id match + if detected_type == code_type: + normalized_class_id = re.sub(r'[^a-zA-Z0-9_]', '', class_safe_id.replace('-', '_')) + if normalized_class_id == safe_node_id or class_name == expected_name: + start_line = node.lineno - 1 # 0-indexed + end_line = node.end_lineno if hasattr(node, 'end_lineno') else start_line + 1 + + # Extract the code + code_lines = lines[start_line:end_line] + return ''.join(code_lines).rstrip() + + return None + + +def _execute_workflow_subprocess( + task_name: str, + args_dict: dict, + result_queue: multiprocessing.Queue, + log_queue: multiprocessing.Queue, + node_queue: multiprocessing.Queue = None, +): + """ + Function that runs in a subprocess to execute the workflow. + + This allows us to terminate the process mid-execution to save LLM costs. + Logs are sent back to the main process via log_queue. + Node execution events are sent via node_queue for real-time UI updates. + """ + try: + from sygra.core.base_task_executor import DefaultTaskExecutor + from sygra.core.execution_callbacks import ExecutionCallbacks + from sygra.utils import utils + from sygra.logger.logger_config import set_external_logger + from argparse import Namespace + from datetime import datetime + + # Create a logger that sends logs to the queue + class QueueLogger: + def __init__(self, queue): + self.queue = queue + + def _send(self, level: str, msg: str): + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S,%f")[:-3] + self.queue.put(f"{timestamp} - {level} - {msg}") + + def debug(self, msg: str): self._send("DEBUG", msg) + def info(self, msg: str): self._send("INFO", msg) + def warn(self, msg: str): self._send("WARNING", msg) + def warning(self, msg: str): self._send("WARNING", msg) + def error(self, msg: str): self._send("ERROR", msg) + def exception(self, msg: str): self._send("ERROR", msg) + + # Set up log capture + queue_logger = QueueLogger(log_queue) + set_external_logger(queue_logger) + + # Reconstruct args namespace + args = Namespace(**args_dict) + + # Set current task for SyGra utils + utils.current_task = task_name + + # Create execution callbacks for real-time node tracking + execution_callbacks = None + if node_queue is not None: + def on_node_start(node_name: str, input_data: dict): + node_queue.put({ + "event": "node_start", + "node_name": node_name, + "timestamp": datetime.now().isoformat(), + }) + + def on_node_complete(node_name: str, output_data: dict, duration_ms: int): + node_queue.put({ + "event": "node_complete", + "node_name": node_name, + "duration_ms": duration_ms, + "timestamp": datetime.now().isoformat(), + }) + + def on_node_error(node_name: str, error_msg: str, context: dict): + node_queue.put({ + "event": "node_error", + "node_name": node_name, + "error": error_msg, + "timestamp": datetime.now().isoformat(), + }) + + execution_callbacks = ExecutionCallbacks( + on_node_start=on_node_start, + on_node_complete=on_node_complete, + on_node_error=on_node_error, + ) + + # Create and run task executor + executor = DefaultTaskExecutor(args) + executor.execute(execution_callbacks=execution_callbacks) + + result_queue.put({"status": "completed"}) + except Exception as e: + import traceback + from datetime import datetime as dt + log_queue.put(f"{dt.now().strftime('%Y-%m-%d %H:%M:%S')} - ERROR - {type(e).__name__}: {str(e)}") + result_queue.put({ + "status": "failed", + "error": f"{type(e).__name__}: {str(e)}", + "traceback": traceback.format_exc() + }) + + +class ExecutionLogCapture: + """ + Logger that captures SyGra logs to an execution's logs list. + + Implements SyGra's ExternalLoggerProtocol to intercept all SyGra logs. + """ + + def __init__(self, execution: WorkflowExecution): + self.execution = execution + + def _format_msg(self, level: str, msg: str) -> str: + """Format log message with timestamp like SyGra's default format.""" + from datetime import datetime + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S,%f")[:-3] + return f"{timestamp} - {level} - {msg}" + + def debug(self, msg: str) -> None: + self.execution.logs.append(self._format_msg("DEBUG", msg)) + + def info(self, msg: str) -> None: + self.execution.logs.append(self._format_msg("INFO", msg)) + + def warn(self, msg: str) -> None: + self.execution.logs.append(self._format_msg("WARNING", msg)) + + def error(self, msg: str) -> None: + self.execution.logs.append(self._format_msg("ERROR", msg)) + + def exception(self, msg: str) -> None: + self.execution.logs.append(self._format_msg("ERROR", msg)) + + +async def _run_workflow( + execution_id: str, + workflow: WorkflowGraph, + request: ExecutionRequest, +) -> None: + """ + Background task to run a SyGra workflow in a subprocess. + + Using multiprocessing allows us to terminate the execution mid-flight + when cancellation is requested, saving LLM costs. + + Args: + execution_id: The execution ID. + workflow: The workflow graph to execute. + request: The execution request. + """ + import os + import glob + import json + + execution = _executions[execution_id] + + # Check if already cancelled before starting + if execution_id in _cancelled_executions: + execution.status = ExecutionStatus.CANCELLED + execution.completed_at = datetime.now() + _cancelled_executions.discard(execution_id) + return + + execution.status = ExecutionStatus.RUNNING + + try: + # Load the workflow configuration + if not workflow.source_path: + raise ValueError("Workflow source path not available") + + # Extract task name from source path + source_path = workflow.source_path + if source_path.endswith("/graph_config.yaml"): + task_dir = os.path.dirname(source_path) + else: + task_dir = source_path + + # Convert path to task module format + task_name = task_dir.replace("/", ".").replace("\\", ".") + if not task_name.startswith("tasks."): + task_name = f"tasks.{task_name}" if not task_name.startswith("tasks") else task_name + + # Determine output directory and run name + effective_output_dir = request.output_dir if request.output_dir else task_dir + effective_run_name = request.run_name if request.run_name else f"studio_{execution_id[:8]}" + + # Create args dict for subprocess (must be picklable) + args_dict = { + "task": task_name, + "start_index": request.start_index, + "num_records": request.num_records, + "batch_size": request.batch_size, + "checkpoint_interval": request.checkpoint_interval, + "debug": request.debug, + "clear_logs": False, + "output_with_ts": request.output_with_ts, + "run_name": effective_run_name, + "run_args": request.run_args or {}, + "resume": request.resume, + "output_dir": effective_output_dir, + "oasst": False, + "quality": request.quality, + "disable_metadata": request.disable_metadata, + } + + # Update node states + execution.current_node = "START" + if "START" in execution.node_states: + execution.node_states["START"].status = ExecutionStatus.COMPLETED + execution.node_states["START"].completed_at = datetime.now() + + # Create queues for subprocess communication + result_queue = multiprocessing.Queue() + log_queue = multiprocessing.Queue() + node_queue = multiprocessing.Queue() + + process = multiprocessing.Process( + target=_execute_workflow_subprocess, + args=(task_name, args_dict, result_queue, log_queue, node_queue) + ) + + # Store process for potential cancellation + _running_processes[execution_id] = process + + execution.logs.append(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - INFO - Starting workflow execution in subprocess...") + process.start() + + # Helper to drain logs from queue + def drain_logs(): + while True: + try: + log_msg = log_queue.get_nowait() + execution.logs.append(log_msg) + except: + break + + # Helper to drain node events and update node states + def drain_node_events(): + while True: + try: + event = node_queue.get_nowait() + node_name = event.get("node_name") + event_type = event.get("event") + + if node_name and node_name in execution.node_states: + node_state = execution.node_states[node_name] + + if event_type == "node_start": + node_state.status = ExecutionStatus.RUNNING + node_state.started_at = datetime.fromisoformat(event.get("timestamp")) + execution.current_node = node_name + + elif event_type == "node_complete": + node_state.status = ExecutionStatus.COMPLETED + node_state.completed_at = datetime.fromisoformat(event.get("timestamp")) + node_state.duration_ms = event.get("duration_ms", 0) + + elif event_type == "node_error": + node_state.status = ExecutionStatus.FAILED + node_state.error = event.get("error") + node_state.completed_at = datetime.fromisoformat(event.get("timestamp")) + except: + break + + # Poll for completion while checking for cancellation and collecting logs + while process.is_alive(): + # Collect any pending logs and node events + drain_logs() + drain_node_events() + + if execution_id in _cancelled_executions: + execution.logs.append(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - INFO - Cancellation requested, terminating execution...") + process.terminate() + process.join(timeout=5) + if process.is_alive(): + process.kill() + drain_logs() # Get any final logs + drain_node_events() # Get any final node events + execution.status = ExecutionStatus.CANCELLED + execution.completed_at = datetime.now() + _cancelled_executions.discard(execution_id) + if execution_id in _running_processes: + del _running_processes[execution_id] + # Close queues to prevent resource leaks + try: + log_queue.close() + result_queue.close() + node_queue.close() + except: + pass + # Persist execution history + _save_executions() + return + await asyncio.sleep(0.3) # Check every 300ms for more responsive log updates + + process.join() + + # Drain any remaining logs and node events + drain_logs() + drain_node_events() + + # Clean up process reference + if execution_id in _running_processes: + del _running_processes[execution_id] + + # Check if cancelled externally (e.g., via cancel endpoint terminating process) + # This handles the race condition where the process was killed externally + if execution_id in _cancelled_executions or execution.status == ExecutionStatus.CANCELLED: + execution.status = ExecutionStatus.CANCELLED + execution.completed_at = datetime.now() + _cancelled_executions.discard(execution_id) + # Close queues + try: + log_queue.close() + result_queue.close() + node_queue.close() + except: + pass + _save_executions() + return + + # Get result from queue + result = None + try: + result = result_queue.get_nowait() + except: + pass + + # Close queues to prevent resource leaks + try: + log_queue.close() + log_queue.join_thread() + result_queue.close() + result_queue.join_thread() + node_queue.close() + node_queue.join_thread() + except: + pass + + if result and result.get("status") == "failed": + raise Exception(result.get("error", "Unknown error")) + + # Final check: if cancelled during execution, don't mark as completed + if execution_id in _cancelled_executions or execution.status == ExecutionStatus.CANCELLED: + execution.status = ExecutionStatus.CANCELLED + if not execution.completed_at: + execution.completed_at = datetime.now() + _cancelled_executions.discard(execution_id) + _save_executions() + return + + execution.logs.append(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - INFO - Workflow execution completed") + + # Mark all nodes as completed + for node_id, node_state in execution.node_states.items(): + if node_state.status == ExecutionStatus.PENDING: + node_state.status = ExecutionStatus.COMPLETED + node_state.completed_at = datetime.now() + + execution.status = ExecutionStatus.COMPLETED + execution.completed_at = datetime.now() + + # Find the output file and read its contents + import glob + import json + output_pattern = f"{effective_output_dir}/{effective_run_name}_output_*.json" + output_files = glob.glob(output_pattern) + if output_files: + # Get the most recent file + execution.output_file = max(output_files, key=os.path.getmtime) + + # Read and parse the output data + try: + with open(execution.output_file, 'r') as f: + output_content = f.read().strip() + if not output_content: + execution.output_data = None + else: + # Try parsing as JSON first (handles arrays and objects) + try: + execution.output_data = json.loads(output_content) + except json.JSONDecodeError: + # Fallback to JSONL format (one JSON object per line) + execution.output_data = [json.loads(line) for line in output_content.split('\n') if line.strip()] + except Exception as read_err: + execution.output_data = result # Fallback to executor result + else: + execution.output_data = result + + # Find and load metadata file + metadata_dir = f"{effective_output_dir}/metadata" + if os.path.exists(metadata_dir): + metadata_pattern = f"{metadata_dir}/metadata_*.json" + metadata_files = glob.glob(metadata_pattern) + if metadata_files: + # Get the most recent metadata file + execution.metadata_file = max(metadata_files, key=os.path.getmtime) + try: + with open(execution.metadata_file, 'r') as f: + execution.metadata = json.load(f) + except Exception as meta_err: + execution.logs.append(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - WARNING - Failed to load metadata: {meta_err}") + + if execution.started_at: + duration = (execution.completed_at - execution.started_at).total_seconds() + execution.duration_ms = int(duration * 1000) + + # Persist execution history + _save_executions() + + except Exception as e: + import traceback + error_msg = f"{type(e).__name__}: {str(e)}" + execution.logs.append(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - ERROR - Workflow execution failed: {error_msg}") + execution.logs.append(traceback.format_exc()) + + execution.status = ExecutionStatus.FAILED + execution.error = error_msg + execution.completed_at = datetime.now() + + # Mark current node as failed, and all pending nodes as cancelled + for node_id, node_state in execution.node_states.items(): + if node_id == execution.current_node: + # The node that failed + node_state.status = ExecutionStatus.FAILED + node_state.error = error_msg + node_state.completed_at = datetime.now() + elif node_state.status == ExecutionStatus.PENDING: + # Nodes that never ran due to the failure - mark as cancelled + node_state.status = ExecutionStatus.CANCELLED + elif node_state.status == ExecutionStatus.RUNNING: + # Any running node should also be marked as failed + node_state.status = ExecutionStatus.FAILED + node_state.error = "Execution aborted due to failure" + node_state.completed_at = datetime.now() + + # Clean up process if it exists + if execution_id in _running_processes: + del _running_processes[execution_id] + + # Persist execution history + _save_executions() + + finally: + # Clean up cancellation tracking + _cancelled_executions.discard(execution_id) + + +# Create default app instance +app = create_app() + + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/studio/config/.gitkeep b/studio/config/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/studio/converter.py b/studio/converter.py new file mode 100644 index 00000000..0ba68672 --- /dev/null +++ b/studio/converter.py @@ -0,0 +1,426 @@ +from typing import Any, Dict, List, Optional + +from studio.models import ( + NodeType, + WorkflowGraph, +) +from sygra.utils import utils + + +class SygraToStudioConverter: + """ + Converts SyGra workflow configurations to OpenFlow format. + + This enables SyGra workflows to be visualized and potentially execute. + """ + + # Map SyGra node types to Studio module types + MODULE_TYPE_MAP = { + NodeType.LLM: "rawscript", + NodeType.MULTI_LLM: "rawscript", + NodeType.AGENT: "rawscript", + NodeType.WEB_AGENT: "rawscript", + NodeType.LAMBDA: "rawscript", + NodeType.SUBGRAPH: "flow", + NodeType.WEIGHTED_SAMPLER: "rawscript", + NodeType.TOOL: "rawscript", + NodeType.START: "identity", + NodeType.END: "identity", + NodeType.BRANCH: "branchone", + NodeType.LOOP: "forloopflow", + } + + def convert_workflow(self, workflow: WorkflowGraph) -> Dict[str, Any]: + """ + Convert a SyGra WorkflowGraph to OpenFlow format. + + Args: + workflow: SyGra WorkflowGraph object. + + Returns: + Dictionary in OpenFlow format. + """ + modules = self._convert_nodes_to_modules(workflow) + + openflow = { + "summary": workflow.name, + "description": workflow.description or "", + "value": { + "modules": modules, + "failure_module": None, + "preprocessor_module": None, + "same_worker": False, + }, + "schema": self._build_input_schema(workflow), + } + + return openflow + + def convert_from_yaml(self, yaml_path: str) -> Dict[str, Any]: + """ + Convert a SyGra YAML config directly to OpenFlow format. + + Args: + yaml_path: Path to the graph_config.yaml file. + + Returns: + Dictionary in OpenFlow format. + """ + from studio.graph_builder import build_graph_from_yaml + + workflow = build_graph_from_yaml(yaml_path) + return self.convert_workflow(workflow) + + def convert_from_config(self, config: Dict[str, Any]) -> Dict[str, Any]: + """ + Convert a SyGra config dictionary to OpenFlow format. + + Args: + config: SyGra configuration dictionary. + + Returns: + Dictionary in OpenFlow format. + """ + from studio.graph_builder import build_graph_from_config + + workflow = build_graph_from_config(config) + return self.convert_workflow(workflow) + + def _convert_nodes_to_modules(self, workflow: WorkflowGraph) -> List[Dict[str, Any]]: + """Convert SyGra nodes to flow modules.""" + modules = [] + + # Build edge map for determining flow structure + edge_map = self._build_edge_map(workflow) + + # Find the execution order using topological sort + execution_order = self._topological_sort(workflow) + + for node_id in execution_order: + # Skip START and END - they're implicit in Studio + if node_id in ("START", "END"): + continue + + node = next((n for n in workflow.nodes if n.id == node_id), None) + if node is None: + continue + + module = self._convert_node_to_module(node, edge_map, workflow) + if module: + modules.append(module) + + return modules + + def _convert_node_to_module( + self, + node: Any, # WorkflowNode + edge_map: Dict[str, List[Dict[str, Any]]], + workflow: WorkflowGraph + ) -> Optional[Dict[str, Any]]: + """Convert a single SyGra node to a Studio module.""" + + # Check if this node has conditional edges (branching) + outgoing_edges = edge_map.get(node.id, []) + has_conditional = any(e.get("is_conditional") for e in outgoing_edges) + + if has_conditional: + return self._create_branch_module(node, outgoing_edges, workflow) + + # Standard module based on node type + if node.node_type == NodeType.LLM: + return self._create_llm_module(node) + elif node.node_type == NodeType.LAMBDA: + return self._create_lambda_module(node) + elif node.node_type == NodeType.SUBGRAPH: + return self._create_subflow_module(node) + elif node.node_type == NodeType.WEIGHTED_SAMPLER: + return self._create_sampler_module(node) + else: + return self._create_identity_module(node) + + def _create_llm_module(self, node: Any) -> Dict[str, Any]: + """Create a Studio module for an LLM node.""" + # Build the Python script for LLM call + script_content = self._generate_llm_script(node) + + return { + "id": node.id, + "summary": node.summary or node.id, + "value": { + "type": "rawscript", + "content": script_content, + "language": "python3", + "input_transforms": self._build_input_transforms(node), + }, + } + + def _create_lambda_module(self, node: Any) -> Dict[str, Any]: + """Create a Studio module for a Lambda node.""" + function_path = node.function_path or "" + + script_content = f''' +# Lambda Node: {node.id} +# Function: {function_path} + +def main(**kwargs): + """Execute the lambda function.""" + from sygra.utils import utils + + func = utils.import_class_or_function("{function_path}") + return func(**kwargs) +''' + + return { + "id": node.id, + "summary": node.summary or node.id, + "value": { + "type": "rawscript", + "content": script_content, + "language": "python3", + "input_transforms": {}, + }, + } + + def _create_subflow_module(self, node: Any) -> Dict[str, Any]: + """Create a Studio module for a Subgraph node.""" + return { + "id": node.id, + "summary": node.summary or node.id, + "value": { + "type": "flow", + "path": node.subgraph_path or "", + "input_transforms": {}, + }, + } + + def _create_identity_module(self, node: Any) -> Dict[str, Any]: + """Create a Studio identity (pass-through) module.""" + return { + "id": node.id, + "summary": node.summary or node.id, + "value": { + "type": "identity", + }, + } + + def _create_sampler_module(self, node: Any) -> Dict[str, Any]: + """Create a Studio module for a Weighted Sampler node.""" + # Build attributes info for the script + attributes_str = "{}" + if hasattr(node, 'sampler_config') and node.sampler_config: + import json + attributes_str = json.dumps(node.sampler_config.get('attributes', {})) + + script_content = f''' +# Weighted Sampler Node: {node.id} +# Randomly samples attribute values for workflow variables + +import random +from typing import Dict, Any + +def main(**kwargs) -> Dict[str, Any]: + """Sample random values from configured attributes.""" + attributes = {attributes_str} + + result = {{}} + for attr_name, attr_config in attributes.items(): + values = attr_config.get('values', []) + weights = attr_config.get('weights') + + if values: + if weights and len(weights) == len(values): + result[attr_name] = random.choices(values, weights=weights, k=1)[0] + else: + result[attr_name] = random.choice(values) + + return result +''' + + return { + "id": node.id, + "summary": node.summary or node.id, + "value": { + "type": "rawscript", + "content": script_content, + "language": "python3", + "input_transforms": {}, + }, + } + + def _create_branch_module( + self, + node: Any, + outgoing_edges: List[Dict[str, Any]], + workflow: WorkflowGraph + ) -> Dict[str, Any]: + """Create a Studio branching module.""" + branches = [] + default_modules = [] + + for edge in outgoing_edges: + if edge.get("is_conditional"): + condition = edge.get("condition", {}) + path_map = condition.get("path_map", {}) + + for result_key, target_node in path_map.items(): + if target_node == "END": + continue + + branch = { + "summary": f"Branch: {result_key}", + "expr": f"result == '{result_key}'", + "modules": [], # Would contain nested modules + } + branches.append(branch) + + return { + "id": node.id, + "summary": node.summary or node.id, + "value": { + "type": "branchone", + "branches": branches, + "default": default_modules, + }, + } + + def _generate_llm_script(self, node: Any) -> str: + """Generate Python script for LLM execution.""" + model_name = node.model.name if node.model else "gpt-4o" + model_params = node.model.parameters if node.model else {} + + # Build prompt template + prompts = [] + if node.prompt: + for msg in node.prompt: + prompts.append(f'{{"role": "{msg.role}", "content": """{msg.content}"""}}') + + prompt_list = ",\n ".join(prompts) + + script = f''' +# LLM Node: {node.id} +# Model: {model_name} + +def main(**state): + """Execute the LLM node.""" + from langchain_openai import ChatOpenAI + from langchain_core.messages import HumanMessage, SystemMessage + + # Initialize model + model = ChatOpenAI( + model="{model_name}", + temperature={model_params.get("temperature", 0.7)}, + ) + + # Build messages from prompts + messages = [ + {prompt_list} + ] + + # Format messages with state variables + formatted_messages = [] + for msg in messages: + content = msg["content"].format(**state) + if msg["role"] == "system": + formatted_messages.append(SystemMessage(content=content)) + else: + formatted_messages.append(HumanMessage(content=content)) + + # Execute LLM call + response = model.invoke(formatted_messages) + + return response.content +''' + return script.strip() + + def _build_input_transforms(self, node: Any) -> Dict[str, Any]: + """Build input transforms for a node.""" + transforms = {} + + # Extract variable references from prompts + if node.prompt: + import re + pattern = r"(? Dict[str, Any]: + """Build JSON schema for workflow inputs.""" + properties = {} + + for var in workflow.state_variables: + properties[var] = { + "type": "string", + "description": f"Input variable: {var}", + } + + return { + "type": "object", + "properties": properties, + "required": workflow.state_variables, + } + + def _build_edge_map(self, workflow: WorkflowGraph) -> Dict[str, List[Dict[str, Any]]]: + """Build a map of node ID to outgoing edges.""" + edge_map: Dict[str, List[Dict[str, Any]]] = {} + + for edge in workflow.edges: + if edge.source not in edge_map: + edge_map[edge.source] = [] + + edge_map[edge.source].append({ + "target": edge.target, + "is_conditional": edge.is_conditional, + "condition": edge.condition.model_dump() if edge.condition else None, + "label": edge.label, + }) + + return edge_map + + def _topological_sort(self, workflow: WorkflowGraph) -> List[str]: + """Perform topological sort on workflow nodes.""" + # Build adjacency list + adjacency: Dict[str, List[str]] = {node.id: [] for node in workflow.nodes} + in_degree: Dict[str, int] = {node.id: 0 for node in workflow.nodes} + + for edge in workflow.edges: + if edge.source in adjacency: + adjacency[edge.source].append(edge.target) + if edge.target in in_degree: + in_degree[edge.target] += 1 + + # Kahn's algorithm + result = [] + queue = [node_id for node_id, degree in in_degree.items() if degree == 0] + + while queue: + current = queue.pop(0) + result.append(current) + + for neighbor in adjacency.get(current, []): + in_degree[neighbor] -= 1 + if in_degree[neighbor] == 0: + queue.append(neighbor) + + return result + + +def convert_sygra_to_openflow(yaml_path: str) -> Dict[str, Any]: + """ + Convenience function to convert SyGra YAML to OpenFlow format. + + Args: + yaml_path: Path to the graph_config.yaml file. + + Returns: + Dictionary in Studio OpenFlow format. + """ + converter = SygraToStudioConverter() + return converter.convert_from_yaml(yaml_path) diff --git a/studio/execution_manager.py b/studio/execution_manager.py new file mode 100644 index 00000000..1521e231 --- /dev/null +++ b/studio/execution_manager.py @@ -0,0 +1,444 @@ +""" +Execution Manager for SyGra Studio Integration. + +Manages workflow execution with real-time progress tracking, +providing updates for UI visualization. +""" +import argparse +import asyncio +import logging +import threading +import uuid +from datetime import datetime +from typing import Any, Callable, Dict, List, Optional + +from studio.models import ( + ExecutionStatus, + NodeExecutionState, + WorkflowExecution, + WorkflowGraph, +) + + +logger = logging.getLogger(__name__) + + +class ExecutionCallback: + """Callbacks for execution events.""" + + def __init__( + self, + on_start: Optional[Callable[[str], None]] = None, + on_node_start: Optional[Callable[[str, str], None]] = None, + on_node_complete: Optional[Callable[[str, str, Any], None]] = None, + on_node_error: Optional[Callable[[str, str, str], None]] = None, + on_complete: Optional[Callable[[str, Any], None]] = None, + on_error: Optional[Callable[[str, str], None]] = None, + ): + """ + Initialize execution callbacks. + + Args: + on_start: Called when execution starts (execution_id). + on_node_start: Called when a node starts (execution_id, node_id). + on_node_complete: Called when a node completes (execution_id, node_id, result). + on_node_error: Called when a node fails (execution_id, node_id, error). + on_complete: Called when execution completes (execution_id, result). + on_error: Called when execution fails (execution_id, error). + """ + self.on_start = on_start + self.on_node_start = on_node_start + self.on_node_complete = on_node_complete + self.on_node_error = on_node_error + self.on_complete = on_complete + self.on_error = on_error + + +class ExecutionManager: + """ + Manages SyGra workflow executions with real-time tracking. + + Provides execution lifecycle management and progress updates + for UI visualization. + """ + + def __init__(self): + """Initialize the execution manager.""" + self._executions: Dict[str, WorkflowExecution] = {} + self._callbacks: Dict[str, ExecutionCallback] = {} + self._lock = threading.Lock() + + def create_execution( + self, + workflow: WorkflowGraph, + input_data: Dict[str, Any], + callback: Optional[ExecutionCallback] = None, + ) -> WorkflowExecution: + """ + Create a new workflow execution. + + Args: + workflow: The workflow graph to execute. + input_data: Input data for the workflow. + callback: Optional callbacks for execution events. + + Returns: + New WorkflowExecution instance. + """ + execution_id = str(uuid.uuid4()) + + execution = WorkflowExecution( + id=execution_id, + workflow_id=workflow.id, + workflow_name=workflow.name, + status=ExecutionStatus.PENDING, + input_data=input_data, + ) + + # Initialize node states + for node in workflow.nodes: + execution.node_states[node.id] = NodeExecutionState( + node_id=node.id, + status=ExecutionStatus.PENDING, + ) + + with self._lock: + self._executions[execution_id] = execution + if callback: + self._callbacks[execution_id] = callback + + return execution + + def get_execution(self, execution_id: str) -> Optional[WorkflowExecution]: + """Get an execution by ID.""" + return self._executions.get(execution_id) + + def list_executions( + self, + workflow_id: Optional[str] = None, + status: Optional[ExecutionStatus] = None, + ) -> List[WorkflowExecution]: + """List executions with optional filtering.""" + executions = list(self._executions.values()) + + if workflow_id: + executions = [e for e in executions if e.workflow_id == workflow_id] + + if status: + executions = [e for e in executions if e.status == status] + + return executions + + def start_execution(self, execution_id: str) -> None: + """Mark an execution as started.""" + execution = self._executions.get(execution_id) + if not execution: + return + + execution.status = ExecutionStatus.RUNNING + execution.started_at = datetime.now() + execution.logs.append(f"[{datetime.now().isoformat()}] Execution started") + + callback = self._callbacks.get(execution_id) + if callback and callback.on_start: + try: + callback.on_start(execution_id) + except Exception as e: + logger.error(f"Error in on_start callback: {e}") + + def start_node(self, execution_id: str, node_id: str) -> None: + """Mark a node as started.""" + execution = self._executions.get(execution_id) + if not execution: + return + + node_state = execution.node_states.get(node_id) + if not node_state: + return + + node_state.status = ExecutionStatus.RUNNING + node_state.started_at = datetime.now() + execution.current_node = node_id + execution.logs.append(f"[{datetime.now().isoformat()}] Node '{node_id}' started") + + callback = self._callbacks.get(execution_id) + if callback and callback.on_node_start: + try: + callback.on_node_start(execution_id, node_id) + except Exception as e: + logger.error(f"Error in on_node_start callback: {e}") + + def complete_node( + self, + execution_id: str, + node_id: str, + result: Any = None, + ) -> None: + """Mark a node as completed.""" + execution = self._executions.get(execution_id) + if not execution: + return + + node_state = execution.node_states.get(node_id) + if not node_state: + return + + node_state.status = ExecutionStatus.COMPLETED + node_state.completed_at = datetime.now() + node_state.result = result + + if node_state.started_at: + duration = (node_state.completed_at - node_state.started_at).total_seconds() + node_state.duration_ms = int(duration * 1000) + + execution.logs.append( + f"[{datetime.now().isoformat()}] Node '{node_id}' completed " + f"(duration: {node_state.duration_ms}ms)" + ) + + callback = self._callbacks.get(execution_id) + if callback and callback.on_node_complete: + try: + callback.on_node_complete(execution_id, node_id, result) + except Exception as e: + logger.error(f"Error in on_node_complete callback: {e}") + + def fail_node( + self, + execution_id: str, + node_id: str, + error: str, + ) -> None: + """Mark a node as failed.""" + execution = self._executions.get(execution_id) + if not execution: + return + + node_state = execution.node_states.get(node_id) + if not node_state: + return + + node_state.status = ExecutionStatus.FAILED + node_state.completed_at = datetime.now() + node_state.error = error + + execution.error_node = node_id + execution.logs.append( + f"[{datetime.now().isoformat()}] Node '{node_id}' failed: {error}" + ) + + callback = self._callbacks.get(execution_id) + if callback and callback.on_node_error: + try: + callback.on_node_error(execution_id, node_id, error) + except Exception as e: + logger.error(f"Error in on_node_error callback: {e}") + + def complete_execution( + self, + execution_id: str, + result: Any = None, + ) -> None: + """Mark an execution as completed.""" + execution = self._executions.get(execution_id) + if not execution: + return + + execution.status = ExecutionStatus.COMPLETED + execution.completed_at = datetime.now() + execution.output_data = result + execution.current_node = None + + if execution.started_at: + duration = (execution.completed_at - execution.started_at).total_seconds() + execution.duration_ms = int(duration * 1000) + + execution.logs.append( + f"[{datetime.now().isoformat()}] Execution completed " + f"(total duration: {execution.duration_ms}ms)" + ) + + callback = self._callbacks.get(execution_id) + if callback and callback.on_complete: + try: + callback.on_complete(execution_id, result) + except Exception as e: + logger.error(f"Error in on_complete callback: {e}") + + def fail_execution( + self, + execution_id: str, + error: str, + ) -> None: + """Mark an execution as failed.""" + execution = self._executions.get(execution_id) + if not execution: + return + + execution.status = ExecutionStatus.FAILED + execution.completed_at = datetime.now() + execution.error = error + + if execution.started_at: + duration = (execution.completed_at - execution.started_at).total_seconds() + execution.duration_ms = int(duration * 1000) + + execution.logs.append( + f"[{datetime.now().isoformat()}] Execution failed: {error}" + ) + + callback = self._callbacks.get(execution_id) + if callback and callback.on_error: + try: + callback.on_error(execution_id, error) + except Exception as e: + logger.error(f"Error in on_error callback: {e}") + + def cancel_execution(self, execution_id: str) -> bool: + """ + Cancel a running execution. + + Returns: + True if cancellation was successful. + """ + execution = self._executions.get(execution_id) + if not execution: + return False + + if execution.status not in (ExecutionStatus.PENDING, ExecutionStatus.RUNNING): + return False + + execution.status = ExecutionStatus.CANCELLED + execution.completed_at = datetime.now() + execution.logs.append( + f"[{datetime.now().isoformat()}] Execution cancelled by user" + ) + + return True + + def add_log(self, execution_id: str, message: str) -> None: + """Add a log message to an execution.""" + execution = self._executions.get(execution_id) + if execution: + execution.logs.append(f"[{datetime.now().isoformat()}] {message}") + + +class SygraExecutionRunner: + """ + Runs SyGra workflows with execution tracking. + + Integrates with SyGra's BaseTaskExecutor while providing + real-time progress updates. + """ + + def __init__(self, execution_manager: ExecutionManager): + """ + Initialize the execution runner. + + Args: + execution_manager: The execution manager for tracking. + """ + self.execution_manager = execution_manager + + async def run_workflow( + self, + execution_id: str, + workflow: WorkflowGraph, + input_data: Dict[str, Any], + max_samples: int = 1, + ) -> Any: + """ + Run a SyGra workflow with tracking. + + Args: + execution_id: The execution ID for tracking. + workflow: The workflow graph to execute. + input_data: Input data for the workflow. + max_samples: Maximum samples to process. + + Returns: + Workflow execution result. + """ + self.execution_manager.start_execution(execution_id) + + try: + # Import SyGra components + from sygra.core.base_task_executor import BaseTaskExecutor + + # Start tracking + self.execution_manager.start_node(execution_id, "START") + self.execution_manager.complete_node(execution_id, "START") + + if not workflow.source_path: + raise ValueError("Workflow source path not available") + + # Create and run executor + self.execution_manager.add_log( + execution_id, + f"Initializing workflow from {workflow.source_path}" + ) + + args = argparse.Namespace() + args.config_path = workflow.source_path + args.output_dir = "./output" + args.num_records = max_samples + executor = BaseTaskExecutor(args=args) + + # Track each node execution + # Note: This is a simplified approach - ideally we'd hook into + # LangGraph's execution events for real-time node tracking + for node in workflow.nodes: + if node.id in ("START", "END"): + continue + + self.execution_manager.start_node(execution_id, node.id) + + # Run the workflow + result = await asyncio.to_thread( + executor.run, + data=[input_data] if input_data else None, + ) + + # Mark all nodes as completed + for node in workflow.nodes: + if node.id == "END": + continue + + node_state = self.execution_manager.get_execution(execution_id) + if node_state: + ns = node_state.node_states.get(node.id) + if ns and ns.status == ExecutionStatus.RUNNING: + self.execution_manager.complete_node(execution_id, node.id) + + self.execution_manager.start_node(execution_id, "END") + self.execution_manager.complete_node(execution_id, "END") + self.execution_manager.complete_execution(execution_id, result) + + return result + + except Exception as e: + logger.error(f"Workflow execution failed: {e}") + + # Find and mark the current node as failed + execution = self.execution_manager.get_execution(execution_id) + if execution and execution.current_node: + self.execution_manager.fail_node( + execution_id, + execution.current_node, + str(e) + ) + + self.execution_manager.fail_execution(execution_id, str(e)) + raise + + +# Global execution manager instance +_execution_manager: Optional[ExecutionManager] = None + + +def get_execution_manager() -> ExecutionManager: + """Get the global execution manager instance.""" + global _execution_manager + if _execution_manager is None: + _execution_manager = ExecutionManager() + return _execution_manager diff --git a/studio/execution_storage.py b/studio/execution_storage.py new file mode 100644 index 00000000..50703a0d --- /dev/null +++ b/studio/execution_storage.py @@ -0,0 +1,622 @@ +""" +Scalable Execution Storage for SyGra Studio. + +This module provides a scalable, per-run storage architecture that: +- Stores each execution in its own file (not one monolithic JSON) +- Maintains a lightweight index for fast listing/filtering +- References output files instead of duplicating data +- Supports pagination for UI scalability +- Is designed to be extensible for future multi-user support + +Directory Structure: + studio/ + โ””โ”€โ”€ .executions/ + โ”œโ”€โ”€ index.json # Lightweight index (metadata only) + โ””โ”€โ”€ runs/ + โ””โ”€โ”€ {execution_id}.json # Full execution data per run + +The index file only contains essential metadata for listing: +- id, workflow_id, workflow_name, status, started_at, completed_at, duration_ms + +Full execution data (including logs, node_states, etc.) is stored per-run +and loaded on demand when viewing a specific execution. +""" + +import json +import os +import shutil +from datetime import datetime +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple +from threading import Lock +import fcntl + +from studio.models import ( + ExecutionStatus, + NodeExecutionState, + WorkflowExecution, +) + + +# Storage version for migration support +STORAGE_VERSION = "2.0" + +# Maximum runs to keep in index (for memory efficiency) +# Older runs are still accessible but not in the quick index +MAX_INDEX_ENTRIES = 10000 + + +class ExecutionIndex: + """Lightweight execution metadata for index file.""" + + def __init__( + self, + id: str, + workflow_id: str, + workflow_name: str, + status: str, + started_at: Optional[str] = None, + completed_at: Optional[str] = None, + duration_ms: Optional[int] = None, + error: Optional[str] = None, + ): + self.id = id + self.workflow_id = workflow_id + self.workflow_name = workflow_name + self.status = status + self.started_at = started_at + self.completed_at = completed_at + self.duration_ms = duration_ms + self.error = error + + def to_dict(self) -> Dict[str, Any]: + return { + "id": self.id, + "workflow_id": self.workflow_id, + "workflow_name": self.workflow_name, + "status": self.status, + "started_at": self.started_at, + "completed_at": self.completed_at, + "duration_ms": self.duration_ms, + "error": self.error, + } + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "ExecutionIndex": + return cls( + id=data["id"], + workflow_id=data["workflow_id"], + workflow_name=data["workflow_name"], + status=data["status"], + started_at=data.get("started_at"), + completed_at=data.get("completed_at"), + duration_ms=data.get("duration_ms"), + error=data.get("error"), + ) + + @classmethod + def from_execution(cls, execution: WorkflowExecution) -> "ExecutionIndex": + """Create index entry from full execution.""" + return cls( + id=execution.id, + workflow_id=execution.workflow_id, + workflow_name=execution.workflow_name, + status=execution.status.value if hasattr(execution.status, 'value') else execution.status, + started_at=execution.started_at.isoformat() if execution.started_at else None, + completed_at=execution.completed_at.isoformat() if execution.completed_at else None, + duration_ms=execution.duration_ms, + error=execution.error, + ) + + +class ExecutionStorage: + """ + Scalable execution storage with per-run files and lightweight index. + + Thread-safe with file locking for concurrent access. + Designed to be extensible for future multi-user support. + """ + + def __init__(self, base_dir: Optional[Path] = None): + """ + Initialize execution storage. + + Args: + base_dir: Base directory for storage. Defaults to studio/.executions/ + """ + if base_dir is None: + base_dir = Path(__file__).parent / ".executions" + + self.base_dir = Path(base_dir) + self.runs_dir = self.base_dir / "runs" + self.index_file = self.base_dir / "index.json" + self.legacy_file = Path(__file__).parent / ".executions_history.json" + + # In-memory cache for active/recent executions + self._cache: Dict[str, WorkflowExecution] = {} + self._index_cache: Dict[str, ExecutionIndex] = {} + self._lock = Lock() + + # Ensure directories exist + self._ensure_directories() + + # Migrate from legacy format if needed + self._migrate_if_needed() + + # Load index into memory + self._load_index() + + def _ensure_directories(self): + """Create storage directories if they don't exist.""" + self.base_dir.mkdir(parents=True, exist_ok=True) + self.runs_dir.mkdir(parents=True, exist_ok=True) + + def _get_run_file(self, execution_id: str) -> Path: + """Get the file path for a specific run.""" + return self.runs_dir / f"{execution_id}.json" + + def _migrate_if_needed(self): + """Migrate from legacy monolithic JSON if it exists.""" + if not self.legacy_file.exists(): + return + + # Check if already migrated + if self.index_file.exists(): + # Verify index has entries or legacy is empty + try: + with open(self.index_file, 'r') as f: + index_data = json.load(f) + if index_data.get("runs") and len(index_data["runs"]) > 0: + # Already migrated, optionally remove legacy + return + except: + pass + + print(f"[ExecutionStorage] Migrating from legacy format...") + + try: + with open(self.legacy_file, 'r') as f: + legacy_data = json.load(f) + + migrated_count = 0 + index_entries = [] + + for exec_id, exec_data in legacy_data.items(): + try: + # Convert node_states if needed + if 'node_states' in exec_data and exec_data['node_states']: + exec_data['node_states'] = { + k: NodeExecutionState(**v) if isinstance(v, dict) else v + for k, v in exec_data['node_states'].items() + } + + # Create WorkflowExecution object + execution = WorkflowExecution(**exec_data) + + # Save per-run file + run_file = self._get_run_file(exec_id) + with open(run_file, 'w') as f: + json.dump(execution.model_dump(mode='json'), f, default=str) + + # Create index entry + index_entry = ExecutionIndex.from_execution(execution) + index_entries.append(index_entry) + + migrated_count += 1 + except Exception as e: + print(f"[ExecutionStorage] Warning: Failed to migrate {exec_id}: {e}") + + # Sort by started_at, newest first + index_entries.sort( + key=lambda e: e.started_at or "", + reverse=True + ) + + # Save index + index_data = { + "version": STORAGE_VERSION, + "total_runs": len(index_entries), + "last_updated": datetime.now().isoformat(), + "runs": [e.to_dict() for e in index_entries[:MAX_INDEX_ENTRIES]] + } + + with open(self.index_file, 'w') as f: + json.dump(index_data, f, indent=2) + + # Rename legacy file to backup + backup_file = self.legacy_file.with_suffix('.json.bak') + shutil.move(str(self.legacy_file), str(backup_file)) + + print(f"[ExecutionStorage] Migrated {migrated_count} executions. Legacy file backed up to {backup_file}") + + except Exception as e: + print(f"[ExecutionStorage] Migration failed: {e}") + + def _load_index(self): + """Load index into memory and verify files exist.""" + needs_save = False + + with self._lock: + self._index_cache.clear() + + if not self.index_file.exists(): + return + + try: + with open(self.index_file, 'r') as f: + data = json.load(f) + + missing_files = [] + for entry_data in data.get("runs", []): + entry = ExecutionIndex.from_dict(entry_data) + # Verify the run file still exists + run_file = self._get_run_file(entry.id) + if run_file.exists(): + self._index_cache[entry.id] = entry + else: + missing_files.append(entry.id) + + # If files were deleted externally, mark for index update + if missing_files: + print(f"[ExecutionStorage] Detected {len(missing_files)} missing run files, cleaning up index...") + needs_save = True + + except Exception as e: + print(f"[ExecutionStorage] Warning: Failed to load index: {e}") + + # Save outside the lock to avoid deadlock + if needs_save: + self._save_index() + + def _save_index(self): + """Save index to disk.""" + with self._lock: + # Sort by started_at, newest first + entries = sorted( + self._index_cache.values(), + key=lambda e: e.started_at or "", + reverse=True + ) + + index_data = { + "version": STORAGE_VERSION, + "total_runs": len(entries), + "last_updated": datetime.now().isoformat(), + "runs": [e.to_dict() for e in entries[:MAX_INDEX_ENTRIES]] + } + + # Write with file locking for safety + try: + with open(self.index_file, 'w') as f: + # Try to get exclusive lock (non-blocking) + try: + fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB) + except (IOError, OSError): + pass # Continue without lock on Windows or if busy + + json.dump(index_data, f, indent=2) + + try: + fcntl.flock(f.fileno(), fcntl.LOCK_UN) + except (IOError, OSError): + pass + except Exception as e: + print(f"[ExecutionStorage] Warning: Failed to save index: {e}") + + def save_execution(self, execution: WorkflowExecution): + """ + Save an execution (both to per-run file and update index). + + Args: + execution: The execution to save. + """ + exec_id = execution.id + + # Save per-run file + run_file = self._get_run_file(exec_id) + try: + with open(run_file, 'w') as f: + json.dump(execution.model_dump(mode='json'), f, default=str) + except Exception as e: + print(f"[ExecutionStorage] Warning: Failed to save run file {exec_id}: {e}") + return + + # Update index + with self._lock: + index_entry = ExecutionIndex.from_execution(execution) + self._index_cache[exec_id] = index_entry + + # Update in-memory cache + self._cache[exec_id] = execution + + # Save index (only for completed/failed/cancelled) + if execution.status in (ExecutionStatus.COMPLETED, ExecutionStatus.FAILED, ExecutionStatus.CANCELLED): + self._save_index() + + def get_execution(self, execution_id: str) -> Optional[WorkflowExecution]: + """ + Get full execution by ID. + + Args: + execution_id: The execution ID. + + Returns: + The full WorkflowExecution or None if not found. + """ + # Check in-memory cache first + with self._lock: + if execution_id in self._cache: + return self._cache[execution_id] + + # Load from per-run file + run_file = self._get_run_file(execution_id) + if not run_file.exists(): + return None + + try: + with open(run_file, 'r') as f: + data = json.load(f) + + # Convert node_states if needed + if 'node_states' in data and data['node_states']: + data['node_states'] = { + k: NodeExecutionState(**v) if isinstance(v, dict) else v + for k, v in data['node_states'].items() + } + + execution = WorkflowExecution(**data) + + # Cache it + with self._lock: + self._cache[execution_id] = execution + + return execution + + except Exception as e: + print(f"[ExecutionStorage] Warning: Failed to load execution {execution_id}: {e}") + return None + + def list_executions( + self, + workflow_id: Optional[str] = None, + status: Optional[str] = None, + limit: int = 50, + offset: int = 0, + ) -> Tuple[List[ExecutionIndex], int]: + """ + List executions with filtering and pagination. + + Args: + workflow_id: Filter by workflow ID. + status: Filter by status. + limit: Maximum results to return. + offset: Number of results to skip. + + Returns: + Tuple of (list of ExecutionIndex entries, total count matching filters) + """ + with self._lock: + entries = list(self._index_cache.values()) + + # Apply filters + if workflow_id: + entries = [e for e in entries if e.workflow_id == workflow_id] + + if status: + entries = [e for e in entries if e.status == status] + + # Sort by started_at, newest first + entries.sort(key=lambda e: e.started_at or "", reverse=True) + + total = len(entries) + + # Apply pagination + entries = entries[offset:offset + limit] + + return entries, total + + def list_executions_full( + self, + workflow_id: Optional[str] = None, + status: Optional[str] = None, + limit: int = 50, + offset: int = 0, + ) -> Tuple[List[WorkflowExecution], int]: + """ + List full executions (for backward compatibility). + + This loads full execution data for each entry. + Use list_executions() for lightweight listing. + + Args: + workflow_id: Filter by workflow ID. + status: Filter by status. + limit: Maximum results to return. + offset: Number of results to skip. + + Returns: + Tuple of (list of WorkflowExecution objects, total count) + """ + index_entries, total = self.list_executions( + workflow_id=workflow_id, + status=status, + limit=limit, + offset=offset, + ) + + executions = [] + for entry in index_entries: + execution = self.get_execution(entry.id) + if execution: + executions.append(execution) + + return executions, total + + def delete_execution(self, execution_id: str) -> bool: + """ + Delete an execution. + + Args: + execution_id: The execution ID to delete. + + Returns: + True if deleted, False if not found. + """ + # Remove from cache + with self._lock: + self._cache.pop(execution_id, None) + self._index_cache.pop(execution_id, None) + + # Remove per-run file + run_file = self._get_run_file(execution_id) + if run_file.exists(): + try: + run_file.unlink() + except Exception as e: + print(f"[ExecutionStorage] Warning: Failed to delete run file {execution_id}: {e}") + return False + + # Save updated index + self._save_index() + + return True + + def update_execution_in_memory(self, execution: WorkflowExecution): + """ + Update execution in memory only (for active executions). + + Use this during execution progress updates to avoid disk I/O. + Call save_execution() when execution completes. + + Args: + execution: The execution to update. + """ + with self._lock: + self._cache[execution.id] = execution + + # Also update index cache + index_entry = ExecutionIndex.from_execution(execution) + self._index_cache[execution.id] = index_entry + + def refresh_index(self) -> int: + """ + Refresh the index by re-scanning the runs directory. + + Detects files deleted externally and orphaned files not in the index. + + Returns: + Number of changes detected (removed or added entries). + """ + changes = 0 + + with self._lock: + # Check for missing files (in index but not on disk) + missing = [] + for exec_id in list(self._index_cache.keys()): + run_file = self._get_run_file(exec_id) + if not run_file.exists(): + missing.append(exec_id) + + for exec_id in missing: + del self._index_cache[exec_id] + # Also remove from in-memory cache + self._cache.pop(exec_id, None) + changes += 1 + + if missing: + print(f"[ExecutionStorage] Removed {len(missing)} entries for missing files") + + # Check for orphaned files (on disk but not in index) + if self.runs_dir.exists(): + for run_file in self.runs_dir.glob("*.json"): + exec_id = run_file.stem + if exec_id not in self._index_cache: + # Try to load and add to index + try: + with open(run_file, 'r') as f: + data = json.load(f) + execution = WorkflowExecution(**data) + index_entry = ExecutionIndex.from_execution(execution) + self._index_cache[exec_id] = index_entry + changes += 1 + print(f"[ExecutionStorage] Added orphaned file to index: {exec_id}") + except Exception as e: + print(f"[ExecutionStorage] Warning: Could not index orphaned file {exec_id}: {e}") + + # Save updated index if changes were made + if changes > 0: + self._save_index() + + return changes + + def get_stats(self) -> Dict[str, Any]: + """Get storage statistics.""" + with self._lock: + total = len(self._index_cache) + + status_counts = {} + workflow_counts = {} + + for entry in self._index_cache.values(): + status_counts[entry.status] = status_counts.get(entry.status, 0) + 1 + workflow_counts[entry.workflow_id] = workflow_counts.get(entry.workflow_id, 0) + 1 + + return { + "total_executions": total, + "status_breakdown": status_counts, + "workflow_breakdown": workflow_counts, + "storage_version": STORAGE_VERSION, + "index_file": str(self.index_file), + "runs_directory": str(self.runs_dir), + } + + def cleanup_old_runs(self, keep_days: int = 30, keep_min: int = 100): + """ + Cleanup old execution data to manage disk space. + + Args: + keep_days: Keep executions from the last N days. + keep_min: Always keep at least this many executions. + """ + from datetime import timedelta + + cutoff = datetime.now() - timedelta(days=keep_days) + cutoff_str = cutoff.isoformat() + + with self._lock: + entries = sorted( + self._index_cache.values(), + key=lambda e: e.started_at or "", + reverse=True + ) + + # Keep recent and minimum count + to_delete = [] + kept_count = 0 + + for entry in entries: + if kept_count < keep_min: + kept_count += 1 + continue + + if entry.started_at and entry.started_at < cutoff_str: + to_delete.append(entry.id) + + # Delete old executions + for exec_id in to_delete: + self.delete_execution(exec_id) + + if to_delete: + print(f"[ExecutionStorage] Cleaned up {len(to_delete)} old executions") + + +# Global singleton instance +_storage_instance: Optional[ExecutionStorage] = None + + +def get_storage() -> ExecutionStorage: + """Get the global ExecutionStorage instance.""" + global _storage_instance + if _storage_instance is None: + _storage_instance = ExecutionStorage() + return _storage_instance diff --git a/studio/frontend/package-lock.json b/studio/frontend/package-lock.json new file mode 100644 index 00000000..22fcf02a --- /dev/null +++ b/studio/frontend/package-lock.json @@ -0,0 +1,3791 @@ +{ + "lockfileVersion": 3, + "name": "sygra-workflow-ui", + "packages": { + "": { + "dependencies": { + "@monaco-editor/loader": "^1.4.0", + "@xyflow/svelte": "^0.1.22", + "chart.js": "^4.4.7", + "d3-dag": "^1.1.0", + "fast-equals": "^5.2.2", + "html2canvas": "^1.4.1", + "jspdf": "^3.0.4", + "lucide-svelte": "^0.468.0", + "monaco-editor": "^0.52.2" + }, + "devDependencies": { + "@sveltejs/adapter-static": "^3.0.8", + "@sveltejs/kit": "^2.16.0", + "@sveltejs/vite-plugin-svelte": "^4.0.0", + "@types/node": "^22.10.2", + "autoprefixer": "^10.4.20", + "eslint": "^9.17.0", + "postcss": "^8.4.49", + "svelte": "^5.16.0", + "svelte-check": "^4.1.1", + "tailwindcss": "^3.4.17", + "typescript": "^5.7.2", + "vite": "^5.4.0" + }, + "name": "sygra-workflow-ui", + "version": "0.1.0" + }, + "node_modules/@alloc/quick-lru": { + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "version": "5.2.0" + }, + "node_modules/@babel/runtime": { + "engines": { + "node": ">=6.9.0" + }, + "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", + "version": "7.28.4" + }, + "node_modules/@esbuild/aix-ppc64": { + "cpu": [ + "ppc64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "optional": true, + "os": [ + "aix" + ], + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/android-arm": { + "cpu": [ + "arm" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "optional": true, + "os": [ + "android" + ], + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/android-arm64": { + "cpu": [ + "arm64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "optional": true, + "os": [ + "android" + ], + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/android-x64": { + "cpu": [ + "x64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "optional": true, + "os": [ + "android" + ], + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/darwin-arm64": { + "cpu": [ + "arm64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "optional": true, + "os": [ + "darwin" + ], + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/darwin-x64": { + "cpu": [ + "x64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "optional": true, + "os": [ + "darwin" + ], + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/freebsd-arm64": { + "cpu": [ + "arm64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "optional": true, + "os": [ + "freebsd" + ], + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/freebsd-x64": { + "cpu": [ + "x64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "optional": true, + "os": [ + "freebsd" + ], + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/linux-arm": { + "cpu": [ + "arm" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/linux-arm64": { + "cpu": [ + "arm64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/linux-ia32": { + "cpu": [ + "ia32" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/linux-loong64": { + "cpu": [ + "loong64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/linux-mips64el": { + "cpu": [ + "mips64el" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/linux-ppc64": { + "cpu": [ + "ppc64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/linux-riscv64": { + "cpu": [ + "riscv64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/linux-s390x": { + "cpu": [ + "s390x" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/linux-x64": { + "cpu": [ + "x64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/netbsd-x64": { + "cpu": [ + "x64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "optional": true, + "os": [ + "netbsd" + ], + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/openbsd-x64": { + "cpu": [ + "x64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "optional": true, + "os": [ + "openbsd" + ], + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/sunos-x64": { + "cpu": [ + "x64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "optional": true, + "os": [ + "sunos" + ], + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/win32-arm64": { + "cpu": [ + "arm64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "optional": true, + "os": [ + "win32" + ], + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/win32-ia32": { + "cpu": [ + "ia32" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "optional": true, + "os": [ + "win32" + ], + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@esbuild/win32-x64": { + "cpu": [ + "x64" + ], + "dev": true, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "optional": true, + "os": [ + "win32" + ], + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/@eslint-community/eslint-utils": { + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + }, + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz", + "version": "4.9.0" + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "version": "3.4.3" + }, + "node_modules/@eslint-community/regexpp": { + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + }, + "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz", + "version": "4.12.2" + }, + "node_modules/@eslint/config-array": { + "dependencies": { + "@eslint/object-schema": "^2.1.7", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz", + "version": "0.21.1" + }, + "node_modules/@eslint/config-helpers": { + "dependencies": { + "@eslint/core": "^0.17.0" + }, + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz", + "version": "0.4.2" + }, + "node_modules/@eslint/core": { + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz", + "version": "0.17.0" + }, + "node_modules/@eslint/eslintrc": { + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.1", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "integrity": "sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.3.tgz", + "version": "3.3.3" + }, + "node_modules/@eslint/js": { + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.2.tgz", + "version": "9.39.2" + }, + "node_modules/@eslint/object-schema": { + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz", + "version": "2.1.7" + }, + "node_modules/@eslint/plugin-kit": { + "dependencies": { + "@eslint/core": "^0.17.0", + "levn": "^0.4.1" + }, + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz", + "version": "0.4.1" + }, + "node_modules/@humanfs/core": { + "dev": true, + "engines": { + "node": ">=18.18.0" + }, + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "version": "0.19.1" + }, + "node_modules/@humanfs/node": { + "dependencies": { + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.4.0" + }, + "dev": true, + "engines": { + "node": ">=18.18.0" + }, + "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz", + "version": "0.16.7" + }, + "node_modules/@humanwhocodes/module-importer": { + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + }, + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "version": "1.0.1" + }, + "node_modules/@humanwhocodes/retry": { + "dev": true, + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + }, + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "version": "0.4.3" + }, + "node_modules/@jridgewell/gen-mapping": { + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "version": "0.3.13" + }, + "node_modules/@jridgewell/remapping": { + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "version": "2.3.5" + }, + "node_modules/@jridgewell/resolve-uri": { + "engines": { + "node": ">=6.0.0" + }, + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "version": "3.1.2" + }, + "node_modules/@jridgewell/sourcemap-codec": { + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "version": "1.5.5" + }, + "node_modules/@jridgewell/trace-mapping": { + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + }, + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "version": "0.3.31" + }, + "node_modules/@kurkle/color": { + "integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==", + "license": "MIT", + "resolved": "https://registry.npmjs.org/@kurkle/color/-/color-0.3.4.tgz", + "version": "0.3.4" + }, + "node_modules/@monaco-editor/loader": { + "dependencies": { + "state-local": "^1.0.6" + }, + "integrity": "sha512-gIwR1HrJrrx+vfyOhYmCZ0/JcWqG5kbfG7+d3f/C1LXk2EvzAbHSg3MQ5lO2sMlo9izoAZ04shohfKLVT6crVA==", + "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.7.0.tgz", + "version": "1.7.0" + }, + "node_modules/@nodelib/fs.scandir": { + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "dev": true, + "engines": { + "node": ">= 8" + }, + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "version": "2.1.5" + }, + "node_modules/@nodelib/fs.stat": { + "dev": true, + "engines": { + "node": ">= 8" + }, + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "version": "2.0.5" + }, + "node_modules/@nodelib/fs.walk": { + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "dev": true, + "engines": { + "node": ">= 8" + }, + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "version": "1.2.8" + }, + "node_modules/@polka/url": { + "dev": true, + "integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz", + "version": "1.0.0-next.29" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "cpu": [ + "arm" + ], + "dev": true, + "integrity": "sha512-iDGS/h7D8t7tvZ1t6+WPK04KD0MwzLZrG0se1hzBjSi5fyxlsiggoJHwh18PCFNn7tG43OWb6pdZ6Y+rMlmyNQ==", + "optional": true, + "os": [ + "android" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-android-arm64": { + "cpu": [ + "arm64" + ], + "dev": true, + "integrity": "sha512-wrSAViWvZHBMMlWk6EJhvg8/rjxzyEhEdgfMMjREHEq11EtJ6IP6yfcCH57YAEca2Oe3FNCE9DSTgU70EIGmVw==", + "optional": true, + "os": [ + "android" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "cpu": [ + "arm64" + ], + "dev": true, + "integrity": "sha512-S87zZPBmRO6u1YXQLwpveZm4JfPpAa6oHBX7/ghSiGH3rz/KDgAu1rKdGutV+WUI6tKDMbaBJomhnT30Y2t4VQ==", + "optional": true, + "os": [ + "darwin" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-darwin-x64": { + "cpu": [ + "x64" + ], + "dev": true, + "integrity": "sha512-YTbnsAaHo6VrAczISxgpTva8EkfQus0VPEVJCEaboHtZRIb6h6j0BNxRBOwnDciFTZLDPW5r+ZBmhL/+YpTZgA==", + "optional": true, + "os": [ + "darwin" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "cpu": [ + "arm64" + ], + "dev": true, + "integrity": "sha512-1T8eY2J8rKJWzaznV7zedfdhD1BqVs1iqILhmHDq/bqCUZsrMt+j8VCTHhP0vdfbHK3e1IQ7VYx3jlKqwlf+vw==", + "optional": true, + "os": [ + "freebsd" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "cpu": [ + "x64" + ], + "dev": true, + "integrity": "sha512-sHTiuXyBJApxRn+VFMaw1U+Qsz4kcNlxQ742snICYPrY+DDL8/ZbaC4DVIB7vgZmp3jiDaKA0WpBdP0aqPJoBQ==", + "optional": true, + "os": [ + "freebsd" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "cpu": [ + "arm" + ], + "dev": true, + "integrity": "sha512-dV3T9MyAf0w8zPVLVBptVlzaXxka6xg1f16VAQmjg+4KMSTWDvhimI/Y6mp8oHwNrmnmVl9XxJ/w/mO4uIQONA==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "cpu": [ + "arm" + ], + "dev": true, + "integrity": "sha512-wIGYC1x/hyjP+KAu9+ewDI+fi5XSNiUi9Bvg6KGAh2TsNMA3tSEs+Sh6jJ/r4BV/bx/CyWu2ue9kDnIdRyafcQ==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "cpu": [ + "arm64" + ], + "dev": true, + "integrity": "sha512-Y+qVA0D9d0y2FRNiG9oM3Hut/DgODZbU9I8pLLPwAsU0tUKZ49cyV1tzmB/qRbSzGvY8lpgGkJuMyuhH7Ma+Vg==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "cpu": [ + "arm64" + ], + "dev": true, + "integrity": "sha512-juaC4bEgJsyFVfqhtGLz8mbopaWD+WeSOYr5E16y+1of6KQjc0BpwZLuxkClqY1i8sco+MdyoXPNiCkQou09+g==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "cpu": [ + "loong64" + ], + "dev": true, + "integrity": "sha512-rIEC0hZ17A42iXtHX+EPJVL/CakHo+tT7W0pbzdAGuWOt2jxDFh7A/lRhsNHBcqL4T36+UiAgwO8pbmn3dE8wA==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "cpu": [ + "ppc64" + ], + "dev": true, + "integrity": "sha512-T7l409NhUE552RcAOcmJHj3xyZ2h7vMWzcwQI0hvn5tqHh3oSoclf9WgTl+0QqffWFG8MEVZZP1/OBglKZx52Q==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "cpu": [ + "riscv64" + ], + "dev": true, + "integrity": "sha512-7OK5/GhxbnrMcxIFoYfhV/TkknarkYC1hqUw1wU2xUN3TVRLNT5FmBv4KkheSG2xZ6IEbRAhTooTV2+R5Tk0lQ==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "cpu": [ + "riscv64" + ], + "dev": true, + "integrity": "sha512-GwuDBE/PsXaTa76lO5eLJTyr2k8QkPipAyOrs4V/KJufHCZBJ495VCGJol35grx9xryk4V+2zd3Ri+3v7NPh+w==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "cpu": [ + "s390x" + ], + "dev": true, + "integrity": "sha512-IAE1Ziyr1qNfnmiQLHBURAD+eh/zH1pIeJjeShleII7Vj8kyEm2PF77o+lf3WTHDpNJcu4IXJxNO0Zluro8bOw==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "cpu": [ + "x64" + ], + "dev": true, + "integrity": "sha512-Pg6E+oP7GvZ4XwgRJBuSXZjcqpIW3yCBhK4BcsANvb47qMvAbCjR6E+1a/U2WXz1JJxp9/4Dno3/iSJLcm5auw==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "cpu": [ + "x64" + ], + "dev": true, + "integrity": "sha512-txGtluxDKTxaMDzUduGP0wdfng24y1rygUMnmlUJ88fzCCULCLn7oE5kb2+tRB+MWq1QDZT6ObT5RrR8HFRKqg==", + "optional": true, + "os": [ + "linux" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "cpu": [ + "arm64" + ], + "dev": true, + "integrity": "sha512-3DFiLPnTxiOQV993fMc+KO8zXHTcIjgaInrqlG8zDp1TlhYl6WgrOHuJkJQ6M8zHEcntSJsUp1XFZSY8C1DYbg==", + "optional": true, + "os": [ + "openharmony" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "cpu": [ + "arm64" + ], + "dev": true, + "integrity": "sha512-nggc/wPpNTgjGg75hu+Q/3i32R00Lq1B6N1DO7MCU340MRKL3WZJMjA9U4K4gzy3dkZPXm9E1Nc81FItBVGRlA==", + "optional": true, + "os": [ + "win32" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "cpu": [ + "ia32" + ], + "dev": true, + "integrity": "sha512-U/54pTbdQpPLBdEzCT6NBCFAfSZMvmjr0twhnD9f4EIvlm9wy3jjQ38yQj1AGznrNO65EWQMgm/QUjuIVrYF9w==", + "optional": true, + "os": [ + "win32" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "cpu": [ + "x64" + ], + "dev": true, + "integrity": "sha512-2NqKgZSuLH9SXBBV2dWNRCZmocgSOx8OJSdpRaEcRlIfX8YrKxUT6z0F1NpvDVhOsl190UFTRh2F2WDWWCYp3A==", + "optional": true, + "os": [ + "win32" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "cpu": [ + "x64" + ], + "dev": true, + "integrity": "sha512-JRpZUhCfhZ4keB5v0fe02gQJy05GqboPOaxvjugW04RLSYYoB/9t2lx2u/tMs/Na/1NXfY8QYjgRljRpN+MjTQ==", + "optional": true, + "os": [ + "win32" + ], + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/@standard-schema/spec": { + "dev": true, + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "version": "1.1.0" + }, + "node_modules/@svelte-put/shortcut": { + "integrity": "sha512-3VFU6TX4DwXT7vrtKGqwWa5WqEsFWQVZMSffE0owCwKUIBAhjGtZGy69G4+2quGhl/r+qi4jLrXkE00xw38M7g==", + "peerDependencies": { + "svelte": "^3.55.0 || ^4.0.0 || ^5.0.0" + }, + "resolved": "https://registry.npmjs.org/@svelte-put/shortcut/-/shortcut-3.2.0.tgz", + "version": "3.2.0" + }, + "node_modules/@sveltejs/acorn-typescript": { + "integrity": "sha512-esgN+54+q0NjB0Y/4BomT9samII7jGwNy/2a3wNZbT2A2RpmXsXwUt24LvLhx6jUq2gVk4cWEvcRO6MFQbOfNA==", + "peerDependencies": { + "acorn": "^8.9.0" + }, + "resolved": "https://registry.npmjs.org/@sveltejs/acorn-typescript/-/acorn-typescript-1.0.8.tgz", + "version": "1.0.8" + }, + "node_modules/@sveltejs/adapter-static": { + "dev": true, + "integrity": "sha512-7D9lYFWJmB7zxZyTE/qxjksvMqzMuYrrsyh1f4AlZqeZeACPRySjbC3aFiY55wb1tWUaKOQG9PVbm74JcN2Iew==", + "peerDependencies": { + "@sveltejs/kit": "^2.0.0" + }, + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-static/-/adapter-static-3.0.10.tgz", + "version": "3.0.10" + }, + "node_modules/@sveltejs/kit": { + "bin": { + "svelte-kit": "svelte-kit.js" + }, + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@sveltejs/acorn-typescript": "^1.0.5", + "@types/cookie": "^0.6.0", + "acorn": "^8.14.1", + "cookie": "^0.6.0", + "devalue": "^5.3.2", + "esm-env": "^1.2.2", + "kleur": "^4.1.5", + "magic-string": "^0.30.5", + "mrmime": "^2.0.0", + "sade": "^1.8.1", + "set-cookie-parser": "^2.6.0", + "sirv": "^3.0.0" + }, + "dev": true, + "engines": { + "node": ">=18.13" + }, + "integrity": "sha512-Vp3zX/qlwerQmHMP6x0Ry1oY7eKKRcOWGc2P59srOp4zcqyn+etJyQpELgOi4+ZSUgteX8Y387NuwruLgGXLUQ==", + "peerDependencies": { + "@opentelemetry/api": "^1.0.0", + "@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0", + "svelte": "^4.0.0 || ^5.0.0-next.0", + "vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0" + }, + "peerDependenciesMeta": { + "@opentelemetry/api": { + "optional": true + } + }, + "resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.49.2.tgz", + "version": "2.49.2" + }, + "node_modules/@sveltejs/vite-plugin-svelte": { + "dependencies": { + "@sveltejs/vite-plugin-svelte-inspector": "^3.0.0-next.0||^3.0.0", + "debug": "^4.3.7", + "deepmerge": "^4.3.1", + "kleur": "^4.1.5", + "magic-string": "^0.30.12", + "vitefu": "^1.0.3" + }, + "dev": true, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22" + }, + "integrity": "sha512-0ba1RQ/PHen5FGpdSrW7Y3fAMQjrXantECALeOiOdBdzR5+5vPP6HVZRLmZaQL+W8m++o+haIAKq5qT+MiZ7VA==", + "peerDependencies": { + "svelte": "^5.0.0-next.96 || ^5.0.0", + "vite": "^5.0.0" + }, + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-4.0.4.tgz", + "version": "4.0.4" + }, + "node_modules/@sveltejs/vite-plugin-svelte-inspector": { + "dependencies": { + "debug": "^4.3.7" + }, + "dev": true, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22" + }, + "integrity": "sha512-2CKypmj1sM4GE7HjllT7UKmo4Q6L5xFRd7VMGEWhYnZ+wc6AUVU01IBd7yUi6WnFndEwWoMNOd6e8UjoN0nbvQ==", + "peerDependencies": { + "@sveltejs/vite-plugin-svelte": "^4.0.0-next.0||^4.0.0", + "svelte": "^5.0.0-next.96 || ^5.0.0", + "vite": "^5.0.0" + }, + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-3.0.1.tgz", + "version": "3.0.1" + }, + "node_modules/@types/cookie": { + "dev": true, + "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", + "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", + "version": "0.6.0" + }, + "node_modules/@types/d3-color": { + "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", + "version": "3.1.3" + }, + "node_modules/@types/d3-drag": { + "dependencies": { + "@types/d3-selection": "*" + }, + "integrity": "sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==", + "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.7.tgz", + "version": "3.0.7" + }, + "node_modules/@types/d3-interpolate": { + "dependencies": { + "@types/d3-color": "*" + }, + "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", + "version": "3.0.4" + }, + "node_modules/@types/d3-selection": { + "integrity": "sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==", + "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.11.tgz", + "version": "3.0.11" + }, + "node_modules/@types/d3-transition": { + "dependencies": { + "@types/d3-selection": "*" + }, + "integrity": "sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==", + "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.9.tgz", + "version": "3.0.9" + }, + "node_modules/@types/d3-zoom": { + "dependencies": { + "@types/d3-interpolate": "*", + "@types/d3-selection": "*" + }, + "integrity": "sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==", + "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.8.tgz", + "version": "3.0.8" + }, + "node_modules/@types/estree": { + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "version": "1.0.8" + }, + "node_modules/@types/json-schema": { + "dev": true, + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "version": "7.0.15" + }, + "node_modules/@types/node": { + "dependencies": { + "undici-types": "~6.21.0" + }, + "dev": true, + "integrity": "sha512-1N9SBnWYOJTrNZCdh/yJE+t910Y128BoyY+zBLWhL3r0TYzlTmFdXrPwHL9DyFZmlEXNQQolTZh3KHV31QDhyA==", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.3.tgz", + "version": "22.19.3" + }, + "node_modules/@types/pako": { + "integrity": "sha512-VWDCbrLeVXJM9fihYodcLiIv0ku+AlOa/TQ1SvYOaBuyrSKgEcro95LJyIsJ4vSo6BXIxOKxiJAat04CmST9Fw==", + "resolved": "https://registry.npmjs.org/@types/pako/-/pako-2.0.4.tgz", + "version": "2.0.4" + }, + "node_modules/@types/raf": { + "integrity": "sha512-c4YAvMedbPZ5tEyxzQdMoOhhJ4RD3rngZIdwC2/qDN3d7JpEhB6fiBRKVY1lg5B7Wk+uPBjn5f39j1/2MY1oOw==", + "optional": true, + "resolved": "https://registry.npmjs.org/@types/raf/-/raf-3.4.3.tgz", + "version": "3.4.3" + }, + "node_modules/@types/trusted-types": { + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "optional": true, + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "version": "2.0.7" + }, + "node_modules/@xyflow/svelte": { + "dependencies": { + "@svelte-put/shortcut": "^3.1.0", + "@xyflow/system": "0.0.44", + "classcat": "^5.0.4" + }, + "integrity": "sha512-qFPPYcQHoltdbwb0T6FioSSNzTrCzRkLePIXLGPWFgvy6yk1bL35ztCUYvxrqmIWG42JMDFmFHOP3AyswHTInw==", + "peerDependencies": { + "svelte": "^3.0.0 || ^4.0.0 || ^5.0.0" + }, + "resolved": "https://registry.npmjs.org/@xyflow/svelte/-/svelte-0.1.22.tgz", + "version": "0.1.22" + }, + "node_modules/@xyflow/system": { + "dependencies": { + "@types/d3-drag": "^3.0.7", + "@types/d3-selection": "^3.0.10", + "@types/d3-transition": "^3.0.8", + "@types/d3-zoom": "^3.0.8", + "d3-drag": "^3.0.0", + "d3-selection": "^3.0.0", + "d3-zoom": "^3.0.0" + }, + "integrity": "sha512-hKHtH8hUVKmCCXbTdEYUWNqRkcSBwYxdzZhIxpJst60AEnlobfphNu8eAOJArEJJl+MrjidvY5K/BOzYUcwCug==", + "resolved": "https://registry.npmjs.org/@xyflow/system/-/system-0.0.44.tgz", + "version": "0.0.44" + }, + "node_modules/acorn": { + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + }, + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "version": "8.15.0" + }, + "node_modules/acorn-jsx": { + "dev": true, + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "version": "5.3.2" + }, + "node_modules/ajv": { + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + }, + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "version": "6.12.6" + }, + "node_modules/ansi-styles": { + "dependencies": { + "color-convert": "^2.0.1" + }, + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + }, + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "version": "4.3.0" + }, + "node_modules/any-promise": { + "dev": true, + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "version": "1.3.0" + }, + "node_modules/anymatch": { + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "dev": true, + "engines": { + "node": ">= 8" + }, + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "version": "3.1.3" + }, + "node_modules/anymatch/node_modules/picomatch": { + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + }, + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "version": "2.3.1" + }, + "node_modules/arg": { + "dev": true, + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "version": "5.0.2" + }, + "node_modules/argparse": { + "dev": true, + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "version": "2.0.1" + }, + "node_modules/aria-query": { + "engines": { + "node": ">= 0.4" + }, + "integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz", + "version": "5.3.2" + }, + "node_modules/autoprefixer": { + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "dependencies": { + "browserslist": "^4.28.1", + "caniuse-lite": "^1.0.30001760", + "fraction.js": "^5.3.4", + "picocolors": "^1.1.1", + "postcss-value-parser": "^4.2.0" + }, + "dev": true, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "integrity": "sha512-YYTXSFulfwytnjAPlw8QHncHJmlvFKtczb8InXaAx9Q0LbfDnfEYDE55omerIJKihhmU61Ft+cAOSzQVaBUmeA==", + "peerDependencies": { + "postcss": "^8.1.0" + }, + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.23.tgz", + "version": "10.4.23" + }, + "node_modules/axobject-query": { + "engines": { + "node": ">= 0.4" + }, + "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz", + "version": "4.1.0" + }, + "node_modules/balanced-match": { + "dev": true, + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "version": "1.0.2" + }, + "node_modules/base64-arraybuffer": { + "engines": { + "node": ">= 0.6.0" + }, + "integrity": "sha512-I3yl4r9QB5ZRY3XuJVEPfc2XhZO6YweFPI+UovAzn+8/hb3oJ6lnysaFcjVpkCPfVWFUDvoZ8kmVDP7WyRtYtQ==", + "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", + "version": "1.0.2" + }, + "node_modules/baseline-browser-mapping": { + "bin": { + "baseline-browser-mapping": "dist/cli.js" + }, + "dev": true, + "integrity": "sha512-Y1fOuNDowLfgKOypdc9SPABfoWXuZHBOyCS4cD52IeZBhr4Md6CLLs6atcxVrzRmQ06E7hSlm5bHHApPKR/byA==", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.8.tgz", + "version": "2.9.8" + }, + "node_modules/binary-extensions": { + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "version": "2.3.0" + }, + "node_modules/brace-expansion": { + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "dev": true, + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "version": "1.1.12" + }, + "node_modules/braces": { + "dependencies": { + "fill-range": "^7.1.1" + }, + "dev": true, + "engines": { + "node": ">=8" + }, + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "version": "3.0.3" + }, + "node_modules/browserslist": { + "bin": { + "browserslist": "cli.js" + }, + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "dev": true, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + }, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "version": "4.28.1" + }, + "node_modules/callsites": { + "dev": true, + "engines": { + "node": ">=6" + }, + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "version": "3.1.0" + }, + "node_modules/camelcase-css": { + "dev": true, + "engines": { + "node": ">= 6" + }, + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "version": "2.0.1" + }, + "node_modules/caniuse-lite": { + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "integrity": "sha512-7AAMPcueWELt1p3mi13HR/LHH0TJLT11cnwDJEs3xA4+CK/PLKeO9Kl1oru24htkyUKtkGCvAx4ohB0Ttry8Dw==", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001760.tgz", + "version": "1.0.30001760" + }, + "node_modules/canvg": { + "dependencies": { + "@babel/runtime": "^7.12.5", + "@types/raf": "^3.4.0", + "core-js": "^3.8.3", + "raf": "^3.4.1", + "regenerator-runtime": "^0.13.7", + "rgbcolor": "^1.0.1", + "stackblur-canvas": "^2.0.0", + "svg-pathdata": "^6.0.3" + }, + "engines": { + "node": ">=10.0.0" + }, + "integrity": "sha512-5ON+q7jCTgMp9cjpu4Jo6XbvfYwSB2Ow3kzHKfIyJfaCAOHLbdKPQqGKgfED/R5B+3TFFfe8pegYA+b423SRyA==", + "optional": true, + "resolved": "https://registry.npmjs.org/canvg/-/canvg-3.0.11.tgz", + "version": "3.0.11" + }, + "node_modules/chalk": { + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + }, + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "version": "4.1.2" + }, + "node_modules/chart.js": { + "dependencies": { + "@kurkle/color": "^0.3.0" + }, + "engines": { + "pnpm": ">=8" + }, + "integrity": "sha512-GIjfiT9dbmHRiYi6Nl2yFCq7kkwdkp1W/lp2J99rX0yo9tgJGn3lKQATztIjb5tVtevcBtIdICNWqlq5+E8/Pw==", + "license": "MIT", + "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.1.tgz", + "version": "4.5.1" + }, + "node_modules/chokidar": { + "dependencies": { + "readdirp": "^4.0.1" + }, + "dev": true, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "version": "4.0.3" + }, + "node_modules/classcat": { + "integrity": "sha512-JhZUT7JFcQy/EzW605k/ktHtncoo9vnyW/2GspNYwFlN1C/WmjuV/xtS04e9SOkL2sTdw0VAZ2UGCcQ9lR6p6w==", + "resolved": "https://registry.npmjs.org/classcat/-/classcat-5.0.5.tgz", + "version": "5.0.5" + }, + "node_modules/clsx": { + "engines": { + "node": ">=6" + }, + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "version": "2.1.1" + }, + "node_modules/color-convert": { + "dependencies": { + "color-name": "~1.1.4" + }, + "dev": true, + "engines": { + "node": ">=7.0.0" + }, + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "version": "2.0.1" + }, + "node_modules/color-name": { + "dev": true, + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "version": "1.1.4" + }, + "node_modules/commander": { + "dev": true, + "engines": { + "node": ">= 6" + }, + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "version": "4.1.1" + }, + "node_modules/concat-map": { + "dev": true, + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "version": "0.0.1" + }, + "node_modules/cookie": { + "dev": true, + "engines": { + "node": ">= 0.6" + }, + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "version": "0.6.0" + }, + "node_modules/core-js": { + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + }, + "hasInstallScript": true, + "integrity": "sha512-c3Q2VVkGAUyupsjRnaNX6u8Dq2vAdzm9iuPj5FW0fRxzlxgq9Q39MDq10IvmQSpLgHQNyQzQmOo6bgGHmH3NNg==", + "optional": true, + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.47.0.tgz", + "version": "3.47.0" + }, + "node_modules/cross-spawn": { + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "dev": true, + "engines": { + "node": ">= 8" + }, + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "version": "7.0.6" + }, + "node_modules/css-line-break": { + "dependencies": { + "utrie": "^1.0.2" + }, + "integrity": "sha512-FHcKFCZcAha3LwfVBhCQbW2nCNbkZXn7KVUJcsT5/P8YmfsVja0FMPJr0B903j/E69HUphKiV9iQArX8SDYA4w==", + "resolved": "https://registry.npmjs.org/css-line-break/-/css-line-break-2.1.0.tgz", + "version": "2.1.0" + }, + "node_modules/cssesc": { + "bin": { + "cssesc": "bin/cssesc" + }, + "dev": true, + "engines": { + "node": ">=4" + }, + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "version": "3.0.0" + }, + "node_modules/d3-array": { + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "version": "3.2.4" + }, + "node_modules/d3-color": { + "engines": { + "node": ">=12" + }, + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "version": "3.1.0" + }, + "node_modules/d3-dag": { + "dependencies": { + "d3-array": "^3.2.4", + "javascript-lp-solver": "0.4.24", + "quadprog": "^1.6.1", + "stringify-object": "^5.0.0" + }, + "integrity": "sha512-N8IxsIHcUaIxLrV3cElTC47kVJGFiY3blqSuJubQhyhYBJs0syfFPTnRSj2Cq0LBxxi4mzJmcqCvHIv9sPdILQ==", + "resolved": "https://registry.npmjs.org/d3-dag/-/d3-dag-1.1.0.tgz", + "version": "1.1.0" + }, + "node_modules/d3-dispatch": { + "engines": { + "node": ">=12" + }, + "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", + "version": "3.0.1" + }, + "node_modules/d3-drag": { + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-selection": "3" + }, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", + "version": "3.0.0" + }, + "node_modules/d3-ease": { + "engines": { + "node": ">=12" + }, + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "version": "3.0.1" + }, + "node_modules/d3-interpolate": { + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "version": "3.0.1" + }, + "node_modules/d3-selection": { + "engines": { + "node": ">=12" + }, + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "version": "3.0.0" + }, + "node_modules/d3-timer": { + "engines": { + "node": ">=12" + }, + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "version": "3.0.1" + }, + "node_modules/d3-transition": { + "dependencies": { + "d3-color": "1 - 3", + "d3-dispatch": "1 - 3", + "d3-ease": "1 - 3", + "d3-interpolate": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", + "peerDependencies": { + "d3-selection": "2 - 3" + }, + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", + "version": "3.0.1" + }, + "node_modules/d3-zoom": { + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "2 - 3", + "d3-transition": "2 - 3" + }, + "engines": { + "node": ">=12" + }, + "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", + "version": "3.0.0" + }, + "node_modules/debug": { + "dependencies": { + "ms": "^2.1.3" + }, + "dev": true, + "engines": { + "node": ">=6.0" + }, + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + }, + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "version": "4.4.3" + }, + "node_modules/deep-is": { + "dev": true, + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "version": "0.1.4" + }, + "node_modules/deepmerge": { + "dev": true, + "engines": { + "node": ">=0.10.0" + }, + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "version": "4.3.1" + }, + "node_modules/devalue": { + "integrity": "sha512-jDwizj+IlEZBunHcOuuFVBnIMPAEHvTsJj0BcIp94xYguLRVBcXO853px/MyIJvbVzWdsGvrRweIUWJw8hBP7A==", + "resolved": "https://registry.npmjs.org/devalue/-/devalue-5.6.1.tgz", + "version": "5.6.1" + }, + "node_modules/didyoumean": { + "dev": true, + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "version": "1.2.2" + }, + "node_modules/dlv": { + "dev": true, + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "version": "1.1.3" + }, + "node_modules/dompurify": { + "integrity": "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==", + "optional": true, + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" + }, + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz", + "version": "3.3.1" + }, + "node_modules/electron-to-chromium": { + "dev": true, + "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz", + "version": "1.5.267" + }, + "node_modules/esbuild": { + "bin": { + "esbuild": "bin/esbuild" + }, + "dev": true, + "engines": { + "node": ">=12" + }, + "hasInstallScript": true, + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + }, + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "version": "0.21.5" + }, + "node_modules/escalade": { + "dev": true, + "engines": { + "node": ">=6" + }, + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "version": "3.2.0" + }, + "node_modules/escape-string-regexp": { + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "version": "4.0.0" + }, + "node_modules/eslint": { + "bin": { + "eslint": "bin/eslint.js" + }, + "dependencies": { + "@eslint-community/eslint-utils": "^4.8.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.1", + "@eslint/config-helpers": "^0.4.2", + "@eslint/core": "^0.17.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.39.2", + "@eslint/plugin-kit": "^0.4.1", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + }, + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.2.tgz", + "version": "9.39.2" + }, + "node_modules/eslint-scope": { + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "version": "8.4.0" + }, + "node_modules/eslint-visitor-keys": { + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "version": "4.2.1" + }, + "node_modules/esm-env": { + "integrity": "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==", + "resolved": "https://registry.npmjs.org/esm-env/-/esm-env-1.2.2.tgz", + "version": "1.2.2" + }, + "node_modules/espree": { + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "version": "10.4.0" + }, + "node_modules/esquery": { + "dependencies": { + "estraverse": "^5.1.0" + }, + "dev": true, + "engines": { + "node": ">=0.10" + }, + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "version": "1.6.0" + }, + "node_modules/esrap": { + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15" + }, + "integrity": "sha512-GiYWG34AN/4CUyaWAgunGt0Rxvr1PTMlGC0vvEov/uOQYWne2bpN03Um+k8jT+q3op33mKouP2zeJ6OlM+qeUg==", + "resolved": "https://registry.npmjs.org/esrap/-/esrap-2.2.1.tgz", + "version": "2.2.1" + }, + "node_modules/esrecurse": { + "dependencies": { + "estraverse": "^5.2.0" + }, + "dev": true, + "engines": { + "node": ">=4.0" + }, + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "version": "4.3.0" + }, + "node_modules/estraverse": { + "dev": true, + "engines": { + "node": ">=4.0" + }, + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "version": "5.3.0" + }, + "node_modules/esutils": { + "dev": true, + "engines": { + "node": ">=0.10.0" + }, + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "version": "2.0.3" + }, + "node_modules/fast-deep-equal": { + "dev": true, + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "version": "3.1.3" + }, + "node_modules/fast-equals": { + "engines": { + "node": ">=6.0.0" + }, + "integrity": "sha512-jt2DW/aNFNwke7AUd+Z+e6pz39KO5rzdbbFCg2sGafS4mk13MI7Z8O5z9cADNn5lhGODIgLwug6TZO2ctf7kcw==", + "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-5.4.0.tgz", + "version": "5.4.0" + }, + "node_modules/fast-glob": { + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "dev": true, + "engines": { + "node": ">=8.6.0" + }, + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "version": "3.3.3" + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "dependencies": { + "is-glob": "^4.0.1" + }, + "dev": true, + "engines": { + "node": ">= 6" + }, + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "version": "5.1.2" + }, + "node_modules/fast-json-stable-stringify": { + "dev": true, + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "version": "2.1.0" + }, + "node_modules/fast-levenshtein": { + "dev": true, + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "version": "2.0.6" + }, + "node_modules/fast-png": { + "dependencies": { + "@types/pako": "^2.0.3", + "iobuffer": "^5.3.2", + "pako": "^2.1.0" + }, + "integrity": "sha512-kAqZq1TlgBjZcLr5mcN6NP5Rv4V2f22z00c3g8vRrwkcqjerx7BEhPbOnWCPqaHUl2XWQBJQvOT/FQhdMT7X/Q==", + "resolved": "https://registry.npmjs.org/fast-png/-/fast-png-6.4.0.tgz", + "version": "6.4.0" + }, + "node_modules/fastq": { + "dependencies": { + "reusify": "^1.0.4" + }, + "dev": true, + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "version": "1.19.1" + }, + "node_modules/fdir": { + "dev": true, + "engines": { + "node": ">=12.0.0" + }, + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + }, + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "version": "6.5.0" + }, + "node_modules/fflate": { + "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==", + "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", + "version": "0.8.2" + }, + "node_modules/file-entry-cache": { + "dependencies": { + "flat-cache": "^4.0.0" + }, + "dev": true, + "engines": { + "node": ">=16.0.0" + }, + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "version": "8.0.0" + }, + "node_modules/fill-range": { + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "dev": true, + "engines": { + "node": ">=8" + }, + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "version": "7.1.1" + }, + "node_modules/find-up": { + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "version": "5.0.0" + }, + "node_modules/flat-cache": { + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "dev": true, + "engines": { + "node": ">=16" + }, + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "version": "4.0.1" + }, + "node_modules/flatted": { + "dev": true, + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "version": "3.3.3" + }, + "node_modules/fraction.js": { + "dev": true, + "engines": { + "node": "*" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/rawify" + }, + "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz", + "version": "5.3.4" + }, + "node_modules/fsevents": { + "dev": true, + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + }, + "hasInstallScript": true, + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "optional": true, + "os": [ + "darwin" + ], + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "version": "2.3.3" + }, + "node_modules/function-bind": { + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "version": "1.1.2" + }, + "node_modules/get-own-enumerable-keys": { + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-PKsK2FSrQCyxcGHsGrLDcK0lx+0Ke+6e8KFFozA9/fIQLhQzPaRvJFdcz7+Axg3jUH/Mq+NI4xa5u/UT2tQskA==", + "resolved": "https://registry.npmjs.org/get-own-enumerable-keys/-/get-own-enumerable-keys-1.0.0.tgz", + "version": "1.0.0" + }, + "node_modules/glob-parent": { + "dependencies": { + "is-glob": "^4.0.3" + }, + "dev": true, + "engines": { + "node": ">=10.13.0" + }, + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "version": "6.0.2" + }, + "node_modules/globals": { + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "version": "14.0.0" + }, + "node_modules/has-flag": { + "dev": true, + "engines": { + "node": ">=8" + }, + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "version": "4.0.0" + }, + "node_modules/hasown": { + "dependencies": { + "function-bind": "^1.1.2" + }, + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "version": "2.0.2" + }, + "node_modules/html2canvas": { + "dependencies": { + "css-line-break": "^2.1.0", + "text-segmentation": "^1.0.3" + }, + "engines": { + "node": ">=8.0.0" + }, + "integrity": "sha512-fPU6BHNpsyIhr8yyMpTLLxAbkaK8ArIBcmZIRiBLiDhjeqvXolaEmDGmELFuX9I4xDcaKKcJl+TKZLqruBbmWA==", + "resolved": "https://registry.npmjs.org/html2canvas/-/html2canvas-1.4.1.tgz", + "version": "1.4.1" + }, + "node_modules/ignore": { + "dev": true, + "engines": { + "node": ">= 4" + }, + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "version": "5.3.2" + }, + "node_modules/import-fresh": { + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "version": "3.3.1" + }, + "node_modules/imurmurhash": { + "dev": true, + "engines": { + "node": ">=0.8.19" + }, + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "version": "0.1.4" + }, + "node_modules/internmap": { + "engines": { + "node": ">=12" + }, + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "version": "2.0.3" + }, + "node_modules/iobuffer": { + "integrity": "sha512-DRebOWuqDvxunfkNJAlc3IzWIPD5xVxwUNbHr7xKB8E6aLJxIPfNX3CoMJghcFjpv6RWQsrcJbghtEwSPoJqMA==", + "resolved": "https://registry.npmjs.org/iobuffer/-/iobuffer-5.4.0.tgz", + "version": "5.4.0" + }, + "node_modules/is-binary-path": { + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "dev": true, + "engines": { + "node": ">=8" + }, + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "version": "2.1.0" + }, + "node_modules/is-core-module": { + "dependencies": { + "hasown": "^2.0.2" + }, + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "version": "2.16.1" + }, + "node_modules/is-extglob": { + "dev": true, + "engines": { + "node": ">=0.10.0" + }, + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "version": "2.1.1" + }, + "node_modules/is-glob": { + "dependencies": { + "is-extglob": "^2.1.1" + }, + "dev": true, + "engines": { + "node": ">=0.10.0" + }, + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "version": "4.0.3" + }, + "node_modules/is-number": { + "dev": true, + "engines": { + "node": ">=0.12.0" + }, + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "version": "7.0.0" + }, + "node_modules/is-obj": { + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-IlsXEHOjtKhpN8r/tRFj2nDyTmHvcfNeu/nrRIcXE17ROeatXchkojffa1SpdqW4cr/Fj6QkEf/Gn4zf6KKvEQ==", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-3.0.0.tgz", + "version": "3.0.0" + }, + "node_modules/is-reference": { + "dependencies": { + "@types/estree": "^1.0.6" + }, + "integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.3.tgz", + "version": "3.0.3" + }, + "node_modules/is-regexp": { + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-rbku49cWloU5bSMI+zaRaXdQHXnthP6DZ/vLnfdSKyL4zUzuWnomtOEiZZOd+ioQ+avFo/qau3KPTc7Fjy1uPA==", + "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-3.1.0.tgz", + "version": "3.1.0" + }, + "node_modules/isexe": { + "dev": true, + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "version": "2.0.0" + }, + "node_modules/javascript-lp-solver": { + "integrity": "sha512-5edoDKnMrt/u3M6GnZKDDIPxOyFOg+WrwDv8mjNiMC2DePhy2H9/FFQgf4ggywaXT1utvkxusJcjQUER72cZmA==", + "resolved": "https://registry.npmjs.org/javascript-lp-solver/-/javascript-lp-solver-0.4.24.tgz", + "version": "0.4.24" + }, + "node_modules/jiti": { + "bin": { + "jiti": "bin/jiti.js" + }, + "dev": true, + "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", + "version": "1.21.7" + }, + "node_modules/js-yaml": { + "bin": { + "js-yaml": "bin/js-yaml.js" + }, + "dependencies": { + "argparse": "^2.0.1" + }, + "dev": true, + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "version": "4.1.1" + }, + "node_modules/json-buffer": { + "dev": true, + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "version": "3.0.1" + }, + "node_modules/json-schema-traverse": { + "dev": true, + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "version": "0.4.1" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "dev": true, + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "version": "1.0.1" + }, + "node_modules/jspdf": { + "dependencies": { + "@babel/runtime": "^7.28.4", + "fast-png": "^6.2.0", + "fflate": "^0.8.1" + }, + "integrity": "sha512-dc6oQ8y37rRcHn316s4ngz/nOjayLF/FFxBF4V9zamQKRqXxyiH1zagkCdktdWhtoQId5K20xt1lB90XzkB+hQ==", + "optionalDependencies": { + "canvg": "^3.0.11", + "core-js": "^3.6.0", + "dompurify": "^3.2.4", + "html2canvas": "^1.0.0-rc.5" + }, + "resolved": "https://registry.npmjs.org/jspdf/-/jspdf-3.0.4.tgz", + "version": "3.0.4" + }, + "node_modules/keyv": { + "dependencies": { + "json-buffer": "3.0.1" + }, + "dev": true, + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "version": "4.5.4" + }, + "node_modules/kleur": { + "dev": true, + "engines": { + "node": ">=6" + }, + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "version": "4.1.5" + }, + "node_modules/levn": { + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "dev": true, + "engines": { + "node": ">= 0.8.0" + }, + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "version": "0.4.1" + }, + "node_modules/lilconfig": { + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + }, + "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", + "version": "3.1.3" + }, + "node_modules/lines-and-columns": { + "dev": true, + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "version": "1.2.4" + }, + "node_modules/locate-character": { + "integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==", + "resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz", + "version": "3.0.0" + }, + "node_modules/locate-path": { + "dependencies": { + "p-locate": "^5.0.0" + }, + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "version": "6.0.0" + }, + "node_modules/lodash.merge": { + "dev": true, + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "version": "4.6.2" + }, + "node_modules/lucide-svelte": { + "integrity": "sha512-n0ecAFtCY5LEeL+PJ1Xj4n3c2gzj8tMpak0KMGnvoSJEjCsCnRB0mekBtJZAo7beyynW9Qj5Um1KfMBAeTNplw==", + "peerDependencies": { + "svelte": "^3 || ^4 || ^5.0.0-next.42" + }, + "resolved": "https://registry.npmjs.org/lucide-svelte/-/lucide-svelte-0.468.0.tgz", + "version": "0.468.0" + }, + "node_modules/magic-string": { + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + }, + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "version": "0.30.21" + }, + "node_modules/merge2": { + "dev": true, + "engines": { + "node": ">= 8" + }, + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "version": "1.4.1" + }, + "node_modules/micromatch": { + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "dev": true, + "engines": { + "node": ">=8.6" + }, + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "version": "4.0.8" + }, + "node_modules/micromatch/node_modules/picomatch": { + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + }, + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "version": "2.3.1" + }, + "node_modules/minimatch": { + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "dev": true, + "engines": { + "node": "*" + }, + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "version": "3.1.2" + }, + "node_modules/monaco-editor": { + "integrity": "sha512-GEQWEZmfkOGLdd3XK8ryrfWz3AIP8YymVXiPHEdewrUq7mh0qrKrfHLNCXcbB6sTnMLnOZ3ztSiKcciFUkIJwQ==", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.52.2.tgz", + "version": "0.52.2" + }, + "node_modules/mri": { + "dev": true, + "engines": { + "node": ">=4" + }, + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "version": "1.2.0" + }, + "node_modules/mrmime": { + "dev": true, + "engines": { + "node": ">=10" + }, + "integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz", + "version": "2.0.1" + }, + "node_modules/ms": { + "dev": true, + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "version": "2.1.3" + }, + "node_modules/mz": { + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + }, + "dev": true, + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "version": "2.7.0" + }, + "node_modules/nanoid": { + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "dev": true, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "version": "3.3.11" + }, + "node_modules/natural-compare": { + "dev": true, + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "version": "1.4.0" + }, + "node_modules/node-releases": { + "dev": true, + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "version": "2.0.27" + }, + "node_modules/normalize-path": { + "dev": true, + "engines": { + "node": ">=0.10.0" + }, + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "version": "3.0.0" + }, + "node_modules/object-assign": { + "dev": true, + "engines": { + "node": ">=0.10.0" + }, + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "version": "4.1.1" + }, + "node_modules/object-hash": { + "dev": true, + "engines": { + "node": ">= 6" + }, + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "version": "3.0.0" + }, + "node_modules/optionator": { + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "dev": true, + "engines": { + "node": ">= 0.8.0" + }, + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "version": "0.9.4" + }, + "node_modules/p-limit": { + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "version": "3.1.0" + }, + "node_modules/p-locate": { + "dependencies": { + "p-limit": "^3.0.2" + }, + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "version": "5.0.0" + }, + "node_modules/pako": { + "integrity": "sha512-w+eufiZ1WuJYgPXbV/PO3NCMEc3xqylkKHzp8bxp1uW4qaSNQUkwmLLEc3kKsfz8lpV1F8Ht3U1Cm+9Srog2ug==", + "resolved": "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz", + "version": "2.1.0" + }, + "node_modules/parent-module": { + "dependencies": { + "callsites": "^3.0.0" + }, + "dev": true, + "engines": { + "node": ">=6" + }, + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "version": "1.0.1" + }, + "node_modules/path-exists": { + "dev": true, + "engines": { + "node": ">=8" + }, + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "version": "4.0.0" + }, + "node_modules/path-key": { + "dev": true, + "engines": { + "node": ">=8" + }, + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "version": "3.1.1" + }, + "node_modules/path-parse": { + "dev": true, + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "version": "1.0.7" + }, + "node_modules/performance-now": { + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==", + "optional": true, + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "version": "2.1.0" + }, + "node_modules/picocolors": { + "dev": true, + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "version": "1.1.1" + }, + "node_modules/picomatch": { + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + }, + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "version": "4.0.3" + }, + "node_modules/pify": { + "dev": true, + "engines": { + "node": ">=0.10.0" + }, + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "version": "2.3.0" + }, + "node_modules/pirates": { + "dev": true, + "engines": { + "node": ">= 6" + }, + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "version": "4.0.7" + }, + "node_modules/postcss": { + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "dev": true, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "version": "8.5.6" + }, + "node_modules/postcss-import": { + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "dev": true, + "engines": { + "node": ">=14.0.0" + }, + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "peerDependencies": { + "postcss": "^8.0.0" + }, + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "version": "15.1.0" + }, + "node_modules/postcss-js": { + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "dev": true, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "integrity": "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==", + "peerDependencies": { + "postcss": "^8.4.21" + }, + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.1.0.tgz", + "version": "4.1.0" + }, + "node_modules/postcss-load-config": { + "dependencies": { + "lilconfig": "^3.1.1" + }, + "dev": true, + "engines": { + "node": ">= 18" + }, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "integrity": "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==", + "peerDependencies": { + "jiti": ">=1.21.0", + "postcss": ">=8.0.9", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + }, + "postcss": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + }, + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-6.0.1.tgz", + "version": "6.0.1" + }, + "node_modules/postcss-nested": { + "dependencies": { + "postcss-selector-parser": "^6.1.1" + }, + "dev": true, + "engines": { + "node": ">=12.0" + }, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", + "peerDependencies": { + "postcss": "^8.2.14" + }, + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", + "version": "6.2.0" + }, + "node_modules/postcss-selector-parser": { + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "dev": true, + "engines": { + "node": ">=4" + }, + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "version": "6.1.2" + }, + "node_modules/postcss-value-parser": { + "dev": true, + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "version": "4.2.0" + }, + "node_modules/prelude-ls": { + "dev": true, + "engines": { + "node": ">= 0.8.0" + }, + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "version": "1.2.1" + }, + "node_modules/punycode": { + "dev": true, + "engines": { + "node": ">=6" + }, + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "version": "2.3.1" + }, + "node_modules/quadprog": { + "engines": { + "node": ">=8.x" + }, + "integrity": "sha512-fN5Jkcjlln/b3pJkseDKREf89JkKIyu6cKIVXisgL6ocKPQ0yTp9n6NZUAq3otEPPw78WZMG9K0o9WsfKyMWJw==", + "resolved": "https://registry.npmjs.org/quadprog/-/quadprog-1.6.1.tgz", + "version": "1.6.1" + }, + "node_modules/queue-microtask": { + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "version": "1.2.3" + }, + "node_modules/raf": { + "dependencies": { + "performance-now": "^2.1.0" + }, + "integrity": "sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA==", + "optional": true, + "resolved": "https://registry.npmjs.org/raf/-/raf-3.4.1.tgz", + "version": "3.4.1" + }, + "node_modules/read-cache": { + "dependencies": { + "pify": "^2.3.0" + }, + "dev": true, + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "version": "1.0.0" + }, + "node_modules/readdirp": { + "dev": true, + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + }, + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "version": "4.1.2" + }, + "node_modules/regenerator-runtime": { + "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==", + "optional": true, + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", + "version": "0.13.11" + }, + "node_modules/resolve": { + "bin": { + "resolve": "bin/resolve" + }, + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "version": "1.22.11" + }, + "node_modules/resolve-from": { + "dev": true, + "engines": { + "node": ">=4" + }, + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "version": "4.0.0" + }, + "node_modules/reusify": { + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + }, + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "version": "1.1.0" + }, + "node_modules/rgbcolor": { + "engines": { + "node": ">= 0.8.15" + }, + "integrity": "sha512-9aZLIrhRaD97sgVhtJOW6ckOEh6/GnvQtdVNfdZ6s67+3/XwLS9lBcQYzEEhYVeUowN7pRzMLsyGhK2i/xvWbw==", + "optional": true, + "resolved": "https://registry.npmjs.org/rgbcolor/-/rgbcolor-1.0.1.tgz", + "version": "1.0.1" + }, + "node_modules/rollup": { + "bin": { + "rollup": "dist/bin/rollup" + }, + "dependencies": { + "@types/estree": "1.0.8" + }, + "dev": true, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "integrity": "sha512-iTNAbFSlRpcHeeWu73ywU/8KuU/LZmNCSxp6fjQkJBD3ivUb8tpDrXhIxEzA05HlYMEwmtaUnb3RP+YNv162OQ==", + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.53.5", + "@rollup/rollup-android-arm64": "4.53.5", + "@rollup/rollup-darwin-arm64": "4.53.5", + "@rollup/rollup-darwin-x64": "4.53.5", + "@rollup/rollup-freebsd-arm64": "4.53.5", + "@rollup/rollup-freebsd-x64": "4.53.5", + "@rollup/rollup-linux-arm-gnueabihf": "4.53.5", + "@rollup/rollup-linux-arm-musleabihf": "4.53.5", + "@rollup/rollup-linux-arm64-gnu": "4.53.5", + "@rollup/rollup-linux-arm64-musl": "4.53.5", + "@rollup/rollup-linux-loong64-gnu": "4.53.5", + "@rollup/rollup-linux-ppc64-gnu": "4.53.5", + "@rollup/rollup-linux-riscv64-gnu": "4.53.5", + "@rollup/rollup-linux-riscv64-musl": "4.53.5", + "@rollup/rollup-linux-s390x-gnu": "4.53.5", + "@rollup/rollup-linux-x64-gnu": "4.53.5", + "@rollup/rollup-linux-x64-musl": "4.53.5", + "@rollup/rollup-openharmony-arm64": "4.53.5", + "@rollup/rollup-win32-arm64-msvc": "4.53.5", + "@rollup/rollup-win32-ia32-msvc": "4.53.5", + "@rollup/rollup-win32-x64-gnu": "4.53.5", + "@rollup/rollup-win32-x64-msvc": "4.53.5", + "fsevents": "~2.3.2" + }, + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.53.5.tgz", + "version": "4.53.5" + }, + "node_modules/run-parallel": { + "dependencies": { + "queue-microtask": "^1.2.2" + }, + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "version": "1.2.0" + }, + "node_modules/sade": { + "dependencies": { + "mri": "^1.1.0" + }, + "dev": true, + "engines": { + "node": ">=6" + }, + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", + "version": "1.8.1" + }, + "node_modules/set-cookie-parser": { + "dev": true, + "integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", + "version": "2.7.2" + }, + "node_modules/shebang-command": { + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "dev": true, + "engines": { + "node": ">=8" + }, + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "version": "2.0.0" + }, + "node_modules/shebang-regex": { + "dev": true, + "engines": { + "node": ">=8" + }, + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "version": "3.0.0" + }, + "node_modules/sirv": { + "dependencies": { + "@polka/url": "^1.0.0-next.24", + "mrmime": "^2.0.0", + "totalist": "^3.0.0" + }, + "dev": true, + "engines": { + "node": ">=18" + }, + "integrity": "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.2.tgz", + "version": "3.0.2" + }, + "node_modules/source-map-js": { + "dev": true, + "engines": { + "node": ">=0.10.0" + }, + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "version": "1.2.1" + }, + "node_modules/stackblur-canvas": { + "engines": { + "node": ">=0.1.14" + }, + "integrity": "sha512-yf7OENo23AGJhBriGx0QivY5JP6Y1HbrrDI6WLt6C5auYZXlQrheoY8hD4ibekFKz1HOfE48Ww8kMWMnJD/zcQ==", + "optional": true, + "resolved": "https://registry.npmjs.org/stackblur-canvas/-/stackblur-canvas-2.7.0.tgz", + "version": "2.7.0" + }, + "node_modules/state-local": { + "integrity": "sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==", + "resolved": "https://registry.npmjs.org/state-local/-/state-local-1.0.7.tgz", + "version": "1.0.7" + }, + "node_modules/stringify-object": { + "dependencies": { + "get-own-enumerable-keys": "^1.0.0", + "is-obj": "^3.0.0", + "is-regexp": "^3.1.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/yeoman/stringify-object?sponsor=1" + }, + "integrity": "sha512-zaJYxz2FtcMb4f+g60KsRNFOpVMUyuJgA51Zi5Z1DOTC3S59+OQiVOzE9GZt0x72uBGWKsQIuBKeF9iusmKFsg==", + "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-5.0.0.tgz", + "version": "5.0.0" + }, + "node_modules/strip-json-comments": { + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "version": "3.1.1" + }, + "node_modules/sucrase": { + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "tinyglobby": "^0.2.11", + "ts-interface-checker": "^0.1.9" + }, + "dev": true, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "integrity": "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.1.tgz", + "version": "3.35.1" + }, + "node_modules/supports-color": { + "dependencies": { + "has-flag": "^4.0.0" + }, + "dev": true, + "engines": { + "node": ">=8" + }, + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "version": "7.2.0" + }, + "node_modules/supports-preserve-symlinks-flag": { + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "version": "1.0.0" + }, + "node_modules/svelte": { + "dependencies": { + "@jridgewell/remapping": "^2.3.4", + "@jridgewell/sourcemap-codec": "^1.5.0", + "@sveltejs/acorn-typescript": "^1.0.5", + "@types/estree": "^1.0.5", + "acorn": "^8.12.1", + "aria-query": "^5.3.1", + "axobject-query": "^4.1.0", + "clsx": "^2.1.1", + "devalue": "^5.5.0", + "esm-env": "^1.2.1", + "esrap": "^2.2.1", + "is-reference": "^3.0.3", + "locate-character": "^3.0.0", + "magic-string": "^0.30.11", + "zimmerframe": "^1.1.2" + }, + "engines": { + "node": ">=18" + }, + "integrity": "sha512-ZhLtvroYxUxr+HQJfMZEDRsGsmU46x12RvAv/zi9584f5KOX7bUrEbhPJ7cKFmUvZTJXi/CFZUYwDC6M1FigPw==", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.46.0.tgz", + "version": "5.46.0" + }, + "node_modules/svelte-check": { + "bin": { + "svelte-check": "bin/svelte-check" + }, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "chokidar": "^4.0.1", + "fdir": "^6.2.0", + "picocolors": "^1.0.0", + "sade": "^1.7.4" + }, + "dev": true, + "engines": { + "node": ">= 18.0.0" + }, + "integrity": "sha512-DVWvxhBrDsd+0hHWKfjP99lsSXASeOhHJYyuKOFYJcP7ThfSCKgjVarE8XfuMWpS5JV3AlDf+iK1YGGo2TACdw==", + "peerDependencies": { + "svelte": "^4.0.0 || ^5.0.0-next.0", + "typescript": ">=5.0.0" + }, + "resolved": "https://registry.npmjs.org/svelte-check/-/svelte-check-4.3.4.tgz", + "version": "4.3.4" + }, + "node_modules/svg-pathdata": { + "engines": { + "node": ">=12.0.0" + }, + "integrity": "sha512-qsjeeq5YjBZ5eMdFuUa4ZosMLxgr5RZ+F+Y1OrDhuOCEInRMA3x74XdBtggJcj9kOeInz0WE+LgCPDkZFlBYJw==", + "optional": true, + "resolved": "https://registry.npmjs.org/svg-pathdata/-/svg-pathdata-6.0.3.tgz", + "version": "6.0.3" + }, + "node_modules/tailwindcss": { + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.6.0", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.3.2", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.21.7", + "lilconfig": "^3.1.3", + "micromatch": "^4.0.8", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.1.1", + "postcss": "^8.4.47", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", + "postcss-nested": "^6.2.0", + "postcss-selector-parser": "^6.1.2", + "resolve": "^1.22.8", + "sucrase": "^3.35.0" + }, + "dev": true, + "engines": { + "node": ">=14.0.0" + }, + "integrity": "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.19.tgz", + "version": "3.4.19" + }, + "node_modules/tailwindcss/node_modules/chokidar": { + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "dev": true, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "optionalDependencies": { + "fsevents": "~2.3.2" + }, + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "version": "3.6.0" + }, + "node_modules/tailwindcss/node_modules/chokidar/node_modules/glob-parent": { + "dependencies": { + "is-glob": "^4.0.1" + }, + "dev": true, + "engines": { + "node": ">= 6" + }, + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "version": "5.1.2" + }, + "node_modules/tailwindcss/node_modules/picomatch": { + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + }, + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "version": "2.3.1" + }, + "node_modules/tailwindcss/node_modules/readdirp": { + "dependencies": { + "picomatch": "^2.2.1" + }, + "dev": true, + "engines": { + "node": ">=8.10.0" + }, + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "version": "3.6.0" + }, + "node_modules/text-segmentation": { + "dependencies": { + "utrie": "^1.0.2" + }, + "integrity": "sha512-iOiPUo/BGnZ6+54OsWxZidGCsdU8YbE4PSpdPinp7DeMtUJNJBoJ/ouUSTJjHkh1KntHaltHl/gDs2FC4i5+Nw==", + "resolved": "https://registry.npmjs.org/text-segmentation/-/text-segmentation-1.0.3.tgz", + "version": "1.0.3" + }, + "node_modules/thenify": { + "dependencies": { + "any-promise": "^1.0.0" + }, + "dev": true, + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "version": "3.3.1" + }, + "node_modules/thenify-all": { + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "dev": true, + "engines": { + "node": ">=0.8" + }, + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "version": "1.6.0" + }, + "node_modules/tinyglobby": { + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "dev": true, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + }, + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "version": "0.2.15" + }, + "node_modules/to-regex-range": { + "dependencies": { + "is-number": "^7.0.0" + }, + "dev": true, + "engines": { + "node": ">=8.0" + }, + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "version": "5.0.1" + }, + "node_modules/totalist": { + "dev": true, + "engines": { + "node": ">=6" + }, + "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", + "version": "3.0.1" + }, + "node_modules/ts-interface-checker": { + "dev": true, + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "version": "0.1.13" + }, + "node_modules/type-check": { + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "dev": true, + "engines": { + "node": ">= 0.8.0" + }, + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "version": "0.4.0" + }, + "node_modules/typescript": { + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "dev": true, + "engines": { + "node": ">=14.17" + }, + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "version": "5.9.3" + }, + "node_modules/undici-types": { + "dev": true, + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "version": "6.21.0" + }, + "node_modules/update-browserslist-db": { + "bin": { + "update-browserslist-db": "cli.js" + }, + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "peerDependencies": { + "browserslist": ">= 4.21.0" + }, + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "version": "1.2.3" + }, + "node_modules/uri-js": { + "dependencies": { + "punycode": "^2.1.0" + }, + "dev": true, + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "version": "4.4.1" + }, + "node_modules/util-deprecate": { + "dev": true, + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "version": "1.0.2" + }, + "node_modules/utrie": { + "dependencies": { + "base64-arraybuffer": "^1.0.2" + }, + "integrity": "sha512-1MLa5ouZiOmQzUbjbu9VmjLzn1QLXBhwpUa7kdLUQK+KQ5KA9I1vk5U4YHe/X2Ch7PYnJfWuWT+VbuxbGwljhw==", + "resolved": "https://registry.npmjs.org/utrie/-/utrie-1.0.2.tgz", + "version": "1.0.2" + }, + "node_modules/vite": { + "bin": { + "vite": "bin/vite.js" + }, + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "dev": true, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + }, + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "version": "5.4.21" + }, + "node_modules/vitefu": { + "dev": true, + "integrity": "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ==", + "peerDependencies": { + "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" + }, + "peerDependenciesMeta": { + "vite": { + "optional": true + } + }, + "resolved": "https://registry.npmjs.org/vitefu/-/vitefu-1.1.1.tgz", + "version": "1.1.1" + }, + "node_modules/which": { + "bin": { + "node-which": "bin/node-which" + }, + "dependencies": { + "isexe": "^2.0.0" + }, + "dev": true, + "engines": { + "node": ">= 8" + }, + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "version": "2.0.2" + }, + "node_modules/word-wrap": { + "dev": true, + "engines": { + "node": ">=0.10.0" + }, + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "version": "1.2.5" + }, + "node_modules/yocto-queue": { + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + }, + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "version": "0.1.0" + }, + "node_modules/zimmerframe": { + "integrity": "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ==", + "resolved": "https://registry.npmjs.org/zimmerframe/-/zimmerframe-1.1.4.tgz", + "version": "1.1.4" + } + }, + "requires": true, + "version": "0.1.0" +} diff --git a/studio/frontend/package.json b/studio/frontend/package.json new file mode 100644 index 00000000..5cccbcb4 --- /dev/null +++ b/studio/frontend/package.json @@ -0,0 +1,39 @@ +{ + "dependencies": { + "@monaco-editor/loader": "^1.4.0", + "@xyflow/svelte": "^0.1.22", + "chart.js": "^4.4.7", + "d3-dag": "^1.1.0", + "fast-equals": "^5.2.2", + "html2canvas": "^1.4.1", + "jspdf": "^3.0.4", + "lucide-svelte": "^0.468.0", + "monaco-editor": "^0.52.2" + }, + "devDependencies": { + "@sveltejs/adapter-static": "^3.0.8", + "@sveltejs/kit": "^2.16.0", + "@sveltejs/vite-plugin-svelte": "^4.0.0", + "@types/node": "^22.10.2", + "autoprefixer": "^10.4.20", + "eslint": "^9.17.0", + "postcss": "^8.4.49", + "svelte": "^5.16.0", + "svelte-check": "^4.1.1", + "tailwindcss": "^3.4.17", + "typescript": "^5.7.2", + "vite": "^5.4.0" + }, + "name": "sygra-workflow-ui", + "private": true, + "scripts": { + "build": "vite build", + "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", + "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", + "dev": "vite dev --port 5173", + "lint": "eslint .", + "preview": "vite preview" + }, + "type": "module", + "version": "0.1.0" +} diff --git a/studio/frontend/postcss.config.js b/studio/frontend/postcss.config.js new file mode 100644 index 00000000..0f772168 --- /dev/null +++ b/studio/frontend/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {} + } +}; diff --git a/studio/frontend/src/app.d.ts b/studio/frontend/src/app.d.ts new file mode 100644 index 00000000..db1aa5db --- /dev/null +++ b/studio/frontend/src/app.d.ts @@ -0,0 +1,13 @@ +/// + +declare global { + namespace App { + // interface Error {} + // interface Locals {} + // interface PageData {} + // interface PageState {} + // interface Platform {} + } +} + +export {}; diff --git a/studio/frontend/src/app.html b/studio/frontend/src/app.html new file mode 100644 index 00000000..4e72cb26 --- /dev/null +++ b/studio/frontend/src/app.html @@ -0,0 +1,13 @@ + + + + + + + Sygra Studio + %sveltekit.head% + + +
%sveltekit.body%
+ + diff --git a/studio/frontend/src/lib/components/builder/GroupSubgraphModal.svelte b/studio/frontend/src/lib/components/builder/GroupSubgraphModal.svelte new file mode 100644 index 00000000..51d1f525 --- /dev/null +++ b/studio/frontend/src/lib/components/builder/GroupSubgraphModal.svelte @@ -0,0 +1,284 @@ + + + + + diff --git a/studio/frontend/src/lib/components/builder/RecipePickerModal.svelte b/studio/frontend/src/lib/components/builder/RecipePickerModal.svelte new file mode 100644 index 00000000..7c9e275a --- /dev/null +++ b/studio/frontend/src/lib/components/builder/RecipePickerModal.svelte @@ -0,0 +1,183 @@ + + + + +
dispatch('cancel')}> +
e.stopPropagation()} + > + +
+
+
+ +
+
+

+ Add Subgraph +

+

+ Create from recipe or start empty +

+
+
+ +
+ + +
+ +
+ + + {#if recipeStore.recipes.length > 0} +
+
+ + +
+
+ + +
+ {#if filteredRecipes().length === 0} +
+

No recipes match your search

+
+ {:else} +
+ {#each filteredRecipes() as recipe (recipe.id)} + {@const Icon = categoryIcons[recipe.category]} + + {/each} +
+ {/if} +
+ {:else} + +
+
+ +

+ No recipes in library yet +

+

+ Save subgraphs as recipes to reuse them +

+
+
+ {/if} +
+
diff --git a/studio/frontend/src/lib/components/builder/SaveRecipeModal.svelte b/studio/frontend/src/lib/components/builder/SaveRecipeModal.svelte new file mode 100644 index 00000000..bc7842ed --- /dev/null +++ b/studio/frontend/src/lib/components/builder/SaveRecipeModal.svelte @@ -0,0 +1,269 @@ + + + + +
dispatch('close')}> +
e.stopPropagation()} + > + +
+
+
+ +
+
+

+ Save as Recipe +

+

+ Save this subgraph for reuse in other workflows +

+
+
+ +
+ + +
+ +
+
+ Nodes: + {nodes.length} +
+
+
+ Edges: + {edges.length} +
+ {#if nodeTypeSummary()} +
+
+ {nodeTypeSummary()} +
+ {/if} +
+ + +
+ + +
+ + +
+ + +
+ + +
+ +
+ {#each RECIPE_CATEGORIES as cat} + {@const Icon = categoryIcons[cat.value]} + + {/each} +
+
+ + +
+ + + {#if tags().length > 0} +
+ {#each tags() as tag} + + {tag} + + {/each} +
+ {/if} +
+ + +
+ + +
+ + + {#if error} +
+

{error}

+
+ {/if} +
+ + +
+ + +
+
+
diff --git a/studio/frontend/src/lib/components/builder/SaveWorkflowModal.svelte b/studio/frontend/src/lib/components/builder/SaveWorkflowModal.svelte new file mode 100644 index 00000000..34513905 --- /dev/null +++ b/studio/frontend/src/lib/components/builder/SaveWorkflowModal.svelte @@ -0,0 +1,255 @@ + + + + +{#if isOpen} + + + + +{/if} diff --git a/studio/frontend/src/lib/components/builder/ToolPickerModal.svelte b/studio/frontend/src/lib/components/builder/ToolPickerModal.svelte new file mode 100644 index 00000000..0b953146 --- /dev/null +++ b/studio/frontend/src/lib/components/builder/ToolPickerModal.svelte @@ -0,0 +1,256 @@ + + + + +
dispatch('cancel')}> +
e.stopPropagation()} + > + +
+
+
+ +
+
+

+ Add Tool +

+

+ Select from library or enter path +

+
+
+ +
+ + +
+ + +
+ + + {#if activeTab === 'library'} + + {#if toolStore.tools.length > 0} + +
+
+ + +
+
+ + +
+ {#if filteredTools().length === 0} +
+

No tools match your search

+
+ {:else} +
+ {#each filteredTools() as tool (tool.id)} + {@const Icon = categoryIcons[tool.category]} + + {/each} +
+ {/if} +
+ {:else} + +
+
+ +

+ No tools in library yet +

+

+ Create tools in the Library to reuse them +

+ +
+
+ {/if} + {:else} + +
+
+
+ + +
+ +
+

Enter the Python import path to a tool:

+
    +
  • package.module.tool_function
  • +
  • package.module.ToolClass
  • +
  • package.module (all tools)
  • +
+

+ Functions must be decorated with @tool +

+
+ + +
+
+ {/if} +
+
diff --git a/studio/frontend/src/lib/components/builder/UnsavedChangesModal.svelte b/studio/frontend/src/lib/components/builder/UnsavedChangesModal.svelte new file mode 100644 index 00000000..b50fdf2a --- /dev/null +++ b/studio/frontend/src/lib/components/builder/UnsavedChangesModal.svelte @@ -0,0 +1,76 @@ + + + + +
dispatch('cancel')} + role="dialog" + aria-modal="true" + aria-labelledby="unsaved-changes-title" +> +
e.stopPropagation()} + > + +
+
+ +
+
+

+ Unsaved Changes +

+

+ Your workflow has unsaved changes +

+
+
+ + +
+

+ Would you like to save your work as a draft before leaving? You can continue editing it later. +

+
+ + +
+ + + +
+
+
diff --git a/studio/frontend/src/lib/components/builder/WorkflowBuilder.svelte b/studio/frontend/src/lib/components/builder/WorkflowBuilder.svelte new file mode 100644 index 00000000..4e9c46be --- /dev/null +++ b/studio/frontend/src/lib/components/builder/WorkflowBuilder.svelte @@ -0,0 +1,1561 @@ + + + + +
+ +
+
+
+
+ +
+ +
+ {#if hasChanges} + + Unsaved changes + + {/if} + {#if lastAutoSave} + + Draft saved {lastAutoSave.toLocaleTimeString()} + + {/if} +
+ +
+ + {#if workflow && (workflow.nodes.length > 2 || workflow.edges.length > 0)} + +
+ {/if} + + {#if selectedNodeIds.length >= 2} + +
+ {/if} + +
+ + +
+
+ +
+ +
+
+ +
+ + + {#if paletteSearch} + + {/if} +
+ + +
+ {#each NODE_CATEGORIES as category} + {@const categoryNodes = groupedNodeTypes()[category.id]} + {#if categoryNodes && categoryNodes.length > 0} +
+ + + + + {#if expandedCategories.has(category.id)} +
+ {#each categoryNodes as nodeType} + {@const Icon = nodeIcons[nodeType.type]} +
handleDragStart(e, nodeType.type)} + ondragend={handleDragEnd} + role="option" + aria-selected={draggedNodeType === nodeType.type} + tabindex="0" + class="flex items-center gap-2.5 p-2 rounded-lg border border-dashed border-gray-200 dark:border-gray-600 hover:border-[#52B8FF] dark:hover:border-[#52B8FF] cursor-grab active:cursor-grabbing transition-colors group {draggedNodeType === nodeType.type ? 'border-[#52B8FF] bg-sky-50 dark:bg-sky-900/20' : ''}" + > +
+ +
+
+
+ {nodeType.label} +
+
+ {nodeType.description} +
+
+ +
+ {/each} +
+ {/if} +
+ {/if} + {/each} +
+ + + {#if paletteSearch && filteredNodeTypes().length === 0} +
+ +

No nodes match "{paletteSearch}"

+
+ {/if} + + +
+

+ Tips +

+
+

โ€ข Connect by dragging handle to handle

+

โ€ข โŒ˜/Ctrl+Click to multi-select

+

โ€ข Delete/Backspace to delete selected

+

โ€ข โŒ˜G to group as subgraph

+

โ€ข โŒ˜Z to undo

+
+
+
+
+ + +
+ +
+ {#if workflow} + + + + + + + + +
+ + + + + {#if showMinimap} +
+ { + const nodeType = NODE_TYPES.find(t => t.type === node.type); + return nodeType?.color ?? '#6b7280'; + }} + bgColor={isDarkMode ? '#1e293b' : '#ffffff'} + maskColor={isDarkMode ? 'rgba(30, 41, 59, 0.6)' : 'rgba(240, 240, 240, 0.6)'} + maskStrokeColor={isDarkMode ? '#475569' : '#cbd5e1'} + maskStrokeWidth={1} + pannable={true} + zoomable={true} + class="!relative !m-0" + /> +
+ {/if} +
+
+
+ {/if} + + + {#if draggedNodeType} +
+
+ Drop to add {NODE_TYPES.find(t => t.type === draggedNodeType)?.label} node +
+
+ {/if} + + + {#if selectedNodeIds.length >= 2 || selectedEdgeIds.length > 0} + {@const totalSelected = selectedNodeIds.length + selectedEdgeIds.length} + {@const canDelete = selectedNodeIds.filter(id => id !== 'START' && id !== 'END').length > 0 || selectedEdgeIds.length > 0} +
+
+ + {#if selectedNodeIds.length > 0 && selectedEdgeIds.length > 0} + {selectedNodeIds.length} nodes, {selectedEdgeIds.length} edges + {:else if selectedNodeIds.length > 0} + {selectedNodeIds.length} nodes selected + {:else} + {selectedEdgeIds.length} edge{selectedEdgeIds.length > 1 ? 's' : ''} selected + {/if} +
+
+ {#if selectedNodeIds.length >= 2 && canGroupNodes()} + +
+ {/if} + {#if canDelete} + + {/if} + Esc to clear +
+ {/if} +
+ + + {#if bottom} + {@render bottom()} + {/if} +
+
+
+ + +{#if showGroupModal && workflow} + showGroupModal = false} + /> +{/if} + + +{#if showRecipeModal} + { + showRecipeModal = false; + recipeNodes = []; + recipeEdges = []; + recipeSuggestedName = ''; + }} + /> +{/if} + + +{#if showRecipePicker} + showRecipePicker = false} + /> +{/if} + + + showSaveModal = false} +/> + + +{#if showResetConfirm} + +{/if} diff --git a/studio/frontend/src/lib/components/code/ExecutionOutputPanel.svelte b/studio/frontend/src/lib/components/code/ExecutionOutputPanel.svelte new file mode 100644 index 00000000..a4e11ad2 --- /dev/null +++ b/studio/frontend/src/lib/components/code/ExecutionOutputPanel.svelte @@ -0,0 +1,573 @@ + + +
+ +
+
+ + Output + {#if executionId} + ({executionId}) + {/if} +
+ +
+ + {#if isPaused} +
+ + + Paused + + + + + +
+ {/if} + + +
+ {#if status === 'running'} + + {:else if status === 'completed'} + + {:else if status === 'failed'} + + {:else if status === 'cancelled'} + + {:else} + + {/if} + {status} +
+ + {#if isRunning} + + {/if} + + + + +
+
+ + +
+ +
+ {#if output.length === 0} +
+ {#if status === 'idle'} + Click "Run" to execute the code + {:else if status === 'running'} + Waiting for output... + {:else} + No output + {/if} +
+ {:else} + {#each output as line} + {#if line.type === 'stdout'} +
{line.content}
+ {:else if line.type === 'stderr'} +
{line.content}
+ {:else if line.type === 'debug'} +
{line.content}
+ {:else if line.type === 'error'} +
+ [ERROR] {line.content} +
+ {:else if line.type === 'status'} +
+ [STATUS] {line.status} + {#if line.error} + - {line.error} + {/if} +
+ {/if} + {/each} + {/if} + + {#if error && status !== 'running'} +
+ {error} +
+ {/if} + + {#if isPaused && debugVariables.length > 0} +
+ + {debugVariables.length} variables available. Switch to Code tab to view variables and highlighted line. +
+ {/if} +
+ + +
+
diff --git a/studio/frontend/src/lib/components/code/WorkflowCodePanel.svelte b/studio/frontend/src/lib/components/code/WorkflowCodePanel.svelte new file mode 100644 index 00000000..6499bc5c --- /dev/null +++ b/studio/frontend/src/lib/components/code/WorkflowCodePanel.svelte @@ -0,0 +1,1516 @@ + + +
+ + {#if !isCollapsed} + + {/if} + + +
+
+ +
+
+ + Code Panel +
+ +
+ + + {#if !isCollapsed} +
+ + + +
+ + + {#if activeTab === 'code' && codeFileList.length > 1} + + {/if} + + + {#if (activeTab === 'yaml' && yamlDirty) || (activeTab === 'code' && codeDirty)} + + {/if} + + + {#if activeTab === 'code' && codeFileList.length > 0} +
+ {#if isDebugPaused} + +
+ + + + +
+ +
+ {:else if isRunning} + + {:else} + + + {/if} + +
+ {/if} + {/if} +
+ + + {#if !isCollapsed} +
+ + {#if activeTab === 'yaml' && yamlPath} + + {yamlPath.split('/').slice(-2).join('/')} + + {:else if activeTab === 'code' && codeFiles[selectedCodeFile]?.path} + + {codeFiles[selectedCodeFile]?.path.split('/').slice(-2).join('/')} + + {/if} + + + + + + + + +
+ + + +
+ {/if} +
+ + + {#if !isCollapsed && showArgsInput && activeTab === 'code'} +
+ + + +
+ {/if} + + + {#if !isCollapsed} +
+ + {#if showFileSidebar && referencedFiles.length > 0 && (activeTab === 'yaml' || activeTab === 'code')} +
+ +
+ References + +
+ + +
+ {#each referencedFiles as ref} + + {/each} +
+
+ {:else if !showFileSidebar && referencedFiles.length > 0 && (activeTab === 'yaml' || activeTab === 'code')} + + + {/if} + + +
+ {#if activeTab === 'yaml'} + {#if yamlLoading} +
+
+ + Loading YAML... +
+
+ {:else if yamlError} +
+
+

Error loading YAML

+

{yamlError}

+
+
+ {:else} + saveYaml()} + /> + {/if} + {:else if activeTab === 'code'} + {#if codeLoading} +
+
+ + Loading code... +
+
+ {:else if codeError} +
+
+

Error loading code

+

{codeError}

+
+
+ {:else if codeFileList.length === 0} +
+
+ +

No Python files found

+

This workflow has no task_executor.py or other Python files

+
+
+ {:else} + +
+ + {#if isDebugPaused} +
+ +
+ Variables + {#if currentDebugLine} + + Line {currentDebugLine} + + {/if} +
+
+ {#if debugVariables.length === 0} +
+ {#if variablesLoaded} + + No variables in current scope + {:else} + + + Loading variables... + {/if} +
+ {:else} + {#each Object.entries(groupedVariables) as [scope, vars]} +
+ +
+ + {scope} +
+ + {#each vars as variable} + {@const varKey = `${scope}:${variable.name}`} + {@const hasChildren = (variable.variablesReference ?? 0) > 0} + {@const isExpanded = expandedVars.has(varKey)} + {@const isLoading = loadingVars.has(variable.variablesReference ?? 0)} +
+ +
hasChildren && toggleVarExpand(varKey, variable.variablesReference ?? 0)} + > + + + {#if hasChildren} + {#if isLoading} + + {:else if isExpanded} + + {:else} + + {/if} + {/if} + + + {variable.name} + = + + { e.stopPropagation(); showFullValue(variable.name, variable.value, variable.type); }} + > + {variable.value.length > 50 ? variable.value.slice(0, 50) + '...' : variable.value} + + {#if variable.type} + {variable.type} + {/if} +
+ + {#if isExpanded && hasChildren && childVariables[variable.variablesReference ?? 0]} + {#each childVariables[variable.variablesReference ?? 0] as child} + {@const childKey = `${variable.variablesReference}:${child.name}`} + {@const childHasChildren = (child.variablesReference ?? 0) > 0} + {@const childIsExpanded = expandedVars.has(childKey)} + {@const childIsLoading = loadingVars.has(child.variablesReference ?? 0)} +
childHasChildren && toggleVarExpand(childKey, child.variablesReference ?? 0)} + > + + {#if childHasChildren} + {#if childIsLoading} + + {:else if childIsExpanded} + + {:else} + + {/if} + {/if} + + {child.name} + = + { e.stopPropagation(); showFullValue(child.name, child.value, child.type); }} + > + {child.value.length > 40 ? child.value.slice(0, 40) + '...' : child.value} + + {#if child.type} + {child.type} + {/if} +
+ + {#if childIsExpanded && childHasChildren && childVariables[child.variablesReference ?? 0]} + {#each childVariables[child.variablesReference ?? 0] as grandchild} + {@const grandchildKey = `${child.variablesReference}:${grandchild.name}`} + {@const grandchildHasChildren = (grandchild.variablesReference ?? 0) > 0} + {@const grandchildIsExpanded = expandedVars.has(grandchildKey)} + {@const grandchildIsLoading = loadingVars.has(grandchild.variablesReference ?? 0)} +
grandchildHasChildren && toggleVarExpand(grandchildKey, grandchild.variablesReference ?? 0)} + > + + {#if grandchildHasChildren} + {#if grandchildIsLoading} + + {:else if grandchildIsExpanded} + + {:else} + + {/if} + {/if} + + {grandchild.name} + = + { e.stopPropagation(); showFullValue(grandchild.name, grandchild.value, grandchild.type); }} + > + {grandchild.value.length > 30 ? grandchild.value.slice(0, 30) + '...' : grandchild.value} + + {#if grandchild.type} + {grandchild.type} + {/if} +
+ + {#if grandchildIsExpanded && grandchildHasChildren && childVariables[grandchild.variablesReference ?? 0]} + {#each childVariables[grandchild.variablesReference ?? 0] as greatgrandchild} +
+ + {greatgrandchild.name} + = + { e.stopPropagation(); showFullValue(greatgrandchild.name, greatgrandchild.value, greatgrandchild.type); }} + > + {greatgrandchild.value.length > 25 ? greatgrandchild.value.slice(0, 25) + '...' : greatgrandchild.value} + +
+ {/each} + {/if} + {/each} + {/if} + {/each} + {/if} +
+ {/each} +
+ {/each} + {/if} +
+
+ {/if} + + +
+ { breakpoints = e.detail; }} + on:change={(e) => { + if (codeFiles[selectedCodeFile]) { + codeFiles[selectedCodeFile].content = e.detail; + } + }} + on:save={() => saveCode()} + /> +
+
+ {/if} + {/if} + + +
+ +
+
+
+ {/if} +
+ + +{#if showValueViewer} + +{/if} + + diff --git a/studio/frontend/src/lib/components/common/AudioPlayer.svelte b/studio/frontend/src/lib/components/common/AudioPlayer.svelte new file mode 100644 index 00000000..71cd5cfb --- /dev/null +++ b/studio/frontend/src/lib/components/common/AudioPlayer.svelte @@ -0,0 +1,309 @@ + + + +