diff --git a/.claude.md b/.claude.md new file mode 100644 index 0000000..08991bb --- /dev/null +++ b/.claude.md @@ -0,0 +1,152 @@ +# OPTIMAP - Claude Code Configuration + +This file contains project-specific guidance for Claude Code when working with the OPTIMAP codebase. + +## Project Overview + +OPTIMAP is a Django-based geospatial metadata portal for scientific publications. It provides interactive mapping, publication metadata management, and geocoding services. + +## Local Library Policy + +### Policy Statement + +**All external JavaScript and CSS libraries MUST be served locally from the `publications/static/` directory. CDN dependencies are NOT allowed in production.** + +### Rationale + +1. **Privacy & GDPR Compliance**: Serving libraries from CDNs may leak user IP addresses and browsing behavior to third parties +2. **Reliability**: Eliminates dependency on external CDN availability and potential network issues +3. **Performance**: Local libraries are served from the same origin, reducing DNS lookups and connection overhead +4. **Security**: Prevents supply chain attacks from compromised CDN resources +5. **Offline Development**: Enables development without internet connectivity + +### Implementation + +All external libraries are managed through the `publications/static/download_libraries.sh` script: + +```bash +cd publications/static/ +bash download_libraries.sh +``` + +This script downloads all required libraries to the appropriate directories: +- JavaScript files → `publications/static/js/` +- CSS files → `publications/static/css/` +- Images → `publications/static/css/images/` +- Fonts → `publications/static/css/fonts/` + +### Currently Managed Libraries + +#### Core Libraries +- jQuery 3.4.1 +- Bootstrap 4.4.1 (JS and CSS) +- Popper.js 2.x (for Bootstrap tooltips) +- Font Awesome 4.7.0 (CSS and fonts) + +#### Mapping Libraries +- Leaflet 1.9.4 (core mapping library) +- Leaflet Draw 1.0.4 (geometry drawing) +- Leaflet Fullscreen 3.0.2 (fullscreen control) +- Leaflet Control Geocoder 2.4.0 (location search/gazetteer) + +#### UI Components +- Bootstrap Datepicker 1.9.0 + +### Adding New External Libraries + +When adding a new external library to OPTIMAP: + +1. **Update `download_libraries.sh`**: + - Add wget commands to download the library files + - Include version numbers in echo statements + - Download source maps if available + - Download any required assets (images, fonts, etc.) + +2. **Download the library**: + ```bash + cd publications/static/ + bash download_libraries.sh + ``` + +3. **Update templates to reference local files**: + ```django + + + + + + + + ``` + +4. **Test thoroughly**: + - Verify the library loads correctly + - Check browser console for 404 errors + - Ensure all assets (images, fonts) load properly + - Test in both development and production environments + +5. **Commit all files**: + - Commit both the download script AND the downloaded library files + - Library files should be checked into version control + +### Version Control + +- All library files are committed to the repository +- This ensures reproducible builds and consistent deployments +- Update libraries deliberately and test thoroughly before committing new versions + +### Exception Policy + +CDN usage is only acceptable in these limited cases: +- Temporary development/testing (must be replaced before production) +- Services that cannot be self-hosted (e.g., Google reCAPTCHA) +- External APIs that require CDN delivery (must be documented) + +Any exceptions must be: +1. Documented in this file with justification +2. Reviewed by the project maintainer +3. Include a plan for eventual local hosting if possible + +## Development Guidelines + +### File Organization + +- Custom JavaScript: `publications/static/js/map-*.js` +- Custom CSS: `publications/static/css/*.css` +- Templates: `publications/templates/` +- Views: `publications/views*.py` + +### Map Architecture + +The interactive map consists of several modular components: + +- `map-popup.js` - Popup content generation +- `map-interaction.js` - Click handling and overlapping features +- `map-keyboard-navigation.js` - Keyboard accessibility +- `map-search.js` - Publication search/filtering +- `map-gazetteer.js` - Location search (geocoding) +- `map-zoom-to-all.js` - Zoom to all features control +- `main.js` - Map initialization and orchestration + +### Accessibility + +OPTIMAP follows WCAG 2.1 AA standards: + +- Focus indicators are only visible in high contrast mode (`body.high-contrast`) +- All interactive elements must have ARIA labels +- Screen reader announcements for dynamic content +- Keyboard navigation support for all map features + +### Testing + +Before committing changes: + +1. Test with high contrast mode enabled +2. Test keyboard navigation +3. Test with screen reader (if possible) +4. Verify no console errors +5. Check all CDN links have been replaced with local files + +## Contact + +For questions about this configuration, contact the OPTIMAP development team at login@optimap.science. diff --git a/.claude/settings.local.json b/.claude/settings.local.json index ef03b39..db6c163 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -16,6 +16,14 @@ "Bash(python manage.py:*)", "Bash(python -m py_compile:*)", "Bash(python:*)", + "Read(//home/daniel/git/geoextent/geoextent/lib/**)", + "Read(//home/daniel/git/geoextent/**)", + "Bash(docker compose run:*)", + "Bash(docker compose exec:*)", + "Bash(docker compose:*)", + "Bash(OPTIMAP_LOGGING_LEVEL=WARNING python manage.py test tests.test_geoextent)", + "Bash(OPTIMAP_LOGGING_LEVEL=WARNING python manage.py test tests.test_geoextent.GeoextentRemoteGetTest)", + "Bash(geoextent:*)" "Bash(node --check:*)", "Bash(find:*)", "Bash(OPTIMAP_LOGGING_LEVEL=WARNING python manage.py test:*)", @@ -25,7 +33,16 @@ "Bash(bash:*)", "Bash(./create_wikibase_property.sh:*)", "Bash(python3:*)", - "Bash(pkill:*)" + "Bash(pkill:*)", + "Bash(wget:*)" + "Bash(geoextent:*)", + "Bash(curl:*)", + "Bash(python3:*)", + "Read(//home/daniel/.cache/**)", + "Bash(gh pr list:*)", + "Bash(gh pr view:*)", + "Bash(OPTIMAP_LOGGING_LEVEL=WARNING python manage.py test tests)", + "Bash(export OPTIMAP_LOGGING_LEVEL=WARNING)" ], "deny": [], "ask": [] diff --git a/.gitignore b/.gitignore index fa01a3b..07d680e 100644 --- a/.gitignore +++ b/.gitignore @@ -149,3 +149,5 @@ certbot/www/ publications/management/commands/marine_regions_iho.geojson publications/management/commands/world_continents.geojson + +.claude/temp.md diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..f06ea86 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,414 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +OPTIMAP is a geospatial discovery portal for research articles based on open metadata. Built with Django/GeoDjango and PostgreSQL/PostGIS, it enables users to discover scientific publications through map-based search, temporal filtering, and spatial metadata. + +Part of the KOMET project (), continuing from OPTIMETA (). + +## Core Architecture + +### Django Apps Structure + +- **optimap/** - Main Django project settings and URL routing + - `settings.py` - All configuration via environment variables prefixed with `OPTIMAP_` + - `.env` file for local config (see `.env.example` for all available parameters) + +- **publications/** - Main application containing all models, views, and business logic + - **Models** ([models.py](publications/models.py)): + - `Publication` - Core model with spatial (`GeometryCollectionField`) and temporal metadata + - `Source` - OAI-PMH harvesting sources + - `HarvestingEvent` - Tracks harvesting jobs + - `Subscription` - User subscriptions with spatial/temporal filters + - `CustomUser` - Extended Django user model + - `BlockedEmail`/`BlockedDomain` - Anti-spam mechanisms + - **Views** ([views.py](publications/views.py)) - Handles passwordless login, subscriptions, data downloads + - **Tasks** ([tasks.py](publications/tasks.py)) - Django-Q async tasks for harvesting and data export + - **API** ([api.py](publications/api.py), [viewsets.py](publications/viewsets.py), [serializers.py](publications/serializers.py)) - DRF REST API at `/api/v1/` + - **Feeds** ([feeds.py](publications/feeds.py), [feeds_geometry.py](publications/feeds_geometry.py)) - GeoRSS/GeoAtom feed generation + +### Key Technologies + +- **GeoDjango** with **PostGIS** for spatial data (SRID 4326) +- **Django REST Framework** with `rest_framework_gis` for geospatial API +- **Django-Q2** for background task scheduling (harvesting, email notifications, data dumps) +- **drf-spectacular** for OpenAPI schema + +### Data Flow + +1. **Harvesting**: OAI-PMH sources → `HarvestingEvent` → parse XML → create `Publication` records with spatial/temporal metadata +2. **API**: Publications exposed via REST API at `/api/v1/publications/` with spatial filtering +3. **Feeds**: Dynamic GeoRSS/GeoAtom feeds filtered by region or global +4. **Data Export**: Scheduled tasks generate cached GeoJSON/GeoPackage dumps in `/tmp/optimap_cache/` + +## Development Commands + +### Docker Development + +```bash +# Start all services (app, db, webserver) +docker compose up + +# Load test data +docker compose run --entrypoint python app manage.py loaddata fixtures/test_data.json + +# Create superuser +docker compose run --entrypoint python app manage.py createsuperuser + +# Run migrations manually (normally auto-applied via etc/manage-and-run.sh) +docker compose run --entrypoint python app manage.py migrate + +# Collect static files +docker compose run --entrypoint python app manage.py collectstatic --noinput +``` + +Access at (note: use `localhost` not `127.0.0.1` to avoid CSRF issues) + +### Local Development + +```bash +# Setup (once) +python -m venv .venv +source .venv/bin/activate +pip install gdal=="$(gdal-config --version).*" +pip install -r requirements.txt + +# Start local PostGIS container +docker run --name optimapDB -p 5432:5432 \ + -e POSTGRES_USER=optimap -e POSTGRES_PASSWORD=optimap \ + -e POSTGRES_DB=optimap -d postgis/postgis:14-3.3 + +# Apply migrations +python manage.py migrate +python manage.py createcachetable + +# Load global regions (required for predefined feeds) +python manage.py load_global_regions + +# Start Django-Q cluster (separate terminal, required for harvesting/tasks) +python manage.py qcluster + +# Run server (debug mode) +OPTIMAP_DEBUG=True OPTIMAP_CACHE=dummy python manage.py runserver +``` + +Access at http://127.0.0.1:8000/ + +### Testing + +```bash +# Install test dependencies +pip install -r requirements-dev.txt + +# Run unit tests +python manage.py test tests + +# Run UI tests (requires docker compose up or runserver) +python -Wa manage.py test tests-ui + +# Test with clean output +OPTIMAP_LOGGING_LEVEL=WARNING python manage.py test tests + +# Coverage +coverage run --source='publications' --omit='*/migrations/**' manage.py test tests +coverage report --show-missing --fail-under=70 +coverage html # generates htmlcov/ +``` + +### Django Management Commands + +#### Standard Django Commands + +```bash +# Database operations +python manage.py makemigrations # Create new migrations (should detect no changes normally) +python manage.py migrate # Apply database migrations +python manage.py showmigrations # List all migrations and their status +python manage.py sqlmigrate publications 0001 # Show SQL for a specific migration + +# User management +python manage.py createsuperuser # Create admin user interactively +python manage.py createsuperuser --username=optimap --email=admin@optimap.science +python manage.py changepassword # Change user password + +# Static files +python manage.py collectstatic --noinput # Collect static files to STATIC_ROOT +python manage.py findstatic # Find location of static file + +# Cache +python manage.py createcachetable # Create database cache table (required on setup) + +# Data management +python manage.py dumpdata # Export data as JSON +python manage.py loaddata # Import data from JSON fixture +python manage.py flush # Clear all data from database (careful!) + +# Shell access +python manage.py shell # Django shell with models loaded +python manage.py shell -c "from publications.tasks import regenerate_geojson_cache; regenerate_geojson_cache()" +python manage.py dbshell # Direct PostgreSQL shell + +# Development server +python manage.py runserver # Start dev server on 127.0.0.1:8000 +python manage.py runserver 0.0.0.0:8000 # Start on all interfaces (Docker) +OPTIMAP_DEBUG=True python manage.py runserver # With debug mode + +# Testing +python manage.py test # Run all tests +python manage.py test tests # Run unit tests only +python manage.py test tests-ui # Run UI tests only +python manage.py test tests.test_geo_data # Run specific test module +python manage.py test tests.test_geoextent # Run geoextent API integration tests +python -Wa manage.py test # Show deprecation warnings +``` + +#### Custom OPTIMAP Commands + +Located in [publications/management/commands/](publications/management/commands/) + +```bash +# Global regions setup +python manage.py load_global_regions +# Loads predefined continent and ocean geometries into GlobalRegion model +# Required for global feeds functionality - run once after initial setup + +# Data export scheduling +python manage.py schedule_geojson +# Adds GeoJSON/GeoPackage regeneration task to Django-Q schedule +# Creates recurring task to refresh data dumps every 6 hours + +# Source synchronization +python manage.py sync_source_metadata +# Syncs metadata from configured OAI-PMH sources +# Updates Source model with latest information from endpoints + +# OpenAlex journal updates +python manage.py update_openalex_journals +# Fetches and updates journal metadata from OpenAlex API +# Enriches Source records with additional journal information +``` + +#### Django-Q Task Management + +```bash +# Start task worker (required for async operations) +python manage.py qcluster +# Runs background worker to process harvesting jobs, email sending, data exports +# Keep running in separate terminal during development + +# Monitor tasks +python manage.py qmonitor # Live dashboard of task queue +python manage.py qinfo # Show cluster statistics and status + +# Manual task management via Django shell +python manage.py shell +>>> from django_q.models import Schedule +>>> Schedule.objects.all() # List scheduled tasks +>>> from django_q.tasks import async_task +>>> async_task('publications.tasks.regenerate_geojson_cache') # Queue a task +``` + +### Manual Data Operations + +```bash +# Create test data dump +python manage.py dumpdata --exclude=auth --exclude=contenttypes | jq > fixtures/test_data.json + +# Load fixtures +python manage.py loaddata fixtures/test_data_optimap.json +python manage.py loaddata fixtures/test_data_partners.json +python manage.py loaddata fixtures/test_data_global_feeds.json + +# Manually regenerate GeoJSON/GeoPackage cache (without Django-Q) +python manage.py shell -c "from publications.tasks import regenerate_geojson_cache; regenerate_geojson_cache()" +``` + +## Important Patterns + +### Configuration + +All deployment-specific config uses `OPTIMAP_*` environment variables loaded from `.env` or environment. See [optimap/.env.example](optimap/.env.example). + +### Spatial Data + +- All geometries use `GeometryCollectionField` with SRID 4326 +- WKT format for manual geometry input (use https://wktmap.com/ for creation) +- Spatial metadata extracted from HTML `` tags during harvesting + +### Harvesting Flow + +1. Create/configure `Source` in admin with OAI-PMH URL +2. Django-Q task creates `HarvestingEvent` +3. Fetch XML → parse → extract DOI, spatial, temporal metadata → save `Publication` records +4. Track status in `HarvestingEvent.status` (pending/in_progress/completed/failed) + +### Authentication + +- Passwordless "magic link" system based on own implementation +- Users receive login token via email (10-minute expiration) +- Email confirmation for account changes +- CSRF tokens required - use `localhost` domain during development (not 127.0.0.1) + +### Testing Notes + +- UI tests use Helium/Selenium (set `headless=False` for debugging) +- Test data fixtures in `fixtures/` directory +- Use `-Wa` flag to show deprecation warnings + +## Common Gotchas + +- **CSRF errors during login**: Switch to `localhost:8000` instead of `127.0.0.1:8000` +- **Migrations on startup**: Applied automatically via `etc/manage-and-run.sh` in Docker +- **Debug mode**: Default is `OPTIMAP_DEBUG=False` - set explicitly for development +- **Email debugging**: Set `EMAIL_BACKEND=django.core.mail.backends.console.EmailBackend` in `.env` +- **Django-Q cluster**: Must be running separately for harvesting/scheduled tasks to execute +- **Data dumps retention**: Controlled by `OPTIMAP_DATA_DUMP_RETENTION` (default: 3) + +## File Structure Highlights + +``` +optimap/ +├── optimap/ # Django project settings +├── publications/ # Main app (models, views, tasks, API) +│ ├── management/commands/ # Custom Django commands +│ ├── static/ # Frontend assets, logos +│ └── templates/ # Django templates +├── tests/ # Unit tests +├── tests-ui/ # Selenium UI tests +├── fixtures/ # Test data JSON +├── etc/ # Deployment scripts (manage-and-run.sh) +├── static/ # Collected static files (generated) +└── docker-compose.yml / docker-compose.deploy.yml +``` + +## API & Endpoints + +- `/api/v1/` - REST API root (see `/api/schema/ui/` for OpenAPI docs) +- `/admin/` - Django admin interface +- `/download/geojson/` - Download full publication dataset as GeoJSON +- `/download/geopackage/` - Download as GeoPackage +- `/feed/georss/` - Global GeoRSS feed +- `/feeds/georss//` - Region-filtered GeoRSS feed +- `/geoextent/` - Geoextent extraction web UI (interactive tool for file upload and remote resource extraction) + +### Geoextent API Endpoints + +#### Public API - No authentication required + +All geoextent endpoints return valid GeoJSON FeatureCollections by default, matching the geoextent CLI output format. + +- `/api/v1/geoextent/extract/` - Extract spatial/temporal extent from uploaded file + - Method: POST with multipart/form-data + - Parameters: file, bbox, tbox, convex_hull, response_format, placename, gazetteer + - Returns: GeoJSON FeatureCollection with `geoextent_extraction` metadata + +- `/api/v1/geoextent/extract-remote/` - Extract extent from remote repositories + - Methods: GET or POST (same URL) + - POST: JSON body with `identifiers` array + - GET: URL parameters with comma-separated `identifiers` + - Supports: Zenodo, PANGAEA, OSF, Figshare, Dryad, GFZ Data Services, Dataverse + - Parameters: identifiers, bbox, tbox, convex_hull, response_format, placename, gazetteer, file_limit, size_limit_mb + - Uses geoextent's native multi-identifier support with automatic extent merging + - Parallel downloads controlled by `GEOEXTENT_DOWNLOAD_WORKERS` setting + - Example GET: `/api/v1/geoextent/extract-remote/?identifiers=10.5281/zenodo.4593540&bbox=true&tbox=true` + - Example POST: `{"identifiers": ["10.5281/zenodo.4593540"], "bbox": true, "tbox": true}` + +- `/api/v1/geoextent/extract-batch/` - Batch processing of multiple files + - Method: POST with multipart/form-data (multiple files) + - Parameters: files[], bbox, tbox, convex_hull, response_format, placename, gazetteer, size_limit_mb + - Uses geoextent's `fromDirectory` for native extent combination + - Returns: GeoJSON FeatureCollection with combined extent and individual features + +**Response Formats** (`response_format` parameter): + +- `geojson` (default) - Valid GeoJSON FeatureCollection matching CLI output + - Structure: `{"type": "FeatureCollection", "features": [...], "geoextent_extraction": {...}}` + - Temporal extent in feature properties as `tbox` (not `temporal_extent`) +- `wkt` - WKT (Well-Known Text) string with metadata + - Structure: `{"wkt": "POLYGON(...)", "crs": "EPSG:4326", "tbox": [...], "geoextent_extraction": {...}}` +- `wkb` - WKB (Well-Known Binary) hex string with metadata + - Structure: `{"wkb": "0103...", "crs": "EPSG:4326", "tbox": [...], "geoextent_extraction": {...}}` + +See [docs/geoextent_response_formats.md](docs/geoextent_response_formats.md) for detailed examples. + +**Metadata Structure** (`geoextent_extraction`): + +Property names match geoextent CLI output to avoid confusion: + +- `version` - Geoextent library version +- `inputs` - List of input identifiers/filenames +- `statistics.files_processed` - Number of files processed +- `statistics.files_with_extent` - Number of files with valid extent +- `statistics.total_size` - Total size (e.g., "2.71 MiB") +- `format` - Source format (e.g., "remote", "geojson") +- `crs` - Coordinate reference system +- `extent_type` - "bounding_box" or "convex_hull" + +**HTTP Status Codes:** + +- `200 OK` - Successful extraction +- `400 Bad Request` - Invalid parameters +- `413 Request Entity Too Large` - File too large +- `500 Internal Server Error` - Processing error + +Error responses: `{"error": "message"}` (no `success: false` property) + +**Supported Input Formats:** +GeoJSON, GeoTIFF, Shapefile, GeoPackage, KML, GML, GPX, FlatGeobuf, CSV (with lat/lon) + +**Gazetteers:** Nominatim (default), GeoNames (requires username), Photon + +**Known Issues:** + +- **Coordinate order bug in geoextent.fromRemote()**: The geoextent library's `fromRemote()` function returns bounding boxes in `[minLat, minLon, maxLat, maxLon]` format instead of the GeoJSON standard `[minLon, minLat, maxLon, maxLat]`. This affects remote extractions only (not file uploads). This needs to be fixed upstream in the geoextent library. Until fixed, remote extraction coordinates will be in the wrong order. + +### Geoextent Web UI + +Interactive web interface at [/geoextent](publications/templates/geoextent.html) for extracting geospatial/temporal extents from data files. + +**Features:** + +- File upload (single or batch) with size validation +- Remote resource extraction via DOI/URL (comma-separated) +- Interactive Leaflet map preview with clickable features +- Parameter customization (bbox, tbox, convex_hull, placename, gazetteer) +- Response format selection (GeoJSON, WKT, WKB) +- Download results in selected format +- Client-side file size validation against server limits +- Error handling with informative messages +- Documentation section with supported formats and providers +- Use *sentence case* for all headlines and fields + +**Implementation:** + +- View: [publications/views.py](publications/views.py) - `geoextent()` function + - Uses `geoextent.lib.features.get_supported_features()` to dynamically load supported formats and providers + - No hardcoded format lists - always reflects current geoextent capabilities +- Template: [publications/templates/geoextent.html](publications/templates/geoextent.html) + - Uses Fetch API for AJAX requests (jQuery slim doesn't include $.ajax) + - Interactive file management with add/remove functionality + - Multiple file selection from different locations + - CSRF token handling for secure POST requests +- Uses existing jQuery (slim) and Bootstrap (no additional libraries) +- Map integration via existing Leaflet setup +- API calls to `/api/v1/geoextent/` endpoints +- UI tests: [tests-ui/test_geoextent.py](tests-ui/test_geoextent.py) + +**Configuration:** + +Size limits passed from Django settings: + +- `GEOEXTENT_MAX_FILE_SIZE_MB` - Single file upload limit +- `GEOEXTENT_MAX_BATCH_SIZE_MB` - Total batch upload limit +- `GEOEXTENT_MAX_DOWNLOAD_SIZE_MB` - Remote resource download limit + +**Navigation:** + +- Footer link added to [publications/templates/footer.html](publications/templates/footer.html) +- URL route: `path("geoextent/", views.geoextent, name="geoextent")` in [publications/urls.py](publications/urls.py) + +## Version Management + +Version is maintained in [optimap/\_\_init\_\_.py](optimap/__init__.py). Follow [Semantic Versioning](https://semver.org/spec/v2.0.0.html). Update [CHANGELOG.md](CHANGELOG.md) following [Keep a Changelog](https://keepachangelog.com/en/1.1.0/) format. diff --git a/Dockerfile b/Dockerfile index fcf08cf..8ed07d2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -10,10 +10,10 @@ ENV OPTIMAP_ALLOWED_HOST=* ENV DEBIAN_FRONTEND="noninteractive" TZ="Europe/Berlin" -# install Python +# install Python and git RUN apt-get update && \ apt-get install -y -qq python-is-python3 && \ - apt-get install -y -qq python3-pip tzdata + apt-get install -y -qq python3-pip tzdata git # install GDAL from UbuntuGIS RUN apt-get update && \ diff --git a/docs/geoextent_api.md b/docs/geoextent_api.md new file mode 100644 index 0000000..9aa2823 --- /dev/null +++ b/docs/geoextent_api.md @@ -0,0 +1,281 @@ +# Geoextent API + +## Overview + +A implementation of REST API endpoints that expose the functionality of the [geoextent library](https://github.com/nuest/geoextent/) for extracting geospatial and temporal extents from various file formats and remote repositories. +Response formats are close to the geoextent library output, with additional structured formats for easier consumption of WKT and WKB outputs. + +## Endpoints + +1. **`/api/v1/geoextent/extract/`** (POST) + - File upload via multipart/form-data + - File size validation + - Temporary file handling with cleanup + - Optional placename lookup + +1. **`/api/v1/geoextent/extract-remote/`** (POST, GET) + - Remote repository extraction (Zenodo, PANGAEA, etc.) via multipart/form-data or via URL parameter + - Download workers configuration + - Size and file limits + - Optional placename lookup + +1. **`/api/v1/geoextent/extract-batch/`** (POST) + - Multiple file upload + - Total size validation + - Per-file error handling + - Combined extent calculation + - Optional placename for combined result + + +## Parameter Summary + +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `response_format` | string | `geojson` | Output format: `geojson`, `wkt`, or `wkb` | +| `bbox` | boolean | `true` | Extract spatial bounding box | +| `tbox` | boolean | `true` | Extract temporal extent | +| `convex_hull` | boolean | `false` | Use convex hull instead of bounding box | + +## Property Names + +The API uses the same property names as the geoextent CLI tool to avoid confusion: + +- `tbox`: Temporal extent (not `temporal_extent`) +- `geoextent_extraction`: Top-level metadata object +- `inputs`: Input identifiers (not `identifiers_processed`) +- `files_processed`, `files_with_extent`, `total_size`: Statistics fields + +## HTTP Status Codes + +- `200 OK`: Successful extraction +- `400 Bad Request`: Invalid parameters +- `413 Request Entity Too Large`: File too large +- `500 Internal Server Error`: Processing error + +Error responses contain only an `error` field with the error message (no `success: false` property). + +## Usage examples + +1. **Single file extraction**: + + ```bash + curl -X POST http://127.0.0.1:8000/api/v1/geoextent/extract/ \ + -F "file=@test.geojson" \ + -F "bbox=true" \ + -F "tbox=true" \ + -F "placename=true" \ + -F "gazetteer=nominatim" + ``` + +1. **Remote extraction**: + + ```bash + curl -X POST http://127.0.0.1:8000/api/v1/geoextent/extract-remote/ \ + -H "Content-Type: application/json" \ + -d '{ + "identifier": "10.5281/zenodo.4593540", + "bbox": true, + "tbox": true, + "placename": true, + "file_limit": 5 + }' + ``` + +1- **Multiple identifiers with GeoJSON format**: + + ```bash + curl -X POST http://localhost:8000/api/v1/geoextent/extract-remote/ \ + -H "Content-Type: application/json" \ + -d '{ + "identifiers": ["10.5281/zenodo.4593540", "10.5281/zenodo.1234567"], + "bbox": true, + "tbox": true, + "response_format": "geojson", + }' + ``` + +1. **Batch extraction**: + + ```bash + curl -X POST http://127.0.0.1:8000/api/v1/geoextent/extract-batch/ \ + -F "files=@file1.geojson" \ + -F "files=@file2.tif" \ + -F "bbox=true" \ + -F "combine_extents=true" \ + -F "placename=true" + ``` + +**OpenAPI docs**: + + + +## Available Formats + +Switchable via `response_format` parameter. + +### `geojson` + +Returns spatial extent as a GeoJSON FeatureCollection with temporal data and metadata in properties. + +**Example:** + +```bash +curl -X POST http://localhost:8000/api/v1/geoextent/extract-remote/ \ + -H "Content-Type: application/json" \ + -d '{"identifiers": ["10.5281/zenodo.4593540"], "bbox": true, "tbox": true}' +``` + +Or via GET: + +```bash +curl "http://localhost:8000/api/v1/geoextent/extract-remote/?identifiers=10.5281/zenodo.4593540&bbox=true&tbox=true" +``` + +```json +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [39.642802545572735, -80.71456319678893], + [42.256308231814586, -80.71456319678893], + [42.256308231814586, -74.78657735361809], + [39.642802545572735, -74.78657735361809], + [39.642802545572735, -80.71456319678893] + ] + ] + }, + "properties": { + "tbox": ["2006-02-02", "2018-08-27"] + } + } + ], + "geoextent_extraction": { + "version": "0.9.1.dev3+g42ab7cff2.d20251006", + "inputs": ["10.5281/zenodo.4593540"], + "statistics": { + "files_processed": 1, + "files_with_extent": 1, + "total_size": "2.71 MiB" + }, + "format": "remote", + "crs": "4326", + "extent_type": "bounding_box" + } +} +``` + +#### GeoJSON Structure + +- `type`: Always "FeatureCollection" +- `features`: Array of GeoJSON Feature objects + - `geometry`: Polygon geometry representing the spatial extent + - `properties.tbox`: Temporal extent (if requested with `tbox=true`) +- `geoextent_extraction`: Extraction metadata + - `version`: Geoextent library version + - `inputs`: List of input files/identifiers + - `statistics`: Files processed, files with extent, total size + - `format`: Source format (e.g., "remote", "geojson", "geotiff") + - `crs`: Coordinate reference system + - `extent_type`: "bounding_box" or "convex_hull" + +### `wkt` + +Returns spatial extent as Well-Known Text (WKT) string with CRS information. + +**Example:** + +```bash +curl -X POST http://localhost:8000/api/v1/geoextent/extract-remote/ \ + -H "Content-Type: application/json" \ + -d '{"identifiers": ["10.5281/zenodo.4593540"], "bbox": true, "tbox": true, "response_format": "wkt"}' +``` + +```json +{ + "wkt": "POLYGON ((39.642802545572735 -80.71456319678893, 39.642802545572735 -74.78657735361809, 42.256308231814586 -74.78657735361809, 42.256308231814586 -80.71456319678893, 39.642802545572735 -80.71456319678893))", + "crs": "EPSG:4326", + "tbox": ["2006-02-02", "2018-08-27"], + "geoextent_extraction": { + "version": "0.9.1.dev3+g42ab7cff2.d20251006", + "inputs": ["10.5281/zenodo.4593540"], + "format": "remote", + "crs": "4326", + "extent_type": "bounding_box" + } +} +``` + +### `wkb` + +Returns spatial extent as Well-Known Binary (WKB) hex string with CRS information. + +**Example:** + +```bash +curl -X POST http://localhost:8000/api/v1/geoextent/extract-remote/ \ + -H "Content-Type: application/json" \ + -d '{"identifiers": ["10.5281/zenodo.4593540"], "bbox": true, "tbox": true, "response_format": "wkb"}' +``` + +```json +{ + "wkb": "0103000000010000000500000054e3a59bc4f2434054e3a59bc4f2434054e3a59bc4f2434054e3a59bc4f2434054e3a59bc4f24340", + "crs": "EPSG:4326", + "tbox": ["2006-02-02", "2018-08-27"], + "geoextent_extraction": { + "version": "0.9.1.dev3+g42ab7cff2.d20251006", + "inputs": ["10.5281/zenodo.4593540"], + "format": "remote", + "crs": "4326", + "extent_type": "bounding_box" + } +} +``` + +## Error Handling + +If a spatial extent cannot be converted to the requested format (e.g., no bbox available), the API returns a JSON error message with HTTP status code 400: + +```json +{ + "success": false, + "error": "Cannot convert to geojson: no spatial extent available" +} +``` + +## Configuration Examples + +### Development Configuration + +In `.env`: + +```env +OPTIMAP_DEBUG=True +OPTIMAP_GEOEXTENT_MAX_FILE_SIZE_MB=50 +OPTIMAP_GEOEXTENT_TIMEOUT=60 +OPTIMAP_GEOEXTENT_DOWNLOAD_WORKERS=2 +``` + +### Production Configuration + +In `.env`: + +```env +OPTIMAP_DEBUG=False +OPTIMAP_GEOEXTENT_MAX_FILE_SIZE_MB=100 +OPTIMAP_GEOEXTENT_MAX_DOWNLOAD_SIZE_MB=500 +OPTIMAP_GEOEXTENT_MAX_BATCH_SIZE_MB=250 +OPTIMAP_GEOEXTENT_TIMEOUT=30 +OPTIMAP_GEOEXTENT_DOWNLOAD_WORKERS=4 +OPTIMAP_GEOEXTENT_GEONAMES_USERNAME=your_username_here +``` + +## References + +- **Geoextent Library**: +- **Geoextent Documentation**: diff --git a/optimap/.env.example b/optimap/.env.example index 4b7e29e..f2299b8 100644 --- a/optimap/.env.example +++ b/optimap/.env.example @@ -28,6 +28,16 @@ DJANGO_LOGGING_LEVEL=ERROR OPTIMAP_DATA_DUMP_RETENTION=3 +# Geoextent API Configuration +OPTIMAP_GEOEXTENT_MAX_FILE_SIZE_MB=100 +OPTIMAP_GEOEXTENT_TIMEOUT=30 +OPTIMAP_GEOEXTENT_MAX_DOWNLOAD_SIZE_MB=1000 +OPTIMAP_GEOEXTENT_MAX_BATCH_SIZE_MB=500 +OPTIMAP_GEOEXTENT_DOWNLOAD_WORKERS=4 +OPTIMAP_GEOEXTENT_DEFAULT_GAZETTEER=nominatim +OPTIMAP_GEOEXTENT_GAZETTEER_TIMEOUT_SECONDS=5 +OPTIMAP_GEOEXTENT_GEONAMES_USERNAME= + WIKIBASE_CONSUMER_TOKEN=your_consumer_token WIKIBASE_CONSUMER_SECRET=your_consumer_secret WIKIBASE_ACCESS_TOKEN=your_access_token diff --git a/optimap/__init__.py b/optimap/__init__.py index a471784..18ec413 100644 --- a/optimap/__init__.py +++ b/optimap/__init__.py @@ -1,2 +1,2 @@ -__version__ = "0.5.0" +__version__ = "0.8.0" VERSION = __version__ \ No newline at end of file diff --git a/optimap/context_processors.py b/optimap/context_processors.py index 2125cdd..b1240cb 100644 --- a/optimap/context_processors.py +++ b/optimap/context_processors.py @@ -1,7 +1,18 @@ import optimap +from django.conf import settings def get_version(request): """ Return package version as listed in `__version__` in `init.py`. """ return {"optimap_version": optimap.__version__} + +def gazetteer_settings(request): + """ + Return gazetteer/geocoding settings for use in templates. + """ + return { + "gazetteer_provider": getattr(settings, 'GAZETTEER_PROVIDER', 'nominatim'), + "gazetteer_placeholder": getattr(settings, 'GAZETTEER_PLACEHOLDER', 'Search for a location...'), + "gazetteer_api_key": getattr(settings, 'GAZETTEER_API_KEY', ''), + } diff --git a/optimap/settings.py b/optimap/settings.py index beffc06..63d19d2 100644 --- a/optimap/settings.py +++ b/optimap/settings.py @@ -54,7 +54,6 @@ AUTHENTICATION_BACKENDS = [ 'django.contrib.auth.backends.ModelBackend', - "sesame.backends.ModelBackend", ] # Login/Logout URLs for @login_required decorator @@ -240,6 +239,25 @@ WIKIBASE_USER_AGENT = f"OPTIMAP/{optimap.__version__} (https://optimap.science; {CONTACT_EMAIL})" +# Geoextent API settings +GEOEXTENT_MAX_FILE_SIZE_MB = int(os.getenv("OPTIMAP_GEOEXTENT_MAX_FILE_SIZE_MB", 100)) +GEOEXTENT_PROCESSING_TIMEOUT_SECONDS = int(os.getenv("OPTIMAP_GEOEXTENT_TIMEOUT", 30)) +GEOEXTENT_TEMP_DIR = os.getenv("OPTIMAP_GEOEXTENT_TEMP_DIR", "/tmp/optimap_geoextent") + +# Download limits - server-enforced maximums that cap user requests +GEOEXTENT_MAX_DOWNLOAD_SIZE_MB = int(os.getenv("OPTIMAP_GEOEXTENT_MAX_DOWNLOAD_SIZE_MB", 1000)) +GEOEXTENT_MAX_BATCH_SIZE_MB = int(os.getenv("OPTIMAP_GEOEXTENT_MAX_BATCH_SIZE_MB", 500)) + +# Download workers for parallel processing (remote and batch operations) +GEOEXTENT_DOWNLOAD_WORKERS = int(os.getenv("OPTIMAP_GEOEXTENT_DOWNLOAD_WORKERS", 4)) + +# Placename/gazetteer settings +GEOEXTENT_DEFAULT_GAZETTEER = os.getenv("OPTIMAP_GEOEXTENT_DEFAULT_GAZETTEER", "nominatim") +GEOEXTENT_GAZETTEER_TIMEOUT_SECONDS = int(os.getenv("OPTIMAP_GEOEXTENT_GAZETTEER_TIMEOUT_SECONDS", 5)) + +# GeoNames API configuration (if using geonames gazetteer) +GEOEXTENT_GEONAMES_USERNAME = os.getenv("OPTIMAP_GEOEXTENT_GEONAMES_USERNAME", "") + MIDDLEWARE = [ 'django.middleware.cache.UpdateCacheMiddleware', 'django.middleware.common.CommonMiddleware', @@ -256,7 +274,6 @@ 'django.middleware.clickjacking.XFrameOptionsMiddleware', "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.sites.middleware.CurrentSiteMiddleware", - "sesame.middleware.AuthenticationMiddleware", "django_currentuser.middleware.ThreadLocalUserMiddleware", "django.middleware.gzip.GZipMiddleware", @@ -277,6 +294,7 @@ 'django.contrib.messages.context_processors.messages', 'optimap.urls.site', 'optimap.context_processors.get_version', + 'optimap.context_processors.gazetteer_settings', ], }, }, @@ -376,3 +394,33 @@ ADMINS = [('OPTIMAP', 'login@optimap.science')] FEED_MAX_ITEMS = 20 + +# Gazetteer / Geocoding Settings +# Configures the location search (gazetteer) feature on the map +GAZETTEER_PROVIDER = env('OPTIMAP_GAZETTEER_PROVIDER', default='nominatim') +GAZETTEER_PLACEHOLDER = env('OPTIMAP_GAZETTEER_PLACEHOLDER', default='Search for a location...') +# Optional API key for commercial providers (not required for Nominatim) +GAZETTEER_API_KEY = env('OPTIMAP_GAZETTEER_API_KEY', default='') + +# Works List Pagination Settings +# Default number of works to display per page +WORKS_PAGE_SIZE_DEFAULT = int(env('OPTIMAP_WORKS_PAGE_SIZE_DEFAULT', default=50)) +# Minimum page size users can select +WORKS_PAGE_SIZE_MIN = int(env('OPTIMAP_WORKS_PAGE_SIZE_MIN', default=10)) +# Maximum page size users can select +WORKS_PAGE_SIZE_MAX = int(env('OPTIMAP_WORKS_PAGE_SIZE_MAX', default=200)) + +# Calculate available page size options by doubling from MIN to MAX +# Always includes MIN and MAX values +def _calculate_page_size_options(min_size, max_size): + """Calculate page size options by doubling from min to max""" + options = [min_size] + current = min_size + while current * 2 < max_size: + current = current * 2 + options.append(current) + if options[-1] != max_size: + options.append(max_size) + return options + +WORKS_PAGE_SIZE_OPTIONS = _calculate_page_size_options(WORKS_PAGE_SIZE_MIN, WORKS_PAGE_SIZE_MAX) diff --git a/publications/api.py b/publications/api.py index 0509b7f..d78c34a 100644 --- a/publications/api.py +++ b/publications/api.py @@ -1,14 +1,17 @@ """Publications API URL Configuration.""" from rest_framework import routers -from publications.viewsets import ( SourceViewSet, +from publications.viewsets import ( + SourceViewSet, PublicationViewSet, SubscriptionViewSet, + GeoextentViewSet, ) router = routers.DefaultRouter() router.register(r"sources", SourceViewSet, basename="source") router.register(r"publications", PublicationViewSet, basename="publication") router.register(r"subscriptions", SubscriptionViewSet, basename="subscription") +router.register(r"geoextent", GeoextentViewSet, basename="geoextent") urlpatterns = router.urls diff --git a/publications/management/commands/update_statistics.py b/publications/management/commands/update_statistics.py new file mode 100644 index 0000000..2411d5a --- /dev/null +++ b/publications/management/commands/update_statistics.py @@ -0,0 +1,30 @@ +# publications/management/commands/update_statistics.py +""" +Management command to update cached statistics. +Run this command nightly via cron job: + 0 2 * * * /path/to/manage.py update_statistics +""" + +from django.core.management.base import BaseCommand +from publications.utils.statistics import update_statistics_cache + + +class Command(BaseCommand): + help = 'Update cached publication statistics' + + def handle(self, *args, **options): + self.stdout.write('Updating publication statistics...') + + try: + stats = update_statistics_cache() + + self.stdout.write(self.style.SUCCESS('✓ Statistics updated successfully')) + self.stdout.write(f' Total works: {stats["total_works"]}') + self.stdout.write(f' Published works: {stats["published_works"]}') + self.stdout.write(f' With complete metadata: {stats["with_complete_metadata"]} ({stats["complete_percentage"]}%)') + + except Exception as e: + self.stdout.write( + self.style.ERROR(f'✗ Failed to update statistics: {str(e)}') + ) + raise diff --git a/publications/serializers.py b/publications/serializers.py index 54313f9..bad6178 100644 --- a/publications/serializers.py +++ b/publications/serializers.py @@ -5,6 +5,7 @@ from rest_framework import serializers as drf_serializers from .models import Publication, Subscription, Source from django.contrib.auth import get_user_model +from django.conf import settings User = get_user_model() @@ -94,3 +95,113 @@ class UserSerializer(drf_serializers.ModelSerializer): class Meta: model = User fields = ["id", "username", "email"] + + +# Geoextent API Serializers + +# Shared field definitions +RESPONSE_FORMAT_CHOICES = ['geojson', 'wkt', 'wkb'] +RESPONSE_FORMAT_DEFAULT = 'geojson' +RESPONSE_FORMAT_HELP = "Response format: 'geojson' (default - GeoJSON FeatureCollection), 'wkt' (WKT string with metadata), 'wkb' (WKB hex string with metadata)" + +GAZETTEER_CHOICES = ['nominatim', 'geonames', 'photon'] +GAZETTEER_DEFAULT = 'nominatim' + + +class GeoextentBaseSerializer(serializers.Serializer): + """Base serializer with common geoextent parameters.""" + bbox = serializers.BooleanField(default=True) + tbox = serializers.BooleanField(default=True) + convex_hull = serializers.BooleanField(default=False) + response_format = serializers.ChoiceField( + choices=RESPONSE_FORMAT_CHOICES, + default=RESPONSE_FORMAT_DEFAULT, + help_text=RESPONSE_FORMAT_HELP + ) + placename = serializers.BooleanField(default=False) + gazetteer = serializers.ChoiceField( + choices=GAZETTEER_CHOICES, + default=GAZETTEER_DEFAULT + ) + external_metadata = serializers.BooleanField( + default=True, + help_text="Retrieve external metadata from CrossRef/DataCite for DOIs (only applies to remote resources)" + ) + external_metadata_method = serializers.ChoiceField( + choices=['auto', 'all', 'crossref', 'datacite'], + default='auto', + help_text="Method for retrieving metadata: 'auto' (default), 'all', 'crossref', or 'datacite'" + ) + + def validate_gazetteer(self, value): + """Only validate gazetteer if placename is requested.""" + if self.initial_data.get('placename', False) and not value: + raise serializers.ValidationError("Gazetteer must be specified when placename=true") + return value + + +class GeoextentExtractSerializer(GeoextentBaseSerializer): + """Serializer for extracting extent from uploaded file.""" + file = serializers.FileField(required=True) + + +class GeoextentRemoteSerializer(GeoextentBaseSerializer): + """Serializer for extracting extent from remote repository.""" + identifiers = serializers.ListField( + child=serializers.CharField(), + required=True, + min_length=1, + help_text="List of DOIs or repository URLs" + ) + file_limit = serializers.IntegerField(default=10, min_value=1, max_value=100) + size_limit_mb = serializers.IntegerField(default=100, min_value=1) + + def validate_size_limit_mb(self, value): + """Ensure requested size doesn't exceed server maximum.""" + max_allowed = getattr(settings, 'GEOEXTENT_MAX_DOWNLOAD_SIZE_MB', 1000) + if value > max_allowed: + raise serializers.ValidationError( + f"Requested size limit ({value}MB) exceeds server maximum ({max_allowed}MB)" + ) + return value + + +class GeoextentRemoteGetSerializer(GeoextentBaseSerializer): + """Serializer for GET endpoint with URL parameters.""" + identifiers = serializers.CharField( + required=True, + help_text="Comma-separated DOIs or repository URLs" + ) + file_limit = serializers.IntegerField(default=10, min_value=1, max_value=100) + size_limit_mb = serializers.IntegerField(default=100, min_value=1) + + def validate_identifiers(self, value): + """Parse comma-separated identifiers and validate.""" + identifiers = [i.strip() for i in value.split(',') if i.strip()] + if not identifiers: + raise serializers.ValidationError("At least one identifier must be provided") + return identifiers + + def validate_size_limit_mb(self, value): + """Ensure requested size doesn't exceed server maximum.""" + max_allowed = getattr(settings, 'GEOEXTENT_MAX_DOWNLOAD_SIZE_MB', 1000) + if value > max_allowed: + raise serializers.ValidationError( + f"Requested size limit ({value}MB) exceeds server maximum ({max_allowed}MB)" + ) + return value + + +class GeoextentBatchSerializer(GeoextentBaseSerializer): + """Serializer for extracting extent from multiple files.""" + # files handled separately in view + size_limit_mb = serializers.IntegerField(default=100, min_value=1) + + def validate_size_limit_mb(self, value): + """Ensure total batch size doesn't exceed server maximum.""" + max_allowed = getattr(settings, 'GEOEXTENT_MAX_BATCH_SIZE_MB', 500) + if value > max_allowed: + raise serializers.ValidationError( + f"Requested batch size ({value}MB) exceeds server maximum ({max_allowed}MB)" + ) + return value diff --git a/publications/sitemaps.py b/publications/sitemaps.py index 92ecece..5b8afde 100644 --- a/publications/sitemaps.py +++ b/publications/sitemaps.py @@ -39,6 +39,7 @@ def items(self): "contribute", # Contribute page (/contribute/) "data", # Data download page (/data/) "feeds", # RSS/Atom feeds listing (/feeds/) + "geoextent", # Geoextent extraction tool (/geoextent/) "privacy", # Privacy policy (/privacy/) "redoc", # API schema UI (/api/schema/ui/) "sitemap-page", # Human-readable sitemap (/pages/) diff --git a/publications/static/css/accessibility.css b/publications/static/css/accessibility.css new file mode 100644 index 0000000..e065fc8 --- /dev/null +++ b/publications/static/css/accessibility.css @@ -0,0 +1,457 @@ +/* + * Accessibility Enhancements for OPTIMAP + * Implements WCAG 2.1 AA compliance features + * - High contrast theme + * - Focus indicators + * - Screen reader utilities + */ + +/* ========================================================================== + Screen Reader Only Utility Class + ========================================================================== */ + +.sr-only { + position: absolute; + width: 1px; + height: 1px; + padding: 0; + margin: -1px; + overflow: hidden; + clip: rect(0, 0, 0, 0); + white-space: nowrap; + border-width: 0; +} + +.sr-only-focusable:active, +.sr-only-focusable:focus { + position: static; + width: auto; + height: auto; + overflow: visible; + clip: auto; + white-space: normal; +} + +/* ========================================================================== + Focus Indicators (WCAG 2.4.7) + High contrast, visible focus indicators for all interactive elements + Only shown when high contrast mode is enabled + ========================================================================== */ + +/* Focus indicators only in high contrast mode */ +body.high-contrast *:focus { + outline: 3px solid #FF6B35 !important; + outline-offset: 2px; +} + +body.high-contrast a:focus, +body.high-contrast button:focus, +body.high-contrast input:focus, +body.high-contrast select:focus, +body.high-contrast textarea:focus, +body.high-contrast [tabindex]:focus { + outline: 3px solid #FF6B35 !important; + outline-offset: 2px; +} + +/* Focus indicators for dark backgrounds (navbar, footer) in high contrast mode */ +body.high-contrast .navbar *:focus, +body.high-contrast .footer *:focus, +body.high-contrast .bg-primary *:focus { + outline: 3px solid #FFD700 !important; /* Gold for better visibility on dark */ + outline-offset: 2px; +} + +/* Focus for Bootstrap buttons in high contrast mode */ +body.high-contrast .btn:focus, +body.high-contrast .btn:active:focus { + outline: 3px solid #FF6B35 !important; + outline-offset: 2px; + box-shadow: 0 0 0 0.2rem rgba(255, 107, 53, 0.25) !important; +} + +body.high-contrast .btn-primary:focus, +body.high-contrast .btn-primary:active:focus { + box-shadow: 0 0 0 0.2rem rgba(21, 143, 155, 0.5) !important; +} + +/* Focus for dropdown items in high contrast mode */ +body.high-contrast .dropdown-item:focus { + outline: 2px solid #FF6B35 !important; + outline-offset: -2px; + background-color: #f8f9fa; +} + +/* Focus for form controls in high contrast mode */ +body.high-contrast .form-control:focus { + border-color: #FF6B35; + box-shadow: 0 0 0 0.2rem rgba(255, 107, 53, 0.25); + outline: 2px solid #FF6B35; +} + +/* ========================================================================== + Skip Link (WCAG 2.4.1) + ========================================================================== */ + +.skip-link { + position: absolute; + top: -40px; + left: 0; + background: #000; + color: #fff; + padding: 8px 12px; + z-index: 10000; + text-decoration: none; + font-weight: bold; +} + +.skip-link:focus { + top: 0; +} + +body.high-contrast .skip-link:focus { + outline: 3px solid #FFD700; + outline-offset: 0; +} + +/* ========================================================================== + High Contrast Theme + Activated via JavaScript toggle, saved in localStorage + ========================================================================== */ + +body.high-contrast { + /* Enhanced contrast colors */ + background-color: #000000 !important; + color: #FFFFFF !important; +} + +body.high-contrast .navbar { + background-color: #000000 !important; + border-bottom: 3px solid #FFFFFF; +} + +body.high-contrast .footer-copyright { + background-color: #000000 !important; + border-top: 3px solid #FFFFFF; +} + +body.high-contrast .bg-primary { + background-color: #000000 !important; + color: #FFFF00 !important; /* Yellow for maximum contrast */ +} + +body.high-contrast a { + color: #00FFFF !important; /* Cyan for links */ + text-decoration: underline; + font-weight: bold; +} + +body.high-contrast a:hover, +body.high-contrast a:focus { + color: #FFFF00 !important; /* Yellow on hover */ + text-decoration: underline; + background-color: #000080; /* Dark blue background */ +} + +body.high-contrast .btn-primary { + background-color: #FFFF00 !important; + color: #000000 !important; + border: 3px solid #FFFFFF !important; + font-weight: bold; +} + +body.high-contrast .btn-primary:hover, +body.high-contrast .btn-primary:focus { + background-color: #FFFFFF !important; + color: #000000 !important; + border: 3px solid #FFFF00 !important; +} + +body.high-contrast .btn-secondary { + background-color: #808080 !important; + color: #FFFFFF !important; + border: 3px solid #FFFFFF !important; +} + +body.high-contrast .btn-danger { + background-color: #FF0000 !important; + color: #FFFFFF !important; + border: 3px solid #FFFFFF !important; +} + +body.high-contrast .card { + background-color: #1A1A1A !important; + border: 2px solid #FFFFFF !important; + color: #FFFFFF !important; +} + +body.high-contrast .card-header { + background-color: #000000 !important; + border-bottom: 2px solid #FFFFFF !important; + color: #FFFF00 !important; +} + +body.high-contrast .alert { + border: 3px solid #FFFFFF !important; + font-weight: bold; +} + +body.high-contrast .alert-primary { + background-color: #000080 !important; + color: #FFFF00 !important; +} + +body.high-contrast .alert-success { + background-color: #006400 !important; + color: #00FF00 !important; +} + +body.high-contrast .alert-danger { + background-color: #8B0000 !important; + color: #FF6347 !important; +} + +body.high-contrast .form-control { + background-color: #FFFFFF !important; + color: #000000 !important; + border: 2px solid #000000 !important; +} + +body.high-contrast .form-control:focus { + outline: 3px solid #FFFF00 !important; + border-color: #FFFF00 !important; +} + +body.high-contrast .dropdown-menu { + background-color: #1A1A1A !important; + border: 2px solid #FFFFFF !important; +} + +body.high-contrast .dropdown-item { + color: #FFFFFF !important; +} + +body.high-contrast .dropdown-item:hover, +body.high-contrast .dropdown-item:focus { + background-color: #000080 !important; + color: #FFFF00 !important; +} + +body.high-contrast .dropdown-divider { + border-top: 2px solid #FFFFFF !important; +} + +/* High contrast for map elements */ +body.high-contrast #map { + border: 3px solid #FFFFFF !important; +} + +body.high-contrast .leaflet-popup-content-wrapper { + background-color: #1A1A1A !important; + color: #FFFFFF !important; + border: 3px solid #FFFFFF !important; +} + +body.high-contrast .leaflet-popup-content-wrapper a { + color: #00FFFF !important; +} + +body.high-contrast .leaflet-popup-tip { + background-color: #1A1A1A !important; + border: 2px solid #FFFFFF !important; +} + +/* High contrast focus indicators for Leaflet map controls */ +body.high-contrast .leaflet-bar a:focus, +body.high-contrast .leaflet-control a:focus, +body.high-contrast .leaflet-control button:focus { + outline: 3px solid #FFD700 !important; + outline-offset: 2px; +} + +body.high-contrast .leaflet-container a.leaflet-popup-close-button:focus { + outline: 3px solid #FFD700 !important; + outline-offset: 2px; +} + +body.high-contrast .leaflet-control-attribution a:focus { + outline: 3px solid #FFD700 !important; + outline-offset: 2px; +} + +/* High contrast for tables */ +body.high-contrast table { + border: 2px solid #FFFFFF !important; +} + +body.high-contrast th { + background-color: #000000 !important; + color: #FFFF00 !important; + border: 2px solid #FFFFFF !important; +} + +body.high-contrast td { + border: 1px solid #FFFFFF !important; +} + +/* ========================================================================== + Accessibility Toggle Button + ========================================================================== */ + +#accessibility-toggle { + position: fixed; + bottom: 15px; + right: 15px; + z-index: 9999; + background-color: #158F9B; + color: #FFFFFF; + border: 2px solid #FFFFFF; + border-radius: 50%; + width: 42px; + height: 42px; + font-size: 18px; + cursor: pointer; + box-shadow: 0 2px 6px rgba(0, 0, 0, 0.3); + display: flex; + align-items: center; + justify-content: center; + transition: all 0.3s ease; +} + +#accessibility-toggle:hover { + background-color: #0A6870; + transform: scale(1.05); +} + +body.high-contrast #accessibility-toggle:focus { + outline: 3px solid #FFD700; + outline-offset: 3px; +} + +body.high-contrast #accessibility-toggle { + background-color: #FFFF00; + color: #000000; + border: 3px solid #FFFFFF; +} + +body.high-contrast #accessibility-toggle:hover { + background-color: #FFFFFF; + color: #000000; +} + +/* Tooltip for accessibility toggle */ +#accessibility-toggle[data-tooltip]:before { + content: attr(data-tooltip); + position: absolute; + right: 100%; + margin-right: 10px; + padding: 8px 12px; + background-color: #000; + color: #fff; + white-space: nowrap; + border-radius: 4px; + opacity: 0; + pointer-events: none; + transition: opacity 0.3s; +} + +#accessibility-toggle:hover[data-tooltip]:before, +#accessibility-toggle:focus[data-tooltip]:before { + opacity: 1; +} + +/* ========================================================================== + ARIA Live Regions + ========================================================================== */ + +#announcer, +#map-announcer { + position: absolute; + width: 1px; + height: 1px; + padding: 0; + margin: -1px; + overflow: hidden; + clip: rect(0, 0, 0, 0); + white-space: nowrap; + border-width: 0; +} + +/* ========================================================================== + Improved Link Visibility + ========================================================================== */ + +/* External link indicators should be more visible */ +a[target="_blank"]::after, +a[rel*="external"]::after { + content: " \f35d"; /* FontAwesome external-link-alt icon */ + font-family: "Font Awesome 5 Free"; + font-weight: 900; + font-size: 0.8em; + margin-left: 0.2em; + opacity: 0.7; +} + +/* High contrast external link indicators */ +body.high-contrast a[target="_blank"]::after, +body.high-contrast a[rel*="external"]::after { + opacity: 1; + color: inherit; +} + +/* ========================================================================== + Improved Button States + ========================================================================== */ + +/* Make disabled state more obvious */ +button:disabled, +.btn:disabled, +input:disabled, +select:disabled, +textarea:disabled { + opacity: 0.5; + cursor: not-allowed; + outline: 2px dashed #999; +} + +body.high-contrast button:disabled, +body.high-contrast .btn:disabled, +body.high-contrast input:disabled { + background-color: #4D4D4D !important; + color: #999999 !important; + border: 2px dashed #FFFFFF !important; +} + +/* ========================================================================== + Reduced Motion (prefers-reduced-motion) + ========================================================================== */ + +@media (prefers-reduced-motion: reduce) { + *, + *::before, + *::after { + animation-duration: 0.01ms !important; + animation-iteration-count: 1 !important; + transition-duration: 0.01ms !important; + scroll-behavior: auto !important; + } +} + +/* ========================================================================== + Print Styles + ========================================================================== */ + +@media print { + .skip-link, + #accessibility-toggle, + .navbar, + .footer, + #map-announcer, + #announcer { + display: none !important; + } + + a[href]::after { + content: " (" attr(href) ")"; + } +} diff --git a/publications/static/css/leaflet.control.geocoder.css b/publications/static/css/leaflet.control.geocoder.css new file mode 100644 index 0000000..4d30016 --- /dev/null +++ b/publications/static/css/leaflet.control.geocoder.css @@ -0,0 +1,126 @@ +.leaflet-control-geocoder { + border-radius: 4px; + background: white; + min-width: 26px; + min-height: 26px; +} + +.leaflet-touch .leaflet-control-geocoder { + min-width: 30px; + min-height: 30px; +} + +.leaflet-control-geocoder a, +.leaflet-control-geocoder .leaflet-control-geocoder-icon { + border-bottom: none; + display: inline-block; +} + +.leaflet-control-geocoder .leaflet-control-geocoder-alternatives a { + width: inherit; + height: inherit; + line-height: inherit; +} + +.leaflet-control-geocoder a:hover, +.leaflet-control-geocoder .leaflet-control-geocoder-icon:hover { + border-bottom: none; + display: inline-block; +} + +.leaflet-control-geocoder-form { + display: none; + vertical-align: middle; +} +.leaflet-control-geocoder-expanded .leaflet-control-geocoder-form { + display: inline-block; +} +.leaflet-control-geocoder-form input { + font-size: 120%; + border: 0; + background-color: transparent; + width: 246px; +} + +.leaflet-control-geocoder-icon { + border-radius: 4px; + width: 26px; + height: 26px; + border: none; + background-color: white; + background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'%3E%3Cpath d='M12.2 13l3.4 6.6c.6 1.1 2.5-.4 2-1.2l-4-6.2z'/%3E%3Ccircle cx='10.8' cy='8.9' r='3.9' fill='none' stroke='%23000' stroke-width='1.5'/%3E%3C/svg%3E"); + background-repeat: no-repeat; + background-position: center; + cursor: pointer; +} + +.leaflet-touch .leaflet-control-geocoder-icon { + width: 30px; + height: 30px; +} + +.leaflet-control-geocoder-throbber .leaflet-control-geocoder-icon { + background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' stroke='%23000' stroke-linecap='round' stroke-width='1.6' viewBox='0 0 24 24'%3E%3Cdefs/%3E%3Cg%3E%3Cpath stroke-opacity='.1' d='M14 8.4l3-5'/%3E%3Cpath stroke-opacity='.2' d='M15.6 10l5-3'/%3E%3Cpath stroke-opacity='.3' d='M16.2 12H22'/%3E%3Cpath stroke-opacity='.4' d='M15.6 14l5 3m-6.5-1.4l2.9 5'/%3E%3Cpath stroke-opacity='.5' d='M12 16.2V22m-2-6.4l-3 5'/%3E%3Cpath stroke-opacity='.6' d='M8.4 14l-5 3'/%3E%3Cpath stroke-opacity='.7' d='M7.8 12H2'/%3E%3Cpath stroke-opacity='.8' d='M8.4 10l-5-3'/%3E%3Cpath stroke-opacity='.9' d='M10 8.4l-3-5'/%3E%3Cpath d='M12 7.8V2'/%3E%3CanimateTransform attributeName='transform' calcMode='discrete' dur='1s' repeatCount='indefinite' type='rotate' values='0 12 12;30 12 12;60 12 12;90 12 12;120 12 12;150 12 12;180 12 12;210 12 12;240 12 12;270 12 12;300 12 12;330 12 12'/%3E%3C/g%3E%3C/svg%3E"); +} + +.leaflet-control-geocoder-form-no-error { + display: none; +} + +.leaflet-control-geocoder-form input:focus { + outline: none; +} + +.leaflet-control-geocoder-form button { + display: none; +} +.leaflet-control-geocoder-error { + margin-top: 8px; + margin-left: 8px; + display: block; + color: #444; +} +.leaflet-control-geocoder-alternatives { + display: block; + width: 272px; + list-style: none; + padding: 0; + margin: 0; +} + +.leaflet-control-geocoder-alternatives-minimized { + display: none; + height: 0; +} +.leaflet-control-geocoder-alternatives li { + white-space: nowrap; + display: block; + overflow: hidden; + padding: 5px 8px; + text-overflow: ellipsis; + border-bottom: 1px solid #ccc; + cursor: pointer; +} + +.leaflet-control-geocoder-alternatives li a, +.leaflet-control-geocoder-alternatives li a:hover { + width: inherit; + height: inherit; + line-height: inherit; + background: inherit; + border-radius: inherit; + text-align: left; +} + +.leaflet-control-geocoder-alternatives li:last-child { + border-bottom: none; +} +.leaflet-control-geocoder-alternatives li:hover, +.leaflet-control-geocoder-selected { + background-color: #f5f5f5; +} +.leaflet-control-geocoder-address-detail { +} +.leaflet-control-geocoder-address-context { + color: #666; +} diff --git a/publications/static/css/main.css b/publications/static/css/main.css index 8d4709e..36edb74 100644 --- a/publications/static/css/main.css +++ b/publications/static/css/main.css @@ -253,21 +253,45 @@ h1.page-title { } /* Custom zoom to all features button */ -.leaflet-control-zoom-all { +.leaflet-control-zoom-to-all { + margin-top: 10px; +} + +.leaflet-control-zoom-to-all-button { background-color: white; border: 2px solid rgba(0,0,0,0.2); border-radius: 4px; - width: 26px; - height: 26px; - line-height: 26px; + width: 30px; + height: 30px; + line-height: 30px; text-align: center; cursor: pointer; - font-size: 18px; - font-weight: bold; + font-size: 14px; + display: flex; + align-items: center; + justify-content: center; + color: #333; + text-decoration: none; + transition: background-color 0.2s ease; +} + +.leaflet-control-zoom-to-all-button i { + font-size: 14px; } -.leaflet-control-zoom-all:hover { +.leaflet-control-zoom-to-all-button:hover { background-color: #f4f4f4; + color: #000; +} + +.leaflet-control-zoom-to-all-button:active { + background-color: #e0e0e0; +} + +/* Focus outline only in high contrast mode */ +body.high-contrast .leaflet-control-zoom-to-all-button:focus { + outline: 3px solid #FFD700; + outline-offset: 2px; } /* Works page styles */ diff --git a/publications/static/css/map-search.css b/publications/static/css/map-search.css new file mode 100644 index 0000000..1a715c7 --- /dev/null +++ b/publications/static/css/map-search.css @@ -0,0 +1,316 @@ +/* + * Map Search Component Styles + * Search bar integrated into navbar for filtering map publications + */ + +/* ========================================================================== + Navbar Search Container + ========================================================================== */ + +.navbar-search-container { + display: flex; + align-items: center; + margin: 0 1rem; + flex-grow: 1; + max-width: 500px; +} + +.navbar-search-form { + width: 100%; + margin: 0; +} + +.search-input-wrapper { + position: relative; + display: flex; + align-items: center; + width: 100%; +} + +/* ========================================================================== + Search Input + ========================================================================== */ + +.navbar-search-input { + width: 100%; + padding: 0.5rem 2.5rem 0.5rem 2.5rem; + border: 2px solid rgba(255, 255, 255, 0.3); + border-radius: 25px; + background-color: rgba(255, 255, 255, 0.15); + color: #FFFFFF; + font-size: 14px; + transition: all 0.3s ease; + outline: none; +} + +.navbar-search-input::placeholder { + color: rgba(255, 255, 255, 0.7); +} + +.navbar-search-input:hover { + background-color: rgba(255, 255, 255, 0.25); + border-color: rgba(255, 255, 255, 0.5); +} + +.navbar-search-input:focus { + background-color: rgba(255, 255, 255, 0.95); + color: #000000; +} + +.navbar-search-input:focus::placeholder { + color: rgba(0, 0, 0, 0.5); +} + +/* Focus outline only in high contrast mode */ +body.high-contrast .navbar-search-input:focus { + border-color: #FFD700; + box-shadow: 0 0 0 3px rgba(255, 215, 0, 0.3); +} + +/* Search icon */ +.search-input-wrapper .search-icon { + position: absolute; + left: 12px; + color: rgba(255, 255, 255, 0.7); + pointer-events: none; + z-index: 1; + transition: color 0.3s ease; +} + +.navbar-search-input:focus ~ .search-icon, +.navbar-search-input:focus + .search-icon { + color: #158F9B; +} + +/* Ensure icon is before input in DOM but appears correctly */ +.search-input-wrapper { + display: flex; + flex-direction: row-reverse; + justify-content: flex-end; +} + +.search-input-wrapper .navbar-search-input { + order: 2; +} + +.search-input-wrapper .search-icon { + order: 1; +} + +/* ========================================================================== + Search Submit Button (Magnifying Glass) + ========================================================================== */ + +.navbar-search-submit-btn { + position: absolute; + right: 40px; + background: transparent; + border: none; + color: rgba(255, 255, 255, 0.8); + padding: 0; + cursor: pointer; + border-radius: 50%; + transition: all 0.2s ease; + display: flex; + align-items: center; + justify-content: center; + width: 32px; + height: 32px; + z-index: 2; + font-size: 16px; +} + +.navbar-search-submit-btn:hover { + background-color: rgba(255, 255, 255, 0.2); + color: #FFFFFF; +} + +.navbar-search-submit-btn:focus { + background-color: rgba(255, 255, 255, 0.3); + color: #FFFFFF; +} + +/* Focus outline only in high contrast mode */ +body.high-contrast .navbar-search-submit-btn:focus { + outline: 2px solid #FFD700; + outline-offset: 2px; +} + +.navbar-search-input:focus ~ .navbar-search-submit-btn { + color: #158F9B; +} + +.navbar-search-input:focus ~ .navbar-search-submit-btn:hover { + background-color: rgba(21, 143, 155, 0.1); + color: #0A6870; +} + +/* High contrast mode */ +body.high-contrast .navbar-search-submit-btn { + color: #FFFF00; +} + +body.high-contrast .navbar-search-submit-btn:hover { + color: #FFFFFF; +} + +/* ========================================================================== + Clear Button + ========================================================================== */ + +.navbar-clear-search-btn { + position: absolute; + right: 4px; + background: transparent; + border: none; + color: rgba(255, 255, 255, 0.7); + padding: 0; + cursor: pointer; + border-radius: 50%; + transition: all 0.2s ease; + display: flex; + align-items: center; + justify-content: center; + width: 32px; + height: 32px; + z-index: 2; + font-size: 16px; +} + +.navbar-clear-search-btn:hover { + background-color: rgba(255, 255, 255, 0.2); + color: #FFFFFF; +} + +.navbar-clear-search-btn:focus { + background-color: rgba(255, 255, 255, 0.3); + color: #FFFFFF; +} + +/* Focus outline only in high contrast mode */ +body.high-contrast .navbar-clear-search-btn:focus { + outline: 2px solid #FFD700; + outline-offset: 2px; +} + +.navbar-search-input:focus ~ .navbar-clear-search-btn { + color: #158F9B; +} + +.navbar-search-input:focus ~ .navbar-clear-search-btn:hover { + background-color: rgba(21, 143, 155, 0.1); + color: #0A6870; +} + +/* ========================================================================== + Responsive Design + ========================================================================== */ + +@media (max-width: 991px) { + .navbar-search-container { + max-width: 300px; + margin: 0 0.5rem; + } + + .navbar-search-input { + font-size: 13px; + padding: 0.4rem 2.25rem 0.4rem 2.25rem; + } +} + +@media (max-width: 767px) { + .navbar-search-container { + display: none; /* Hide on very small screens, could be replaced with a toggle */ + } + + /* Alternative: Make it full width on mobile */ + /* .navbar-search-container { + position: absolute; + top: 50px; + left: 0; + right: 0; + max-width: none; + padding: 0.5rem; + background-color: #158F9B; + border-bottom: 1px solid rgba(255, 255, 255, 0.3); + } */ +} + +@media (min-width: 768px) and (max-width: 1199px) { + .tagline { + font-size: 1.2em !important; + } +} + +/* ========================================================================== + High Contrast Mode Adjustments + ========================================================================== */ + +body.high-contrast .navbar-search-input { + background-color: #FFFFFF; + color: #000000; + border-color: #FFFF00; +} + +body.high-contrast .navbar-search-input::placeholder { + color: #666666; +} + +body.high-contrast .navbar-search-input:focus { + background-color: #FFFF00; + color: #000000; + border-color: #FFFFFF; + box-shadow: 0 0 0 3px #FFFFFF; +} + +body.high-contrast .search-input-wrapper .search-icon { + color: #000000; +} + +body.high-contrast .navbar-clear-search-btn { + color: #000000; + background-color: rgba(255, 255, 0, 0.2); +} + +body.high-contrast .navbar-clear-search-btn:hover { + background-color: #FFFF00; + color: #000000; + border: 2px solid #000000; +} + +/* ========================================================================== + Loading State + ========================================================================== */ + +.navbar-search-input.searching { + background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='20' height='20' viewBox='0 0 20 20'%3E%3Cpath fill='%23158F9B' d='M10 2a8 8 0 1 0 0 16 8 8 0 0 0 0-16zm0 14a6 6 0 1 1 0-12 6 6 0 0 1 0 12z'/%3E%3C/svg%3E"); + background-repeat: no-repeat; + background-position: calc(100% - 40px) center; + background-size: 16px 16px; +} + +/* ========================================================================== + Search Results Badge (optional - for showing count) + ========================================================================== */ + +.search-results-badge { + position: absolute; + top: -8px; + right: -8px; + background-color: #FF6B35; + color: #FFFFFF; + border-radius: 12px; + padding: 2px 8px; + font-size: 11px; + font-weight: bold; + min-width: 20px; + text-align: center; + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2); + pointer-events: none; +} + +body.high-contrast .search-results-badge { + background-color: #FFFF00; + color: #000000; + border: 2px solid #FFFFFF; +} diff --git a/publications/static/download_libraries.sh b/publications/static/download_libraries.sh index 78b1c80..62076c2 100755 --- a/publications/static/download_libraries.sh +++ b/publications/static/download_libraries.sh @@ -32,6 +32,9 @@ wget -q https://unpkg.com/leaflet-draw@1.0.4/dist/leaflet.draw.js -O js/leaflet. echo " - Leaflet Fullscreen 3.0.2" wget -q https://unpkg.com/leaflet.fullscreen@3.0.2/Control.FullScreen.js -O js/leaflet.fullscreen.js +echo " - Leaflet Control Geocoder 2.4.0" +wget -q https://unpkg.com/leaflet-control-geocoder@2.4.0/dist/Control.Geocoder.js -O js/leaflet.control.geocoder.js + # Bootstrap Datepicker echo " - Bootstrap Datepicker 1.9.0" wget -q https://cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.9.0/js/bootstrap-datepicker.min.js -O js/bootstrap-datepicker.min.js @@ -70,12 +73,21 @@ wget -q https://unpkg.com/leaflet-draw@1.0.4/dist/images/spritesheet.svg -O css/ echo " - Leaflet Fullscreen 3.0.2 CSS" wget -q https://unpkg.com/leaflet.fullscreen@3.0.2/Control.FullScreen.css -O css/leaflet.fullscreen.css +# Leaflet Control Geocoder CSS +echo " - Leaflet Control Geocoder 2.4.0 CSS" +wget -q https://unpkg.com/leaflet-control-geocoder@2.4.0/dist/Control.Geocoder.css -O css/leaflet.control.geocoder.css + # Leaflet Fullscreen images echo " - Leaflet Fullscreen images" mkdir -p css/images/fullscreen wget -q https://unpkg.com/leaflet.fullscreen@3.0.2/icon-fullscreen.png -O css/images/fullscreen/icon-fullscreen.png wget -q https://unpkg.com/leaflet.fullscreen@3.0.2/icon-fullscreen-2x.png -O css/images/fullscreen/icon-fullscreen-2x.png +# Leaflet Control Geocoder images +echo " - Leaflet Control Geocoder images" +wget -q https://unpkg.com/leaflet-control-geocoder@2.4.0/dist/images/geocoder.png -O css/images/geocoder.png 2>/dev/null || true +wget -q https://unpkg.com/leaflet-control-geocoder@2.4.0/dist/images/throbber.gif -O css/images/throbber.gif 2>/dev/null || true + # Bootstrap Datepicker CSS echo " - Bootstrap Datepicker 1.9.0 CSS" wget -q https://cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.9.0/css/bootstrap-datepicker.min.css -O css/bootstrap-datepicker.min.css diff --git a/publications/static/js/accessibility-toggle.js b/publications/static/js/accessibility-toggle.js new file mode 100644 index 0000000..07e901a --- /dev/null +++ b/publications/static/js/accessibility-toggle.js @@ -0,0 +1,158 @@ +// publications/static/js/accessibility-toggle.js +// High contrast theme toggle with localStorage persistence + +(function() { + 'use strict'; + + /** + * Accessibility Toggle Manager + * Handles high contrast mode toggle and persistence + */ + class AccessibilityToggle { + constructor() { + this.storageKey = 'optimap-high-contrast'; + this.bodyElement = document.body; + this.toggleButton = null; + + this.init(); + } + + init() { + // Load saved preference + this.loadPreference(); + + // Create toggle button + this.createToggleButton(); + + // Add event listeners + this.setupEventListeners(); + + // Announce current state to screen readers + this.announceState(); + } + + /** + * Load user preference from localStorage + */ + loadPreference() { + const saved = localStorage.getItem(this.storageKey); + if (saved === 'true') { + this.enable(); + } + } + + /** + * Save user preference to localStorage + */ + savePreference(enabled) { + localStorage.setItem(this.storageKey, enabled.toString()); + } + + /** + * Create the floating toggle button + */ + createToggleButton() { + this.toggleButton = document.createElement('button'); + this.toggleButton.id = 'accessibility-toggle'; + this.toggleButton.setAttribute('aria-label', 'Toggle high contrast mode'); + this.toggleButton.setAttribute('title', 'Toggle High Contrast Mode'); + this.toggleButton.setAttribute('data-tooltip', 'Toggle High Contrast'); + this.toggleButton.innerHTML = ''; + + document.body.appendChild(this.toggleButton); + } + + /** + * Setup event listeners + */ + setupEventListeners() { + if (!this.toggleButton) return; + + this.toggleButton.addEventListener('click', () => { + this.toggle(); + }); + + // Keyboard shortcut: Ctrl+Alt+H + document.addEventListener('keydown', (e) => { + if (e.ctrlKey && e.altKey && e.key === 'h') { + e.preventDefault(); + this.toggle(); + } + }); + } + + /** + * Toggle high contrast mode + */ + toggle() { + if (this.isEnabled()) { + this.disable(); + } else { + this.enable(); + } + } + + /** + * Enable high contrast mode + */ + enable() { + this.bodyElement.classList.add('high-contrast'); + this.savePreference(true); + this.announceState(); + console.log('High contrast mode enabled'); + } + + /** + * Disable high contrast mode + */ + disable() { + this.bodyElement.classList.remove('high-contrast'); + this.savePreference(false); + this.announceState(); + console.log('High contrast mode disabled'); + } + + /** + * Check if high contrast mode is enabled + */ + isEnabled() { + return this.bodyElement.classList.contains('high-contrast'); + } + + /** + * Announce state change to screen readers + */ + announceState() { + let announcer = document.getElementById('announcer'); + if (!announcer) { + announcer = document.createElement('div'); + announcer.id = 'announcer'; + announcer.className = 'sr-only'; + announcer.setAttribute('role', 'status'); + announcer.setAttribute('aria-live', 'polite'); + announcer.setAttribute('aria-atomic', 'true'); + document.body.appendChild(announcer); + } + + const state = this.isEnabled() ? 'enabled' : 'disabled'; + announcer.textContent = `High contrast mode ${state}`; + + // Update button label + if (this.toggleButton) { + this.toggleButton.setAttribute( + 'aria-label', + `Toggle high contrast mode (currently ${state})` + ); + } + } + } + + // Initialize when DOM is ready + if (document.readyState === 'loading') { + document.addEventListener('DOMContentLoaded', () => { + new AccessibilityToggle(); + }); + } else { + new AccessibilityToggle(); + } +})(); diff --git a/publications/static/js/jquery-3.4.1.min.js b/publications/static/js/jquery-3.4.1.min.js new file mode 100644 index 0000000..a1c07fd --- /dev/null +++ b/publications/static/js/jquery-3.4.1.min.js @@ -0,0 +1,2 @@ +/*! jQuery v3.4.1 | (c) JS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],E=C.document,r=Object.getPrototypeOf,s=t.slice,g=t.concat,u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType},x=function(e){return null!=e&&e===e.window},c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.4.1",k=function(e,t){return new k.fn.init(e,t)},p=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;function d(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp($),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+$),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ne=function(e,t,n){var r="0x"+t-65536;return r!=r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023&r|56320)},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(m.childNodes),m.childNodes),t[m.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&((e?e.ownerDocument||e:m)!==C&&T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!A[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&U.test(t)){(s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=k),o=(l=h(t)).length;while(o--)l[o]="#"+s+" "+xe(l[o]);c=l.join(","),f=ee.test(t)&&ye(e.parentNode)||e}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){A(t,!0)}finally{s===k&&e.removeAttribute("id")}}}return g(t.replace(B,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[k]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e.namespaceURI,n=(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:m;return r!==C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),m!==C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=k,!C.getElementsByName||!C.getElementsByName(k).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+k+"-]").length||v.push("~="),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+k+"+*").length||v.push(".#.+[+~]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",$)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)===(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e===C||e.ownerDocument===m&&y(m,e)?-1:t===C||t.ownerDocument===m&&y(m,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e===C?-1:t===C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]===m?-1:s[r]===m?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if((e.ownerDocument||e)!==C&&T(e),d.matchesSelector&&E&&!A[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){A(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=p[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&p(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?k.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?k.grep(e,function(e){return e===n!==r}):"string"!=typeof n?k.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(k.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||q,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:L.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof k?t[0]:t,k.merge(this,k.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),D.test(r[1])&&k.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(k):k.makeArray(e,this)}).prototype=k.fn,q=k(E);var H=/^(?:parents|prev(?:Until|All))/,O={children:!0,contents:!0,next:!0,prev:!0};function P(e,t){while((e=e[t])&&1!==e.nodeType);return e}k.fn.extend({has:function(e){var t=k(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i,ge={option:[1,""],thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?k.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;nx",y.noCloneChecked=!!me.cloneNode(!0).lastChild.defaultValue;var Te=/^key/,Ce=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,Ee=/^([^.]*)(?:\.(.+)|)/;function ke(){return!0}function Se(){return!1}function Ne(e,t){return e===function(){try{return E.activeElement}catch(e){}}()==("focus"===t)}function Ae(e,t,n,r,i,o){var a,s;if("object"==typeof t){for(s in"string"!=typeof n&&(r=r||n,n=void 0),t)Ae(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=Se;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return k().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=k.guid++)),e.each(function(){k.event.add(this,t,i,r,n)})}function De(e,i,o){o?(Q.set(e,i,!1),k.event.add(e,i,{namespace:!1,handler:function(e){var t,n,r=Q.get(this,i);if(1&e.isTrigger&&this[i]){if(r.length)(k.event.special[i]||{}).delegateType&&e.stopPropagation();else if(r=s.call(arguments),Q.set(this,i,r),t=o(this,i),this[i](),r!==(n=Q.get(this,i))||t?Q.set(this,i,!1):n={},r!==n)return e.stopImmediatePropagation(),e.preventDefault(),n.value}else r.length&&(Q.set(this,i,{value:k.event.trigger(k.extend(r[0],k.Event.prototype),r.slice(1),this)}),e.stopImmediatePropagation())}})):void 0===Q.get(e,i)&&k.event.add(e,i,ke)}k.event={global:{},add:function(t,e,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.get(t);if(v){n.handler&&(n=(o=n).handler,i=o.selector),i&&k.find.matchesSelector(ie,i),n.guid||(n.guid=k.guid++),(u=v.events)||(u=v.events={}),(a=v.handle)||(a=v.handle=function(e){return"undefined"!=typeof k&&k.event.triggered!==e.type?k.event.dispatch.apply(t,arguments):void 0}),l=(e=(e||"").match(R)||[""]).length;while(l--)d=g=(s=Ee.exec(e[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=k.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=k.event.special[d]||{},c=k.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&k.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(t,r,h,a)||t.addEventListener&&t.addEventListener(d,a)),f.add&&(f.add.call(t,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),k.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.hasData(e)&&Q.get(e);if(v&&(u=v.events)){l=(t=(t||"").match(R)||[""]).length;while(l--)if(d=g=(s=Ee.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d){f=k.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,v.handle)||k.removeEvent(e,d,v.handle),delete u[d])}else for(d in u)k.event.remove(e,d+t[l],n,r,!0);k.isEmptyObject(u)&&Q.remove(e,"handle events")}},dispatch:function(e){var t,n,r,i,o,a,s=k.event.fix(e),u=new Array(arguments.length),l=(Q.get(this,"events")||{})[s.type]||[],c=k.event.special[s.type]||{};for(u[0]=s,t=1;t\x20\t\r\n\f]*)[^>]*)\/>/gi,qe=/\s*$/g;function Oe(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&k(e).children("tbody")[0]||e}function Pe(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Re(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Me(e,t){var n,r,i,o,a,s,u,l;if(1===t.nodeType){if(Q.hasData(e)&&(o=Q.access(e),a=Q.set(t,o),l=o.events))for(i in delete a.handle,a.events={},l)for(n=0,r=l[i].length;n")},clone:function(e,t,n){var r,i,o,a,s,u,l,c=e.cloneNode(!0),f=oe(e);if(!(y.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||k.isXMLDoc(e)))for(a=ve(c),r=0,i=(o=ve(e)).length;r").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var Vt,Gt=[],Yt=/(=)\?(?=&|$)|\?\?/;k.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Gt.pop()||k.expando+"_"+kt++;return this[e]=!0,e}}),k.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Yt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Yt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Yt,"$1"+r):!1!==e.jsonp&&(e.url+=(St.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||k.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?k(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Gt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((Vt=E.implementation.createHTMLDocument("").body).innerHTML="
",2===Vt.childNodes.length),k.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=D.exec(e))?[t.createElement(i[1])]:(i=we([e],t,o),o&&o.length&&k(o).remove(),k.merge([],i.childNodes)));var r,i,o},k.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(k.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},k.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){k.fn[t]=function(e){return this.on(t,e)}}),k.expr.pseudos.animated=function(t){return k.grep(k.timers,function(e){return t===e.elem}).length},k.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=k.css(e,"position"),c=k(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=k.css(e,"top"),u=k.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,k.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},k.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){k.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===k.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===k.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=k(e).offset()).top+=k.css(e,"borderTopWidth",!0),i.left+=k.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-k.css(r,"marginTop",!0),left:t.left-i.left-k.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===k.css(e,"position"))e=e.offsetParent;return e||ie})}}),k.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;k.fn[t]=function(e){return _(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),k.each(["top","left"],function(e,n){k.cssHooks[n]=ze(y.pixelPosition,function(e,t){if(t)return t=_e(e,n),$e.test(t)?k(e).position()[n]+"px":t})}),k.each({Height:"height",Width:"width"},function(a,s){k.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){k.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return _(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?k.css(e,t,i):k.style(e,t,n,i)},s,n?e:void 0,n)}})}),k.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){k.fn[n]=function(e,t){return 0"'`]/g; + /** + * @internal + */ + + var possible = /[&<>"'`]/; + /** + * @internal + */ + + var escape = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''', + '`': '`' + }; + /** + * @internal + */ + + function escapeChar(chr) { + return escape[chr]; + } + /** + * @internal + */ + + + function htmlEscape(string) { + if (string == null) { + return ''; + } else if (!string) { + return string + ''; + } // Force a string conversion as this will be done by the append regardless and + // the regex test will do this transparently behind the scenes, causing issues if + // an object's to string has escaped characters in it. + + + string = '' + string; + + if (!possible.test(string)) { + return string; + } + + return string.replace(badChars, escapeChar); + } + /** + * @internal + */ + + function jsonp(url, params, callback, context, jsonpParam) { + var callbackId = '_l_geocoder_' + lastCallbackId++; + params[jsonpParam || 'callback'] = callbackId; + window[callbackId] = L__namespace.Util.bind(callback, context); + var script = document.createElement('script'); + script.type = 'text/javascript'; + script.src = url + getParamString(params); + script.id = callbackId; + document.getElementsByTagName('head')[0].appendChild(script); + } + /** + * @internal + */ + + function getJSON(url, params, callback) { + var xmlHttp = new XMLHttpRequest(); + + xmlHttp.onreadystatechange = function () { + if (xmlHttp.readyState !== 4) { + return; + } + + var message; + + if (xmlHttp.status !== 200 && xmlHttp.status !== 304) { + message = ''; + } else if (typeof xmlHttp.response === 'string') { + // IE doesn't parse JSON responses even with responseType: 'json'. + try { + message = JSON.parse(xmlHttp.response); + } catch (e) { + // Not a JSON response + message = xmlHttp.response; + } + } else { + message = xmlHttp.response; + } + + callback(message); + }; + + xmlHttp.open('GET', url + getParamString(params), true); + xmlHttp.responseType = 'json'; + xmlHttp.setRequestHeader('Accept', 'application/json'); + xmlHttp.send(null); + } + /** + * @internal + */ + + function template(str, data) { + return str.replace(/\{ *([\w_]+) *\}/g, function (str, key) { + var value = data[key]; + + if (value === undefined) { + value = ''; + } else if (typeof value === 'function') { + value = value(data); + } + + return htmlEscape(value); + }); + } + /** + * @internal + */ + + function getParamString(obj, existingUrl, uppercase) { + var params = []; + + for (var i in obj) { + var key = encodeURIComponent(uppercase ? i.toUpperCase() : i); + var value = obj[i]; + + if (!Array.isArray(value)) { + params.push(key + '=' + encodeURIComponent(String(value))); + } else { + for (var j = 0; j < value.length; j++) { + params.push(key + '=' + encodeURIComponent(value[j])); + } + } + } + + return (!existingUrl || existingUrl.indexOf('?') === -1 ? '?' : '&') + params.join('&'); + } + + /** + * Implementation of the [ArcGIS geocoder](https://developers.arcgis.com/features/geocoding/) + */ + + var ArcGis = /*#__PURE__*/function () { + function ArcGis(options) { + this.options = { + serviceUrl: 'https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer', + apiKey: '' + }; + L__namespace.Util.setOptions(this, options); + } + + var _proto = ArcGis.prototype; + + _proto.geocode = function geocode(query, cb, context) { + var params = geocodingParams(this.options, { + token: this.options.apiKey, + SingleLine: query, + outFields: 'Addr_Type', + forStorage: false, + maxLocations: 10, + f: 'json' + }); + getJSON(this.options.serviceUrl + '/findAddressCandidates', params, function (data) { + var results = []; + + if (data.candidates && data.candidates.length) { + for (var i = 0; i <= data.candidates.length - 1; i++) { + var loc = data.candidates[i]; + var latLng = L__namespace.latLng(loc.location.y, loc.location.x); + var latLngBounds = L__namespace.latLngBounds(L__namespace.latLng(loc.extent.ymax, loc.extent.xmax), L__namespace.latLng(loc.extent.ymin, loc.extent.xmin)); + results[i] = { + name: loc.address, + bbox: latLngBounds, + center: latLng + }; + } + } + + cb.call(context, results); + }); + }; + + _proto.suggest = function suggest(query, cb, context) { + return this.geocode(query, cb, context); + }; + + _proto.reverse = function reverse(location, scale, cb, context) { + var params = reverseParams(this.options, { + location: location.lng + ',' + location.lat, + distance: 100, + f: 'json' + }); + getJSON(this.options.serviceUrl + '/reverseGeocode', params, function (data) { + var result = []; + + if (data && !data.error) { + var center = L__namespace.latLng(data.location.y, data.location.x); + var bbox = L__namespace.latLngBounds(center, center); + result.push({ + name: data.address.Match_addr, + center: center, + bbox: bbox + }); + } + + cb.call(context, result); + }); + }; + + return ArcGis; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link ArcGis} + * @param options the options + */ + + function arcgis(options) { + return new ArcGis(options); + } + + /** + * Implementation of the [Bing Locations API](https://docs.microsoft.com/en-us/bingmaps/rest-services/locations/) + */ + + var Bing = /*#__PURE__*/function () { + function Bing(options) { + this.options = { + serviceUrl: 'https://dev.virtualearth.net/REST/v1/Locations' + }; + L__namespace.Util.setOptions(this, options); + } + + var _proto = Bing.prototype; + + _proto.geocode = function geocode(query, cb, context) { + var params = geocodingParams(this.options, { + query: query, + key: this.options.apiKey + }); + jsonp(this.options.apiKey, params, function (data) { + var results = []; + + if (data.resourceSets.length > 0) { + for (var i = data.resourceSets[0].resources.length - 1; i >= 0; i--) { + var resource = data.resourceSets[0].resources[i], + bbox = resource.bbox; + results[i] = { + name: resource.name, + bbox: L__namespace.latLngBounds([bbox[0], bbox[1]], [bbox[2], bbox[3]]), + center: L__namespace.latLng(resource.point.coordinates) + }; + } + } + + cb.call(context, results); + }, this, 'jsonp'); + }; + + _proto.reverse = function reverse(location, scale, cb, context) { + var params = reverseParams(this.options, { + key: this.options.apiKey + }); + jsonp(this.options.serviceUrl + location.lat + ',' + location.lng, params, function (data) { + var results = []; + + for (var i = data.resourceSets[0].resources.length - 1; i >= 0; i--) { + var resource = data.resourceSets[0].resources[i], + bbox = resource.bbox; + results[i] = { + name: resource.name, + bbox: L__namespace.latLngBounds([bbox[0], bbox[1]], [bbox[2], bbox[3]]), + center: L__namespace.latLng(resource.point.coordinates) + }; + } + + cb.call(context, results); + }, this, 'jsonp'); + }; + + return Bing; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link Bing} + * @param options the options + */ + + function bing(options) { + return new Bing(options); + } + + var Google = /*#__PURE__*/function () { + function Google(options) { + this.options = { + serviceUrl: 'https://maps.googleapis.com/maps/api/geocode/json' + }; + L__namespace.Util.setOptions(this, options); + } + + var _proto = Google.prototype; + + _proto.geocode = function geocode(query, cb, context) { + var params = geocodingParams(this.options, { + key: this.options.apiKey, + address: query + }); + getJSON(this.options.serviceUrl, params, function (data) { + var results = []; + + if (data.results && data.results.length) { + for (var i = 0; i <= data.results.length - 1; i++) { + var loc = data.results[i]; + var latLng = L__namespace.latLng(loc.geometry.location); + var latLngBounds = L__namespace.latLngBounds(L__namespace.latLng(loc.geometry.viewport.northeast), L__namespace.latLng(loc.geometry.viewport.southwest)); + results[i] = { + name: loc.formatted_address, + bbox: latLngBounds, + center: latLng, + properties: loc.address_components + }; + } + } + + cb.call(context, results); + }); + }; + + _proto.reverse = function reverse(location, scale, cb, context) { + var params = reverseParams(this.options, { + key: this.options.apiKey, + latlng: location.lat + ',' + location.lng + }); + getJSON(this.options.serviceUrl, params, function (data) { + var results = []; + + if (data.results && data.results.length) { + for (var i = 0; i <= data.results.length - 1; i++) { + var loc = data.results[i]; + var center = L__namespace.latLng(loc.geometry.location); + var bbox = L__namespace.latLngBounds(L__namespace.latLng(loc.geometry.viewport.northeast), L__namespace.latLng(loc.geometry.viewport.southwest)); + results[i] = { + name: loc.formatted_address, + bbox: bbox, + center: center, + properties: loc.address_components + }; + } + } + + cb.call(context, results); + }); + }; + + return Google; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link Google} + * @param options the options + */ + + function google(options) { + return new Google(options); + } + + /** + * Implementation of the [HERE Geocoder API](https://developer.here.com/documentation/geocoder/topics/introduction.html) + */ + + var HERE = /*#__PURE__*/function () { + function HERE(options) { + this.options = { + serviceUrl: 'https://geocoder.api.here.com/6.2/', + app_id: '', + app_code: '', + apiKey: '', + maxResults: 5 + }; + L__namespace.Util.setOptions(this, options); + if (options.apiKey) throw Error('apiKey is not supported, use app_id/app_code instead!'); + } + + var _proto = HERE.prototype; + + _proto.geocode = function geocode(query, cb, context) { + var params = geocodingParams(this.options, { + searchtext: query, + gen: 9, + app_id: this.options.app_id, + app_code: this.options.app_code, + jsonattributes: 1, + maxresults: this.options.maxResults + }); + this.getJSON(this.options.serviceUrl + 'geocode.json', params, cb, context); + }; + + _proto.reverse = function reverse(location, scale, cb, context) { + var prox = location.lat + ',' + location.lng; + + if (this.options.reverseGeocodeProxRadius) { + prox += ',' + this.options.reverseGeocodeProxRadius; + } + + var params = reverseParams(this.options, { + prox: prox, + mode: 'retrieveAddresses', + app_id: this.options.app_id, + app_code: this.options.app_code, + gen: 9, + jsonattributes: 1, + maxresults: this.options.maxResults + }); + this.getJSON(this.options.serviceUrl + 'reversegeocode.json', params, cb, context); + }; + + _proto.getJSON = function getJSON$1(url, params, cb, context) { + getJSON(url, params, function (data) { + var results = []; + + if (data.response.view && data.response.view.length) { + for (var i = 0; i <= data.response.view[0].result.length - 1; i++) { + var loc = data.response.view[0].result[i].location; + var center = L__namespace.latLng(loc.displayPosition.latitude, loc.displayPosition.longitude); + var bbox = L__namespace.latLngBounds(L__namespace.latLng(loc.mapView.topLeft.latitude, loc.mapView.topLeft.longitude), L__namespace.latLng(loc.mapView.bottomRight.latitude, loc.mapView.bottomRight.longitude)); + results[i] = { + name: loc.address.label, + properties: loc.address, + bbox: bbox, + center: center + }; + } + } + + cb.call(context, results); + }); + }; + + return HERE; + }(); + /** + * Implementation of the new [HERE Geocoder API](https://developer.here.com/documentation/geocoding-search-api/api-reference-swagger.html) + */ + + var HEREv2 = /*#__PURE__*/function () { + function HEREv2(options) { + this.options = { + serviceUrl: 'https://geocode.search.hereapi.com/v1', + apiKey: '', + app_id: '', + app_code: '', + maxResults: 10 + }; + L__namespace.Util.setOptions(this, options); + } + + var _proto2 = HEREv2.prototype; + + _proto2.geocode = function geocode(query, cb, context) { + var params = geocodingParams(this.options, { + q: query, + apiKey: this.options.apiKey, + limit: this.options.maxResults + }); + + if (!params.at && !params["in"]) { + throw Error('at / in parameters not found. Please define coordinates (at=latitude,longitude) or other (in) in your geocodingQueryParams.'); + } + + this.getJSON(this.options.serviceUrl + '/discover', params, cb, context); + }; + + _proto2.reverse = function reverse(location, scale, cb, context) { + var params = reverseParams(this.options, { + at: location.lat + ',' + location.lng, + limit: this.options.reverseGeocodeProxRadius, + apiKey: this.options.apiKey + }); + this.getJSON(this.options.serviceUrl + '/revgeocode', params, cb, context); + }; + + _proto2.getJSON = function getJSON$1(url, params, cb, context) { + getJSON(url, params, function (data) { + var results = []; + + if (data.items && data.items.length) { + for (var i = 0; i <= data.items.length - 1; i++) { + var item = data.items[i]; + var latLng = L__namespace.latLng(item.position.lat, item.position.lng); + var bbox = void 0; + + if (item.mapView) { + bbox = L__namespace.latLngBounds(L__namespace.latLng(item.mapView.south, item.mapView.west), L__namespace.latLng(item.mapView.north, item.mapView.east)); + } else { + // Using only position when not provided + bbox = L__namespace.latLngBounds(L__namespace.latLng(item.position.lat, item.position.lng), L__namespace.latLng(item.position.lat, item.position.lng)); + } + + results[i] = { + name: item.address.label, + properties: item.address, + bbox: bbox, + center: latLng + }; + } + } + + cb.call(context, results); + }); + }; + + return HEREv2; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link HERE} + * @param options the options + */ + + function here(options) { + if (options.apiKey) { + return new HEREv2(options); + } else { + return new HERE(options); + } + } + + /** + * Parses basic latitude/longitude strings such as `'50.06773 14.37742'`, `'N50.06773 W14.37742'`, `'S 50° 04.064 E 014° 22.645'`, or `'S 50° 4′ 03.828″, W 14° 22′ 38.712″'` + * @param query the latitude/longitude string to parse + * @returns the parsed latitude/longitude + */ + + function parseLatLng(query) { + var match; // regex from https://github.com/openstreetmap/openstreetmap-website/blob/master/app/controllers/geocoder_controller.rb + + if (match = query.match(/^([NS])\s*(\d{1,3}(?:\.\d*)?)\W*([EW])\s*(\d{1,3}(?:\.\d*)?)$/)) { + // [NSEW] decimal degrees + return L__namespace.latLng((/N/i.test(match[1]) ? 1 : -1) * +match[2], (/E/i.test(match[3]) ? 1 : -1) * +match[4]); + } else if (match = query.match(/^(\d{1,3}(?:\.\d*)?)\s*([NS])\W*(\d{1,3}(?:\.\d*)?)\s*([EW])$/)) { + // decimal degrees [NSEW] + return L__namespace.latLng((/N/i.test(match[2]) ? 1 : -1) * +match[1], (/E/i.test(match[4]) ? 1 : -1) * +match[3]); + } else if (match = query.match(/^([NS])\s*(\d{1,3})°?\s*(\d{1,3}(?:\.\d*)?)?['′]?\W*([EW])\s*(\d{1,3})°?\s*(\d{1,3}(?:\.\d*)?)?['′]?$/)) { + // [NSEW] degrees, decimal minutes + return L__namespace.latLng((/N/i.test(match[1]) ? 1 : -1) * (+match[2] + +match[3] / 60), (/E/i.test(match[4]) ? 1 : -1) * (+match[5] + +match[6] / 60)); + } else if (match = query.match(/^(\d{1,3})°?\s*(\d{1,3}(?:\.\d*)?)?['′]?\s*([NS])\W*(\d{1,3})°?\s*(\d{1,3}(?:\.\d*)?)?['′]?\s*([EW])$/)) { + // degrees, decimal minutes [NSEW] + return L__namespace.latLng((/N/i.test(match[3]) ? 1 : -1) * (+match[1] + +match[2] / 60), (/E/i.test(match[6]) ? 1 : -1) * (+match[4] + +match[5] / 60)); + } else if (match = query.match(/^([NS])\s*(\d{1,3})°?\s*(\d{1,2})['′]?\s*(\d{1,3}(?:\.\d*)?)?["″]?\W*([EW])\s*(\d{1,3})°?\s*(\d{1,2})['′]?\s*(\d{1,3}(?:\.\d*)?)?["″]?$/)) { + // [NSEW] degrees, minutes, decimal seconds + return L__namespace.latLng((/N/i.test(match[1]) ? 1 : -1) * (+match[2] + +match[3] / 60 + +match[4] / 3600), (/E/i.test(match[5]) ? 1 : -1) * (+match[6] + +match[7] / 60 + +match[8] / 3600)); + } else if (match = query.match(/^(\d{1,3})°?\s*(\d{1,2})['′]?\s*(\d{1,3}(?:\.\d*)?)?["″]\s*([NS])\W*(\d{1,3})°?\s*(\d{1,2})['′]?\s*(\d{1,3}(?:\.\d*)?)?["″]?\s*([EW])$/)) { + // degrees, minutes, decimal seconds [NSEW] + return L__namespace.latLng((/N/i.test(match[4]) ? 1 : -1) * (+match[1] + +match[2] / 60 + +match[3] / 3600), (/E/i.test(match[8]) ? 1 : -1) * (+match[5] + +match[6] / 60 + +match[7] / 3600)); + } else if (match = query.match(/^\s*([+-]?\d+(?:\.\d*)?)\s*[\s,]\s*([+-]?\d+(?:\.\d*)?)\s*$/)) { + return L__namespace.latLng(+match[1], +match[2]); + } + } + /** + * Parses basic latitude/longitude strings such as `'50.06773 14.37742'`, `'N50.06773 W14.37742'`, `'S 50° 04.064 E 014° 22.645'`, or `'S 50° 4′ 03.828″, W 14° 22′ 38.712″'` + */ + + var LatLng = /*#__PURE__*/function () { + function LatLng(options) { + this.options = { + next: undefined, + sizeInMeters: 10000 + }; + L__namespace.Util.setOptions(this, options); + } + + var _proto = LatLng.prototype; + + _proto.geocode = function geocode(query, cb, context) { + var center = parseLatLng(query); + + if (center) { + var results = [{ + name: query, + center: center, + bbox: center.toBounds(this.options.sizeInMeters) + }]; + cb.call(context, results); + } else if (this.options.next) { + this.options.next.geocode(query, cb, context); + } + }; + + return LatLng; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link LatLng} + * @param options the options + */ + + function latLng(options) { + return new LatLng(options); + } + + /** + * Implementation of the [Mapbox Geocoding](https://www.mapbox.com/api-documentation/#geocoding) + */ + + var Mapbox = /*#__PURE__*/function () { + function Mapbox(options) { + this.options = { + serviceUrl: 'https://api.mapbox.com/geocoding/v5/mapbox.places/' + }; + L__namespace.Util.setOptions(this, options); + } + + var _proto = Mapbox.prototype; + + _proto._getProperties = function _getProperties(loc) { + var properties = { + text: loc.text, + address: loc.address + }; + + for (var j = 0; j < (loc.context || []).length; j++) { + var id = loc.context[j].id.split('.')[0]; + properties[id] = loc.context[j].text; // Get country code when available + + if (loc.context[j].short_code) { + properties['countryShortCode'] = loc.context[j].short_code; + } + } + + return properties; + }; + + _proto.geocode = function geocode(query, cb, context) { + var _this = this; + + var params = geocodingParams(this.options, { + access_token: this.options.apiKey + }); + + if (params.proximity !== undefined && params.proximity.lat !== undefined && params.proximity.lng !== undefined) { + params.proximity = params.proximity.lng + ',' + params.proximity.lat; + } + + getJSON(this.options.serviceUrl + encodeURIComponent(query) + '.json', params, function (data) { + var results = []; + + if (data.features && data.features.length) { + for (var i = 0; i <= data.features.length - 1; i++) { + var loc = data.features[i]; + var center = L__namespace.latLng(loc.center.reverse()); + var bbox = void 0; + + if (loc.bbox) { + bbox = L__namespace.latLngBounds(L__namespace.latLng(loc.bbox.slice(0, 2).reverse()), L__namespace.latLng(loc.bbox.slice(2, 4).reverse())); + } else { + bbox = L__namespace.latLngBounds(center, center); + } + + results[i] = { + name: loc.place_name, + bbox: bbox, + center: center, + properties: _this._getProperties(loc) + }; + } + } + + cb.call(context, results); + }); + }; + + _proto.suggest = function suggest(query, cb, context) { + return this.geocode(query, cb, context); + }; + + _proto.reverse = function reverse(location, scale, cb, context) { + var _this2 = this; + + var url = this.options.serviceUrl + location.lng + ',' + location.lat + '.json'; + var param = reverseParams(this.options, { + access_token: this.options.apiKey + }); + getJSON(url, param, function (data) { + var results = []; + + if (data.features && data.features.length) { + for (var i = 0; i <= data.features.length - 1; i++) { + var loc = data.features[i]; + var center = L__namespace.latLng(loc.center.reverse()); + var bbox = void 0; + + if (loc.bbox) { + bbox = L__namespace.latLngBounds(L__namespace.latLng(loc.bbox.slice(0, 2).reverse()), L__namespace.latLng(loc.bbox.slice(2, 4).reverse())); + } else { + bbox = L__namespace.latLngBounds(center, center); + } + + results[i] = { + name: loc.place_name, + bbox: bbox, + center: center, + properties: _this2._getProperties(loc) + }; + } + } + + cb.call(context, results); + }); + }; + + return Mapbox; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link Mapbox} + * @param options the options + */ + + function mapbox(options) { + return new Mapbox(options); + } + + /** + * Implementation of the [MapQuest Geocoding API](http://developer.mapquest.com/web/products/dev-services/geocoding-ws) + */ + + var MapQuest = /*#__PURE__*/function () { + function MapQuest(options) { + this.options = { + serviceUrl: 'https://www.mapquestapi.com/geocoding/v1' + }; + L__namespace.Util.setOptions(this, options); // MapQuest seems to provide URI encoded API keys, + // so to avoid encoding them twice, we decode them here + + this.options.apiKey = decodeURIComponent(this.options.apiKey); + } + + var _proto = MapQuest.prototype; + + _proto._formatName = function _formatName() { + return [].slice.call(arguments).filter(function (s) { + return !!s; + }).join(', '); + }; + + _proto.geocode = function geocode(query, cb, context) { + var params = geocodingParams(this.options, { + key: this.options.apiKey, + location: query, + limit: 5, + outFormat: 'json' + }); + getJSON(this.options.serviceUrl + '/address', params, L__namespace.Util.bind(function (data) { + var results = []; + + if (data.results && data.results[0].locations) { + for (var i = data.results[0].locations.length - 1; i >= 0; i--) { + var loc = data.results[0].locations[i]; + var center = L__namespace.latLng(loc.latLng); + results[i] = { + name: this._formatName(loc.street, loc.adminArea4, loc.adminArea3, loc.adminArea1), + bbox: L__namespace.latLngBounds(center, center), + center: center + }; + } + } + + cb.call(context, results); + }, this)); + }; + + _proto.reverse = function reverse(location, scale, cb, context) { + var params = reverseParams(this.options, { + key: this.options.apiKey, + location: location.lat + ',' + location.lng, + outputFormat: 'json' + }); + getJSON(this.options.serviceUrl + '/reverse', params, L__namespace.Util.bind(function (data) { + var results = []; + + if (data.results && data.results[0].locations) { + for (var i = data.results[0].locations.length - 1; i >= 0; i--) { + var loc = data.results[0].locations[i]; + var center = L__namespace.latLng(loc.latLng); + results[i] = { + name: this._formatName(loc.street, loc.adminArea4, loc.adminArea3, loc.adminArea1), + bbox: L__namespace.latLngBounds(center, center), + center: center + }; + } + } + + cb.call(context, results); + }, this)); + }; + + return MapQuest; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link MapQuest} + * @param options the options + */ + + function mapQuest(options) { + return new MapQuest(options); + } + + /** + * Implementation of the [Neutrino API](https://www.neutrinoapi.com/api/geocode-address/) + */ + + var Neutrino = /*#__PURE__*/function () { + function Neutrino(options) { + this.options = { + userId: undefined, + apiKey: undefined, + serviceUrl: 'https://neutrinoapi.com/' + }; + L__namespace.Util.setOptions(this, options); + } // https://www.neutrinoapi.com/api/geocode-address/ + + + var _proto = Neutrino.prototype; + + _proto.geocode = function geocode(query, cb, context) { + var params = geocodingParams(this.options, { + apiKey: this.options.apiKey, + userId: this.options.userId, + //get three words and make a dot based string + address: query.split(/\s+/).join('.') + }); + getJSON(this.options.serviceUrl + 'geocode-address', params, function (data) { + var results = []; + + if (data.locations) { + data.geometry = data.locations[0]; + var center = L__namespace.latLng(data.geometry['latitude'], data.geometry['longitude']); + var bbox = L__namespace.latLngBounds(center, center); + results[0] = { + name: data.geometry.address, + bbox: bbox, + center: center + }; + } + + cb.call(context, results); + }); + }; + + _proto.suggest = function suggest(query, cb, context) { + return this.geocode(query, cb, context); + } // https://www.neutrinoapi.com/api/geocode-reverse/ + ; + + _proto.reverse = function reverse(location, scale, cb, context) { + var params = reverseParams(this.options, { + apiKey: this.options.apiKey, + userId: this.options.userId, + latitude: location.lat, + longitude: location.lng + }); + getJSON(this.options.serviceUrl + 'geocode-reverse', params, function (data) { + var results = []; + + if (data.status.status == 200 && data.found) { + var center = L__namespace.latLng(location.lat, location.lng); + var bbox = L__namespace.latLngBounds(center, center); + results[0] = { + name: data.address, + bbox: bbox, + center: center + }; + } + + cb.call(context, results); + }); + }; + + return Neutrino; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link Neutrino} + * @param options the options + */ + + function neutrino(options) { + return new Neutrino(options); + } + + /** + * Implementation of the [Nominatim](https://wiki.openstreetmap.org/wiki/Nominatim) geocoder. + * + * This is the default geocoding service used by the control, unless otherwise specified in the options. + * + * Unless using your own Nominatim installation, please refer to the [Nominatim usage policy](https://operations.osmfoundation.org/policies/nominatim/). + */ + + var Nominatim = /*#__PURE__*/function () { + function Nominatim(options) { + this.options = { + serviceUrl: 'https://nominatim.openstreetmap.org/', + htmlTemplate: function htmlTemplate(r) { + var address = r.address; + var className; + var parts = []; + + if (address.road || address.building) { + parts.push('{building} {road} {house_number}'); + } + + if (address.city || address.town || address.village || address.hamlet) { + className = parts.length > 0 ? 'leaflet-control-geocoder-address-detail' : ''; + parts.push('{postcode} {city} {town} {village} {hamlet}'); + } + + if (address.state || address.country) { + className = parts.length > 0 ? 'leaflet-control-geocoder-address-context' : ''; + parts.push('{state} {country}'); + } + + return template(parts.join('
'), address); + } + }; + L__namespace.Util.setOptions(this, options || {}); + } + + var _proto = Nominatim.prototype; + + _proto.geocode = function geocode(query, cb, context) { + var _this = this; + + var params = geocodingParams(this.options, { + q: query, + limit: 5, + format: 'json', + addressdetails: 1 + }); + getJSON(this.options.serviceUrl + 'search', params, function (data) { + var results = []; + + for (var i = data.length - 1; i >= 0; i--) { + var bbox = data[i].boundingbox; + + for (var j = 0; j < 4; j++) { + bbox[j] = +bbox[j]; + } + + results[i] = { + icon: data[i].icon, + name: data[i].display_name, + html: _this.options.htmlTemplate ? _this.options.htmlTemplate(data[i]) : undefined, + bbox: L__namespace.latLngBounds([bbox[0], bbox[2]], [bbox[1], bbox[3]]), + center: L__namespace.latLng(data[i].lat, data[i].lon), + properties: data[i] + }; + } + + cb.call(context, results); + }); + }; + + _proto.reverse = function reverse(location, scale, cb, context) { + var _this2 = this; + + var params = reverseParams(this.options, { + lat: location.lat, + lon: location.lng, + zoom: Math.round(Math.log(scale / 256) / Math.log(2)), + addressdetails: 1, + format: 'json' + }); + getJSON(this.options.serviceUrl + 'reverse', params, function (data) { + var result = []; + + if (data && data.lat && data.lon) { + var center = L__namespace.latLng(data.lat, data.lon); + var bbox = L__namespace.latLngBounds(center, center); + result.push({ + name: data.display_name, + html: _this2.options.htmlTemplate ? _this2.options.htmlTemplate(data) : undefined, + center: center, + bbox: bbox, + properties: data + }); + } + + cb.call(context, result); + }); + }; + + return Nominatim; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link Nominatim} + * @param options the options + */ + + function nominatim(options) { + return new Nominatim(options); + } + + /** + * Implementation of the [Plus codes](https://plus.codes/) (formerly OpenLocationCode) (requires [open-location-code](https://www.npmjs.com/package/open-location-code)) + */ + + var OpenLocationCode = /*#__PURE__*/function () { + function OpenLocationCode(options) { + L__namespace.Util.setOptions(this, options); + } + + var _proto = OpenLocationCode.prototype; + + _proto.geocode = function geocode(query, cb, context) { + try { + var decoded = this.options.OpenLocationCode.decode(query); + var result = { + name: query, + center: L__namespace.latLng(decoded.latitudeCenter, decoded.longitudeCenter), + bbox: L__namespace.latLngBounds(L__namespace.latLng(decoded.latitudeLo, decoded.longitudeLo), L__namespace.latLng(decoded.latitudeHi, decoded.longitudeHi)) + }; + cb.call(context, [result]); + } catch (e) { + console.warn(e); // eslint-disable-line no-console + + cb.call(context, []); + } + }; + + _proto.reverse = function reverse(location, scale, cb, context) { + try { + var code = this.options.OpenLocationCode.encode(location.lat, location.lng, this.options.codeLength); + var result = { + name: code, + center: L__namespace.latLng(location.lat, location.lng), + bbox: L__namespace.latLngBounds(L__namespace.latLng(location.lat, location.lng), L__namespace.latLng(location.lat, location.lng)) + }; + cb.call(context, [result]); + } catch (e) { + console.warn(e); // eslint-disable-line no-console + + cb.call(context, []); + } + }; + + return OpenLocationCode; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link OpenLocationCode} + * @param options the options + */ + + function openLocationCode(options) { + return new OpenLocationCode(options); + } + + /** + * Implementation of the [OpenCage Data API](https://opencagedata.com/) + */ + + var OpenCage = /*#__PURE__*/function () { + function OpenCage(options) { + this.options = { + serviceUrl: 'https://api.opencagedata.com/geocode/v1/json' + }; + L__namespace.Util.setOptions(this, options); + } + + var _proto = OpenCage.prototype; + + _proto.geocode = function geocode(query, cb, context) { + var params = geocodingParams(this.options, { + key: this.options.apiKey, + q: query + }); + getJSON(this.options.serviceUrl, params, function (data) { + var results = []; + + if (data.results && data.results.length) { + for (var i = 0; i < data.results.length; i++) { + var loc = data.results[i]; + var center = L__namespace.latLng(loc.geometry); + var bbox = void 0; + + if (loc.annotations && loc.annotations.bounds) { + bbox = L__namespace.latLngBounds(L__namespace.latLng(loc.annotations.bounds.northeast), L__namespace.latLng(loc.annotations.bounds.southwest)); + } else { + bbox = L__namespace.latLngBounds(center, center); + } + + results.push({ + name: loc.formatted, + bbox: bbox, + center: center + }); + } + } + + cb.call(context, results); + }); + }; + + _proto.suggest = function suggest(query, cb, context) { + return this.geocode(query, cb, context); + }; + + _proto.reverse = function reverse(location, scale, cb, context) { + var params = reverseParams(this.options, { + key: this.options.apiKey, + q: [location.lat, location.lng].join(',') + }); + getJSON(this.options.serviceUrl, params, function (data) { + var results = []; + + if (data.results && data.results.length) { + for (var i = 0; i < data.results.length; i++) { + var loc = data.results[i]; + var center = L__namespace.latLng(loc.geometry); + var bbox = void 0; + + if (loc.annotations && loc.annotations.bounds) { + bbox = L__namespace.latLngBounds(L__namespace.latLng(loc.annotations.bounds.northeast), L__namespace.latLng(loc.annotations.bounds.southwest)); + } else { + bbox = L__namespace.latLngBounds(center, center); + } + + results.push({ + name: loc.formatted, + bbox: bbox, + center: center + }); + } + } + + cb.call(context, results); + }); + }; + + return OpenCage; + }(); + function opencage(options) { + return new OpenCage(options); + } + + /** + * Implementation of the [Pelias](https://pelias.io/), [geocode.earth](https://geocode.earth/) geocoder (formerly Mapzen Search) + */ + + var Pelias = /*#__PURE__*/function () { + function Pelias(options) { + this.options = { + serviceUrl: 'https://api.geocode.earth/v1' + }; + this._lastSuggest = 0; + L__namespace.Util.setOptions(this, options); + } + + var _proto = Pelias.prototype; + + _proto.geocode = function geocode(query, cb, context) { + var _this = this; + + var params = geocodingParams(this.options, { + api_key: this.options.apiKey, + text: query + }); + getJSON(this.options.serviceUrl + '/search', params, function (data) { + cb.call(context, _this._parseResults(data, 'bbox')); + }); + }; + + _proto.suggest = function suggest(query, cb, context) { + var _this2 = this; + + var params = geocodingParams(this.options, { + api_key: this.options.apiKey, + text: query + }); + getJSON(this.options.serviceUrl + '/autocomplete', params, function (data) { + if (data.geocoding.timestamp > _this2._lastSuggest) { + _this2._lastSuggest = data.geocoding.timestamp; + cb.call(context, _this2._parseResults(data, 'bbox')); + } + }); + }; + + _proto.reverse = function reverse(location, scale, cb, context) { + var _this3 = this; + + var params = reverseParams(this.options, { + api_key: this.options.apiKey, + 'point.lat': location.lat, + 'point.lon': location.lng + }); + getJSON(this.options.serviceUrl + '/reverse', params, function (data) { + cb.call(context, _this3._parseResults(data, 'bounds')); + }); + }; + + _proto._parseResults = function _parseResults(data, bboxname) { + var results = []; + L__namespace.geoJSON(data, { + pointToLayer: function pointToLayer(feature, latlng) { + return L__namespace.circleMarker(latlng); + }, + onEachFeature: function onEachFeature(feature, layer) { + var result = {}; + var bbox; + var center; + + if (layer.getBounds) { + bbox = layer.getBounds(); + center = bbox.getCenter(); + } else if (layer.feature.bbox) { + center = layer.getLatLng(); + bbox = L__namespace.latLngBounds(L__namespace.GeoJSON.coordsToLatLng(layer.feature.bbox.slice(0, 2)), L__namespace.GeoJSON.coordsToLatLng(layer.feature.bbox.slice(2, 4))); + } else { + center = layer.getLatLng(); + bbox = L__namespace.latLngBounds(center, center); + } + + result.name = layer.feature.properties.label; + result.center = center; + result[bboxname] = bbox; + result.properties = layer.feature.properties; + results.push(result); + } + }); + return results; + }; + + return Pelias; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link Pelias} + * @param options the options + */ + + function pelias(options) { + return new Pelias(options); + } + var GeocodeEarth = Pelias; + var geocodeEarth = pelias; + /** + * r.i.p. + * @deprecated + */ + + var Mapzen = Pelias; + /** + * r.i.p. + * @deprecated + */ + + var mapzen = pelias; + /** + * Implementation of the [Openrouteservice](https://openrouteservice.org/dev/#/api-docs/geocode) geocoder + */ + + var Openrouteservice = /*#__PURE__*/function (_Pelias) { + _inheritsLoose(Openrouteservice, _Pelias); + + function Openrouteservice(options) { + return _Pelias.call(this, L__namespace.Util.extend({ + serviceUrl: 'https://api.openrouteservice.org/geocode' + }, options)) || this; + } + + return Openrouteservice; + }(Pelias); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link Openrouteservice} + * @param options the options + */ + + function openrouteservice(options) { + return new Openrouteservice(options); + } + + /** + * Implementation of the [Photon](http://photon.komoot.de/) geocoder + */ + + var Photon = /*#__PURE__*/function () { + function Photon(options) { + this.options = { + serviceUrl: 'https://photon.komoot.io/api/', + reverseUrl: 'https://photon.komoot.io/reverse/', + nameProperties: ['name', 'street', 'suburb', 'hamlet', 'town', 'city', 'state', 'country'] + }; + L__namespace.Util.setOptions(this, options); + } + + var _proto = Photon.prototype; + + _proto.geocode = function geocode(query, cb, context) { + var params = geocodingParams(this.options, { + q: query + }); + getJSON(this.options.serviceUrl, params, L__namespace.Util.bind(function (data) { + cb.call(context, this._decodeFeatures(data)); + }, this)); + }; + + _proto.suggest = function suggest(query, cb, context) { + return this.geocode(query, cb, context); + }; + + _proto.reverse = function reverse(latLng, scale, cb, context) { + var params = reverseParams(this.options, { + lat: latLng.lat, + lon: latLng.lng + }); + getJSON(this.options.reverseUrl, params, L__namespace.Util.bind(function (data) { + cb.call(context, this._decodeFeatures(data)); + }, this)); + }; + + _proto._decodeFeatures = function _decodeFeatures(data) { + var results = []; + + if (data && data.features) { + for (var i = 0; i < data.features.length; i++) { + var f = data.features[i]; + var c = f.geometry.coordinates; + var center = L__namespace.latLng(c[1], c[0]); + var extent = f.properties.extent; + var bbox = extent ? L__namespace.latLngBounds([extent[1], extent[0]], [extent[3], extent[2]]) : L__namespace.latLngBounds(center, center); + results.push({ + name: this._decodeFeatureName(f), + html: this.options.htmlTemplate ? this.options.htmlTemplate(f) : undefined, + center: center, + bbox: bbox, + properties: f.properties + }); + } + } + + return results; + }; + + _proto._decodeFeatureName = function _decodeFeatureName(f) { + return (this.options.nameProperties || []).map(function (p) { + return f.properties[p]; + }).filter(function (v) { + return !!v; + }).join(', '); + }; + + return Photon; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link Photon} + * @param options the options + */ + + function photon(options) { + return new Photon(options); + } + + /** + * Implementation of the What3Words service + */ + + var What3Words = /*#__PURE__*/function () { + function What3Words(options) { + this.options = { + serviceUrl: 'https://api.what3words.com/v2/' + }; + L__namespace.Util.setOptions(this, options); + } + + var _proto = What3Words.prototype; + + _proto.geocode = function geocode(query, cb, context) { + //get three words and make a dot based string + getJSON(this.options.serviceUrl + 'forward', geocodingParams(this.options, { + key: this.options.apiKey, + addr: query.split(/\s+/).join('.') + }), function (data) { + var results = []; + + if (data.geometry) { + var latLng = L__namespace.latLng(data.geometry['lat'], data.geometry['lng']); + var latLngBounds = L__namespace.latLngBounds(latLng, latLng); + results[0] = { + name: data.words, + bbox: latLngBounds, + center: latLng + }; + } + + cb.call(context, results); + }); + }; + + _proto.suggest = function suggest(query, cb, context) { + return this.geocode(query, cb, context); + }; + + _proto.reverse = function reverse(location, scale, cb, context) { + getJSON(this.options.serviceUrl + 'reverse', reverseParams(this.options, { + key: this.options.apiKey, + coords: [location.lat, location.lng].join(',') + }), function (data) { + var results = []; + + if (data.status.status == 200) { + var center = L__namespace.latLng(data.geometry['lat'], data.geometry['lng']); + var bbox = L__namespace.latLngBounds(center, center); + results[0] = { + name: data.words, + bbox: bbox, + center: center + }; + } + + cb.call(context, results); + }); + }; + + return What3Words; + }(); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link What3Words} + * @param options the options + */ + + function what3words(options) { + return new What3Words(options); + } + + var geocoders = { + __proto__: null, + geocodingParams: geocodingParams, + reverseParams: reverseParams, + ArcGis: ArcGis, + arcgis: arcgis, + Bing: Bing, + bing: bing, + Google: Google, + google: google, + HERE: HERE, + HEREv2: HEREv2, + here: here, + parseLatLng: parseLatLng, + LatLng: LatLng, + latLng: latLng, + Mapbox: Mapbox, + mapbox: mapbox, + MapQuest: MapQuest, + mapQuest: mapQuest, + Neutrino: Neutrino, + neutrino: neutrino, + Nominatim: Nominatim, + nominatim: nominatim, + OpenLocationCode: OpenLocationCode, + openLocationCode: openLocationCode, + OpenCage: OpenCage, + opencage: opencage, + Pelias: Pelias, + pelias: pelias, + GeocodeEarth: GeocodeEarth, + geocodeEarth: geocodeEarth, + Mapzen: Mapzen, + mapzen: mapzen, + Openrouteservice: Openrouteservice, + openrouteservice: openrouteservice, + Photon: Photon, + photon: photon, + What3Words: What3Words, + what3words: what3words + }; + + /** + * Leaflet mixins https://leafletjs.com/reference-1.7.1.html#class-includes + * for TypeScript https://www.typescriptlang.org/docs/handbook/mixins.html + * @internal + */ + + var EventedControl = // eslint-disable-next-line @typescript-eslint/no-unused-vars + function EventedControl() {// empty + }; + + L__namespace.Util.extend(EventedControl.prototype, L__namespace.Control.prototype); + L__namespace.Util.extend(EventedControl.prototype, L__namespace.Evented.prototype); + /** + * This is the geocoder control. It works like any other [Leaflet control](https://leafletjs.com/reference.html#control), and is added to the map. + */ + + var GeocoderControl = /*#__PURE__*/function (_EventedControl) { + _inheritsLoose(GeocoderControl, _EventedControl); + + /** + * Instantiates a geocoder control (to be invoked using `new`) + * @param options the options + */ + function GeocoderControl(options) { + var _this; + + _this = _EventedControl.call(this, options) || this; + _this.options = { + showUniqueResult: true, + showResultIcons: false, + collapsed: true, + expand: 'touch', + position: 'topright', + placeholder: 'Search...', + errorMessage: 'Nothing found.', + iconLabel: 'Initiate a new search', + query: '', + queryMinLength: 1, + suggestMinLength: 3, + suggestTimeout: 250, + defaultMarkGeocode: true + }; + _this._requestCount = 0; + L__namespace.Util.setOptions(_assertThisInitialized(_this), options); + + if (!_this.options.geocoder) { + _this.options.geocoder = new Nominatim(); + } + + return _this; + } + + var _proto = GeocoderControl.prototype; + + _proto.addThrobberClass = function addThrobberClass() { + L__namespace.DomUtil.addClass(this._container, 'leaflet-control-geocoder-throbber'); + }; + + _proto.removeThrobberClass = function removeThrobberClass() { + L__namespace.DomUtil.removeClass(this._container, 'leaflet-control-geocoder-throbber'); + } + /** + * Returns the container DOM element for the control and add listeners on relevant map events. + * @param map the map instance + * @see https://leafletjs.com/reference.html#control-onadd + */ + ; + + _proto.onAdd = function onAdd(map) { + var _this2 = this; + + var className = 'leaflet-control-geocoder'; + var container = L__namespace.DomUtil.create('div', className + ' leaflet-bar'); + var icon = L__namespace.DomUtil.create('button', className + '-icon', container); + var form = this._form = L__namespace.DomUtil.create('div', className + '-form', container); + this._map = map; + this._container = container; + icon.innerHTML = ' '; + icon.type = 'button'; + icon.setAttribute('aria-label', this.options.iconLabel); + var input = this._input = L__namespace.DomUtil.create('input', '', form); + input.type = 'text'; + input.value = this.options.query; + input.placeholder = this.options.placeholder; + L__namespace.DomEvent.disableClickPropagation(input); + this._errorElement = L__namespace.DomUtil.create('div', className + '-form-no-error', container); + this._errorElement.innerHTML = this.options.errorMessage; + this._alts = L__namespace.DomUtil.create('ul', className + '-alternatives leaflet-control-geocoder-alternatives-minimized', container); + L__namespace.DomEvent.disableClickPropagation(this._alts); + L__namespace.DomEvent.addListener(input, 'keydown', this._keydown, this); + + if (this.options.geocoder.suggest) { + L__namespace.DomEvent.addListener(input, 'input', this._change, this); + } + + L__namespace.DomEvent.addListener(input, 'blur', function () { + if (_this2.options.collapsed && !_this2._preventBlurCollapse) { + _this2._collapse(); + } + + _this2._preventBlurCollapse = false; + }); + + if (this.options.collapsed) { + if (this.options.expand === 'click') { + L__namespace.DomEvent.addListener(container, 'click', function (e) { + if (e.button === 0 && e.detail !== 2) { + _this2._toggle(); + } + }); + } else if (this.options.expand === 'touch') { + L__namespace.DomEvent.addListener(container, L__namespace.Browser.touch ? 'touchstart mousedown' : 'mousedown', function (e) { + _this2._toggle(); + + e.preventDefault(); // mobile: clicking focuses the icon, so UI expands and immediately collapses + + e.stopPropagation(); + }, this); + } else { + L__namespace.DomEvent.addListener(container, 'mouseover', this._expand, this); + L__namespace.DomEvent.addListener(container, 'mouseout', this._collapse, this); + + this._map.on('movestart', this._collapse, this); + } + } else { + this._expand(); + + if (L__namespace.Browser.touch) { + L__namespace.DomEvent.addListener(container, 'touchstart', function () { + return _this2._geocode(); + }); + } else { + L__namespace.DomEvent.addListener(container, 'click', function () { + return _this2._geocode(); + }); + } + } + + if (this.options.defaultMarkGeocode) { + this.on('markgeocode', this.markGeocode, this); + } + + this.on('startgeocode', this.addThrobberClass, this); + this.on('finishgeocode', this.removeThrobberClass, this); + this.on('startsuggest', this.addThrobberClass, this); + this.on('finishsuggest', this.removeThrobberClass, this); + L__namespace.DomEvent.disableClickPropagation(container); + return container; + } + /** + * Sets the query string on the text input + * @param string the query string + */ + ; + + _proto.setQuery = function setQuery(string) { + this._input.value = string; + return this; + }; + + _proto._geocodeResult = function _geocodeResult(results, suggest) { + if (!suggest && this.options.showUniqueResult && results.length === 1) { + this._geocodeResultSelected(results[0]); + } else if (results.length > 0) { + this._alts.innerHTML = ''; + this._results = results; + L__namespace.DomUtil.removeClass(this._alts, 'leaflet-control-geocoder-alternatives-minimized'); + L__namespace.DomUtil.addClass(this._container, 'leaflet-control-geocoder-options-open'); + + for (var i = 0; i < results.length; i++) { + this._alts.appendChild(this._createAlt(results[i], i)); + } + } else { + L__namespace.DomUtil.addClass(this._container, 'leaflet-control-geocoder-options-error'); + L__namespace.DomUtil.addClass(this._errorElement, 'leaflet-control-geocoder-error'); + } + } + /** + * Marks a geocoding result on the map + * @param result the geocoding result + */ + ; + + _proto.markGeocode = function markGeocode(event) { + var result = event.geocode; + + this._map.fitBounds(result.bbox); + + if (this._geocodeMarker) { + this._map.removeLayer(this._geocodeMarker); + } + + this._geocodeMarker = new L__namespace.Marker(result.center).bindPopup(result.html || result.name).addTo(this._map).openPopup(); + return this; + }; + + _proto._geocode = function _geocode(suggest) { + var _this3 = this; + + var value = this._input.value; + + if (!suggest && value.length < this.options.queryMinLength) { + return; + } + + var requestCount = ++this._requestCount; + + var cb = function cb(results) { + if (requestCount === _this3._requestCount) { + var _event = { + input: value, + results: results + }; + + _this3.fire(suggest ? 'finishsuggest' : 'finishgeocode', _event); + + _this3._geocodeResult(results, suggest); + } + }; + + this._lastGeocode = value; + + if (!suggest) { + this._clearResults(); + } + + var event = { + input: value + }; + this.fire(suggest ? 'startsuggest' : 'startgeocode', event); + + if (suggest) { + this.options.geocoder.suggest(value, cb); + } else { + this.options.geocoder.geocode(value, cb); + } + }; + + _proto._geocodeResultSelected = function _geocodeResultSelected(geocode) { + var event = { + geocode: geocode + }; + this.fire('markgeocode', event); + }; + + _proto._toggle = function _toggle() { + if (L__namespace.DomUtil.hasClass(this._container, 'leaflet-control-geocoder-expanded')) { + this._collapse(); + } else { + this._expand(); + } + }; + + _proto._expand = function _expand() { + L__namespace.DomUtil.addClass(this._container, 'leaflet-control-geocoder-expanded'); + + this._input.select(); + + this.fire('expand'); + }; + + _proto._collapse = function _collapse() { + L__namespace.DomUtil.removeClass(this._container, 'leaflet-control-geocoder-expanded'); + L__namespace.DomUtil.addClass(this._alts, 'leaflet-control-geocoder-alternatives-minimized'); + L__namespace.DomUtil.removeClass(this._errorElement, 'leaflet-control-geocoder-error'); + L__namespace.DomUtil.removeClass(this._container, 'leaflet-control-geocoder-options-open'); + L__namespace.DomUtil.removeClass(this._container, 'leaflet-control-geocoder-options-error'); + + this._input.blur(); // mobile: keyboard shouldn't stay expanded + + + this.fire('collapse'); + }; + + _proto._clearResults = function _clearResults() { + L__namespace.DomUtil.addClass(this._alts, 'leaflet-control-geocoder-alternatives-minimized'); + this._selection = null; + L__namespace.DomUtil.removeClass(this._errorElement, 'leaflet-control-geocoder-error'); + L__namespace.DomUtil.removeClass(this._container, 'leaflet-control-geocoder-options-open'); + L__namespace.DomUtil.removeClass(this._container, 'leaflet-control-geocoder-options-error'); + }; + + _proto._createAlt = function _createAlt(result, index) { + var _this4 = this; + + var li = L__namespace.DomUtil.create('li', ''), + a = L__namespace.DomUtil.create('a', '', li), + icon = this.options.showResultIcons && result.icon ? L__namespace.DomUtil.create('img', '', a) : null, + text = result.html ? undefined : document.createTextNode(result.name), + mouseDownHandler = function mouseDownHandler(e) { + // In some browsers, a click will fire on the map if the control is + // collapsed directly after mousedown. To work around this, we + // wait until the click is completed, and _then_ collapse the + // control. Messy, but this is the workaround I could come up with + // for #142. + _this4._preventBlurCollapse = true; + L__namespace.DomEvent.stop(e); + + _this4._geocodeResultSelected(result); + + L__namespace.DomEvent.on(li, 'click touchend', function () { + if (_this4.options.collapsed) { + _this4._collapse(); + } else { + _this4._clearResults(); + } + }); + }; + + if (icon) { + icon.src = result.icon; + } + + li.setAttribute('data-result-index', String(index)); + + if (result.html) { + a.innerHTML = a.innerHTML + result.html; + } else if (text) { + a.appendChild(text); + } // Use mousedown and not click, since click will fire _after_ blur, + // causing the control to have collapsed and removed the items + // before the click can fire. + + + L__namespace.DomEvent.addListener(li, 'mousedown touchstart', mouseDownHandler, this); + return li; + }; + + _proto._keydown = function _keydown(e) { + var _this5 = this; + + var select = function select(dir) { + if (_this5._selection) { + L__namespace.DomUtil.removeClass(_this5._selection, 'leaflet-control-geocoder-selected'); + _this5._selection = _this5._selection[dir > 0 ? 'nextSibling' : 'previousSibling']; + } + + if (!_this5._selection) { + _this5._selection = _this5._alts[dir > 0 ? 'firstChild' : 'lastChild']; + } + + if (_this5._selection) { + L__namespace.DomUtil.addClass(_this5._selection, 'leaflet-control-geocoder-selected'); + } + }; + + switch (e.keyCode) { + // Escape + case 27: + if (this.options.collapsed) { + this._collapse(); + } else { + this._clearResults(); + } + + break; + // Up + + case 38: + select(-1); + break; + // Up + + case 40: + select(1); + break; + // Enter + + case 13: + if (this._selection) { + var index = parseInt(this._selection.getAttribute('data-result-index'), 10); + + this._geocodeResultSelected(this._results[index]); + + this._clearResults(); + } else { + this._geocode(); + } + + break; + + default: + return; + } + + L__namespace.DomEvent.preventDefault(e); + }; + + _proto._change = function _change() { + var _this6 = this; + + var v = this._input.value; + + if (v !== this._lastGeocode) { + clearTimeout(this._suggestTimeout); + + if (v.length >= this.options.suggestMinLength) { + this._suggestTimeout = setTimeout(function () { + return _this6._geocode(true); + }, this.options.suggestTimeout); + } else { + this._clearResults(); + } + } + }; + + return GeocoderControl; + }(EventedControl); + /** + * [Class factory method](https://leafletjs.com/reference.html#class-class-factories) for {@link GeocoderControl} + * @param options the options + */ + + function geocoder(options) { + return new GeocoderControl(options); + } + + /* @preserve + * Leaflet Control Geocoder + * https://github.com/perliedman/leaflet-control-geocoder + * + * Copyright (c) 2012 sa3m (https://github.com/sa3m) + * Copyright (c) 2018 Per Liedman + * All rights reserved. + */ + L__namespace.Util.extend(GeocoderControl, geocoders); + L__namespace.Util.extend(L__namespace.Control, { + Geocoder: GeocoderControl, + geocoder: geocoder + }); + + exports.Geocoder = GeocoderControl; + exports.default = GeocoderControl; + exports.geocoder = geocoder; + exports.geocoders = geocoders; + + return exports; + +}({}, L)); +//# sourceMappingURL=Control.Geocoder.js.map diff --git a/publications/static/js/main.js b/publications/static/js/main.js index 4512e82..206672f 100644 --- a/publications/static/js/main.js +++ b/publications/static/js/main.js @@ -39,13 +39,16 @@ async function initMap() { // Controls: scale and layer switcher L.control.scale({ position: 'bottomright' }).addTo(map); - L.control + const layerControl = L.control .layers( { 'OpenStreetMap': osmLayer }, - { Publications: publicationsGroup } + { 'All works': publicationsGroup } ) .addTo(map); + // Make layer control globally available for search manager + window.mapLayerControl = layerControl; + // Fetch data and add to map const pubs = await load_publications(); const pubsLayer = L.geoJSON(pubs, { @@ -54,12 +57,59 @@ async function initMap() { }); pubsLayer.eachLayer((layer) => publicationsGroup.addLayer(layer)); + // Make style and popup functions globally available for search manager + window.publicationStyle = publicationStyle; + window.publicationPopup = publicationPopup; + // Initialize enhanced interaction manager for handling overlapping polygons + let interactionManager = null; if (typeof MapInteractionManager !== 'undefined') { - const interactionManager = new MapInteractionManager(map, pubsLayer); + interactionManager = new MapInteractionManager(map, pubsLayer); console.log('Enhanced map interaction enabled: overlapping polygon selection and geometry highlighting'); } + // Initialize keyboard navigation for accessibility + if (typeof MapKeyboardNavigation !== 'undefined' && interactionManager) { + const keyboardNav = new MapKeyboardNavigation(map, pubsLayer, interactionManager); + console.log('Keyboard navigation enabled for accessibility'); + } + + // Initialize map search functionality + if (typeof MapSearchManager !== 'undefined') { + const searchManager = new MapSearchManager(map, pubsLayer, pubs, publicationsGroup); + console.log('Map search enabled'); + + // Make search manager globally available for potential use by other components + window.mapSearchManager = searchManager; + } + + // Initialize gazetteer (location search) + if (typeof MapGazetteerManager !== 'undefined' && window.OPTIMAP_SETTINGS?.gazetteer) { + const gazetteerManager = new MapGazetteerManager(map, window.OPTIMAP_SETTINGS.gazetteer); + console.log('Gazetteer enabled'); + + // Make gazetteer manager globally available + window.mapGazetteerManager = gazetteerManager; + } + + // Initialize zoom to all features control + if (typeof MapZoomToAllControl !== 'undefined') { + const zoomToAllControl = new MapZoomToAllControl(map, publicationsGroup); + console.log('Zoom to all features control enabled'); + + // Make zoom control globally available + window.mapZoomToAllControl = zoomToAllControl; + } + // Initialize gazetteer (location search) + if (typeof MapGazetteerManager !== 'undefined' && window.OPTIMAP_SETTINGS?.gazetteer) { + const gazetteerManager = new MapGazetteerManager(map, window.OPTIMAP_SETTINGS.gazetteer); + console.log('Gazetteer enabled'); + + // Make gazetteer manager globally available + window.mapGazetteerManager = gazetteerManager; + } + + // Fit map to markers if (publicationsGroup.getBounds().isValid()) { map.fitBounds(publicationsGroup.getBounds()); diff --git a/publications/static/js/map-gazetteer.js b/publications/static/js/map-gazetteer.js new file mode 100644 index 0000000..ded2ae0 --- /dev/null +++ b/publications/static/js/map-gazetteer.js @@ -0,0 +1,194 @@ +// publications/static/js/map-gazetteer.js +// Gazetteer (location search) functionality for the map + +/** + * Map Gazetteer Manager + * Provides location search using configurable geocoding providers + * - Separate from publication search (doesn't filter publications) + * - Pans/zooms map to searched location + * - Supports multiple geocoding services (Nominatim, Photon, etc.) + */ +class MapGazetteerManager { + constructor(map, options = {}) { + this.map = map; + this.provider = options.provider || 'nominatim'; + this.placeholder = options.placeholder || 'Search for a location...'; + this.geocoder = null; + + console.group('📍 Map Gazetteer Initialization'); + console.log('Provider:', this.provider); + console.log('Placeholder:', this.placeholder); + + this.init(); + console.groupEnd(); + } + + /** + * Initialize the geocoder control + */ + init() { + if (!this.map) { + console.warn('⚠️ Map not found, cannot initialize gazetteer'); + return; + } + + if (typeof L === 'undefined' || !L.Control || !L.Control.Geocoder) { + console.warn('⚠️ Leaflet Control Geocoder not loaded, cannot initialize gazetteer'); + return; + } + + // Get the geocoder instance based on provider + const geocoderInstance = this.getGeocoderInstance(); + + if (!geocoderInstance) { + console.warn('⚠️ Unknown geocoder provider:', this.provider); + return; + } + + // Create the geocoder control + this.geocoder = L.Control.geocoder({ + geocoder: geocoderInstance, + placeholder: this.placeholder, + defaultMarkGeocode: false, // Custom handling + position: 'topleft', + collapsed: true, + errorMessage: 'No location found', + }); + + // Add custom handler for geocoding results + this.geocoder.on('markgeocode', (e) => { + this.handleGeocode(e); + }); + + // Add to map + this.geocoder.addTo(this.map); + + // Add accessibility attributes to the geocoder button + this.addAccessibilityAttributes(); + + console.log('✅ Gazetteer initialized with', this.provider); + } + + /** + * Add accessibility attributes to the geocoder button + */ + addAccessibilityAttributes() { + // Wait for DOM to be ready + setTimeout(() => { + const geocoderButton = document.querySelector('.leaflet-control-geocoder-icon'); + if (geocoderButton) { + geocoderButton.setAttribute('title', 'Search locations on the map'); + geocoderButton.setAttribute('aria-label', 'Search locations on the map'); + console.log('✅ Added accessibility attributes to gazetteer button'); + } else { + console.warn('⚠️ Could not find geocoder button to add accessibility attributes'); + } + }, 100); + } + + /** + * Get geocoder instance based on provider name + */ + getGeocoderInstance() { + const provider = this.provider.toLowerCase(); + + switch (provider) { + case 'nominatim': + // Use built-in Nominatim geocoder with proxy + // Need full URL (with protocol and host) for URL constructor + const nominatimUrl = `${window.location.origin}/api/v1/gazetteer/nominatim/`; + console.log('Using built-in Nominatim geocoder with proxy URL:', nominatimUrl); + return L.Control.Geocoder.nominatim({ + serviceUrl: nominatimUrl, + geocodingQueryParams: { + format: 'json', + addressdetails: 1 + } + }); + + case 'photon': + // Use built-in Photon geocoder with proxy + const photonUrl = `${window.location.origin}/api/v1/gazetteer/photon/`; + console.log('Using built-in Photon geocoder with proxy URL:', photonUrl); + return L.Control.Geocoder.photon({ + serviceUrl: photonUrl + }); + + default: + console.warn('⚠️ Unknown geocoder provider:', provider); + return null; + } + } + + /** + * Handle geocoding result + * Pans to location and adds temporary marker + */ + handleGeocode(e) { + const result = e.geocode; + const latlng = result.center; + + console.group('📍 Gazetteer Result'); + console.log('Name:', result.name); + console.log('Location:', latlng); + console.log('Bounds:', result.bbox); + console.groupEnd(); + + // Fit to bounds if available, otherwise pan to point + if (result.bbox) { + const bbox = result.bbox; + const bounds = L.latLngBounds( + L.latLng(bbox.getSouth(), bbox.getWest()), + L.latLng(bbox.getNorth(), bbox.getEast()) + ); + this.map.fitBounds(bounds, { maxZoom: 16 }); + } else { + this.map.setView(latlng, 13); + } + + // Add temporary marker that disappears after 5 seconds + const marker = L.marker(latlng, { + icon: L.divIcon({ + className: 'gazetteer-marker', + html: '', + iconSize: [32, 32], + iconAnchor: [16, 32], + }) + }) + .addTo(this.map) + .bindPopup(result.name) + .openPopup(); + + // Remove marker after 5 seconds + setTimeout(() => { + this.map.removeLayer(marker); + console.log('🗑️ Temporary gazetteer marker removed'); + }, 5000); + } + + /** + * Programmatically search for a location + */ + search(query) { + if (!this.geocoder) { + console.warn('⚠️ Gazetteer not initialized'); + return; + } + + console.log('🔍 Searching for location:', query); + + const geocoderInstance = this.geocoder.options.geocoder; + geocoderInstance.geocode(query, (results) => { + if (results && results.length > 0) { + console.log(`📍 Found ${results.length} location(s)`); + const result = results[0]; + this.handleGeocode({ geocode: result }); + } else { + console.warn('⚠️ No location found for query:', query); + } + }); + } +} + +// Make available globally +window.MapGazetteerManager = MapGazetteerManager; diff --git a/publications/static/js/map-keyboard-navigation.js b/publications/static/js/map-keyboard-navigation.js new file mode 100644 index 0000000..9be334c --- /dev/null +++ b/publications/static/js/map-keyboard-navigation.js @@ -0,0 +1,281 @@ +// publications/static/js/map-keyboard-navigation.js +// Keyboard navigation accessibility for interactive map + +/** + * Map Keyboard Navigation Manager + * Provides keyboard accessibility for the Leaflet map + * - Arrow keys: Pan map + * - +/- keys: Zoom in/out + * - Enter/Space: Activate focused feature + * - Tab: Cycle through features + * - Escape: Close popup + */ +class MapKeyboardNavigation { + constructor(map, publicationsLayer, interactionManager) { + this.map = map; + this.publicationsLayer = publicationsLayer; + this.interactionManager = interactionManager; + this.focusedFeatureIndex = -1; + this.features = []; + this.isMapFocused = false; + + this.init(); + } + + init() { + // Make map container focusable + const mapContainer = this.map.getContainer(); + mapContainer.setAttribute('tabindex', '0'); + mapContainer.setAttribute('role', 'application'); + mapContainer.setAttribute('aria-label', 'Interactive map of publications. Use arrow keys to pan, plus and minus keys to zoom, tab to cycle through publications, enter to select.'); + + // Collect all features + this.collectFeatures(); + + // Add keyboard event listeners + this.setupKeyboardHandlers(); + + // Add focus/blur handlers + this.setupFocusHandlers(); + } + + /** + * Collect all features from the publications layer + */ + collectFeatures() { + this.features = []; + this.publicationsLayer.eachLayer((layer) => { + if (layer.feature) { + this.features.push({ + layer: layer, + feature: layer.feature, + publicationId: layer.feature.id || layer.feature.properties.id + }); + } + }); + console.log(`Keyboard navigation: ${this.features.length} features available`); + } + + /** + * Setup keyboard event handlers + */ + setupKeyboardHandlers() { + const mapContainer = this.map.getContainer(); + + mapContainer.addEventListener('keydown', (e) => { + if (!this.isMapFocused) return; + + const handled = this.handleKeyPress(e); + if (handled) { + e.preventDefault(); + e.stopPropagation(); + } + }); + } + + /** + * Setup focus handlers to track when map has focus + */ + setupFocusHandlers() { + const mapContainer = this.map.getContainer(); + + mapContainer.addEventListener('focus', () => { + this.isMapFocused = true; + console.log('Map focused - keyboard navigation active'); + this.announce('Map focused. Use arrow keys to pan, plus and minus to zoom, tab to cycle through publications.'); + }); + + mapContainer.addEventListener('blur', () => { + this.isMapFocused = false; + console.log('Map unfocused - keyboard navigation inactive'); + }); + } + + /** + * Handle keyboard input + */ + handleKeyPress(e) { + const key = e.key; + const panAmount = 100; // pixels + + switch(key) { + // Arrow keys - pan map + case 'ArrowUp': + this.map.panBy([0, -panAmount]); + this.announce('Panned up'); + return true; + + case 'ArrowDown': + this.map.panBy([0, panAmount]); + this.announce('Panned down'); + return true; + + case 'ArrowLeft': + this.map.panBy([-panAmount, 0]); + this.announce('Panned left'); + return true; + + case 'ArrowRight': + this.map.panBy([panAmount, 0]); + this.announce('Panned right'); + return true; + + // Zoom keys + case '+': + case '=': + this.map.zoomIn(); + this.announce(`Zoomed in to level ${this.map.getZoom()}`); + return true; + + case '-': + case '_': + this.map.zoomOut(); + this.announce(`Zoomed out to level ${this.map.getZoom()}`); + return true; + + // Tab - cycle through features + case 'Tab': + if (e.shiftKey) { + this.focusPreviousFeature(); + } else { + this.focusNextFeature(); + } + return true; + + // Enter or Space - activate focused feature + case 'Enter': + case ' ': + this.activateFocusedFeature(); + return true; + + // Escape - close popup + case 'Escape': + this.map.closePopup(); + this.focusedFeatureIndex = -1; + this.announce('Popup closed'); + return true; + + // Home - zoom to all features + case 'Home': + if (this.publicationsLayer.getBounds && this.publicationsLayer.getBounds().isValid()) { + this.map.fitBounds(this.publicationsLayer.getBounds()); + this.announce('Zoomed to show all publications'); + } + return true; + + default: + return false; + } + } + + /** + * Focus next feature in the list + */ + focusNextFeature() { + if (this.features.length === 0) { + this.announce('No publications available'); + return; + } + + this.focusedFeatureIndex = (this.focusedFeatureIndex + 1) % this.features.length; + this.focusFeature(this.focusedFeatureIndex); + } + + /** + * Focus previous feature in the list + */ + focusPreviousFeature() { + if (this.features.length === 0) { + this.announce('No publications available'); + return; + } + + this.focusedFeatureIndex = (this.focusedFeatureIndex - 1 + this.features.length) % this.features.length; + this.focusFeature(this.focusedFeatureIndex); + } + + /** + * Focus a specific feature + */ + focusFeature(index) { + if (index < 0 || index >= this.features.length) return; + + const featureData = this.features[index]; + const layer = featureData.layer; + const properties = featureData.feature.properties; + + // Pan to feature + if (layer.getBounds) { + this.map.fitBounds(layer.getBounds(), { padding: [50, 50] }); + } else if (layer.getLatLng) { + this.map.setView(layer.getLatLng(), Math.max(this.map.getZoom(), 10)); + } + + // Highlight feature + if (this.interactionManager) { + this.interactionManager.selectPublication(featureData); + } + + // Announce feature + const title = properties.title || 'Untitled publication'; + const doi = properties.doi || ''; + this.announce(`Publication ${index + 1} of ${this.features.length}: ${title}. Press Enter to view details.`); + } + + /** + * Activate the currently focused feature + */ + activateFocusedFeature() { + if (this.focusedFeatureIndex < 0 || this.focusedFeatureIndex >= this.features.length) { + this.announce('No publication selected. Use Tab to select a publication.'); + return; + } + + const featureData = this.features[this.focusedFeatureIndex]; + const layer = featureData.layer; + + // Get center point for popup + let latlng; + if (layer.getBounds) { + latlng = layer.getBounds().getCenter(); + } else if (layer.getLatLng) { + latlng = layer.getLatLng(); + } + + if (latlng && this.interactionManager) { + // Check for overlapping features at this location + const overlapping = this.interactionManager.findOverlappingFeatures(latlng); + + if (overlapping.length > 1) { + this.interactionManager.showPaginatedPopup(overlapping, latlng); + this.announce(`Multiple publications at this location. Use arrow buttons to navigate.`); + } else { + this.interactionManager.showPublicationPopup(featureData, latlng); + this.announce('Publication details opened'); + } + } + } + + /** + * Announce message to screen readers + */ + announce(message) { + // Find or create announcer element + let announcer = document.getElementById('map-announcer'); + if (!announcer) { + announcer = document.createElement('div'); + announcer.id = 'map-announcer'; + announcer.className = 'sr-only'; + announcer.setAttribute('role', 'status'); + announcer.setAttribute('aria-live', 'polite'); + announcer.setAttribute('aria-atomic', 'true'); + document.body.appendChild(announcer); + } + + // Update message + announcer.textContent = message; + + // Log for debugging + console.log('Screen reader announcement:', message); + } +} diff --git a/publications/static/js/map-search.js b/publications/static/js/map-search.js new file mode 100644 index 0000000..0f62412 --- /dev/null +++ b/publications/static/js/map-search.js @@ -0,0 +1,517 @@ +// publications/static/js/map-search.js +// Full-text search filtering for map publications + +/** + * Map Search Manager + * Provides real-time filtering of publications on the map + * - Searches across all text fields in publication data + * - Minimum 3 characters to activate + * - Debounced for performance + * - Accessible with keyboard and screen readers + */ +class MapSearchManager { + constructor(map, publicationsLayer, allPublications, publicationsGroup = null) { + this.map = map; + this.publicationsLayer = publicationsLayer; // The GeoJSON layer + this.publicationsGroup = publicationsGroup; // The layer group (for layer control) + + // Extract features array from GeoJSON object if needed + if (allPublications && allPublications.type === 'FeatureCollection') { + this.allPublications = allPublications.features || []; + } else if (Array.isArray(allPublications)) { + this.allPublications = allPublications; + } else { + this.allPublications = []; + } + + this.filteredPublications = []; + this.filteredLayer = null; // NEW: Separate layer for search results + this.searchInput = null; + this.searchButton = null; + this.clearButton = null; + this.searchContainer = null; + this.searchForm = null; + this.statusElement = null; + this.searchTimeout = null; + this.minSearchLength = 3; + this.isSearchActive = false; + this.searchStartTime = null; + + console.group('🔍 Map Search Initialization'); + console.log('Publications object type:', allPublications?.type || 'unknown'); + console.log('Total publications loaded:', this.allPublications.length); + if (this.allPublications.length > 0) { + console.log('Sample publication:', this.allPublications[0]); + } + this.init(); + console.groupEnd(); + } + + /** + * Initialize search functionality + */ + init() { + // Find search elements + this.searchInput = document.getElementById('map-search-input'); + this.searchButton = document.getElementById('search-submit-btn'); + this.clearButton = document.getElementById('clear-search-btn'); + this.searchContainer = document.getElementById('navbar-search-container'); + this.searchForm = document.querySelector('.navbar-search-form'); + this.statusElement = document.getElementById('search-results-status'); + + console.log('Search elements found:', { + input: !!this.searchInput, + searchButton: !!this.searchButton, + clearButton: !!this.clearButton, + container: !!this.searchContainer, + form: !!this.searchForm, + statusElement: !!this.statusElement + }); + + if (!this.searchInput) { + console.warn('⚠️ Map search input not found'); + return; + } + + // Setup event listeners + this.setupEventListeners(); + + console.log(`✅ Map search initialized with ${this.allPublications.length} publications`); + } + + /** + * Check if we're on a map page + */ + isMapPage() { + // Check if map element exists + return document.getElementById('map') !== null; + } + + /** + * Setup event listeners + */ + setupEventListeners() { + if (!this.searchInput) return; + + console.log('📋 Setting up event listeners...'); + + // Form submit (Enter key) + if (this.searchForm) { + this.searchForm.addEventListener('submit', (e) => { + e.preventDefault(); + console.log('📝 Form submitted (Enter key pressed)'); + const query = this.searchInput.value; + if (query.trim().length >= this.minSearchLength) { + // Clear debounce and search immediately + if (this.searchTimeout) { + clearTimeout(this.searchTimeout); + } + this.performSearch(query); + } else { + console.warn(`⚠️ Search query too short: "${query}" (minimum ${this.minSearchLength} characters)`); + } + }); + } + + // Search button click + if (this.searchButton) { + this.searchButton.addEventListener('click', (e) => { + e.preventDefault(); + console.log('🔍 Search button clicked'); + const query = this.searchInput.value; + if (query.trim().length >= this.minSearchLength) { + // Clear debounce and search immediately + if (this.searchTimeout) { + clearTimeout(this.searchTimeout); + } + this.performSearch(query); + } else { + console.warn(`⚠️ Search query too short: "${query}" (minimum ${this.minSearchLength} characters)`); + } + }); + } + + // Input event with debouncing + this.searchInput.addEventListener('input', (e) => { + console.log(`⌨️ Input changed: "${e.target.value}"`); + this.handleSearchInput(e.target.value); + }); + + // Keydown for special keys + this.searchInput.addEventListener('keydown', (e) => { + if (e.key === 'Escape') { + e.preventDefault(); + console.log('⎋ Escape key pressed - clearing search'); + this.clearSearch(); + } + }); + + // Clear button + if (this.clearButton) { + this.clearButton.addEventListener('click', () => { + console.log('❌ Clear button clicked'); + this.clearSearch(); + this.searchInput.focus(); + }); + } + + // Focus events for accessibility + this.searchInput.addEventListener('focus', () => { + console.log('🎯 Search field focused'); + this.announce('Search field focused. Type at least 3 characters to filter publications.'); + }); + + console.log('✅ Event listeners set up successfully'); + } + + /** + * Handle search input with debouncing + */ + handleSearchInput(query) { + // Clear previous timeout + if (this.searchTimeout) { + clearTimeout(this.searchTimeout); + } + + // Show/hide clear button + if (this.clearButton) { + this.clearButton.style.display = query.length > 0 ? 'block' : 'none'; + } + + // Debounce search + this.searchTimeout = setTimeout(() => { + this.performSearch(query); + }, 300); + } + + /** + * Perform the actual search + */ + performSearch(query) { + this.searchStartTime = performance.now(); + const trimmedQuery = query.trim(); + + console.group(`🔎 Performing Search`); + console.log('Query:', `"${trimmedQuery}"`); + console.log('Query length:', trimmedQuery.length); + console.log('Minimum required:', this.minSearchLength); + + // Clear search if less than minimum length + if (trimmedQuery.length < this.minSearchLength) { + console.warn(`⚠️ Query too short (${trimmedQuery.length} < ${this.minSearchLength})`); + if (this.isSearchActive) { + console.log('Clearing active search...'); + this.showAllPublications(); + this.announce('Search cleared. Showing all publications.'); + } + console.groupEnd(); + return; + } + + // Add searching class for loading indicator + if (this.searchInput) { + this.searchInput.classList.add('searching'); + } + + // Perform the search + const searchTerms = trimmedQuery.toLowerCase().split(/\s+/); + console.log('Search terms:', searchTerms); + console.log('Total publications to search:', this.allPublications.length); + + const filterStartTime = performance.now(); + this.filteredPublications = this.allPublications.filter(pub => { + return this.matchesSearch(pub, searchTerms); + }); + const filterTime = performance.now() - filterStartTime; + + console.log(`⏱️ Filtering took: ${filterTime.toFixed(2)}ms`); + console.log(`📊 Results: ${this.filteredPublications.length} / ${this.allPublications.length}`); + + // Log sample of matched publications + if (this.filteredPublications.length > 0) { + console.log('Sample matches (first 3):'); + this.filteredPublications.slice(0, 3).forEach((pub, index) => { + console.log(` ${index + 1}. ${pub.properties?.title || 'Untitled'}`); + }); + } + + // Update map + const mapUpdateStart = performance.now(); + this.updateMap(); + const mapUpdateTime = performance.now() - mapUpdateStart; + console.log(`🗺️ Map update took: ${mapUpdateTime.toFixed(2)}ms`); + + // Remove searching class + if (this.searchInput) { + setTimeout(() => { + this.searchInput.classList.remove('searching'); + }, 300); + } + + // Announce results + const count = this.filteredPublications.length; + const total = this.allPublications.length; + const percentage = ((count / total) * 100).toFixed(1); + const totalTime = performance.now() - this.searchStartTime; + + const message = count === 1 + ? `1 publication found matching "${trimmedQuery}"` + : `${count} publications found matching "${trimmedQuery}" (${percentage}% of total)`; + + console.log(`✅ ${message}`); + console.log(`⏱️ Total search time: ${totalTime.toFixed(2)}ms`); + console.groupEnd(); + + this.announce(message); + + this.isSearchActive = true; + } + + /** + * Check if publication matches search terms + * Searches across all text fields in the publication + */ + matchesSearch(publication, searchTerms) { + if (!publication) return false; + + // Build searchable text from all fields + const searchableText = this.buildSearchableText(publication); + + // Check if all search terms are found + return searchTerms.every(term => searchableText.includes(term)); + } + + /** + * Build searchable text from publication object + * Includes all text fields from the API response + */ + buildSearchableText(pub) { + const parts = []; + + // GeoJSON properties (primary source of data) + if (pub.properties) { + const props = pub.properties; + + // Title + if (props.title) parts.push(props.title); + + // DOI + if (props.doi) parts.push(props.doi); + + // Abstract + if (props.abstract) parts.push(props.abstract); + + // Authors (array of strings) + if (Array.isArray(props.authors)) { + parts.push(...props.authors); + } + + // Keywords (array of strings) + if (Array.isArray(props.keywords)) { + parts.push(...props.keywords); + } + + // Topics (array of objects with display_name) + if (Array.isArray(props.topics)) { + props.topics.forEach(topic => { + if (topic.display_name) parts.push(topic.display_name); + if (topic.subfield) parts.push(topic.subfield); + if (topic.field) parts.push(topic.field); + if (topic.domain) parts.push(topic.domain); + }); + } + + // Source details + if (props.source_details) { + const source = props.source_details; + if (source.name) parts.push(source.name); + if (source.display_name) parts.push(source.display_name); + if (source.abbreviated_title) parts.push(source.abbreviated_title); + if (source.publisher_name) parts.push(source.publisher_name); + if (source.issn_l) parts.push(source.issn_l); + } + + // URL + if (props.url) parts.push(props.url); + + // OpenAlex ID + if (props.openalex_id) parts.push(props.openalex_id); + + // PMID, PMCID + if (props.pmid) parts.push(props.pmid); + if (props.pmcid) parts.push(props.pmcid); + + // Time period + if (props.timeperiod_startdate) parts.push(props.timeperiod_startdate); + if (props.timeperiod_enddate) parts.push(props.timeperiod_enddate); + + // Region description + if (props.region_description) parts.push(props.region_description); + } + + // Join all parts and convert to lowercase + return parts.join(' ').toLowerCase(); + } + + /** + * Update map to show only filtered publications + * Uses layer replacement strategy for clean display + */ + updateMap() { + if (!this.map) return; + + console.log('🗺️ Updating map display...'); + console.log('Filtered publications count:', this.filteredPublications.length); + + // Remove existing filtered layer if any + if (this.filteredLayer) { + this.map.removeLayer(this.filteredLayer); + + // Remove from layer control if present + if (window.mapLayerControl) { + window.mapLayerControl.removeLayer(this.filteredLayer); + } + + this.filteredLayer = null; + console.log('🗑️ Removed previous filtered layer'); + } + + // Hide the original publications layer + if (this.publicationsGroup && this.map.hasLayer(this.publicationsGroup)) { + this.map.removeLayer(this.publicationsGroup); + console.log('👻 Hid original "All works" layer'); + } + + // Create a new GeoJSON FeatureCollection with filtered publications + const filteredGeoJSON = { + type: 'FeatureCollection', + features: this.filteredPublications + }; + + console.log('📦 Creating filtered layer with', this.filteredPublications.length, 'features'); + + // Import the style and popup functions from the global scope + const styleFunc = window.publicationStyle || this.publicationsLayer.options.style; + const popupFunc = window.publicationPopup || this.publicationsLayer.options.onEachFeature; + + // Create a new layer with the filtered publications + this.filteredLayer = L.geoJSON(filteredGeoJSON, { + style: styleFunc, + onEachFeature: popupFunc + }); + + // Add the filtered layer to the map + this.filteredLayer.addTo(this.map); + console.log('✅ Added filtered layer to map'); + + // Add to layer control + if (window.mapLayerControl) { + const resultCount = this.filteredPublications.length; + const layerName = `Search results (${resultCount})`; + window.mapLayerControl.addOverlay(this.filteredLayer, layerName); + console.log('📋 Added to layer control as:', layerName); + } + + // Fit map to filtered results + if (this.filteredPublications.length > 0) { + const bounds = this.filteredLayer.getBounds(); + if (bounds.isValid()) { + this.map.fitBounds(bounds, { padding: [50, 50] }); + console.log('🗺️ Map fitted to filtered results'); + } + } + } + + /** + * Show all publications (clear filter) + * Removes filtered layer and restores original layer + */ + showAllPublications() { + if (!this.map) return; + + console.log('🗺️ Showing all publications...'); + + // Remove filtered layer if it exists + if (this.filteredLayer) { + this.map.removeLayer(this.filteredLayer); + + // Remove from layer control + if (window.mapLayerControl) { + window.mapLayerControl.removeLayer(this.filteredLayer); + console.log('📋 Removed from layer control'); + } + + this.filteredLayer = null; + console.log('🗑️ Removed filtered layer'); + } + + // Restore the original publications layer + if (this.publicationsGroup && !this.map.hasLayer(this.publicationsGroup)) { + this.publicationsGroup.addTo(this.map); + console.log('✅ Restored original "All works" layer'); + } + + // Fit to all publications + if (this.publicationsGroup) { + const bounds = this.publicationsGroup.getBounds(); + if (bounds.isValid()) { + this.map.fitBounds(bounds); + console.log('🗺️ Map fitted to all publications'); + } + } + + this.filteredPublications = []; + this.isSearchActive = false; + } + + /** + * Clear search + */ + clearSearch() { + if (this.searchInput) { + this.searchInput.value = ''; + } + + if (this.clearButton) { + this.clearButton.style.display = 'none'; + } + + this.showAllPublications(); + this.announce('Search cleared. Showing all publications.'); + } + + /** + * Announce message to screen readers + */ + announce(message) { + if (!this.statusElement) { + // Try to find or create status element + this.statusElement = document.getElementById('search-results-status'); + if (!this.statusElement) { + this.statusElement = document.createElement('div'); + this.statusElement.id = 'search-results-status'; + this.statusElement.className = 'sr-only'; + this.statusElement.setAttribute('role', 'status'); + this.statusElement.setAttribute('aria-live', 'polite'); + this.statusElement.setAttribute('aria-atomic', 'true'); + document.body.appendChild(this.statusElement); + } + } + + this.statusElement.textContent = message; + console.log('Screen reader announcement:', message); + } + + /** + * Update publications data (called when new data is loaded) + */ + updatePublications(publications) { + this.allPublications = publications || []; + console.log(`Map search updated with ${this.allPublications.length} publications`); + + // If search is active, re-run search + if (this.isSearchActive && this.searchInput && this.searchInput.value.trim().length >= this.minSearchLength) { + this.performSearch(this.searchInput.value); + } + } +} diff --git a/publications/static/js/map-zoom-to-all.js b/publications/static/js/map-zoom-to-all.js new file mode 100644 index 0000000..436db83 --- /dev/null +++ b/publications/static/js/map-zoom-to-all.js @@ -0,0 +1,123 @@ +// publications/static/js/map-zoom-to-all.js + +/** + * MapZoomToAllControl + * + * Adds a custom Leaflet control button that zooms the map to show all features. + * This provides users with an easy way to reset the map view to display all publications. + * + * Usage: + * const zoomControl = new MapZoomToAllControl(map, featureGroup); + * + * @param {L.Map} map - The Leaflet map instance + * @param {L.FeatureGroup} featureGroup - The feature group containing all features to zoom to + */ +class MapZoomToAllControl { + constructor(map, featureGroup) { + this.map = map; + this.featureGroup = featureGroup; + this.control = null; + + this.init(); + } + + /** + * Initialize the control and add it to the map + */ + init() { + const ZoomToAllControl = L.Control.extend({ + options: { + position: 'topleft' + }, + + onAdd: (map) => { + // Create the control container + const container = L.DomUtil.create('div', 'leaflet-bar leaflet-control leaflet-control-zoom-to-all'); + + // Create the button + const button = L.DomUtil.create('a', 'leaflet-control-zoom-to-all-button', container); + button.href = '#'; + button.title = 'Zoom to all features'; + button.setAttribute('role', 'button'); + button.setAttribute('aria-label', 'Zoom to all features'); + + // Add icon using FontAwesome icon or Unicode fallback + button.innerHTML = ''; + + // Prevent map interactions when clicking the button + L.DomEvent.disableClickPropagation(container); + L.DomEvent.disableScrollPropagation(container); + + // Add click event handler + L.DomEvent.on(button, 'click', (e) => { + L.DomEvent.preventDefault(e); + this.zoomToAllFeatures(); + }); + + return container; + } + }); + + // Add the control to the map + this.control = new ZoomToAllControl(); + this.control.addTo(this.map); + + console.log('Zoom to all features control added'); + } + + /** + * Zoom the map to fit all features in the feature group + */ + zoomToAllFeatures() { + const bounds = this.featureGroup.getBounds(); + + if (bounds.isValid()) { + // Fit the map to the bounds with some padding + this.map.fitBounds(bounds, { + padding: [50, 50], + maxZoom: 18 + }); + + console.log('Zoomed to all features'); + + // Announce to screen readers + this.announceToScreenReader('Map zoomed to show all features'); + } else { + console.warn('No valid bounds to zoom to'); + this.announceToScreenReader('No features to display'); + } + } + + /** + * Announce messages to screen readers + * @param {string} message - The message to announce + */ + announceToScreenReader(message) { + // Use existing status element if available, or create a temporary one + let statusElement = document.getElementById('search-results-status'); + + if (!statusElement) { + statusElement = document.createElement('div'); + statusElement.setAttribute('role', 'status'); + statusElement.setAttribute('aria-live', 'polite'); + statusElement.className = 'sr-only'; + document.body.appendChild(statusElement); + } + + statusElement.textContent = message; + } + + /** + * Remove the control from the map + */ + destroy() { + if (this.control) { + this.map.removeControl(this.control); + this.control = null; + console.log('Zoom to all features control removed'); + } + } +} + +// Make the class globally available +window.MapZoomToAllControl = MapZoomToAllControl; diff --git a/publications/templates/base.html b/publications/templates/base.html index ffb423e..56696fd 100644 --- a/publications/templates/base.html +++ b/publications/templates/base.html @@ -19,7 +19,7 @@ href="{% static 'fontawesome/css/solid.min.css' %}" /> - + {{ block.super }} @@ -48,9 +48,15 @@ {% endblock head %} + + + + + + -
+
{% block alert %}{% endblock %} {% block content %}{% endblock %}
diff --git a/publications/templates/geoextent.html b/publications/templates/geoextent.html new file mode 100644 index 0000000..15335e1 --- /dev/null +++ b/publications/templates/geoextent.html @@ -0,0 +1,1133 @@ +{% extends "base.html" %} +{% load static %} + +{% block head %} +{{ block.super }} + + +{% endblock head %} + +{% block content %} +
+

Geoextent extraction

+ +

+ Extract geospatial and temporal extent from your data files or remote repositories. + This tool analyzes files and returns bounding boxes, time ranges, and other metadata. +

+ + +
+
+ +
+
+
+ +
+
+
+ +
+ + +
+ + Click "Browse Files..." to select files. You can click multiple times to add files from different locations. + Supports individual files and ZIP archives containing multiple files. + Maximum file size: {{ max_file_size_mb }}MB per file. + Maximum batch size: {{ max_batch_size_mb }}MB total. + +
+ +
+
+ + +
+
+
+ + + + Enter one identifier or URL per line. Maximum download size: {{ max_download_size_mb }}MB. + +
+
+
+ + +
+
+ + +
+
+
+
+
+ + +
+
+
+ Extraction options +
+ +
+
+
+
+ + +
+
+ + +
+
+
+
+
+ + +
+
+
+
+ + +
+
+
+
+ + +
+
+
+
+ + +
+
+
+
+
+
+ + +
+
+
+
+
+ + +
+ +
+
+
+ + +
+
+
+
Extracted extents (0)
+
+ + +
+
+
+ +
+
+
+ + +
+ +
+
+
+
+ + + + + +
+
+
Documentation & supported formats (geoextent v{{ geoextent_version }})
+
+
+
Supported file formats
+

Formats are dynamically loaded from geoextent's features API.

+ {% for format in supported_formats %} +
+
+ {{ format.name }}: + {% for ext in format.extensions %} + .{{ ext }} + {% endfor %} +
+ {% if format.description %} + {{ format.description }} + {% endif %} +
+ {% endfor %} +

ZIP archives: Upload ZIP files containing multiple data files. The extraction will process all supported files within the archive.

+ +
Supported repository providers
+

Provider information is dynamically loaded from geoextent's features API.

+
+ {% for provider in supported_providers %} +
+
+ + {% if provider.website %} + {{ provider.name }} + {% else %} + {{ provider.name }} + {% endif %} + +
+ {% if provider.description %} +

{{ provider.description }}

+ {% endif %} + {% if provider.examples %} +
+ Examples: + +
+ {% endif %} +
+ {% endfor %} +
+ +
External resources
+ +
+
+
+{% endblock content %} + +{% block scripts %} + + +{% endblock scripts %} diff --git a/publications/templates/main.html b/publications/templates/main.html index c0a48f1..073e9da 100644 --- a/publications/templates/main.html +++ b/publications/templates/main.html @@ -10,16 +10,30 @@ + + + + + + {% endblock head %} {% block content %}
- + {# {%include timeline_snippet.html %} #}
+ + +
{% endblock content %} @@ -28,6 +42,19 @@ {% block scripts %} + + + + + {# #} diff --git a/publications/templates/sitemap_page.html b/publications/templates/sitemap_page.html index b15b2e5..e620856 100644 --- a/publications/templates/sitemap_page.html +++ b/publications/templates/sitemap_page.html @@ -41,6 +41,10 @@
Data & Technical
Feeds

Subscribe to RSS, Atom, and GeoRSS feeds for updates on new publications

+
  • + Geoextent +

    Extract spatial and temporal extent from geospatial data files and remote repositories

    +
  • diff --git a/publications/templates/unified_menu_snippet.html b/publications/templates/unified_menu_snippet.html index fe61fd0..bb66c32 100644 --- a/publications/templates/unified_menu_snippet.html +++ b/publications/templates/unified_menu_snippet.html @@ -23,12 +23,13 @@
  • Data & API
  • Feeds
  • +
  • Geoextent
  • About
  • Contact
  • Accessibility
  • -
  • Code on GitHub
  • +
  • Code on GitHub
  • {% if request.user.is_authenticated %} @@ -59,13 +60,16 @@ {% endif %}
    +
    @@ -73,7 +77,7 @@

    - Want to stay anonymous? Use Mailinator or check our privacy info. + Want to stay anonymous? Use Mailinator or check our privacy info.

    {% endif %} diff --git a/publications/templates/work_landing_page.html b/publications/templates/work_landing_page.html index 1e38e25..7f1b778 100644 --- a/publications/templates/work_landing_page.html +++ b/publications/templates/work_landing_page.html @@ -242,6 +242,8 @@
    Temporal extent (ti {% endif %} + +

    diff --git a/publications/templates/works.html b/publications/templates/works.html index d9521e6..f1bca32 100644 --- a/publications/templates/works.html +++ b/publications/templates/works.html @@ -4,30 +4,263 @@ {% block content %}

    -

    All Article Links

    +

    All Works

    + {% if is_admin %}

    Admin view: You can see all publications regardless of status. Status labels are shown next to each entry.

    {% endif %} -
      - {% for item in links %} -
    • - {{ item.title }} - {% if is_admin and item.status %} - {{ item.status }} + + + {% if page_obj %} +
      +
      +
      +

      + Showing {{ page_obj.start_index }} to {{ page_obj.end_index }} of {{ page_obj.paginator.count }} works +

      +
      +
      +
      + + + +
      +
      +
      + + + +
      + {% endif %} + + +
      + {% for work in works %} +
      +
      + + {{ work.title }} + + {% if is_admin and work.status %} + {{ work.status }} + {% endif %} +
      + +
      {% empty %} -
    • No publications found.
    • +

      No publications found.

      {% endfor %} -
    +
    + + + {% if page_obj and page_obj.paginator.num_pages > 1 %} +
    + +
    + {% endif %} + + + {% if statistics %} +
    +

    Statistics

    +
    +
    +
    +
    Total works in database:
    +
    {{ statistics.total_works|default:"0" }}
    + +
    Published works:
    +
    {{ statistics.published_works|default:"0" }}
    + +
    With geographic data:
    +
    {{ statistics.with_geometry|default:"0" }}
    + +
    With temporal extent:
    +
    {{ statistics.with_temporal|default:"0" }}
    +
    +
    +
    +
    +
    With author information:
    +
    {{ statistics.with_authors|default:"0" }}
    + +
    With DOI:
    +
    {{ statistics.with_doi|default:"0" }}
    + +
    With abstract:
    +
    {{ statistics.with_abstract|default:"0" }}
    + +
    Open access:
    +
    {{ statistics.open_access|default:"0" }}
    +
    +
    +
    +
    +
    +
    + Complete metadata coverage: + {{ statistics.with_complete_metadata|default:"0" }} works ({{ statistics.complete_percentage|default:"0" }}%) + have geographic data, temporal extent, and author information. +
    +
    +
    +
    + {% endif %} + + + {% if api_url %} + + {% endif %} + {% endblock %} diff --git a/publications/urls.py b/publications/urls.py index 0532d95..3f0be1e 100644 --- a/publications/urls.py +++ b/publications/urls.py @@ -6,6 +6,7 @@ from publications import views from publications import views_geometry from publications import views_feeds +from publications import views_gazetteer from .feeds import GeoFeed from .feeds_v2 import GlobalGeoFeed, RegionalGeoFeed from django.views.generic import RedirectView @@ -26,6 +27,10 @@ path('api/schema/', SpectacularAPIView.as_view(), name='schema'), path('api/schema/ui/', SpectacularRedocView.as_view(url_name='optimap:schema'), name='redoc'), + # API v1 Gazetteer proxy endpoints + path('api/v1/gazetteer//search/', views_gazetteer.gazetteer_search, name='gazetteer-search'), + path('api/v1/gazetteer//reverse/', views_gazetteer.gazetteer_reverse, name='gazetteer-reverse'), + # API v1 Feed endpoints - GeoRSS format (with .rss extension) path('api/v1/feeds/optimap-global.rss', GlobalGeoFeed(feed_type_variant="georss"), name='api-feed-georss'), path('api/v1/feeds/optimap-.rss', RegionalGeoFeed(feed_type_variant="georss"), name='api-continent-georss'), @@ -85,5 +90,6 @@ path("feeds/geoatom//", GeoFeedByGeometry(feed_type_variant="geoatom"), name="feed-geoatom-by-slug"), path('contribute/', views.contribute, name="contribute"), + path("geoextent/", views.geoextent, name="geoextent"), ] diff --git a/publications/utils/__init__.py b/publications/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/publications/utils/statistics.py b/publications/utils/statistics.py new file mode 100644 index 0000000..c7ef0ec --- /dev/null +++ b/publications/utils/statistics.py @@ -0,0 +1,101 @@ +# publications/utils/statistics.py +""" +Statistics utilities for OPTIMAP publications. +Provides cached statistics about the publication database. +""" + +from django.core.cache import cache +from django.db.models import Count, Q +from publications.models import Publication + + +STATS_CACHE_KEY = 'publications_statistics' +STATS_CACHE_TIMEOUT = 86400 # 24 hours in seconds + + +def calculate_statistics(): + """ + Calculate comprehensive statistics about publications. + + Returns: + dict: Statistics including total count, published count, + counts with geometry, temporal data, authors, etc. + """ + # Base queryset for published works + published = Publication.objects.filter(status='p') + + stats = { + 'total_works': Publication.objects.count(), + 'published_works': published.count(), + 'with_geometry': published.exclude(geometry__isnull=True).count(), + 'with_temporal': published.filter( + Q(timeperiod_startdate__isnull=False) | + Q(timeperiod_enddate__isnull=False) + ).count(), + 'with_authors': published.exclude(authors__isnull=True).exclude(authors=[]).count(), + 'with_doi': published.exclude(doi__isnull=True).exclude(doi='').count(), + 'with_abstract': published.exclude(abstract__isnull=True).exclude(abstract='').count(), + 'open_access': published.exclude( + openalex_open_access_status__isnull=True + ).exclude( + openalex_open_access_status='' + ).count(), + 'from_openalex': published.exclude( + openalex_id__isnull=True + ).exclude( + openalex_id='' + ).count(), + } + + # Calculate percentage with complete metadata (geometry + temporal + authors) + complete = published.exclude(geometry__isnull=True).filter( + Q(timeperiod_startdate__isnull=False) | Q(timeperiod_enddate__isnull=False) + ).exclude(authors__isnull=True).exclude(authors=[]) + stats['with_complete_metadata'] = complete.count() + + # Calculate percentage + if stats['published_works'] > 0: + stats['complete_percentage'] = round( + (stats['with_complete_metadata'] / stats['published_works']) * 100, 1 + ) + else: + stats['complete_percentage'] = 0 + + return stats + + +def get_cached_statistics(): + """ + Get statistics from cache or calculate if not cached. + + Returns: + dict: Cached or freshly calculated statistics + """ + stats = cache.get(STATS_CACHE_KEY) + + if stats is None: + stats = calculate_statistics() + cache.set(STATS_CACHE_KEY, stats, STATS_CACHE_TIMEOUT) + + return stats + + +def update_statistics_cache(): + """ + Force recalculation and update of statistics cache. + Called by management command for nightly updates. + + Returns: + dict: The updated statistics + """ + stats = calculate_statistics() + cache.set(STATS_CACHE_KEY, stats, STATS_CACHE_TIMEOUT) + return stats + + +def clear_statistics_cache(): + """ + Clear the statistics cache. + Useful when publications are added/removed/updated. + """ + cache.delete(STATS_CACHE_KEY) diff --git a/publications/views.py b/publications/views.py index fb9ff0e..95628ee 100644 --- a/publications/views.py +++ b/publications/views.py @@ -651,6 +651,47 @@ def feeds(request): "regions": regions_with_slugs, }) +def geoextent(request): + """Geoextent extraction UI page.""" + from geoextent.lib.features import get_supported_features + + # Get supported formats and providers from geoextent's features API + features = get_supported_features() + + # Organize file formats by handler type with display names + supported_formats = [] + for handler in features.get('file_formats', []): + display_name = handler.get('display_name', handler['handler']) + extensions = [ext.lstrip('.') for ext in handler.get('file_extensions', [])] + description = handler.get('description', '') + if extensions: + supported_formats.append({ + 'name': display_name, + 'extensions': extensions, + 'description': description + }) + + # Extract provider details with descriptions and URLs + supported_providers = [] + for provider in features.get('content_providers', []): + supported_providers.append({ + 'name': provider.get('name', 'Unknown'), + 'description': provider.get('description', ''), + 'website': provider.get('website', ''), + 'examples': provider.get('examples', []) + }) + + context = { + 'supported_formats': supported_formats, + 'supported_providers': supported_providers, + 'geoextent_version': features.get('version', 'unknown'), + 'max_file_size_mb': getattr(settings, 'GEOEXTENT_MAX_FILE_SIZE_MB', 100), + 'max_batch_size_mb': getattr(settings, 'GEOEXTENT_MAX_BATCH_SIZE_MB', 500), + 'max_download_size_mb': getattr(settings, 'GEOEXTENT_MAX_DOWNLOAD_SIZE_MB', 1000), + } + + return render(request, 'geoextent.html', context) + class RobotsView(View): http_method_names = ['get'] def get(self, request): @@ -740,37 +781,94 @@ def _normalize_authors(pub): def works_list(request): """ - Public page that lists a link for every work: + Public page that lists all works with pagination: - DOI present -> /work/ (site-local landing page) - no DOI -> fall back to Publication.url (external/original) Only published works (status='p') are shown to non-admin users. Admin users see all works with status labels. + + Supports pagination with user-selectable page size. """ + from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger + from publications.utils.statistics import get_cached_statistics + is_admin = request.user.is_authenticated and request.user.is_staff + # Get page size from request or use default + page_size = request.GET.get('size', settings.WORKS_PAGE_SIZE_DEFAULT) + try: + page_size = int(page_size) + # Clamp page size within allowed limits + page_size = max(settings.WORKS_PAGE_SIZE_MIN, min(page_size, settings.WORKS_PAGE_SIZE_MAX)) + except (ValueError, TypeError): + page_size = settings.WORKS_PAGE_SIZE_DEFAULT + + # Get page number from request + page_number = request.GET.get('page', 1) + + # Base queryset if is_admin: - pubs = Publication.objects.all().order_by("-creationDate", "-id") + pubs = Publication.objects.all().select_related('source') else: - pubs = Publication.objects.filter(status='p').order_by("-creationDate", "-id") + pubs = Publication.objects.filter(status='p').select_related('source') + + pubs = pubs.order_by("-creationDate", "-id") + + # Create paginator + paginator = Paginator(pubs, page_size) - links = [] - for pub in pubs: - link_data = {"title": pub.title} + try: + page_obj = paginator.page(page_number) + except PageNotAnInteger: + page_obj = paginator.page(1) + except EmptyPage: + page_obj = paginator.page(paginator.num_pages) + + # Build work data for current page + works = [] + for pub in page_obj: + work_data = { + "title": pub.title, + "doi": pub.doi, + "authors": pub.authors or [], + "source": pub.source.name if pub.source else None, + } if pub.doi: - link_data["href"] = reverse("optimap:article-landing", args=[pub.doi]) + work_data["href"] = reverse("optimap:article-landing", args=[pub.doi]) elif pub.url: - link_data["href"] = pub.url + work_data["href"] = pub.url # Add status info for admin users if is_admin: - link_data["status"] = pub.get_status_display() - link_data["status_code"] = pub.status + work_data["status"] = pub.get_status_display() + work_data["status_code"] = pub.status + + works.append(work_data) - links.append(link_data) + # Get cached statistics + stats = get_cached_statistics() + + # Build API URL for current page/size + # DRF uses limit/offset pagination, so calculate offset from page number + offset = (page_obj.number - 1) * page_size + api_url = request.build_absolute_uri( + '/api/v1/publications/' + + f'?limit={page_size}&offset={offset}' + ) + + context = { + "works": works, + "page_obj": page_obj, + "page_size": page_size, + "page_size_options": settings.WORKS_PAGE_SIZE_OPTIONS, + "is_admin": is_admin, + "statistics": stats, + "api_url": api_url, + } - return render(request, "works.html", {"links": links, "is_admin": is_admin}) + return render(request, "works.html", context) def work_landing(request, doi): diff --git a/publications/views_gazetteer.py b/publications/views_gazetteer.py new file mode 100644 index 0000000..15ef057 --- /dev/null +++ b/publications/views_gazetteer.py @@ -0,0 +1,217 @@ +""" +OPTIMAP gazetteer proxy views. +Provides CORS-safe proxying for geocoding services. +""" + +import requests +from django.http import JsonResponse +from django.views.decorators.http import require_http_methods +from django.conf import settings +import logging + +logger = logging.getLogger(__name__) + +# Geocoding service configurations +GEOCODING_SERVICES = { + 'nominatim': { + 'search_url': 'https://nominatim.openstreetmap.org/search', + 'reverse_url': 'https://nominatim.openstreetmap.org/reverse', + 'requires_key': False, + 'user_agent': 'OPTIMAP/1.0', + }, + 'photon': { + 'search_url': 'https://photon.komoot.io/api/', + 'reverse_url': 'https://photon.komoot.io/reverse', + 'requires_key': False, + }, +} + + +@require_http_methods(["GET"]) +def gazetteer_search(request, provider): + """ + Proxy geocoding search requests to avoid CORS issues. + + Args: + request: Django request object + provider: Geocoding provider name (nominatim, photon, etc.) + + Returns: + JsonResponse with geocoding results + """ + # Validate provider + provider = provider.lower() + if provider not in GEOCODING_SERVICES: + return JsonResponse({ + 'error': f'Unknown provider: {provider}', + 'available_providers': list(GEOCODING_SERVICES.keys()) + }, status=400) + + service_config = GEOCODING_SERVICES[provider] + + # Check if API key is required + if service_config.get('requires_key', False): + api_key = getattr(settings, 'GAZETTEER_API_KEY', '') + if not api_key: + return JsonResponse({ + 'error': f'Provider {provider} requires an API key' + }, status=400) + + # Get search query + query = request.GET.get('q', '').strip() + if not query: + return JsonResponse({ + 'error': 'Missing search query parameter "q"' + }, status=400) + + try: + # Build request parameters based on provider + if provider == 'nominatim': + params = { + 'q': query, + 'format': request.GET.get('format', 'json'), + 'limit': request.GET.get('limit', '5'), + 'addressdetails': request.GET.get('addressdetails', '1'), + } + headers = { + 'User-Agent': service_config.get('user_agent', 'OPTIMAP/1.0'), + } + + elif provider == 'photon': + params = { + 'q': query, + 'limit': request.GET.get('limit', '5'), + 'lang': request.GET.get('lang', 'en'), + } + headers = {} + + else: + # Generic parameter passthrough + params = dict(request.GET) + params['q'] = query + headers = {} + + # Make request to geocoding service + logger.info(f'Geocoding request: {provider} - {query}') + + response = requests.get( + service_config['search_url'], + params=params, + headers=headers, + timeout=10 + ) + + response.raise_for_status() + + # Return the response as-is + try: + data = response.json() + except ValueError: + return JsonResponse({ + 'error': 'Invalid JSON response from geocoding service' + }, status=502) + + logger.info(f'Geocoding results: {len(data) if isinstance(data, list) else 1} results') + + return JsonResponse(data, safe=False) + + except requests.exceptions.Timeout: + logger.error(f'Geocoding timeout: {provider}') + return JsonResponse({ + 'error': 'Geocoding service timeout' + }, status=504) + + except requests.exceptions.RequestException as e: + logger.error(f'Geocoding error: {provider} - {str(e)}') + return JsonResponse({ + 'error': f'Geocoding service error: {str(e)}' + }, status=502) + + +@require_http_methods(["GET"]) +def gazetteer_reverse(request, provider): + """ + Proxy reverse geocoding requests (coordinates to address). + + Args: + request: Django request object + provider: Geocoding provider name + + Returns: + JsonResponse with reverse geocoding result + """ + # Validate provider + provider = provider.lower() + if provider not in GEOCODING_SERVICES: + return JsonResponse({ + 'error': f'Unknown provider: {provider}', + 'available_providers': list(GEOCODING_SERVICES.keys()) + }, status=400) + + service_config = GEOCODING_SERVICES[provider] + + # Get coordinates + lat = request.GET.get('lat', '').strip() + lon = request.GET.get('lon', '').strip() + + if not lat or not lon: + return JsonResponse({ + 'error': 'Missing lat/lon parameters' + }, status=400) + + try: + # Validate coordinates + lat_float = float(lat) + lon_float = float(lon) + + if not (-90 <= lat_float <= 90): + return JsonResponse({'error': 'Invalid latitude'}, status=400) + if not (-180 <= lon_float <= 180): + return JsonResponse({'error': 'Invalid longitude'}, status=400) + + except ValueError: + return JsonResponse({'error': 'Invalid coordinate format'}, status=400) + + try: + # Build request parameters + if provider == 'nominatim': + params = { + 'lat': lat, + 'lon': lon, + 'format': request.GET.get('format', 'json'), + } + headers = { + 'User-Agent': service_config.get('user_agent', 'OPTIMAP/1.0'), + } + + elif provider == 'photon': + params = { + 'lat': lat, + 'lon': lon, + } + headers = {} + + else: + params = dict(request.GET) + headers = {} + + # Make request + logger.info(f'Reverse geocoding: {provider} - {lat},{lon}') + + response = requests.get( + service_config['reverse_url'], + params=params, + headers=headers, + timeout=10 + ) + + response.raise_for_status() + data = response.json() + + return JsonResponse(data, safe=False) + + except requests.exceptions.RequestException as e: + logger.error(f'Reverse geocoding error: {provider} - {str(e)}') + return JsonResponse({ + 'error': f'Reverse geocoding service error: {str(e)}' + }, status=502) diff --git a/publications/viewsets.py b/publications/viewsets.py index a9ba4cb..1e0cedd 100644 --- a/publications/viewsets.py +++ b/publications/viewsets.py @@ -1,15 +1,38 @@ """publications API views.""" -from rest_framework import viewsets +import json +import logging +import os +import shutil +import tempfile +import uuid +import zipfile +from pathlib import Path + +from rest_framework import viewsets, status from rest_framework_gis import filters -from rest_framework.permissions import IsAuthenticatedOrReadOnly +from rest_framework.permissions import IsAuthenticatedOrReadOnly, AllowAny +from rest_framework.decorators import action +from rest_framework.response import Response +from django.conf import settings +from django.contrib.gis.geos import Polygon, Point, MultiPoint + +# Import geoextent at module level +import geoextent.lib.extent as geoextent + from .models import Publication, Source, Subscription from .serializers import ( PublicationSerializer, SourceSerializer, SubscriptionSerializer, + GeoextentExtractSerializer, + GeoextentRemoteSerializer, + GeoextentRemoteGetSerializer, + GeoextentBatchSerializer, ) +logger = logging.getLogger(__name__) + class SourceViewSet(viewsets.ReadOnlyModelViewSet): queryset = Source.objects.all() serializer_class = SourceSerializer @@ -24,10 +47,11 @@ class PublicationViewSet(viewsets.ReadOnlyModelViewSet): def get_queryset(self): """ Return all publications for admin users, only published ones for others. + Sorted by creation date (newest first) to match the works list page. """ if self.request.user.is_authenticated and self.request.user.is_staff: - return Publication.objects.all().distinct() - return Publication.objects.filter(status="p").distinct() + return Publication.objects.all().order_by("-creationDate", "-id").distinct() + return Publication.objects.filter(status="p").order_by("-creationDate", "-id").distinct() class SubscriptionViewSet(viewsets.ModelViewSet): """ @@ -44,4 +68,638 @@ def get_queryset(self): return Subscription.objects.filter(user=user) def perform_create(self, serializer): - serializer.save(user=self.request.user) \ No newline at end of file + serializer.save(user=self.request.user) + + +class GeoextentViewSet(viewsets.ViewSet): + """ + ViewSet for extracting geospatial and temporal extents from files. + + Provides three endpoints: + - extract: Extract from uploaded file + - extract-remote: Extract from remote repository (Zenodo, PANGAEA, etc.) + - extract-batch: Extract from multiple uploaded files + + Public API - no authentication required. + """ + permission_classes = [AllowAny] + + def _cleanup_temp_file(self, filepath): + """Delete temporary file safely.""" + try: + if filepath and os.path.exists(filepath): + os.remove(filepath) + logger.debug(f"Cleaned up temp file: {filepath}") + except Exception as e: + logger.warning(f"Failed to cleanup temp file {filepath}: {e}") + + def _save_uploaded_file(self, uploaded_file): + """Save uploaded file to temporary location.""" + temp_dir = Path(settings.GEOEXTENT_TEMP_DIR) + temp_dir.mkdir(exist_ok=True, parents=True) + + # Generate unique filename + file_ext = Path(uploaded_file.name).suffix + temp_filename = f"{uuid.uuid4()}{file_ext}" + temp_path = temp_dir / temp_filename + + # Save file + with open(temp_path, 'wb+') as destination: + for chunk in uploaded_file.chunks(): + destination.write(chunk) + + logger.info(f"Saved uploaded file to: {temp_path}") + return str(temp_path) + + def _process_geoextent_result(self, result): + """ + Process geoextent result and format for API response. + Geoextent returns the extent information directly. + """ + try: + # Check if result is None or empty + if result is None: + logger.error("Geoextent returned None - no valid spatial data found") + return None + + if not isinstance(result, dict): + logger.error(f"Geoextent returned unexpected type: {type(result)}") + return None + + response = { + 'success': True, + } + + # Add spatial extent if present + if 'bbox' in result: + response['spatial_extent'] = result['bbox'] + + # Add temporal extent if present + if 'tbox' in result: + response['temporal_extent'] = result['tbox'] + + # Add placename if present (geoextent extracts this) + if 'placename' in result and result['placename']: + response['placename'] = result['placename'] + + # Add external metadata if present (from CrossRef/DataCite) + if 'external_metadata' in result and result['external_metadata']: + response['external_metadata'] = result['external_metadata'] + + # Add metadata + response['metadata'] = {} + if 'format' in result: + response['metadata']['file_format'] = result['format'] + if 'crs' in result: + response['metadata']['crs'] = result['crs'] + if 'file_size_bytes' in result: + response['metadata']['file_size_bytes'] = result['file_size_bytes'] + + return response + except Exception as e: + logger.error(f"Error processing geoextent result: {e}") + raise + + def _build_geoextent_extraction_metadata(self, geoextent_result, identifiers=None): + """ + Build geoextent_extraction metadata object matching CLI output format. + + Directly copies geoextent output structure to avoid confusion between API and CLI. + """ + import geoextent + + metadata = { + 'version': geoextent.__version__, + 'inputs': identifiers if identifiers else [], + } + + # Directly copy statistics from extraction_metadata if available + if 'extraction_metadata' in geoextent_result: + em = geoextent_result['extraction_metadata'] + stats = {} + # Copy exactly as geoextent CLI returns them + if 'total_resources' in em: + stats['files_processed'] = em['total_resources'] + if 'successful_resources' in em: + stats['files_with_extent'] = em['successful_resources'] + if 'total_size' in em: + stats['total_size'] = em['total_size'] + if stats: + metadata['statistics'] = stats + + # Directly copy format and CRS from geoextent result + if 'format' in geoextent_result: + metadata['format'] = geoextent_result['format'] + if 'crs' in geoextent_result: + metadata['crs'] = geoextent_result['crs'] + + # Determine extent type + if geoextent_result.get('convex_hull'): + metadata['extent_type'] = 'convex_hull' + else: + metadata['extent_type'] = 'bounding_box' + + return metadata + + def _format_response(self, geoextent_result, structured_result, response_format, identifiers=None): + """ + Format the response based on the requested format. + + Args: + geoextent_result: Raw result from geoextent (dict with bbox, tbox, etc.) + structured_result: Processed structured result from _process_geoextent_result + response_format: One of 'geojson', 'wkt', 'wkb' + identifiers: List of input identifiers (for metadata) + + Returns: + Formatted response based on response_format + """ + if response_format == 'geojson': + # Use geoextent's format_extent_output to create proper GeoJSON + # This ensures we match CLI output exactly and don't need to manually + # reconstruct GeoJSON from bbox + import geoextent.lib.helpfunctions as hf + + # Build extraction metadata for geoextent's formatter + extraction_metadata = self._build_geoextent_extraction_metadata( + geoextent_result, + identifiers=identifiers + ) + + # Use geoextent's official formatter to create GeoJSON FeatureCollection + # This handles bbox, convex_hull, tbox, placename, external_metadata automatically + formatted_output = hf.format_extent_output( + geoextent_result, + output_format='geojson', + extraction_metadata=extraction_metadata + ) + + return formatted_output + + elif response_format in ['wkt', 'wkb']: + # For WKT/WKB, we need to convert bbox to geometry + if not structured_result.get('spatial_extent'): + return { + 'success': False, + 'error': f'Cannot convert to {response_format}: no spatial extent available' + } + + bbox = structured_result['spatial_extent'] + + # Handle convex hull format (list of points) + if isinstance(bbox, list) and len(bbox) > 0 and isinstance(bbox[0], list): + # Convex hull: list of [lon, lat] points + if len(bbox) == 1: + # Single point + geom = Point(bbox[0][0], bbox[0][1], srid=4326) + else: + # Multiple points - create polygon from points + points = [(point[0], point[1]) for point in bbox] + # Close the polygon if not already closed + if points[0] != points[-1]: + points.append(points[0]) + geom = Polygon(points, srid=4326) + + # Handle standard bbox format [min_lon, min_lat, max_lon, max_lat] + elif isinstance(bbox, list) and len(bbox) == 4: + geom = Polygon.from_bbox(bbox) + geom.srid = 4326 + else: + return { + 'success': False, + 'error': f'Cannot convert bbox format {bbox} to {response_format}' + } + + # Build geoextent_extraction metadata + geoextent_extraction = self._build_geoextent_extraction_metadata( + geoextent_result, + identifiers=identifiers + ) + + # Create result with geometry in requested format + if response_format == 'wkt': + result = {'wkt': geom.wkt} + else: # wkb + result = {'wkb': geom.wkb.hex()} + + # Add common fields + result['crs'] = 'EPSG:4326' + result['geoextent_extraction'] = geoextent_extraction + + # Add tbox if present (using same property name as CLI) + if structured_result.get('temporal_extent'): + result['tbox'] = structured_result['temporal_extent'] + if structured_result.get('placename'): + result['placename'] = structured_result['placename'] + if structured_result.get('external_metadata'): + result['external_metadata'] = structured_result['external_metadata'] + + return result + + # Default fallback + return structured_result + + @action(detail=False, methods=['post']) + def extract(self, request): + """ + Extract geospatial and temporal extent from uploaded file. + + POST /api/v1/geoextent/extract/ + """ + serializer = GeoextentExtractSerializer(data=request.data) + if not serializer.is_valid(): + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + uploaded_file = serializer.validated_data['file'] + bbox = serializer.validated_data['bbox'] + tbox = serializer.validated_data['tbox'] + convex_hull = serializer.validated_data['convex_hull'] + response_format = serializer.validated_data['response_format'] + placename = serializer.validated_data['placename'] + gazetteer = serializer.validated_data['gazetteer'] + + temp_path = None + + try: + # Check file size + max_size_bytes = settings.GEOEXTENT_MAX_FILE_SIZE_MB * 1024 * 1024 + if uploaded_file.size > max_size_bytes: + return Response( + { + 'success': False, + 'error': 'File too large', + 'details': f'File size ({uploaded_file.size} bytes) exceeds maximum ({max_size_bytes} bytes)' + }, + status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE + ) + + # Save uploaded file + temp_path = self._save_uploaded_file(uploaded_file) + + # Check if the file is a ZIP archive + is_zip = zipfile.is_zipfile(temp_path) + temp_dir = None + + if is_zip: + # Extract ZIP to temporary directory and process with fromDirectory + temp_dir = tempfile.mkdtemp(prefix='geoextent_zip_') + logger.info(f"Extracting ZIP file to: {temp_dir}") + + with zipfile.ZipFile(temp_path, 'r') as zip_ref: + zip_ref.extractall(temp_dir) + + # Call geoextent.fromDirectory on extracted contents + geoextent_result = geoextent.fromDirectory( + temp_dir, + bbox=bbox, + tbox=tbox, + convex_hull=convex_hull, + placename=gazetteer if placename else None, + show_progress=False, # Disable progress bar in API + recursive=True, # Process subdirectories in ZIP + ) + else: + # Call geoextent once with all parameters + # placename parameter: None, 'nominatim', 'geonames', or 'photon' + geoextent_result = geoextent.fromFile( + temp_path, + bbox=bbox, + tbox=tbox, + convex_hull=convex_hull, + placename=gazetteer if placename else None, + show_progress=False, # Disable progress bar in API + ) + + # Process result to structured format + structured_result = self._process_geoextent_result(geoextent_result) + + # Check if processing failed + if structured_result is None: + return Response({ + 'error': f'Could not extract spatial extent from "{uploaded_file.name}". The file may not contain valid spatial data or may be in an unsupported format.' + }, status=status.HTTP_400_BAD_REQUEST) + + structured_result['filename'] = uploaded_file.name + + # Format response based on requested format + result = self._format_response( + geoextent_result, + structured_result, + response_format, + identifiers=[uploaded_file.name] + ) + + return Response(result, status=status.HTTP_200_OK) + + except Exception as e: + logger.error(f"Error processing file extraction: {e}", exc_info=True) + return Response( + { + 'success': False, + 'error': 'Processing error', + 'details': str(e) + }, + status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) + finally: + # Cleanup temp file + if temp_path: + self._cleanup_temp_file(temp_path) + # Cleanup temp directory if ZIP was extracted + if 'temp_dir' in locals() and temp_dir and os.path.exists(temp_dir): + try: + shutil.rmtree(temp_dir) + logger.info(f"Cleaned up temp directory: {temp_dir}") + except Exception as e: + logger.warning(f"Failed to cleanup temp directory {temp_dir}: {e}") + + @action(detail=False, methods=['get', 'post'], url_path='extract-remote') + def extract_remote(self, request): + """ + Extract geospatial and temporal extent from one or more remote repositories. + + POST /api/v1/geoextent/extract-remote/ - JSON body with identifiers array + GET /api/v1/geoextent/extract-remote/?identifiers=doi1,doi2 - URL parameters with comma-separated identifiers + """ + # Use different serializers for GET vs POST + if request.method == 'GET': + serializer = GeoextentRemoteGetSerializer(data=request.query_params) + else: + serializer = GeoextentRemoteSerializer(data=request.data) + + if not serializer.is_valid(): + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + identifiers = serializer.validated_data['identifiers'] + bbox = serializer.validated_data['bbox'] + tbox = serializer.validated_data['tbox'] + convex_hull = serializer.validated_data['convex_hull'] + response_format = serializer.validated_data['response_format'] + placename = serializer.validated_data['placename'] + gazetteer = serializer.validated_data['gazetteer'] + file_limit = serializer.validated_data['file_limit'] + size_limit_mb = serializer.validated_data['size_limit_mb'] + external_metadata = serializer.validated_data['external_metadata'] + external_metadata_method = serializer.validated_data['external_metadata_method'] + + try: + workers = settings.GEOEXTENT_DOWNLOAD_WORKERS + + # Pass identifiers as list or string to geoextent.fromRemote + # It will handle combining extents natively + geoextent_input = identifiers[0] if len(identifiers) == 1 else identifiers + + # Call geoextent once with all identifiers + geoextent_result = geoextent.fromRemote( + geoextent_input, + bbox=bbox, + tbox=tbox, + convex_hull=convex_hull, + details=True, # Get individual results + placename=gazetteer if placename else None, + max_download_workers=workers, + max_download_size=f"{size_limit_mb}MB" if size_limit_mb else None, + show_progress=False, # Disable progress bar in API + download_skip_nogeo=True, # Skip non-geospatial files + ext_metadata=external_metadata, + ext_metadata_method=external_metadata_method, + ) + + # For single identifier, geoextent returns simple format + if len(identifiers) == 1: + structured_result = self._process_geoextent_result(geoextent_result) + + # Check if processing failed + if structured_result is None: + return Response({ + 'error': f'Could not extract spatial extent from "{identifiers[0]}". The resource may not contain valid spatial data or may be inaccessible.' + }, status=status.HTTP_400_BAD_REQUEST) + + structured_result['identifier'] = identifiers[0] + formatted_result = self._format_response( + geoextent_result, + structured_result, + response_format, + identifiers=identifiers + ) + return Response(formatted_result, status=status.HTTP_200_OK) + + # For multiple identifiers, geoextent returns remote_bulk format + # Extract individual results from details + individual_results = [] + if 'details' in geoextent_result: + for identifier, file_result in geoextent_result['details'].items(): + # Check if this result has an error + if 'error' in file_result: + individual_results.append({ + 'identifier': identifier, + 'success': False, + 'error': file_result['error'] + }) + continue + + structured_result = self._process_geoextent_result(file_result) + structured_result['identifier'] = identifier + + # Format based on response_format + formatted_result = self._format_response( + file_result, + structured_result, + response_format, + identifiers=[identifier] + ) + if response_format not in ['geojson', 'wkt', 'wkb']: + formatted_result['identifier'] = identifier + + individual_results.append(formatted_result) + + # Build response with combined extent (geoextent always combines) + combined_structured = self._process_geoextent_result(geoextent_result) + combined_formatted = self._format_response( + geoextent_result, + combined_structured, + response_format, + identifiers=identifiers + ) + + # For multiple identifiers, return structured response with combined + individual + # For GeoJSON format, return FeatureCollection with all features + if response_format == 'geojson': + # Merge all features into single FeatureCollection + all_features = [] + if isinstance(combined_formatted, dict) and 'features' in combined_formatted: + all_features = combined_formatted['features'].copy() + + # Add individual features + for result in individual_results: + if isinstance(result, dict) and 'features' in result: + all_features.extend(result['features']) + + response_data = { + 'type': 'FeatureCollection', + 'features': all_features, + 'geoextent_extraction': combined_formatted.get('geoextent_extraction', {}) + } + else: + # For WKT/WKB, return combined with metadata + response_data = combined_formatted + + return Response(response_data, status=status.HTTP_200_OK) + + except Exception as e: + logger.error(f"Error processing remote extraction: {e}", exc_info=True) + return Response( + {'error': str(e)}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) + + @action(detail=False, methods=['post'], url_path='extract-batch') + def extract_batch(self, request): + """ + Extract geospatial and temporal extent from multiple uploaded files. + + POST /api/v1/geoextent/extract-batch/ + """ + serializer = GeoextentBatchSerializer(data=request.data) + if not serializer.is_valid(): + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + bbox = serializer.validated_data['bbox'] + tbox = serializer.validated_data['tbox'] + convex_hull = serializer.validated_data['convex_hull'] + response_format = serializer.validated_data['response_format'] + placename = serializer.validated_data['placename'] + gazetteer = serializer.validated_data['gazetteer'] + size_limit_mb = serializer.validated_data['size_limit_mb'] + + # Get uploaded files from request + files = request.FILES.getlist('files') + if not files: + return Response( + { + 'success': False, + 'error': 'No files provided', + 'details': 'At least one file must be uploaded' + }, + status=status.HTTP_400_BAD_REQUEST + ) + + temp_dir = None + + try: + # Check total size + total_size = sum(f.size for f in files) + max_size_bytes = size_limit_mb * 1024 * 1024 + if total_size > max_size_bytes: + return Response( + { + 'success': False, + 'error': 'Total size exceeds limit', + 'details': f'Total size ({total_size} bytes) exceeds limit ({max_size_bytes} bytes)' + }, + status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE + ) + + # Create a temporary directory for all uploaded files + temp_dir = tempfile.mkdtemp(prefix='geoextent_batch_') + logger.info(f"Created temp directory for batch processing: {temp_dir}") + + # Save all files to the temporary directory + for uploaded_file in files: + temp_path = os.path.join(temp_dir, uploaded_file.name) + with open(temp_path, 'wb') as destination: + for chunk in uploaded_file.chunks(): + destination.write(chunk) + logger.debug(f"Saved {uploaded_file.name} to {temp_path}") + + # Use geoextent.fromDirectory to process all files at once + # details=True provides individual file results + geoextent_result = geoextent.fromDirectory( + temp_dir, + bbox=bbox, + tbox=tbox, + convex_hull=convex_hull, + details=True, # Get individual file details + placename=gazetteer if placename else None, + show_progress=False, # Disable progress bar in API + recursive=False, # Don't traverse subdirectories + ) + + # Process combined result + combined_structured = self._process_geoextent_result(geoextent_result) + + # Check if processing failed for combined result + if combined_structured is None: + filenames = ', '.join([f.name for f in files]) + return Response({ + 'success': False, + 'error': f'Could not extract spatial extent from the uploaded files: {filenames}', + 'details': 'The files may not contain valid spatial data or may be in unsupported formats.' + }, status=status.HTTP_400_BAD_REQUEST) + + # Process individual file results from details + individual_results = [] + if 'details' in geoextent_result: + for filename, file_result in geoextent_result['details'].items(): + structured_result = self._process_geoextent_result(file_result) + + # Skip files that failed processing + if structured_result is None: + logger.warning(f"Could not extract extent from {filename}") + individual_results.append({ + 'filename': filename, + 'error': 'Could not extract spatial extent', + 'details': 'The file may not contain valid spatial data or may be in an unsupported format.' + }) + continue + + structured_result['filename'] = filename + + # Format based on response_format + formatted_result = self._format_response( + file_result, + structured_result, + response_format, + identifiers=[filename] + ) + if response_format not in ['geojson', 'wkt', 'wkb']: + formatted_result['filename'] = filename + + individual_results.append(formatted_result) + + # Build response with combined extent (geoextent always combines) + filenames = [f.name for f in files] + combined_formatted = self._format_response( + geoextent_result, + combined_structured, + response_format, + identifiers=filenames + ) + + response_data = { + 'success': True, + 'files_processed': len(files), + 'combined_extent': combined_formatted, + 'individual_results': individual_results + } + + return Response(response_data, status=status.HTTP_200_OK) + + except Exception as e: + logger.error(f"Error processing batch extraction: {e}", exc_info=True) + return Response( + { + 'success': False, + 'error': 'Processing error', + 'details': str(e) + }, + status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) + finally: + # Cleanup temporary directory and all files + if temp_dir and os.path.exists(temp_dir): + try: + shutil.rmtree(temp_dir) + logger.info(f"Cleaned up temp directory: {temp_dir}") + except Exception as e: + logger.error(f"Error cleaning up temp directory {temp_dir}: {e}") \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 2c89786..7607e46 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,7 +12,6 @@ django-leaflet==0.31.0 dj-database-url==2.3.0 django-picklefield==3.2 django-q2==1.7.4 -django-sesame==3.2.2 djangorestframework==3.15.2 djangorestframework-gis==1.1 drf-spectacular==0.28.0 @@ -43,3 +42,7 @@ oaipmh-scythe==0.13.0 feedparser==6.0.12 wikibaseintegrator>=0.12.4 requests-oauthlib>=1.3.1 + + +# Geoextent library for spatial/temporal extent extraction +git+https://github.com/nuest/geoextent.git@main#egg=geoextent \ No newline at end of file diff --git a/tests-ui/test_geoextent.py b/tests-ui/test_geoextent.py new file mode 100644 index 0000000..3019097 --- /dev/null +++ b/tests-ui/test_geoextent.py @@ -0,0 +1,258 @@ +import unittest +import os +import tempfile +from django.test import TestCase +from django.urls import reverse +from helium import ( + start_chrome, + click, + get_driver, + kill_browser, + write, + Text, + Button, + wait_until, + find_all +) + + +class GeoextentPageTests(TestCase): + """UI tests for the geoextent extraction page.""" + + def test_url_exists_at_correct_location(self): + """Test that the geoextent URL returns 200.""" + response = self.client.get("/geoextent/") + self.assertEqual(response.status_code, 200) + + def test_url_available_by_name(self): + """Test that the geoextent URL is accessible by name.""" + response = self.client.get(reverse("optimap:geoextent")) + self.assertEqual(response.status_code, 200) + + def test_template_name_correct(self): + """Test that the correct template is used.""" + response = self.client.get(reverse("optimap:geoextent")) + self.assertTemplateUsed(response, "geoextent.html") + + def test_template_content(self): + """Test that the page contains expected content.""" + response = self.client.get(reverse("optimap:geoextent")) + self.assertContains(response, "Geoextent Extraction") + self.assertContains(response, "Upload Files") + self.assertContains(response, "Remote Resource") + self.assertContains(response, "Browse Files...") + self.assertContains(response, "Extract Extent") + + def test_page_has_file_upload_form(self): + """Test that the page has file upload form elements.""" + response = self.client.get(reverse("optimap:geoextent")) + self.assertContains(response, 'id="file-upload-form"') + self.assertContains(response, 'id="browse-files-btn"') + self.assertContains(response, 'id="files"') + self.assertContains(response, 'id="extract-files-btn"') + + def test_page_has_remote_resource_form(self): + """Test that the page has remote resource form elements.""" + response = self.client.get(reverse("optimap:geoextent")) + self.assertContains(response, 'id="remote-form"') + self.assertContains(response, 'id="identifiers"') + self.assertContains(response, 'id="file_limit"') + self.assertContains(response, 'id="size_limit_mb"') + + def test_page_has_extraction_options(self): + """Test that the page has all extraction option checkboxes.""" + response = self.client.get(reverse("optimap:geoextent")) + self.assertContains(response, 'id="bbox"') + self.assertContains(response, 'id="tbox"') + self.assertContains(response, 'id="convex_hull"') + self.assertContains(response, 'id="placename"') + self.assertContains(response, 'id="response_format"') + self.assertContains(response, 'id="gazetteer"') + + def test_page_has_documentation_section(self): + """Test that the page has documentation section.""" + response = self.client.get(reverse("optimap:geoextent")) + self.assertContains(response, "Supported File Formats") + self.assertContains(response, "Supported Repository Providers") + self.assertContains(response, "geoextent") # Should show version + + def test_page_displays_geoextent_version(self): + """Test that the page displays the geoextent version.""" + response = self.client.get(reverse("optimap:geoextent")) + # Should contain version information + self.assertContains(response, "geoextent v") + + def test_page_has_map_container(self): + """Test that the page has a map container.""" + response = self.client.get(reverse("optimap:geoextent")) + self.assertContains(response, 'id="geoextent-map"') + + def test_footer_link_exists(self): + """Test that geoextent link exists in footer.""" + response = self.client.get("/") + self.assertContains(response, 'href="/geoextent/"') + self.assertContains(response, 'Geoextent') + + +class GeoextentUIInteractionTests(TestCase): + """Browser-based UI interaction tests for geoextent page.""" + + @classmethod + def setUpClass(cls): + """Set up test fixtures.""" + super().setUpClass() + cls.base_url = 'localhost:8000' + cls.screenshot_dir = os.path.join(os.getcwd(), 'tests-ui', 'screenshots') + os.makedirs(cls.screenshot_dir, exist_ok=True) + + def test_geoextent_page_loads(self): + """Test that the geoextent page loads correctly in browser.""" + try: + start_chrome(f'{self.base_url}/geoextent/', headless=True) + + # Check page title + driver = get_driver() + self.assertIn("OPTIMAP", driver.title) + + # Check main heading exists + self.assertTrue(Text("Geoextent Extraction").exists()) + + # Take screenshot + driver.save_screenshot( + os.path.join(self.screenshot_dir, 'geoextent_page.png') + ) + finally: + kill_browser() + + def test_tab_navigation(self): + """Test switching between Upload Files and Remote Resource tabs.""" + try: + start_chrome(f'{self.base_url}/geoextent/', headless=True) + + # Check default tab is Upload Files + self.assertTrue(Text("Browse Files...").exists()) + + # Click Remote Resource tab + click("Remote Resource") + + # Wait for tab content to appear + wait_until(lambda: Text("Resource Identifiers").exists(), timeout_secs=5) + + # Check remote form elements are visible + self.assertTrue(Text("File Limit").exists()) + + # Take screenshot + get_driver().save_screenshot( + os.path.join(self.screenshot_dir, 'geoextent_remote_tab.png') + ) + finally: + kill_browser() + + def test_browse_files_button_exists(self): + """Test that browse files button exists and is clickable.""" + try: + start_chrome(f'{self.base_url}/geoextent/', headless=True) + + # Check browse button exists + self.assertTrue(Button("Browse Files...").exists()) + + # Check extract button exists and is disabled initially + driver = get_driver() + extract_btn = driver.find_element("id", "extract-files-btn") + self.assertTrue(extract_btn.get_attribute("disabled")) + + finally: + kill_browser() + + def test_remote_form_validation(self): + """Test that remote form shows validation when submitted empty.""" + try: + start_chrome(f'{self.base_url}/geoextent/', headless=True) + + # Switch to Remote Resource tab + click("Remote Resource") + wait_until(lambda: Text("Resource Identifiers").exists(), timeout_secs=5) + + # Try to submit without entering identifier + # Note: The form submission button in remote tab + buttons = find_all(Button) + submit_button = None + for btn in buttons: + if "Extract Extent" in btn.web_element.text: + submit_button = btn + break + + if submit_button: + click(submit_button) + + # Wait for error message (should appear) + wait_until(lambda: Text("Error").exists() or True, timeout_secs=2) + + finally: + kill_browser() + + def test_extraction_options_visible(self): + """Test that all extraction options are visible.""" + try: + start_chrome(f'{self.base_url}/geoextent/', headless=True) + + # Check all option labels exist + self.assertTrue(Text("Bounding Box").exists()) + self.assertTrue(Text("Time Box").exists()) + self.assertTrue(Text("Convex Hull").exists()) + self.assertTrue(Text("Place Name").exists()) + self.assertTrue(Text("Output Format").exists()) + self.assertTrue(Text("Gazetteer Service").exists()) + + # Take screenshot of options + get_driver().save_screenshot( + os.path.join(self.screenshot_dir, 'geoextent_options.png') + ) + finally: + kill_browser() + + def test_documentation_section_visible(self): + """Test that documentation section is visible and scrollable.""" + try: + start_chrome(f'{self.base_url}/geoextent/', headless=True) + + # Scroll to bottom to see documentation + driver = get_driver() + driver.execute_script("window.scrollTo(0, document.body.scrollHeight);") + + # Check documentation headers exist + self.assertTrue(Text("Documentation & Supported Formats").exists()) + self.assertTrue(Text("Supported File Formats").exists()) + self.assertTrue(Text("Supported Repository Providers").exists()) + + # Take screenshot of documentation section + driver.save_screenshot( + os.path.join(self.screenshot_dir, 'geoextent_documentation.png') + ) + finally: + kill_browser() + + def test_footer_link_navigates_to_geoextent(self): + """Test that clicking geoextent link in footer navigates to the page.""" + try: + start_chrome(f'{self.base_url}/', headless=True) + + # Scroll to footer + driver = get_driver() + driver.execute_script("window.scrollTo(0, document.body.scrollHeight);") + + # Click geoextent link in footer + click("Geoextent") + + # Wait for page to load + wait_until(lambda: Text("Geoextent Extraction").exists(), timeout_secs=5) + + # Check URL changed + self.assertIn("geoextent", driver.current_url) + + finally: + kill_browser() + + +if __name__ == '__main__': + unittest.main() diff --git a/tests-ui/test_works_page.py b/tests-ui/test_works_page.py new file mode 100644 index 0000000..f7caa85 --- /dev/null +++ b/tests-ui/test_works_page.py @@ -0,0 +1,285 @@ +# tests-ui/test_works_page.py +""" +Tests for the works list page (/works). +Tests pagination, statistics, and work display features. +""" + +from django.test import TestCase, Client +from django.urls import reverse +from django.contrib.auth import get_user_model +from django.core.cache import cache +from django.conf import settings +from publications.models import Publication, Source +from publications.utils.statistics import update_statistics_cache, STATS_CACHE_KEY + +User = get_user_model() + + +class WorksListViewTest(TestCase): + """Test the works list view with pagination and statistics""" + + @classmethod + def setUpTestData(cls): + """Create test data once for all tests""" + # Create a test source + cls.source = Source.objects.create( + name="Test Journal", + issn_l="1234-5678" + ) + + # Create test publications (75 total: 60 published, 15 draft) + cls.publications = [] + for i in range(75): + status = 'p' if i < 60 else 'd' + authors = [f"Author {i}A", f"Author {i}B", f"Author {i}C", f"Author {i}D"] if i % 2 == 0 else [f"Author {i}"] + pub = Publication.objects.create( + title=f"Test Publication {i}", + status=status, + doi=f"10.1234/test.{i}" if i % 3 == 0 else None, + source=cls.source if i % 4 == 0 else None, + authors=authors, + abstract=f"Abstract for publication {i}" if i % 5 == 0 else None, + ) + cls.publications.append(pub) + + def setUp(self): + """Set up for each test""" + self.client = Client() + # Clear cache before each test + cache.clear() + + def test_works_page_loads(self): + """Test that the works page loads successfully""" + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'works.html') + + def test_pagination_default_page_size(self): + """Test that default page size is applied""" + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + + # Should use default page size (50) + self.assertEqual(len(response.context['works']), 50) + self.assertEqual(response.context['page_size'], settings.WORKS_PAGE_SIZE_DEFAULT) + + def test_pagination_custom_page_size(self): + """Test custom page size selection""" + response = self.client.get(reverse('optimap:works') + '?size=25') + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.context['works']), 25) + self.assertEqual(response.context['page_size'], 25) + + def test_pagination_max_limit(self): + """Test that page size is clamped to maximum""" + response = self.client.get(reverse('optimap:works') + '?size=1000') + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context['page_size'], settings.WORKS_PAGE_SIZE_MAX) + + def test_pagination_min_limit(self): + """Test that page size is clamped to minimum""" + response = self.client.get(reverse('optimap:works') + '?size=1') + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context['page_size'], settings.WORKS_PAGE_SIZE_MIN) + + def test_pagination_page_navigation(self): + """Test navigating between pages""" + # First page + response = self.client.get(reverse('optimap:works') + '?page=1&size=25') + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context['page_obj'].number, 1) + self.assertTrue(response.context['page_obj'].has_next()) + self.assertFalse(response.context['page_obj'].has_previous()) + + # Second page + response = self.client.get(reverse('optimap:works') + '?page=2&size=25') + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context['page_obj'].number, 2) + self.assertTrue(response.context['page_obj'].has_next()) + self.assertTrue(response.context['page_obj'].has_previous()) + + def test_only_published_works_shown_to_public(self): + """Test that non-admin users only see published works""" + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + + # Should show 60 published works (not 75 total) + self.assertEqual(response.context['page_obj'].paginator.count, 60) + + def test_admin_sees_all_works(self): + """Test that admin users see all works including drafts""" + # Create admin user + admin = User.objects.create_user( + username='admin', + email='admin@test.com', + password='testpass123' + ) + admin.is_staff = True + admin.save() + + self.client.login(username='admin', password='testpass123') + response = self.client.get(reverse('optimap:works')) + + self.assertEqual(response.status_code, 200) + # Should show all 75 works + self.assertEqual(response.context['page_obj'].paginator.count, 75) + self.assertTrue(response.context['is_admin']) + + def test_work_includes_authors(self): + """Test that work data includes author information""" + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + + # Check first work has authors + first_work = response.context['works'][0] + self.assertIn('authors', first_work) + self.assertIsInstance(first_work['authors'], list) + + def test_work_includes_doi(self): + """Test that work data includes DOI""" + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + + # Find a work with DOI + works_with_doi = [w for w in response.context['works'] if w['doi']] + self.assertGreater(len(works_with_doi), 0) + + def test_work_includes_source(self): + """Test that work data includes source information""" + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + + # Find a work with source + works_with_source = [w for w in response.context['works'] if w['source']] + self.assertGreater(len(works_with_source), 0) + + def test_statistics_displayed(self): + """Test that statistics are included in context""" + # Update statistics cache + update_statistics_cache() + + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + + self.assertIn('statistics', response.context) + stats = response.context['statistics'] + + self.assertIn('total_works', stats) + self.assertIn('published_works', stats) + self.assertIn('with_geometry', stats) + self.assertIn('with_temporal', stats) + self.assertIn('with_authors', stats) + self.assertIn('with_doi', stats) + + def test_statistics_cached(self): + """Test that statistics are cached""" + # First request should calculate and cache + cache.delete(STATS_CACHE_KEY) + response1 = self.client.get(reverse('optimap:works')) + stats1 = response1.context['statistics'] + + # Second request should use cache + response2 = self.client.get(reverse('optimap:works')) + stats2 = response2.context['statistics'] + + self.assertEqual(stats1, stats2) + # Verify cache was used + self.assertIsNotNone(cache.get(STATS_CACHE_KEY)) + + def test_api_url_present(self): + """Test that API URL is included in context""" + response = self.client.get(reverse('optimap:works') + '?page=2&size=25') + self.assertEqual(response.status_code, 200) + + self.assertIn('api_url', response.context) + api_url = response.context['api_url'] + + # API URL should include current page and size + self.assertIn('page=2', api_url) + self.assertIn('limit=25', api_url) + + def test_pagination_controls_in_template(self): + """Test that pagination controls are rendered""" + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + + content = response.content.decode('utf-8') + + # Check for pagination elements + self.assertIn('pagination', content) + self.assertIn('Works per page:', content) + self.assertIn('page-size', content) + + def test_statistics_section_in_template(self): + """Test that statistics section is rendered""" + update_statistics_cache() + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + + content = response.content.decode('utf-8') + + # Check for statistics section + self.assertIn('Statistics', content) + self.assertIn('Total works in database:', content) + self.assertIn('Published works:', content) + self.assertIn('Complete metadata coverage:', content) + + def test_api_link_in_template(self): + """Test that API link is rendered""" + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + + content = response.content.decode('utf-8') + + # Check for API link section + self.assertIn('API Access:', content) + self.assertIn('View this page as JSON (API)', content) + + def test_authors_abbreviated_for_many(self): + """Test that author list is abbreviated for >3 authors""" + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + + content = response.content.decode('utf-8') + + # Should find "et al." for publications with >3 authors + self.assertIn('et al.', content) + + def test_doi_link_external(self): + """Test that DOI links are external""" + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + + content = response.content.decode('utf-8') + + # Check for DOI links + if 'doi.org' in content: + self.assertIn('target="_blank"', content) + self.assertIn('rel="noopener"', content) + + def test_invalid_page_number_handled(self): + """Test that invalid page numbers are handled gracefully""" + # Non-integer page number + response = self.client.get(reverse('optimap:works') + '?page=abc') + self.assertEqual(response.status_code, 200) + self.assertEqual(response.context['page_obj'].number, 1) + + # Out of range page number + response = self.client.get(reverse('optimap:works') + '?page=9999') + self.assertEqual(response.status_code, 200) + # Should show last page + self.assertEqual( + response.context['page_obj'].number, + response.context['page_obj'].paginator.num_pages + ) + + def test_page_size_options_in_context(self): + """Test that page size options are in context""" + response = self.client.get(reverse('optimap:works')) + self.assertEqual(response.status_code, 200) + + self.assertIn('page_size_options', response.context) + self.assertEqual( + response.context['page_size_options'], + settings.WORKS_PAGE_SIZE_OPTIONS + ) diff --git a/tests/fixtures/geoextent/test_linestring.geojson b/tests/fixtures/geoextent/test_linestring.geojson new file mode 100644 index 0000000..530ba89 --- /dev/null +++ b/tests/fixtures/geoextent/test_linestring.geojson @@ -0,0 +1,15 @@ +{ + "type": "Feature", + "geometry": { + "type": "LineString", + "coordinates": [ + [13.4, 52.5], + [13.5, 52.6], + [13.6, 52.7] + ] + }, + "properties": { + "name": "Berlin Route", + "date": "2023-12-25" + } +} diff --git a/tests/fixtures/geoextent/test_point.geojson b/tests/fixtures/geoextent/test_point.geojson new file mode 100644 index 0000000..6dd39c6 --- /dev/null +++ b/tests/fixtures/geoextent/test_point.geojson @@ -0,0 +1,11 @@ +{ + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": [13.405, 52.52] + }, + "properties": { + "name": "Berlin", + "date": "2023-01-15" + } +} diff --git a/tests/fixtures/geoextent/test_polygon.geojson b/tests/fixtures/geoextent/test_polygon.geojson new file mode 100644 index 0000000..e9242c9 --- /dev/null +++ b/tests/fixtures/geoextent/test_polygon.geojson @@ -0,0 +1,18 @@ +{ + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [[ + [9.5, 53.0], + [9.5, 54.0], + [10.5, 54.0], + [10.5, 53.0], + [9.5, 53.0] + ]] + }, + "properties": { + "name": "Hamburg Area", + "start_date": "2023-06-01", + "end_date": "2023-06-30" + } +} diff --git a/tests/test_geoextent.py b/tests/test_geoextent.py new file mode 100644 index 0000000..d2730f1 --- /dev/null +++ b/tests/test_geoextent.py @@ -0,0 +1,774 @@ +""" +Integration tests for Geoextent API endpoints. + +Tests compare API responses against reference results from geoextent library. +Reference values are pre-computed and hardcoded for reliability and speed. +""" + +import json +import os +from django.test import Client, TestCase +from django.core.files.uploadedfile import SimpleUploadedFile +from django.contrib.auth import get_user_model + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'optimap.settings') + +User = get_user_model() + + +# Reference values generated from geoextent library +REFERENCE_VALUES = { + 'test_point': { + 'format': 'geojson', + 'geoextent_handler': 'handleVector', + 'bbox': [13.405, 52.52, 13.405, 52.52], + 'crs': '4326', + 'tbox': ['2023-01-15', '2023-01-15'], + 'file_size_bytes': 171 + }, + 'test_polygon': { + 'format': 'geojson', + 'geoextent_handler': 'handleVector', + 'bbox': [9.5, 53.0, 10.5, 54.0], + 'crs': '4326', + 'tbox': ['2023-06-01', '2023-06-30'], + 'file_size_bytes': 305 + }, + 'test_linestring': { + 'format': 'geojson', + 'geoextent_handler': 'handleVector', + 'bbox': [13.4, 52.5, 13.6, 52.7], + 'crs': '4326', + 'tbox': ['2023-12-25', '2023-12-25'], + 'file_size_bytes': 233 + }, + 'directory_combined': { + 'format': 'folder', + 'crs': '4326', + 'bbox': [9.5, 52.5, 13.6, 54.0], + 'tbox': ['2023-01-15', '2023-12-25'], + 'file_size_bytes': 709 + } +} + + +class GeoextentExtractTest(TestCase): + """Tests for /api/v1/geoextent/extract/ endpoint""" + + def setUp(self): + self.client = Client() + # Create test user and login + self.user = User.objects.create_user('testuser', 'test@example.com', 'testpass') + self.client.login(username='testuser', password='testpass') + + self.fixtures_dir = os.path.join( + os.path.dirname(__file__), 'fixtures', 'geoextent' + ) + + def test_extract_point_geojson_format(self): + """Test extracting extent from point GeoJSON with GeoJSON response format""" + # Load test file + with open(os.path.join(self.fixtures_dir, 'test_point.geojson'), 'rb') as f: + file_content = f.read() + + # Reference values + reference = REFERENCE_VALUES['test_point'] + + # Call API + response = self.client.post( + '/api/v1/geoextent/extract/', + { + 'file': SimpleUploadedFile('test_point.geojson', file_content), + 'bbox': 'true', + 'tbox': 'true', + 'response_format': 'geojson' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Should return GeoJSON FeatureCollection + self.assertEqual(data['type'], 'FeatureCollection') + self.assertIn('features', data) + self.assertIn('geoextent_extraction', data) + + # Check feature properties contain temporal extent + if len(data['features']) > 0: + feature = data['features'][0] + self.assertIn('tbox', feature['properties']) + self.assertEqual(feature['properties']['tbox'], reference['tbox']) + + def test_extract_polygon_wkt_format(self): + """Test extracting extent from polygon GeoJSON with WKT response format""" + with open(os.path.join(self.fixtures_dir, 'test_polygon.geojson'), 'rb') as f: + file_content = f.read() + + # Reference values + reference = REFERENCE_VALUES['test_polygon'] + + # Call API with WKT format + response = self.client.post( + '/api/v1/geoextent/extract/', + { + 'file': SimpleUploadedFile('test_polygon.geojson', file_content), + 'bbox': 'true', + 'tbox': 'true', + 'response_format': 'wkt' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # WKT format should have wkt string and metadata + self.assertIn('wkt', data) + self.assertIn('crs', data) + self.assertIn('geoextent_extraction', data) + self.assertEqual(data['crs'], f"EPSG:{reference['crs']}") + + # Should have temporal extent + self.assertIn('tbox', data) + self.assertEqual(data['tbox'], reference['tbox']) + + def test_extract_geojson_response_format(self): + """Test extracting extent with GeoJSON response format""" + with open(os.path.join(self.fixtures_dir, 'test_linestring.geojson'), 'rb') as f: + file_content = f.read() + + # Reference values + reference = REFERENCE_VALUES['test_linestring'] + + # Call API with geojson format + response = self.client.post( + '/api/v1/geoextent/extract/', + { + 'file': SimpleUploadedFile('test_linestring.geojson', file_content), + 'bbox': 'true', + 'tbox': 'true', + 'response_format': 'geojson' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Should return GeoJSON FeatureCollection + self.assertEqual(data['type'], 'FeatureCollection') + self.assertIn('features', data) + self.assertIn('geoextent_extraction', data) + + # Check first feature has geometry and temporal extent + if len(data['features']) > 0: + feature = data['features'][0] + self.assertIn('geometry', feature) + self.assertEqual(feature['geometry']['type'], 'Polygon') + self.assertIn('tbox', feature['properties']) + self.assertEqual(feature['properties']['tbox'], + reference['tbox'] + ) + + def test_extract_wkt_response_format(self): + """Test extracting extent with WKT response format""" + with open(os.path.join(self.fixtures_dir, 'test_polygon.geojson'), 'rb') as f: + file_content = f.read() + + response = self.client.post( + '/api/v1/geoextent/extract/', + { + 'file': SimpleUploadedFile('test_polygon.geojson', file_content), + 'bbox': 'true', + 'tbox': 'true', + 'response_format': 'wkt' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Check WKT structure + self.assertIn('wkt', data) + self.assertTrue(data['wkt'].startswith('POLYGON')) + self.assertEqual(data['crs'], 'EPSG:4326') + self.assertIn('tbox', data) + self.assertEqual(data['tbox'], REFERENCE_VALUES['test_polygon']['tbox']) + + def test_extract_wkb_response_format(self): + """Test extracting extent with WKB response format""" + with open(os.path.join(self.fixtures_dir, 'test_point.geojson'), 'rb') as f: + file_content = f.read() + + response = self.client.post( + '/api/v1/geoextent/extract/', + { + 'file': SimpleUploadedFile('test_point.geojson', file_content), + 'bbox': 'true', + 'tbox': 'true', + 'response_format': 'wkb' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Check WKB structure + self.assertIn('wkb', data) + self.assertIsInstance(data['wkb'], str) # Hex string + self.assertEqual(data['crs'], 'EPSG:4326') + self.assertIn('tbox', data) + self.assertEqual(data['tbox'], REFERENCE_VALUES['test_point']['tbox']) + + def test_extract_without_bbox(self): + """Test extracting only temporal extent without bbox - should fail gracefully""" + with open(os.path.join(self.fixtures_dir, 'test_point.geojson'), 'rb') as f: + file_content = f.read() + + response = self.client.post( + '/api/v1/geoextent/extract/', + { + 'file': SimpleUploadedFile('test_point.geojson', file_content), + 'bbox': 'false', + 'tbox': 'true', + 'response_format': 'geojson' + } + ) + + # When bbox=false and response_format=geojson, the API cannot create valid + # GeoJSON without geometry, so it returns an error or empty result + self.assertEqual(response.status_code, 200) + data = response.json() + + # Should either be an error response or have features + # For now, just check it's a valid JSON response + self.assertIsInstance(data, dict) + + # If it has features, check temporal extent + if 'features' in data and len(data['features']) > 0: + feature = data['features'][0] + if 'tbox' in feature.get('properties', {}): + self.assertEqual(feature['properties']['tbox'], REFERENCE_VALUES['test_point']['tbox']) + + def test_extract_convex_hull(self): + """Test extracting convex hull instead of bbox""" + with open(os.path.join(self.fixtures_dir, 'test_polygon.geojson'), 'rb') as f: + file_content = f.read() + + response = self.client.post( + '/api/v1/geoextent/extract/', + { + 'file': SimpleUploadedFile('test_polygon.geojson', file_content), + 'bbox': 'true', + 'convex_hull': 'true', + 'response_format': 'geojson' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Should return GeoJSON FeatureCollection with convex hull + self.assertEqual(data['type'], 'FeatureCollection') + self.assertIn('features', data) + self.assertIn('geoextent_extraction', data) + + # Check that extent_type is convex_hull + self.assertEqual(data['geoextent_extraction']['extent_type'], 'convex_hull') + + # Features should have geometry (convex hull polygon) + if len(data['features']) > 0: + feature = data['features'][0] + self.assertIn('geometry', feature) + self.assertEqual(feature['geometry']['type'], 'Polygon') + + +class GeoextentBatchTest(TestCase): + """Tests for /api/v1/geoextent/extract-batch/ endpoint""" + + def setUp(self): + self.client = Client() + # Create test user and login + self.user = User.objects.create_user('testuser', 'test@example.com', 'testpass') + self.client.login(username='testuser', password='testpass') + + self.fixtures_dir = os.path.join( + os.path.dirname(__file__), 'fixtures', 'geoextent' + ) + + def test_batch_multiple_files_combined(self): + """Test batch processing with extent combination""" + # Load all test files + files = [] + for filename in ['test_point.geojson', 'test_polygon.geojson', 'test_linestring.geojson']: + with open(os.path.join(self.fixtures_dir, filename), 'rb') as f: + files.append(SimpleUploadedFile(filename, f.read())) + + # Call API + response = self.client.post( + '/api/v1/geoextent/extract-batch/', + { + 'files': files, + 'bbox': 'true', + 'tbox': 'true', + 'response_format': 'geojson' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Check response structure + self.assertTrue(data['success']) + self.assertEqual(data['files_processed'], 3) + self.assertIn('combined_extent', data) + self.assertIn('individual_results', data) + + # Combined extent should be GeoJSON FeatureCollection + combined = data['combined_extent'] + self.assertEqual(combined['type'], 'FeatureCollection') + self.assertIn('features', combined) + self.assertIn('geoextent_extraction', combined) + + # Check that we have features with temporal extent + if len(combined['features']) > 0: + feature = combined['features'][0] + self.assertIn('tbox', feature['properties']) + + # Check individual results count + self.assertEqual(len(data['individual_results']), 3) + + def test_batch_individual_results(self): + """Test batch processing returns both combined and individual results""" + files = [] + for filename in ['test_point.geojson', 'test_polygon.geojson']: + with open(os.path.join(self.fixtures_dir, filename), 'rb') as f: + files.append(SimpleUploadedFile(filename, f.read())) + + response = self.client.post( + '/api/v1/geoextent/extract-batch/', + { + 'files': files, + 'bbox': 'true', + 'tbox': 'true', + 'response_format': 'geojson' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Should have both combined extent and individual results + self.assertTrue(data['success']) + self.assertIn('combined_extent', data) + self.assertIn('individual_results', data) + self.assertEqual(len(data['individual_results']), 2) + + # Combined extent should be GeoJSON FeatureCollection + combined = data['combined_extent'] + self.assertEqual(combined['type'], 'FeatureCollection') + self.assertIn('features', combined) + + # Individual results should also be GeoJSON FeatureCollections + for result in data['individual_results']: + self.assertEqual(result['type'], 'FeatureCollection') + self.assertIn('features', result) + self.assertIn('geoextent_extraction', result) + + def test_batch_wkt_format(self): + """Test batch processing with WKT response format""" + with open(os.path.join(self.fixtures_dir, 'test_point.geojson'), 'rb') as f: + file_content = f.read() + + response = self.client.post( + '/api/v1/geoextent/extract-batch/', + { + 'files': [SimpleUploadedFile('test_point.geojson', file_content)], + 'bbox': 'true', + 'tbox': 'true', + 'response_format': 'wkt' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Response should have batch metadata + self.assertTrue(data['success']) + self.assertEqual(data['files_processed'], 1) + + # Check that combined extent is in WKT format + combined = data['combined_extent'] + self.assertIn('wkt', combined) + self.assertIn('crs', combined) + self.assertIn('geoextent_extraction', combined) + + # Check that individual results are also in WKT format + self.assertEqual(len(data['individual_results']), 1) + result = data['individual_results'][0] + self.assertIn('wkt', result) + self.assertIn('crs', result) + + def test_batch_geojson_format(self): + """Test batch processing with GeoJSON response format""" + with open(os.path.join(self.fixtures_dir, 'test_polygon.geojson'), 'rb') as f: + file_content = f.read() + + response = self.client.post( + '/api/v1/geoextent/extract-batch/', + { + 'files': [SimpleUploadedFile('test_polygon.geojson', file_content)], + 'bbox': 'true', + 'response_format': 'geojson' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Response should have batch metadata + self.assertTrue(data['success']) + self.assertEqual(data['files_processed'], 1) + + # Combined extent should be GeoJSON FeatureCollection + combined = data['combined_extent'] + self.assertEqual(combined['type'], 'FeatureCollection') + self.assertIn('features', combined) + self.assertIn('geoextent_extraction', combined) + + # Individual results should also be GeoJSON FeatureCollections + self.assertEqual(data['individual_results'][0]['type'], 'FeatureCollection') + + +class GeoextentRemoteTest(TestCase): + """Tests for /api/v1/geoextent/extract-remote/ endpoint + + Note: These tests make actual network calls and may fail if: + - No internet connection + - Repository is unavailable + - DOI resolver is down + """ + + # Reference value from Zenodo dataset 10.5281/zenodo.4593540 + # NOTE: geoextent.fromRemote() has a bug where it returns coordinates in + # [minLat, minLon, maxLat, maxLon] format instead of the GeoJSON standard + # [minLon, minLat, maxLon, maxLat]. This needs to be fixed upstream in geoextent. + # Pennsylvania coordinates: ~40°N latitude, ~75-80°W longitude + ZENODO_REFERENCE = { + 'identifier': '10.5281/zenodo.4593540', + 'bbox': [39.642802545572735, -80.71456319678893, 42.256308231814586, -74.78657735361809], + 'tbox': ['2006-02-02', '2018-08-27'], + 'crs': '4326' + } + + def setUp(self): + self.client = Client() + # Create test user and login + self.user = User.objects.create_user('testuser', 'test@example.com', 'testpass') + self.client.login(username='testuser', password='testpass') + + def test_remote_single_identifier(self): + """Test extracting from single remote identifier""" + identifier = self.ZENODO_REFERENCE['identifier'] + + # Call API + response = self.client.post( + '/api/v1/geoextent/extract-remote/', + json.dumps({ + 'identifiers': [identifier], + 'bbox': True, + 'tbox': True, + 'response_format': 'geojson' + }), + content_type='application/json' + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Should return GeoJSON FeatureCollection + self.assertEqual(data['type'], 'FeatureCollection') + self.assertIn('features', data) + self.assertIn('geoextent_extraction', data) + + # Check extraction metadata + self.assertEqual(data['geoextent_extraction']['inputs'], [identifier]) + self.assertEqual(data['geoextent_extraction']['format'], 'remote') + + # Check temporal extent in feature properties + if len(data['features']) > 0: + feature = data['features'][0] + self.assertIn('tbox', feature['properties']) + self.assertEqual(feature['properties']['tbox'], self.ZENODO_REFERENCE['tbox']) + + def test_remote_multiple_identifiers(self): + """Test extracting from multiple remote identifiers""" + identifiers = [ + '10.5281/zenodo.4593540', + '10.5281/zenodo.7416089' # This might fail (404), but should be handled + ] + + # Call API + response = self.client.post( + '/api/v1/geoextent/extract-remote/', + json.dumps({ + 'identifiers': identifiers, + 'bbox': True, + 'tbox': True, + 'response_format': 'geojson' + }), + content_type='application/json' + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Should return GeoJSON FeatureCollection + self.assertEqual(data['type'], 'FeatureCollection') + self.assertIn('features', data) + self.assertIn('geoextent_extraction', data) + + # Check that multiple identifiers were processed + self.assertEqual(data['geoextent_extraction']['inputs'], identifiers) + + def test_remote_wkt_format(self): + """Test remote extraction with WKT format""" + identifier = self.ZENODO_REFERENCE['identifier'] + + response = self.client.post( + '/api/v1/geoextent/extract-remote/', + json.dumps({ + 'identifiers': [identifier], + 'bbox': True, + 'tbox': True, + 'response_format': 'wkt' + }), + content_type='application/json' + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # WKT format should have wkt string and metadata + self.assertIn('wkt', data) + self.assertIn('crs', data) + self.assertIn('geoextent_extraction', data) + self.assertEqual(data['geoextent_extraction']['format'], 'remote') + + def test_remote_single_identifier_simple_response(self): + """Test single identifier returns GeoJSON FeatureCollection""" + identifier = self.ZENODO_REFERENCE['identifier'] + + response = self.client.post( + '/api/v1/geoextent/extract-remote/', + json.dumps({ + 'identifiers': [identifier], + 'bbox': True + }), + content_type='application/json' + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Should return GeoJSON FeatureCollection (default format) + self.assertEqual(data['type'], 'FeatureCollection') + self.assertIn('features', data) + self.assertIn('geoextent_extraction', data) + + +class GeoextentRemoteGetTest(TestCase): + """Tests for /api/v1/geoextent/extract-remote/ GET endpoint""" + + # Reference value from Zenodo dataset 10.5281/zenodo.4593540 + # NOTE: geoextent.fromRemote() has a bug where it returns coordinates in + # [minLat, minLon, maxLat, maxLon] format instead of the GeoJSON standard + # [minLon, minLat, maxLon, maxLat]. This needs to be fixed upstream in geoextent. + # Pennsylvania coordinates: ~40°N latitude, ~75-80°W longitude + ZENODO_REFERENCE = { + 'identifier': '10.5281/zenodo.4593540', + 'bbox': [39.642802545572735, -80.71456319678893, 42.256308231814586, -74.78657735361809], + 'tbox': ['2006-02-02', '2018-08-27'], + 'crs': '4326' + } + + def setUp(self): + self.client = Client() + # Create test user and login + self.user = User.objects.create_user('testuser', 'test@example.com', 'testpass') + self.client.login(username='testuser', password='testpass') + + def test_get_single_identifier(self): + """Test GET request with single identifier""" + identifier = self.ZENODO_REFERENCE['identifier'] + + response = self.client.get( + '/api/v1/geoextent/extract-remote/', + { + 'identifiers': identifier, + 'bbox': 'true', + 'tbox': 'true', + 'response_format': 'geojson' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Should return GeoJSON FeatureCollection + self.assertEqual(data['type'], 'FeatureCollection') + self.assertIn('features', data) + self.assertIn('geoextent_extraction', data) + + # Check temporal extent in feature properties + if len(data['features']) > 0: + feature = data['features'][0] + self.assertIn('tbox', feature['properties']) + self.assertEqual(feature['properties']['tbox'], self.ZENODO_REFERENCE['tbox']) + + def test_get_multiple_identifiers(self): + """Test GET request with comma-separated identifiers""" + identifiers = '10.5281/zenodo.4593540,10.5281/zenodo.7416089' + + response = self.client.get( + '/api/v1/geoextent/extract-remote/', + { + 'identifiers': identifiers, + 'bbox': 'true', + 'tbox': 'true', + 'response_format': 'geojson' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # Should return GeoJSON FeatureCollection + self.assertEqual(data['type'], 'FeatureCollection') + self.assertIn('features', data) + self.assertIn('geoextent_extraction', data) + + # Should have processed multiple identifiers + self.assertEqual(len(data['geoextent_extraction']['inputs']), 2) + + def test_get_geojson_format(self): + """Test GET request with GeoJSON response format""" + identifier = self.ZENODO_REFERENCE['identifier'] + + response = self.client.get( + '/api/v1/geoextent/extract-remote/', + { + 'identifiers': identifier, + 'bbox': 'true', + 'response_format': 'geojson' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # GeoJSON format should return FeatureCollection + self.assertEqual(data['type'], 'FeatureCollection') + self.assertIn('features', data) + self.assertIn('geoextent_extraction', data) + + # Should have at least one feature with geometry + if len(data['features']) > 0: + feature = data['features'][0] + self.assertEqual(feature['type'], 'Feature') + self.assertIn('geometry', feature) + self.assertIn('properties', feature) + + def test_get_wkt_format(self): + """Test GET request with WKT response format""" + identifier = self.ZENODO_REFERENCE['identifier'] + + response = self.client.get( + '/api/v1/geoextent/extract-remote/', + { + 'identifiers': identifier, + 'bbox': 'true', + 'response_format': 'wkt' + } + ) + + self.assertEqual(response.status_code, 200) + data = response.json() + + # WKT format returns string + self.assertIn('wkt', data) + self.assertIn('crs', data) + self.assertIsInstance(data['wkt'], str) + self.assertTrue(data['wkt'].startswith('POLYGON')) + + def test_get_missing_identifiers(self): + """Test GET request without identifiers parameter""" + response = self.client.get( + '/api/v1/geoextent/extract-remote/', + {'bbox': 'true'} + ) + + self.assertEqual(response.status_code, 400) + data = response.json() + self.assertIn('identifiers', data) + + +class GeoextentErrorHandlingTest(TestCase): + """Tests for error handling in geoextent endpoints""" + + def setUp(self): + self.client = Client() + # Create test user and login + self.user = User.objects.create_user('testuser', 'test@example.com', 'testpass') + self.client.login(username='testuser', password='testpass') + + def test_extract_no_file(self): + """Test extract endpoint with no file provided""" + response = self.client.post( + '/api/v1/geoextent/extract/', + {'bbox': 'true'} + ) + + self.assertEqual(response.status_code, 400) + + def test_batch_no_files(self): + """Test batch endpoint with no files provided""" + response = self.client.post( + '/api/v1/geoextent/extract-batch/', + {'bbox': 'true'} + ) + + self.assertEqual(response.status_code, 400) + data = response.json() + self.assertFalse(data['success']) + self.assertIn('error', data) + + def test_remote_empty_identifiers(self): + """Test remote endpoint with empty identifiers list""" + response = self.client.post( + '/api/v1/geoextent/extract-remote/', + json.dumps({ + 'identifiers': [], + 'bbox': True + }), + content_type='application/json' + ) + + self.assertEqual(response.status_code, 400) + + def test_extract_invalid_format(self): + """Test extract with invalid response format""" + fixtures_dir = os.path.join( + os.path.dirname(__file__), 'fixtures', 'geoextent' + ) + with open(os.path.join(fixtures_dir, 'test_point.geojson'), 'rb') as f: + file_content = f.read() + + response = self.client.post( + '/api/v1/geoextent/extract/', + { + 'file': SimpleUploadedFile('test_point.geojson', file_content), + 'bbox': 'true', + 'response_format': 'invalid_format' + } + ) + + self.assertEqual(response.status_code, 400)