diff --git a/.cursorindexingignore b/.cursorindexingignore new file mode 100644 index 0000000..953908e --- /dev/null +++ b/.cursorindexingignore @@ -0,0 +1,3 @@ + +# Don't index SpecStory auto-save files, but allow explicit context inclusion via @ references +.specstory/** diff --git a/.docs/EMAIL_SERVICE.md b/.docs/EMAIL_SERVICE.md new file mode 100644 index 0000000..554674a --- /dev/null +++ b/.docs/EMAIL_SERVICE.md @@ -0,0 +1,103 @@ +# Email Service Configuration + +This document explains how to set up and use the email service with Microsoft Graph API and the Mail.Send permission. + +## Requirements + +To use the email service with Microsoft Graph API, you need: + +1. An Azure AD application registration with the following delegated permissions: + - `Mail.Send` + - `User.Read` + +2. The following environment variables: + - `MS_FROM_EMAIL`: The email address that will be used as the sender + - `EMAIL_SERVICE_TYPE`: The type of email service to use (default: `ms_graph`) + +## Configuration + +### Setting Up the Email Service + +The application uses a factory pattern to create the appropriate email service. By default, it uses the Microsoft Graph API with the `Mail.Send` permission. + +```python +# The factory creates the appropriate email service based on the environment variables +from src.services.email_factory import create_email_service + +# Create an email service instance +email_service = create_email_service() +``` + +### Environment Variables + +Configure the following environment variables: + +```bash +# Required for Microsoft Graph Email Service +MS_FROM_EMAIL=your-sender-email@example.com +EMAIL_SERVICE_TYPE=ms_graph # Options: ms_graph, sendgrid +``` + +## Usage Examples + +### Sending a Simple Email + +```python +from src.services.email_factory import create_email_service + +# Create an email service instance +email_service = create_email_service() + +# Send an email +await email_service.send_email( + to_emails=["recipient@example.com"], + subject="Test Subject", + content="This is the email content", + content_type="text/plain" # or "text/html" for HTML content +) +``` + +### Sending a Templated Notification Email + +```python +from src.services.email_factory import create_email_service + +# Create an email service instance +email_service = create_email_service() + +# Send a notification email using a template +await email_service.send_notification_email( + to_email="recipient@example.com", + subject="Notification Subject", + template_id="welcome-template", + dynamic_data={ + "name": "John Doe", + "organization": "UNDP", + "role": "Admin" + } +) +``` + +## Testing the Email Service + +You can test the email service by running the provided test script: + +```bash +# Make the script executable +chmod +x test_mail_send.py + +# Run the test script +./test_mail_send.py +``` + +The script will prompt you to enter a recipient email address and will send a test email to verify that the `Mail.Send` permission is working correctly. + +## Troubleshooting + +If you encounter issues with sending emails: + +1. Verify that the Azure AD application has the required permissions (Mail.Send and User.Read) +2. Ensure that the permissions have been admin-consented +3. Check that the MS_FROM_EMAIL environment variable is set correctly +4. Check the application logs for detailed error messages +5. Verify that the DefaultAzureCredential is properly configured \ No newline at end of file diff --git a/.env.example b/.env.example index db22666..33e898f 100644 --- a/.env.example +++ b/.env.example @@ -1,40 +1,17 @@ -# Authentication -TENANT_ID="" -CLIENT_ID="" -API_KEY="" # for accessing "public" endpoints - -# Database and Storage -DB_CONNECTION="postgresql://:@:5432/" -SAS_URL="https://.blob.core.windows.net/?" - -# Azure OpenAI, only required for `/signals/generation` -AZURE_OPENAI_ENDPOINT="https://.openai.azure.com/" -AZURE_OPENAI_API_KEY="" - -# Testing, only required to run tests, must be a valid token of a regular user -API_JWT="" # Email Configuration MS_FROM_EMAIL=futureofdevelopment@undp.org EMAIL_SERVICE_TYPE=ms_graph -# SendGrid Configuration (if using SendGrid email service) -SENDGRID_API_KEY= -SENDGRID_FROM_EMAIL= - -# Azure Authentication +# Authentication TENANT_ID= CLIENT_ID= - -# API Authentication API_KEY= API_JWT= -# Database Connection +# Database and Storage DB_CONNECTION= - -# Azure Storage SAS_URL= # Azure OpenAI Configuration AZURE_OPENAI_ENDPOINT= -AZURE_OPENAI_API_KEY= +AZURE_OPENAI_API_KEY= \ No newline at end of file diff --git a/.gitignore b/.gitignore index 8e2eb1c..12518e9 100644 --- a/.gitignore +++ b/.gitignore @@ -149,3 +149,9 @@ Taskfile.yml /logs webapp_logs.zip /.schemas +app_logs.zip +/deployments +/LogFiles +/.exports +schema.sql +schema.dbml diff --git a/docs/azure_graph_mail_send_setup.md b/docs/azure_graph_mail_send_setup.md new file mode 100644 index 0000000..454c1f6 --- /dev/null +++ b/docs/azure_graph_mail_send_setup.md @@ -0,0 +1,55 @@ +# Enabling Automated Email Sending via Microsoft Graph + +To allow the Future of Development platform to send emails automatically (e.g., for digests, notifications) without manual authentication, you must configure Microsoft Graph **Application permissions** for your Azure AD app registration. + +## Why Application Permissions? +- **Delegated permissions** require a user to be logged in interactivelyβ€”this is not suitable for scheduled/automated jobs. +- **Application permissions** allow your backend/server to send emails as a service account using only a client ID and secret. + +## Steps for Admin + +1. **Go to Azure Portal > Azure Active Directory > App registrations > [Your App]** +2. **API permissions**: + - Click **Add a permission** > **Microsoft Graph** > **Application permissions** + - Search for and add **Mail.Send** (Application) +3. **Grant admin consent**: + - Click **Grant admin consent for [Your Org]** +4. **Verify**: + - You (or your admin) can run: + ```sh + az ad app permission list --id + ``` + - You should see a `"type": "Role"` for Mail.Send. + +## Template Email/Message to Admin + +``` +Subject: Request: Grant Application Mail.Send Permission to Azure App for Automated Email Sending + +Hi [Admin], + +We need to enable automated email sending from the "Future of Development" app (Client ID: 4b179bfc-6621-409a-a1ed-ad141c12eb11) using Microsoft Graph. + +**Please:** +1. Go to Azure Portal > Azure Active Directory > App registrations > "Future of Development". +2. Under **API permissions**, click **Add a permission** > **Microsoft Graph** > **Application permissions**. +3. Add **Mail.Send** (Application). +4. Click **Grant admin consent for [Your Org]**. + +This will allow our backend to send emails on a schedule without manual login. + +Thank you! +``` + +## After Admin Consent +- You can now use the client ID, tenant ID, and client secret to send emails via Microsoft Graph API using `/users/{user_id}/sendMail`. +- No manual login will be required for scheduled jobs. + +--- + +**If you need to check the current permissions or verify setup, use:** +```sh +az ad app permission list --id +``` + +--- \ No newline at end of file diff --git a/docs/email_digest_delivery_methods.md b/docs/email_digest_delivery_methods.md new file mode 100644 index 0000000..96cb876 --- /dev/null +++ b/docs/email_digest_delivery_methods.md @@ -0,0 +1,82 @@ +# Email Digest Delivery Methods: Summary & Lessons Learned + +This document summarizes all the methods we have tried (and considered) for sending automated email digests from the Future Trends & Signals platform, including their outcomes, blockers, and references to official documentation. + +--- + +## 1. Microsoft Graph API (Recommended, but Blocked) + +- **Approach:** Use Microsoft Graph API with Application permissions to send as `futureofdevelopment@undp.org`. +- **Status:** **Blocked** (admin consent for Application permissions not yet granted). +- **What we did:** + - Registered the app in Azure AD. + - Attempted to use `/users/{user_id}/sendMail` endpoint with client credentials. + - Only Delegated permissions are currently granted; Application permissions are missing. +- **Blocker:** + - Cannot send as a service account without `Mail.Send` Application permission and admin consent. +- **Reference:** + - See [azure_graph_mail_send_setup.md](./azure_graph_mail_send_setup.md) for detailed setup and admin request template. + - [Microsoft Docs: Send mail as any user](https://learn.microsoft.com/en-us/graph/api/user-sendmail?view=graph-rest-1.0&tabs=http) + +--- + +## 2. Microsoft Graph API (Delegated Permissions) + +- **Approach:** Use Microsoft Graph API with Delegated permissions, logging in as the sender. +- **Status:** **Not suitable for automation** +- **What we did:** + - Successfully authenticated as a user and sent test emails using `/me/sendMail`. +- **Blocker:** + - Requires interactive login; not suitable for scheduled/automated jobs. + +--- + +## 3. SMTP (Office 365/Exchange Online) + +- **Approach:** Use SMTP to send as `futureofdevelopment@undp.org` via `smtp.office365.com`. +- **Status:** **Blocked** (SMTP AUTH is disabled for the tenant). +- **What we did:** + - Created a script (`send_digest_smtp.py`) to send the digest via SMTP. + - Attempted to authenticate with valid credentials. + - Received error: `SMTPAuthenticationError: 5.7.139 Authentication unsuccessful, SmtpClientAuthentication is disabled for the Tenant.` +- **Blocker:** + - SMTP AUTH is disabled for all users by default in modern Microsoft 365 tenants for security reasons. + - Would require IT to enable SMTP AUTH for the sending account. +- **Reference:** + - [Enable or disable SMTP AUTH in Exchange Online](https://aka.ms/smtp_auth_disabled) + +--- + +--- + +## 5. Distribution List/Group Delivery + +- **Approach:** Send the digest to a mail-enabled group (`futures.curator@undp.org`). +- **Status:** **Group is mail-enabled and can receive mail** +- **What we did:** + - Verified the group exists and is mail-enabled in Azure AD. + - All sending methods above (if working) can target this group. +- **Blocker:** + - Blocked by the same issues as above (Graph permissions or SMTP AUTH). + +--- + +## **Summary Table** + +| Method | Automation | Current Status | Blocker/Notes | +|-----------------------|------------|-----------------------|--------------------------------------| +| MS Graph (App perms) | Yes | Blocked | Need admin to grant permissions | +| MS Graph (Delegated) | No | Works (manual only) | Not suitable for automation | +| SMTP (O365) | Yes | Blocked | SMTP AUTH disabled for tenant | +| Distribution List | Yes | Ready | Blocked by above sending method | + +--- + +## **Next Steps** +- Await admin action to grant Application permissions for Microsoft Graph (see [azure_graph_mail_send_setup.md](./azure_graph_mail_send_setup.md)). +- Alternatively, request IT to enable SMTP AUTH for the sending account (less secure, not recommended). +- Consider third-party relay if allowed by policy. + +--- + +**This document should be updated as our setup or permissions change.** \ No newline at end of file diff --git a/docs/email_system.md b/docs/email_system.md new file mode 100644 index 0000000..2bf9cd7 --- /dev/null +++ b/docs/email_system.md @@ -0,0 +1,199 @@ +# UNDP Futures Trends & Signals Platform - Email System + +## Overview + +The UNDP Future Trends & Signals platform includes functionality to send weekly digest emails containing summaries of recently published signals. This email system keeps curators and other stakeholders informed about new content without requiring them to regularly visit the platform. + +## Components + +The email system consists of the following components: + +1. **Email Service Architecture** + - `EmailServiceBase`: Abstract base class defining the interface for all email services + - `MSGraphEmailService`: Implementation using Microsoft Graph API with enterprise application authentication + - `UserAuthEmailService`: Implementation using Azure CLI authentication + - `EmailFactory`: Factory pattern for creating the appropriate service based on configuration + +2. **Weekly Digest Feature** + - `WeeklyDigestService`: Core service that fetches recent signals and generates digest emails + - HTML email template with responsive design for signal summaries + - Filtering for approved/published signals within a specified date range + +3. **Testing Tools** + - `send_digest.py`: CLI script for sending weekly digests with parameterized options + - `test_email_direct.py`: Script for testing email configuration without database dependencies + +## Setup and Configuration + +### Requirements + +1. Install the required Python packages: + +```bash +# Activate your virtual environment +source venv/bin/activate + +# Install the required packages +pip install python-dotenv msgraph-core azure-identity httpx +``` + +### Environment Variables + +The following environment variables need to be set in your `.env.local` file: + +``` +# Email Configuration +MS_FROM_EMAIL=exo.futures.curators@undp.org # Email that will appear as the sender +EMAIL_SERVICE_TYPE=ms_graph # Authentication type (ms_graph or user_auth) + +# Azure Authentication for UNDP Enterprise Application +TENANT_ID=b3e5db5e-2944-4837-99f5-7488ace54319 # UNDP tenant ID +CLIENT_ID=4b179bfc-6621-409a-a1ed-ad141c12eb11 # UNDP Future Trends and Signals System App ID +CLIENT_SECRET=YOUR_CLIENT_SECRET_HERE # Generate this in Azure Portal +``` + +### Authentication Methods + +The platform supports multiple authentication methods for sending emails: + +#### 1. Enterprise Application Authentication (Recommended for Production) + +This method uses an Azure AD enterprise application with client credentials flow to authenticate and send emails on behalf of a mailbox. + +Requirements: +- UNDP Enterprise Application "UNDP Future Trends and Signals System" +- App ID: `4b179bfc-6621-409a-a1ed-ad141c12eb11` +- Tenant ID: `b3e5db5e-2944-4837-99f5-7488ace54319` (UNDP tenant) +- Client Secret (generated in Azure Portal) +- Mail.Send API permissions granted to the application + +This is the recommended approach for production as it doesn't require user presence and provides a more secure, managed identity for the application. + +#### 2. User Authentication (For Development) + +This method uses the Azure CLI authentication that's already set up on your machine. This is easier for development and testing as it doesn't require setting up app registrations or API credentials. + +Requirements: +- Azure CLI installed and logged in with `az login` +- User must have Mail.Send permissions in Microsoft Graph + + +### Azure AD Enterprise Application Configuration + +To configure the enterprise application for sending emails: + +1. Sign in to the [Azure Portal](https://portal.azure.com) +2. Navigate to "Azure Active Directory" > "App registrations" +3. Search for "UNDP Future Trends and Signals System" (App ID: `4b179bfc-6621-409a-a1ed-ad141c12eb11`) +4. Under "Certificates & secrets", create a new client secret: + - Click "New client secret" + - Provide a description (e.g., "Email Sending Service") + - Set an appropriate expiration (e.g., 1 year, 2 years) + - Copy the generated secret value (only shown once) +5. Under "API permissions", verify the following permissions: + - Microsoft Graph > Application permissions > Mail.Send + - Microsoft Graph > Application permissions > User.Read.All (for accessing user profiles) +6. Ensure admin consent has been granted for these permissions +7. Update your `.env.local` file with the client secret + +## Using the Weekly Digest Feature + +### Manual Testing + +To send a test digest email: + +```bash +# Test with enterprise application authentication +python scripts/test_email_direct.py recipient@example.com + +# Test weekly digest +python scripts/send_digest.py --recipients recipient@example.com --days 7 --test +``` + +Parameters: +- `--recipients`: One or more email addresses (space-separated) +- `--days`: Number of days to look back for signals (default: 7) +- `--test`: Adds [TEST] to the email subject + +### Production Scheduling + +For regular weekly emails, set up a cron job or Azure scheduled task: + +```bash +# Example cron job (every Monday at 8am) +0 8 * * 1 /path/to/python /path/to/scripts/send_digest.py --recipients email1@undp.org email2@undp.org +``` + +## Customization + +### Email Templates + +The HTML email template is embedded in the `generate_email_html` method of the `WeeklyDigestService` class. To customize: + +1. Modify the HTML structure in the method +2. Update CSS styles to match UNDP branding guidelines +3. Adjust the content formatting as needed + +### Recipients Management + +Currently, recipients are specified manually when calling the script. Future enhancements could include: + +- Storing recipient lists in the database +- Building a subscription management UI +- Supporting user-specific preferences for digest contents + +## Troubleshooting + +### Permission Issues + +If you encounter "Access Denied" errors when sending emails: + +1. Check that the enterprise application has the necessary Mail.Send permissions +2. Ensure the permissions have been granted admin consent +3. Verify that the sender email matches an email address the application has permission to send from + +### Common Issues and Solutions + +1. **401 Unauthorized Error** + - Check that client secret is valid and not expired + - Ensure TENANT_ID and CLIENT_ID are correct + +2. **403 Forbidden Error** + - Check that the enterprise application has been granted proper permissions + - Ensure permissions have been admin consented + - Verify that the sender email has proper mailbox permissions + +3. **Connection Issues** + - Check network connectivity + - Ensure firewall rules allow outbound connections to graph.microsoft.com + +4. **Email Delivery Problems** + - Verify that the sender email address is configured correctly + - Check if the email address has sending limits or restrictions + +For detailed error logging, set `LOGLEVEL=DEBUG` in your environment variables. + +## Planned Enhancements + +### Near-term + +1. Set up scheduled task for automated weekly emails +2. Configure environment variables in production environment +3. Implement more sophisticated email templates + +### Future Enhancements + +1. **Recipient Management** + - Database table for storing subscriber information + - API endpoints for subscribing/unsubscribing + - User preferences for digest frequency and content + +2. **Email Customization** + - Different email templates for different types of notifications + - Personalized content based on user interests or roles + - Multiple language support + +3. **Analytics** + - Tracking email opens and clicks + - Reporting on engagement metrics + - A/B testing of email content and formats \ No newline at end of file diff --git a/docs/sample_digest_email.html b/docs/sample_digest_email.html new file mode 100644 index 0000000..87e480a --- /dev/null +++ b/docs/sample_digest_email.html @@ -0,0 +1,121 @@ + + + + + + UNDP Futures - Weekly Signal Digest + + + +
+

UNDP Futures - Weekly Signal Digest

+

Stay updated with the latest signals from around the world

+
+ +

Hello,

+

Here's your weekly digest of new signals from the UNDP Futures platform. Below are the latest signals that might be of interest:

+ +
+
+

Climate-Resilient Agriculture Technology in East Africa

+
+ Location: Africa + β€’ Source: View Source +
+

New irrigation and seed technologies are enabling farmers in East Africa to adapt to changing rainfall patterns, with early pilots showing up to 40% increase in crop yields during drought conditions.

+
+ agriculture + climate + technology +
+
+ +
+

Digital Identity Systems and Financial Inclusion

+
+ Location: Global + β€’ Source: View Source +
+

Digital identity systems are creating pathways to financial services for previously unbanked populations, with innovative biometric solutions addressing challenges in regions with limited documentation.

+
+ digital + finance + inclusion +
+
+ +
+

Community-Led Waste Management Innovations

+
+ Location: Asia + β€’ Source: View Source +
+

Local communities in Southeast Asia are developing scalable waste management systems that combine traditional knowledge with new recycling technologies, reducing plastic pollution and creating economic opportunities.

+
+ environment + community + innovation +
+
+
+ + + + \ No newline at end of file diff --git a/docs/support_ticket_automated_email.md b/docs/support_ticket_automated_email.md new file mode 100644 index 0000000..efd0988 --- /dev/null +++ b/docs/support_ticket_automated_email.md @@ -0,0 +1,46 @@ +# Support Ticket: Enable Automated Email Sending for Future of Development Platform + +**Subject:** +Enable Automated Email Sending for Future Trends & Signals (Microsoft Graph Application Permissions) + +**Description:** +We are building a feature for the Future of Development platform that sends automated email digests (e.g., weekly summaries, notifications) to users. To do this securely and reliably, we need to configure Microsoft Graph **Application permissions** for our Azure AD app registration, and set up a dedicated internal email account for sending these digests. + +## Requirements + +1. **Azure AD App Registration:** + - App Name: **Future Trends & Signals** + - Client ID: `4b179bfc-6621-409a-a1ed-ad141c12eb11` + - The app must be able to send emails automatically (without manual login) using Microsoft Graph. + +2. **Permissions Needed:** + - Add **Mail.Send** (Application) permission to the app registration. + - Grant **admin consent** for this permission. + +3. **Service Account:** + - Please create or confirm an internal mailbox (e.g., `futureofdevelopment@undp.org`) to be used as the sender for these digests. + - Ensure this mailbox is licensed and can send emails. + +4. **Configuration Steps (for ITU):** + - Go to Azure Portal > Azure Active Directory > App registrations > "Future of Development". + - Under **API permissions**, click **Add a permission** > **Microsoft Graph** > **Application permissions**. + - Add **Mail.Send** (Application). + - Click **Grant admin consent for [Your Org]**. + - Confirm that the mailbox `futureofdevelopment@undp.org` is active and can be used by the app for sending emails. + +5. **Verification:** + - After configuration, we will verify by running: + ```sh + az ad app permission list --id 4b179bfc-6621-409a-a1ed-ad141c12eb11 + ``` + - We should see a `"type": "Role"` for Mail.Send. + +## Why This Is Needed +- Delegated permissions require a user to log in interactively, which is not suitable for scheduled/automated jobs. +- Application permissions allow our backend to send emails on a schedule, securely and without manual intervention. + +## What We Need from ITU +- Add and grant the required permissions as described above. +- Confirm the service account is ready and provide any additional configuration details if needed. + +Thank you for your support! If you need more technical details, please see the attached documentation or contact our team. \ No newline at end of file diff --git a/main.py b/main.py index fab43eb..dfd3d79 100644 --- a/main.py +++ b/main.py @@ -22,7 +22,7 @@ setup_logging() # Get application version -app_version = os.environ.get("RELEASE_VERSION", "dev") +app_version = os.environ.get("RELEASE_VERSION", "dev-fixed") app_env = os.environ.get("ENVIRONMENT", "development") # Override environment setting if in local mode if os.environ.get("ENV_MODE") == "local": diff --git a/requirements.txt b/requirements.txt index f3145fe..0b8c731 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,4 +14,7 @@ pillow ~= 11.0.0 beautifulsoup4 ~= 4.12.3 lxml ~= 5.3.0 openai == 1.52.2 -bugsnag>=4.0.0 +azure-identity ~= 1.15.0 +msgraph-core ~= 0.2.2 +pytest-asyncio ~= 0.23.5 +bugsnag>=4.0.0 \ No newline at end of file diff --git a/scripts/run_direct_test.sh b/scripts/run_direct_test.sh new file mode 100755 index 0000000..6af9aef --- /dev/null +++ b/scripts/run_direct_test.sh @@ -0,0 +1,14 @@ +#!/bin/bash +# Script to run the direct email test in the correct virtual environment + +# Change to the project directory +cd "$(dirname "$0")/.." + +# Activate the virtual environment +source venv/bin/activate + +# Run the direct test script +python scripts/test_email_direct.py andrew.maguire@undp.org + +# Deactivate the virtual environment +deactivate \ No newline at end of file diff --git a/scripts/send_digest.py b/scripts/send_digest.py new file mode 100755 index 0000000..e2d41d3 --- /dev/null +++ b/scripts/send_digest.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python +""" +Command-line script to send a weekly digest email. +This script is for manual testing and can be scheduled via cron or other job scheduler. +""" + +import os +import sys +import asyncio +import argparse +import logging +from typing import List + +# Add the parent directory to sys.path to allow importing the app modules +parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +sys.path.insert(0, parent_dir) + +from src.services.weekly_digest import WeeklyDigestService +from src.services.weekly_digest import Status + +# Set up logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[ + logging.StreamHandler() + ] +) +logger = logging.getLogger(__name__) + +async def send_weekly_digest(recipients: List[str], days: int = None, test_mode: bool = False, status: list = None, limit: int = None) -> None: + """ + Send a weekly digest email to the specified recipients. + + Parameters + ---------- + recipients : List[str] + List of email addresses to send the digest to. + days : int, optional + Number of days to look back for signals, defaults to None. + test_mode : bool, optional + If True, adds [TEST] to the subject line. + status : list, optional + List of signal statuses to filter by, defaults to None. + limit : int, optional + Maximum number of signals to include, defaults to None. + """ + logger.info(f"Starting weekly digest email send to {recipients}") + + # Create the digest service + digest_service = WeeklyDigestService() + + # Prepare subject with test mode indicator if needed + subject = "UNDP Futures Weekly Digest" + if test_mode: + subject = f"[TEST] {subject}" + + # Map status strings to Status enum if provided + status_enum = None + if status: + status_enum = [Status(s) for s in status] + + # Generate and send the digest + success = await digest_service.generate_and_send_digest( + recipients=recipients, + days=days, + subject=subject, + status=status_enum, + limit=limit + ) + + if success: + logger.info("Weekly digest email sent successfully") + else: + logger.error("Failed to send weekly digest email") + +def main() -> None: + """Parse command line arguments and run the digest email process.""" + parser = argparse.ArgumentParser(description="Send weekly digest email of recent signals") + + parser.add_argument( + "--recipients", + nargs="+", + required=True, + help="Email addresses to send the digest to (space-separated)" + ) + + parser.add_argument( + "--days", + type=int, + default=None, + help="Number of days to look back for signals (optional)" + ) + + parser.add_argument( + "--test", + action="store_true", + help="Run in test mode (adds [TEST] to the subject line)" + ) + + parser.add_argument( + "--status", + nargs="+", + default=None, + help="Signal statuses to filter by (e.g. Draft Approved). Optional." + ) + + parser.add_argument( + "--limit", + type=int, + default=None, + help="Maximum number of signals to include (optional)" + ) + + args = parser.parse_args() + + # Validate email addresses (basic check) + for email in args.recipients: + if "@" not in email: + logger.error(f"Invalid email address: {email}") + sys.exit(1) + + # Run the async function + asyncio.run(send_weekly_digest( + args.recipients, + args.days, + args.test, + args.status, + args.limit + )) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/scripts/send_digest_smtp.py b/scripts/send_digest_smtp.py new file mode 100644 index 0000000..20be7af --- /dev/null +++ b/scripts/send_digest_smtp.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python +""" +Script to send a weekly digest email using SMTP (e.g., Office 365, Gmail). +This is for testing SMTP-based delivery to a distribution list or group. +""" + +import os +import sys +import asyncio +import argparse +import logging +import smtplib +from email.mime.text import MIMEText +from typing import List + +# Add the parent directory to sys.path to allow importing the app modules +parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +sys.path.insert(0, parent_dir) + +from src.services.weekly_digest import WeeklyDigestService + +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[logging.StreamHandler()] +) +logger = logging.getLogger(__name__) + +async def generate_digest_html(days=None, status=None, limit=None): + digest_service = WeeklyDigestService() + signals_list = await digest_service.get_recent_signals(days=days, status=status, limit=limit) + logger.info(f"Fetched {len(signals_list)} signals for digest.") + html_content = digest_service.generate_email_html(signals_list) + return html_content + +def send_email_smtp(smtp_server, smtp_port, username, password, to_emails, subject, html_content): + msg = MIMEText(html_content, 'html') + msg['Subject'] = subject + msg['From'] = username + msg['To'] = ', '.join(to_emails) + with smtplib.SMTP(smtp_server, smtp_port) as server: + server.starttls() + server.login(username, password) + server.sendmail(msg['From'], to_emails, msg.as_string()) + logger.info(f"Email sent via SMTP to {to_emails}") + +def main(): + parser = argparse.ArgumentParser(description="Send weekly digest email via SMTP") + parser.add_argument('--recipients', nargs='+', required=True, help="Email addresses to send the digest to (space-separated)") + parser.add_argument('--days', type=int, default=None, help="Number of days to look back for signals (optional)") + parser.add_argument('--status', nargs='+', default=None, help="Signal statuses to filter by (e.g. Draft Approved). Optional.") + parser.add_argument('--limit', type=int, default=None, help="Maximum number of signals to include (optional)") + parser.add_argument('--smtp-server', type=str, default='smtp.office365.com', help="SMTP server address") + parser.add_argument('--smtp-port', type=int, default=587, help="SMTP server port") + parser.add_argument('--smtp-user', type=str, required=True, help="SMTP username (your email)") + parser.add_argument('--smtp-password', type=str, required=True, help="SMTP password (or app password)") + parser.add_argument('--test', action='store_true', help="Run in test mode (adds [TEST] to the subject line)") + args = parser.parse_args() + + subject = "UNDP Futures Weekly Digest" + if args.test: + subject = f"[TEST] {subject}" + + # Validate email addresses + for email in args.recipients: + if "@" not in email: + logger.error(f"Invalid email address: {email}") + sys.exit(1) + + # Map status strings to Status enum if provided + status_enum = None + if args.status: + from src.services.weekly_digest import Status + status_enum = [Status(s) for s in args.status] + + # Generate digest HTML + html_content = asyncio.run(generate_digest_html(days=args.days, status=status_enum, limit=args.limit)) + + # Send email via SMTP + send_email_smtp( + smtp_server=args.smtp_server, + smtp_port=args.smtp_port, + username=args.smtp_user, + password=args.smtp_password, + to_emails=args.recipients, + subject=subject, + html_content=html_content + ) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/scripts/test_digest_fetching.py b/scripts/test_digest_fetching.py new file mode 100644 index 0000000..cfbcd56 --- /dev/null +++ b/scripts/test_digest_fetching.py @@ -0,0 +1,180 @@ +#!/usr/bin/env python +""" +Script to test the weekly digest signal fetching and HTML generation without sending emails. +This helps verify that the core digest functionality is working properly. +""" + +import os +import sys +import asyncio +import argparse +import logging +import json +from typing import List +from datetime import datetime + +# Add the parent directory to sys.path to allow importing the app modules +parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +sys.path.insert(0, parent_dir) + +try: + from dotenv import load_dotenv +except ImportError: + # Define a simple fallback if python-dotenv is not installed + def load_dotenv(path): + print(f"Warning: python-dotenv package not installed, loading environment manually") + if not os.path.exists(path): + return False + with open(path) as f: + for line in f: + line = line.strip() + if not line or line.startswith('#') or '=' not in line: + continue + key, value = line.split('=', 1) + os.environ[key.strip()] = value.strip().strip('"').strip("'") + return True + +# Load environment variables from .env +env_file = os.path.join(parent_dir, '.env') +if os.path.exists(env_file): + load_dotenv(env_file) + print(f"Loaded environment from {env_file}") +else: + print(f"Warning: {env_file} not found") + +# Set up logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[ + logging.StreamHandler() + ] +) +logger = logging.getLogger(__name__) + +# Import the weekly digest service +from src.services.weekly_digest import WeeklyDigestService + +async def test_digest_fetching(days: int = 7, save_to_file: bool = True, output_path: str = None) -> None: + """ + Test fetching recent signals and generating a digest without sending an email. + + Parameters + ---------- + days : int, optional + Number of days to look back for signals, defaults to 7. + save_to_file : bool, optional + Whether to save the generated HTML to a file, defaults to True. + output_path : str, optional + Path to save the output HTML, defaults to 'digest_output.html' in the current directory. + """ + print("\n=====================================================") + print(f"πŸ” TESTING WEEKLY DIGEST SIGNAL FETCHING") + print(f"Looking back {days} days for signals...") + print("=====================================================\n") + + try: + # Create the digest service + digest_service = WeeklyDigestService() + + # Get recent signals + signals_list = await digest_service.get_recent_signals(days) + + # Print signal count and basic info + if signals_list: + print(f"\nβœ… Successfully retrieved {len(signals_list)} signals from the last {days} days.") + print("\nSignals Summary:") + print("-" * 60) + + for i, signal in enumerate(signals_list, 1): + print(f"{i}. {signal.headline}") + print(f" Created: {signal.created_at}") + print(f" Location: {signal.location or 'Global'}") + if hasattr(signal, 'keywords') and signal.keywords: + print(f" Keywords: {', '.join(signal.keywords)}") + print(f" Status: {signal.status}") + print("-" * 60) + + # Generate HTML content + print("\nGenerating HTML digest content...") + html_content = digest_service.generate_email_html(signals_list) + + # Save HTML to file if requested + if save_to_file: + if output_path is None: + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + output_path = os.path.join(os.path.dirname(__file__), f"digest_output_{timestamp}.html") + + with open(output_path, "w", encoding="utf-8") as f: + f.write(html_content) + + print(f"\nβœ… HTML digest content saved to: {output_path}") + print(" You can open this file in a browser to preview the digest email.") + + # Save signals to JSON for debugging + json_path = os.path.join(os.path.dirname(__file__), "signals_data.json") + + signals_data = [] + for signal in signals_list: + # Convert to dict and handle datetime objects for JSON serialization + signal_dict = signal.model_dump() + + # Convert datetime objects to strings + for key, value in signal_dict.items(): + if isinstance(value, datetime): + signal_dict[key] = value.isoformat() + + signals_data.append(signal_dict) + + with open(json_path, "w", encoding="utf-8") as f: + json.dump(signals_data, f, indent=2) + + print(f"πŸ“Š Signal data saved to: {json_path}") + + else: + print(f"\n⚠️ No signals found in the last {days} days.") + print(" This could be because:") + print(" - There are no approved signals in the database") + print(" - The signals were created before the specified time period") + print(" - There might be an issue with the database connection") + + except Exception as e: + import traceback + logger.error(f"Error while testing digest fetching: {str(e)}") + traceback.print_exc() + print(f"\n❌ Error testing digest functionality: {str(e)}") + +def main() -> None: + """Parse command line arguments and run the digest test.""" + parser = argparse.ArgumentParser(description="Test weekly digest signal fetching and HTML generation") + + parser.add_argument( + "--days", + type=int, + default=7, + help="Number of days to look back for signals (default: 7)" + ) + + parser.add_argument( + "--no-save", + action="store_true", + help="Don't save the generated HTML to a file" + ) + + parser.add_argument( + "--output", + type=str, + help="Path to save the output HTML (default: digest_output_TIMESTAMP.html in current directory)" + ) + + args = parser.parse_args() + + # Run the async function + asyncio.run(test_digest_fetching( + days=args.days, + save_to_file=not args.no_save, + output_path=args.output + )) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/scripts/test_direct_email.py b/scripts/test_direct_email.py new file mode 100644 index 0000000..d4dc5af --- /dev/null +++ b/scripts/test_direct_email.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python +""" +Direct test script for sending emails using Graph API. +This bypasses the normal email service for testing purposes. +""" + +import os +import sys +import asyncio +import logging +from datetime import datetime +from dotenv import load_dotenv + +# Add the parent directory to sys.path +parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +sys.path.insert(0, parent_dir) + +# Load environment variables +env_file = os.path.join(parent_dir, '.env.local') +if os.path.exists(env_file): + load_dotenv(env_file) + print(f"Loaded environment from {env_file}") +else: + print(f"Warning: {env_file} not found") + +# Import our direct authentication module +from src.services.graph_direct_auth import GraphDirectAuth + +# Set up logging +logging.basicConfig( + level=logging.DEBUG, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[ + logging.StreamHandler() + ] +) +logger = logging.getLogger(__name__) + +async def test_direct_email(to_email: str) -> None: + """Send a test email using direct Graph API authentication""" + try: + print(f"\nSending test email to {to_email}...") + + # Get sender email from environment + from_email = os.getenv('MS_FROM_EMAIL', 'exo.futures.curators@undp.org') + + # Create the GraphDirectAuth client + graph_auth = GraphDirectAuth() + + # Create HTML content with current timestamp + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + html_content = f""" + + + + + + UNDP Futures - Direct Test Email + + + +
+

UNDP Futures - Direct Test Email

+
+ +
+

Direct Graph API Email Test

+

This is a test email sent using direct Graph API authentication.

+

If you're receiving this, it means the email configuration is working!

+

Sent at: {timestamp}

+

Configuration:

+
    +
  • From Email: {from_email}
  • +
  • To Email: {to_email}
  • +
  • Tenant ID: {os.getenv('TENANT_ID')}
  • +
+
+ + + + + """ + + # Send the email + success = await graph_auth.send_email( + from_email=from_email, + to_emails=[to_email], + subject=f"[TEST] UNDP Futures - Direct Email Test ({timestamp})", + content=html_content, + content_type="HTML" + ) + + if success: + print("\n=====================================================") + print(f"βœ… Test email successfully sent to {to_email}!") + print("=====================================================\n") + else: + print("\n=====================================================") + print(f"❌ Failed to send test email to {to_email}") + print("=====================================================\n") + + except Exception as e: + logger.error(f"Error in test_direct_email: {str(e)}", exc_info=True) + print("\n=====================================================") + print(f"❌ Error sending test email: {str(e)}") + print("=====================================================\n") + +def main(): + """Main entry point""" + if len(sys.argv) < 2: + print("Usage: python test_direct_email.py ") + sys.exit(1) + + recipient_email = sys.argv[1] + asyncio.run(test_direct_email(recipient_email)) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/scripts/test_draft_digest.py b/scripts/test_draft_digest.py new file mode 100644 index 0000000..c98bd35 --- /dev/null +++ b/scripts/test_draft_digest.py @@ -0,0 +1,240 @@ +#!/usr/bin/env python +""" +Script to test fetching draft signals and generating a digest. +""" + +import os +import sys +import asyncio +import argparse +import logging +import json +from typing import List +from datetime import datetime +import time + +# Add the parent directory to sys.path to allow importing the app modules +parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +sys.path.insert(0, parent_dir) + +try: + from dotenv import load_dotenv +except ImportError: + # Define a simple fallback if python-dotenv is not installed + def load_dotenv(path): + print(f"Warning: python-dotenv package not installed, loading environment manually") + if not os.path.exists(path): + return False + with open(path) as f: + for line in f: + line = line.strip() + if not line or line.startswith('#') or '=' not in line: + continue + key, value = line.split('=', 1) + os.environ[key.strip()] = value.strip().strip('"').strip("'") + return True + +# Load environment variables from .env +env_file = os.path.join(parent_dir, '.env') +if os.path.exists(env_file): + load_dotenv(env_file) + print(f"Loaded environment from {env_file}") +else: + print(f"Warning: {env_file} not found") + +# Set up logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[ + logging.StreamHandler() + ] +) +logger = logging.getLogger(__name__) + +# Import the draft digest service +from src.services.draft_digest import DraftDigestService +from src.services.email_factory import create_email_service +from src.entities import Status + +async def test_draft_digest(days: int = 7, save_to_file: bool = True, output_path: str = None, send_email: bool = False, recipient_email: str = "andrew.maguire@undp.org") -> None: + """ + Test fetching DRAFT signals and generating a digest. + + Parameters + ---------- + days : int, optional + Number of days to look back for signals, defaults to 7. + save_to_file : bool, optional + Whether to save the generated HTML to a file, defaults to True. + output_path : str, optional + Path to save the output HTML, defaults to 'draft_digest_output.html' in the current directory. + send_email : bool, optional + Whether to send the digest via email, defaults to False. + recipient_email : str, optional + Email address to send the digest to, defaults to "andrew.maguire@undp.org". + """ + print("\n=====================================================") + print(f"πŸ” TESTING DRAFT SIGNAL DIGEST") + print(f"Looking back {days} days for draft signals...") + print("=====================================================\n") + + try: + # Create the digest service + digest_service = DraftDigestService() + + # Set title + title = "Draft Signals Digest" + + # Get draft signals + signals_list = await digest_service.get_recent_draft_signals(days) + + # Print signal count and basic info + if signals_list: + print(f"\nβœ… Successfully retrieved {len(signals_list)} DRAFT signals from the last {days} days.") + print("\nSignals Summary:") + print("-" * 60) + + for i, signal in enumerate(signals_list, 1): + print(f"{i}. {signal.headline}") + print(f" Created: {signal.created_at}") + print(f" Status: {signal.status}") + print(f" Created by: {getattr(signal, 'created_by', 'Unknown')}") + print(f" Location: {signal.location or 'Global'}") + if hasattr(signal, 'keywords') and signal.keywords: + print(f" Keywords: {', '.join(signal.keywords)}") + print("-" * 60) + + # Generate HTML content + print(f"\nGenerating HTML digest content for draft signals...") + html_content = digest_service.generate_digest_html( + signals_list, + title=title, + intro_text=f"

Here's a digest of draft signals from the last {days} days:

" + ) + + # Save HTML to file if requested + if save_to_file: + if output_path is None: + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + output_path = os.path.join(os.path.dirname(__file__), f"draft_digest_{timestamp}.html") + + with open(output_path, "w", encoding="utf-8") as f: + f.write(html_content) + + print(f"\nβœ… HTML digest content saved to: {output_path}") + print(" You can open this file in a browser to preview the digest.") + + # Save signals to JSON for debugging + json_path = os.path.join(os.path.dirname(__file__), f"draft_signals_data.json") + + signals_data = [] + for signal in signals_list: + # Convert to dict and handle datetime objects for JSON serialization + signal_dict = signal.model_dump() + + # Convert datetime objects to strings + for key, value in signal_dict.items(): + if isinstance(value, datetime): + signal_dict[key] = value.isoformat() + + signals_data.append(signal_dict) + + with open(json_path, "w", encoding="utf-8") as f: + json.dump(signals_data, f, indent=2) + + print(f"πŸ“Š Signal data saved to: {json_path}") + + # Send email if requested + if send_email and html_content: + print(f"\nπŸ“§ Sending draft digest email to {recipient_email}...") + try: + # Create email service + email_service = create_email_service() + + # Generate subject with timestamp + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M") + subject = f"[TEST] UNDP Future Trends - Draft Signals Digest ({timestamp})" + + # Send the email + success = await email_service.send_email( + to_emails=[recipient_email], + subject=subject, + content=html_content, + content_type="text/html" + ) + + if success: + print(f"βœ… Draft digest email successfully sent to {recipient_email}") + else: + print(f"❌ Failed to send draft digest email to {recipient_email}") + + except Exception as e: + logger.error(f"Error sending email: {str(e)}") + print(f"❌ Error sending email: {str(e)}") + + else: + print(f"\n⚠️ No draft signals found in the last {days} days.") + print(" This could be because:") + print(f" - There are no draft signals in the database") + print(" - The signals were created before the specified time period") + print(" - There might be an issue with the database connection") + + if send_email: + print("\nπŸ“§ Not sending email because no signals were found.") + + except Exception as e: + import traceback + logger.error(f"Error while testing draft digest: {str(e)}") + traceback.print_exc() + print(f"\n❌ Error testing draft digest functionality: {str(e)}") + +def main() -> None: + """Parse command line arguments and run the draft digest test.""" + parser = argparse.ArgumentParser(description="Test fetching draft signals and generating a digest") + + parser.add_argument( + "--days", + type=int, + default=7, + help="Number of days to look back for signals (default: 7)" + ) + + parser.add_argument( + "--no-save", + action="store_true", + help="Don't save the generated HTML to a file" + ) + + parser.add_argument( + "--output", + type=str, + help="Path to save the output HTML (default: draft_digest_TIMESTAMP.html in current directory)" + ) + + parser.add_argument( + "--email", + action="store_true", + help="Send the digest via email to the specified recipient" + ) + + parser.add_argument( + "--recipient", + type=str, + default="andrew.maguire@undp.org", + help="Email address to send the digest to (default: andrew.maguire@undp.org)" + ) + + args = parser.parse_args() + + # Run the async function + asyncio.run(test_draft_digest( + days=args.days, + save_to_file=not args.no_save, + output_path=args.output, + send_email=args.email, + recipient_email=args.recipient + )) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/scripts/test_email.sh b/scripts/test_email.sh new file mode 100755 index 0000000..230649a --- /dev/null +++ b/scripts/test_email.sh @@ -0,0 +1,14 @@ +#!/bin/bash +# Test script to run the email digest in the correct virtual environment + +# Change to the project directory +cd "$(dirname "$0")/.." + +# Activate the virtual environment +source venv/bin/activate + +# Run the digest script with test parameters +python scripts/send_digest.py --recipients andrew.maguire@undp.org --days 14 --test + +# Deactivate the virtual environment +deactivate \ No newline at end of file diff --git a/scripts/test_email_direct.py b/scripts/test_email_direct.py new file mode 100755 index 0000000..29e1357 --- /dev/null +++ b/scripts/test_email_direct.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python +""" +Script to test email sending directly without database interactions. +This is useful for isolating email configuration issues. +""" + +import os +import sys +import asyncio +import logging +import traceback +from typing import List +try: + from dotenv import load_dotenv +except ImportError: + # Define a simple fallback if python-dotenv is not installed + def load_dotenv(path): + print(f"Warning: python-dotenv package not installed, loading environment manually") + if not os.path.exists(path): + return False + with open(path) as f: + for line in f: + line = line.strip() + if not line or line.startswith('#') or '=' not in line: + continue + key, value = line.split('=', 1) + os.environ[key.strip()] = value.strip().strip('"').strip("'") + return True + +# Load environment variables from .env.local if it exists +env_file = os.path.join(os.path.dirname(os.path.dirname(__file__)), '.env.local') +if os.path.exists(env_file): + load_dotenv(env_file) + print(f"Loaded environment from {env_file}") +else: + print(f"Warning: {env_file} not found") + +# Check if required environment variables are set +required_vars = ["MS_FROM_EMAIL", "EMAIL_SERVICE_TYPE", "TENANT_ID", "CLIENT_ID"] +missing_vars = [var for var in required_vars if not os.getenv(var)] +if missing_vars: + print(f"ERROR: The following required environment variables are not set: {', '.join(missing_vars)}") + print("Please check your .env.local file or set them manually.") + sys.exit(1) + +# Add the parent directory to sys.path to allow importing the app modules +parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +sys.path.insert(0, parent_dir) + +# Set up logging +logging.basicConfig( + level=logging.DEBUG, # Using DEBUG level to see more detailed info + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[ + logging.StreamHandler() + ] +) +logger = logging.getLogger(__name__) + +async def test_email_sending(to_email: str) -> None: + """ + Send a test email directly using the configured email service. + + Parameters + ---------- + to_email : str + The email address to send the test email to. + """ + logger.info(f"Starting direct email test to {to_email}") + logger.info(f"Using email service type: {os.getenv('EMAIL_SERVICE_TYPE')}") + logger.info(f"From email: {os.getenv('MS_FROM_EMAIL')}") + + # Create the email service + try: + from src.services.email_factory import create_email_service + email_service = create_email_service() + logger.info(f"Email service created: {type(email_service).__name__}") + except Exception as e: + logger.error(f"Failed to create email service: {e}") + traceback.print_exc() + return + + # Create a simple HTML email with timestamp + from datetime import datetime + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + html_content = f""" + + + + + + UNDP Futures - Email Test + + + +
+

UNDP Futures - Test Email

+
+ +
+

Email Functionality Test

+

This is a test email to verify that the email sending functionality is working correctly.

+

If you're receiving this, it means the email configuration is properly set up!

+

Sent at: {timestamp}

+

Configuration:

+
    +
  • Email Service: {os.getenv('EMAIL_SERVICE_TYPE')}
  • +
  • From Email: {os.getenv('MS_FROM_EMAIL')}
  • +
  • To Email: {to_email}
  • +
+
+ + + + + """ + + # Send the email + try: + logger.info("Attempting to send email...") + success = await email_service.send_email( + to_emails=[to_email], + subject=f"[TEST] UNDP Futures - Email Configuration Test ({os.getenv('EMAIL_SERVICE_TYPE')})", + content=html_content, + content_type="text/html" + ) + + if success: + logger.info("βœ… Test email sent successfully!") + print("\n=====================================================") + print(f"βœ… Test email sent successfully to {to_email}!") + print("=====================================================\n") + else: + logger.error("❌ Failed to send test email") + print("\n=====================================================") + print(f"❌ Failed to send test email to {to_email}") + print("=====================================================\n") + except Exception as e: + logger.error(f"Error sending test email: {e}") + traceback.print_exc() + print("\n=====================================================") + print(f"❌ Error sending test email to {to_email}: {e}") + print("=====================================================\n") + +def main() -> None: + """Parse command line arguments and run the email test.""" + if len(sys.argv) < 2: + print("Usage: python test_email_direct.py ") + sys.exit(1) + + recipient_email = sys.argv[1] + + # Validate email address (basic check) + if "@" not in recipient_email: + logger.error(f"Invalid email address: {recipient_email}") + sys.exit(1) + + # Run the async function + asyncio.run(test_email_sending(recipient_email)) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/scripts/test_enterprise_email.py b/scripts/test_enterprise_email.py new file mode 100755 index 0000000..654cd6e --- /dev/null +++ b/scripts/test_enterprise_email.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python +""" +Script to test email sending using enterprise application authentication. +This is useful for verifying that the enterprise app credentials are properly set up. +""" + +import os +import sys +import asyncio +import logging +import traceback +from typing import List +from datetime import datetime +try: + from dotenv import load_dotenv +except ImportError: + # Define a simple fallback if python-dotenv is not installed + def load_dotenv(path): + print(f"Warning: python-dotenv package not installed, loading environment manually") + if not os.path.exists(path): + return False + with open(path) as f: + for line in f: + line = line.strip() + if not line or line.startswith('#') or '=' not in line: + continue + key, value = line.split('=', 1) + os.environ[key.strip()] = value.strip().strip('"').strip("'") + return True + +# Load environment variables from .env.local if it exists +env_file = os.path.join(os.path.dirname(os.path.dirname(__file__)), '.env.local') +if os.path.exists(env_file): + load_dotenv(env_file) + print(f"Loaded environment from {env_file}") +else: + print(f"Warning: {env_file} not found") + +# Check if required environment variables are set for enterprise application authentication +required_vars = ["MS_FROM_EMAIL", "TENANT_ID", "CLIENT_ID", "CLIENT_SECRET"] +missing_vars = [var for var in required_vars if not os.getenv(var)] +if missing_vars: + print(f"ERROR: The following required environment variables are not set: {', '.join(missing_vars)}") + print("These are required for enterprise application authentication.") + print("Please check your .env.local file and ensure you've generated a CLIENT_SECRET in the Azure Portal.") + sys.exit(1) + +# Set EMAIL_SERVICE_TYPE to ms_graph to force using the enterprise app authentication +os.environ["EMAIL_SERVICE_TYPE"] = "ms_graph" +print("Force setting EMAIL_SERVICE_TYPE to 'ms_graph' for enterprise application authentication") + +# Add the parent directory to sys.path to allow importing the app modules +parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +sys.path.insert(0, parent_dir) + +# Set up logging +logging.basicConfig( + level=logging.DEBUG, # Using DEBUG level to see more detailed info + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[ + logging.StreamHandler() + ] +) +logger = logging.getLogger(__name__) + +async def test_enterprise_email(to_email: str) -> None: + """ + Send a test email using the enterprise application authentication. + + Parameters + ---------- + to_email : str + The email address to send the test email to. + """ + logger.info("="*80) + logger.info("ENTERPRISE APPLICATION EMAIL TEST") + logger.info("="*80) + logger.info(f"Starting enterprise authentication email test to {to_email}") + logger.info(f"From email: {os.getenv('MS_FROM_EMAIL')}") + logger.info(f"Tenant ID: {os.getenv('TENANT_ID')}") + logger.info(f"Client ID: {os.getenv('CLIENT_ID')}") + logger.info(f"Client Secret: {'*' * 8} (hidden for security)") + + # Create the email service + try: + from src.services.email_factory import create_email_service + from src.services.msgraph_service import MSGraphEmailService + + email_service = create_email_service() + + # Verify that we got an MSGraphEmailService instance + if not isinstance(email_service, MSGraphEmailService): + logger.error(f"Expected MSGraphEmailService, but got {type(email_service).__name__}") + print("\n=====================================================") + print("❌ Wrong email service type created. Check EMAIL_SERVICE_TYPE setting.") + print("=====================================================\n") + return + + logger.info(f"Email service created: {type(email_service).__name__}") + except Exception as e: + logger.error(f"Failed to create email service: {e}") + traceback.print_exc() + return + + # Create a simple HTML email with timestamp + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + html_content = f""" + + + + + + UNDP Futures - Enterprise App Email Test + + + +
+

UNDP Futures - Enterprise App Test

+
+ +
+

Enterprise Application Email Test

+

This is a test email to verify that email sending via enterprise application authentication is working correctly.

+

If you're receiving this, it means the enterprise application credentials are properly set up!

+

Sent at: {timestamp}

+

Configuration:

+
    +
  • Authentication: Enterprise Application
  • +
  • App Name: UNDP Future Trends and Signals System
  • +
  • App ID: {os.getenv('CLIENT_ID')}
  • +
  • From Email: {os.getenv('MS_FROM_EMAIL')}
  • +
  • To Email: {to_email}
  • +
+
+ + + + + """ + + # Send the email + try: + logger.info("Attempting to send email using enterprise application authentication...") + success = await email_service.send_email( + to_emails=[to_email], + subject=f"[TEST] UNDP Futures - Enterprise Application Email Test", + content=html_content, + content_type="text/html" + ) + + if success: + logger.info("βœ… Test email sent successfully using enterprise application authentication!") + print("\n=====================================================") + print(f"βœ… Test email sent successfully to {to_email}!") + print("The enterprise application authentication is working correctly.") + print("=====================================================\n") + else: + logger.error("❌ Failed to send test email using enterprise application authentication") + print("\n=====================================================") + print(f"❌ Failed to send test email to {to_email}") + print("Check the logs for more details.") + print("=====================================================\n") + except Exception as e: + logger.error(f"Error sending test email: {e}") + traceback.print_exc() + print("\n=====================================================") + print(f"❌ Error sending test email to {to_email}: {e}") + print("=====================================================\n") + +def main() -> None: + """Parse command line arguments and run the enterprise email test.""" + if len(sys.argv) < 2: + print("Usage: python test_enterprise_email.py ") + sys.exit(1) + + recipient_email = sys.argv[1] + + # Validate email address (basic check) + if "@" not in recipient_email: + logger.error(f"Invalid email address: {recipient_email}") + sys.exit(1) + + # Run the async function + asyncio.run(test_enterprise_email(recipient_email)) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/scripts/test_user_auth_email.py b/scripts/test_user_auth_email.py new file mode 100755 index 0000000..cea77d0 --- /dev/null +++ b/scripts/test_user_auth_email.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python +""" +Test script for sending emails using user authentication with Azure CLI. +""" + +import os +import sys +import asyncio +import logging +from datetime import datetime +from dotenv import load_dotenv + +# Add the parent directory to sys.path +parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +sys.path.insert(0, parent_dir) + +# Load environment variables +env_file = os.path.join(parent_dir, '.env.local') +if os.path.exists(env_file): + load_dotenv(env_file) + print(f"Loaded environment from {env_file}") +else: + print(f"Warning: {env_file} not found") + +# Set up logging +logging.basicConfig( + level=logging.DEBUG, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[ + logging.StreamHandler() + ] +) +logger = logging.getLogger(__name__) + +async def test_email(to_email: str) -> None: + """Send a test email using the email service factory""" + try: + from src.services.email_factory import create_email_service + + print(f"\nSending test email to {to_email}...") + + # Create the email service + email_service = create_email_service() + logger.info(f"Email service created: {type(email_service).__name__}") + + # Create HTML content with current timestamp + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + html_content = f""" + + + + + + UNDP Futures - User Auth Test Email + + + +
+

UNDP Futures - User Auth Test Email

+
+ +
+

User Authentication Email Test

+

This is a test email sent using Azure CLI user authentication.

+

If you're receiving this, it means the email configuration is working!

+

Sent at: {timestamp}

+

Configuration:

+
    +
  • From Email: {os.getenv('MS_FROM_EMAIL')}
  • +
  • User Email: {os.getenv('USER_EMAIL')}
  • +
  • To Email: {to_email}
  • +
  • Tenant ID: {os.getenv('TENANT_ID')}
  • +
+
+ + + + + """ + + # Send the email + success = await email_service.send_email( + to_emails=[to_email], + subject=f"[TEST] UNDP Futures - User Auth Email Test ({timestamp})", + content=html_content, + content_type="text/html" + ) + + if success: + print("\n=====================================================") + print(f"βœ… Test email successfully sent to {to_email}!") + print("=====================================================\n") + else: + print("\n=====================================================") + print(f"❌ Failed to send test email to {to_email}") + print("=====================================================\n") + + except Exception as e: + logger.error(f"Error in test_email: {str(e)}", exc_info=True) + print("\n=====================================================") + print(f"❌ Error sending test email: {str(e)}") + print("=====================================================\n") + +def main(): + """Main entry point""" + if len(sys.argv) < 2: + print("Usage: python test_user_auth_email.py ") + sys.exit(1) + + recipient_email = sys.argv[1] + asyncio.run(test_email(recipient_email)) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/src/authentication.py b/src/authentication.py index 89ba2ea..9ff3a4f 100644 --- a/src/authentication.py +++ b/src/authentication.py @@ -11,6 +11,7 @@ from fastapi import Depends, Security from fastapi.security import APIKeyHeader from psycopg import AsyncCursor +import psycopg.errors from . import database as db from . import exceptions @@ -204,5 +205,14 @@ async def authenticate_user( if (user := await db.read_user_by_email(cursor, email_str)) is None: user = User(email=email_str, role=Role.USER, name=name_str) - await db.create_user(cursor, user) + try: + await db.create_user(cursor, user) + except psycopg.errors.UniqueViolation: + # User was created by another request in the meantime, fetch the existing user + logging.info(f"User {email_str} already exists, fetching existing user") + user = await db.read_user_by_email(cursor, email_str) + if user is None: + # This should not happen, but handle it gracefully + logging.error(f"Failed to fetch user {email_str} after UniqueViolation") + raise exceptions.not_authenticated return user diff --git a/src/database/signals.py b/src/database/signals.py index 5a64ba6..44256d9 100644 --- a/src/database/signals.py +++ b/src/database/signals.py @@ -92,12 +92,7 @@ async def search_signals(cursor: AsyncCursor, filters: SignalFilters) -> SignalP AND (%(score)s IS NULL OR score = %(score)s) AND (%(unit)s IS NULL OR unit_region = %(unit)s OR unit_name = %(unit)s) AND (%(query)s IS NULL OR text_search_field @@ websearch_to_tsquery('english', %(query)s)) - AND (%(user_email)s IS NOT NULL AND ( - private = FALSE OR - created_by = %(user_email)s OR - %(is_admin)s = TRUE OR - %(is_staff)s = TRUE - )) + AND private = FALSE ORDER BY {filters.order_by} {filters.direction} OFFSET diff --git a/src/database/trends.py b/src/database/trends.py index 1b914da..a784bb3 100644 --- a/src/database/trends.py +++ b/src/database/trends.py @@ -13,6 +13,7 @@ "read_trend", "update_trend", "delete_trend", + "list_trends", ] @@ -283,3 +284,39 @@ async def delete_trend(cursor: AsyncCursor, uid: int) -> Trend | None: if trend.attachment is not None: await storage.delete_image(entity_id=trend.id, folder_name="trends") return trend + + +async def list_trends(cursor: AsyncCursor) -> list[Trend]: + """ + Retrieve all trends from the database, including connected signals. + + Parameters + ---------- + cursor : AsyncCursor + An async database cursor. + + Returns + ------- + list[Trend] + A list of all trends in the database. + """ + query = """ + SELECT + * + FROM + trends AS t + LEFT OUTER JOIN ( + SELECT + trend_id, array_agg(signal_id) AS connected_signals + FROM + connections + GROUP BY + trend_id + ) AS c + ON + t.id = c.trend_id + ORDER BY t.id; + """ + await cursor.execute(query) + rows = await cursor.fetchall() + return [Trend(**row) for row in rows] diff --git a/src/routers/__init__.py b/src/routers/__init__.py index 78d60dc..47ceee4 100644 --- a/src/routers/__init__.py +++ b/src/routers/__init__.py @@ -8,6 +8,7 @@ from .trends import router as trend_router from .users import router as user_router from .user_groups import router as user_group_router +from .email import router as email_router ALL = [ choice_router, @@ -16,4 +17,5 @@ trend_router, user_router, user_group_router, + email_router, ] diff --git a/src/routers/email.py b/src/routers/email.py new file mode 100644 index 0000000..1fc627d --- /dev/null +++ b/src/routers/email.py @@ -0,0 +1,118 @@ +""" +Router for email-related endpoints. +""" + +from typing import List + +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel, EmailStr + +from ..dependencies import require_admin +from ..entities import User +from ..services.email_factory import create_email_service +from ..authentication import authenticate_user + +router = APIRouter(prefix="/email", tags=["email"]) + +# Request models +class EmailRequest(BaseModel): + to_emails: List[EmailStr] + subject: str + content: str + content_type: str = "text/plain" + +class NotificationRequest(BaseModel): + to_email: EmailStr + subject: str + template_id: str + dynamic_data: dict + +class DigestRequest(BaseModel): + days: int | None = None + status: List[str] | None = None + limit: int | None = None + test: bool = False + +# Lazy email service initialization +def get_email_service(): + """Get email service instance. Created on first use to avoid startup errors.""" + return create_email_service() + +@router.post("/send", dependencies=[Depends(require_admin)]) +async def send_email(request: EmailRequest): + """ + Send an email to multiple recipients. + Only accessible by admin users. + """ + email_service = get_email_service() + success = await email_service.send_email( + to_emails=request.to_emails, + subject=request.subject, + content=request.content, + content_type=request.content_type + ) + + if not success: + raise HTTPException(status_code=500, detail="Failed to send email") + + return {"message": "Email sent successfully"} + +@router.post("/notify", dependencies=[Depends(require_admin)]) +async def send_notification(request: NotificationRequest): + """ + Send a templated notification email. + Only accessible by admin users. + """ + email_service = get_email_service() + success = await email_service.send_notification_email( + to_email=request.to_email, + subject=request.subject, + template_id=request.template_id, + dynamic_data=request.dynamic_data + ) + + if not success: + raise HTTPException(status_code=500, detail="Failed to send notification") + + return {"message": "Notification sent successfully"} + +@router.post("/digest") +async def trigger_digest(request: DigestRequest, user: User = Depends(authenticate_user)): + """ + Trigger the email digest process as the authenticated user (delegated permissions). + Only sends to the hardcoded cdo.curators@undp.org address. + """ + from src.services.weekly_digest import WeeklyDigestService, Status + import logging + import asyncio + + logger = logging.getLogger(__name__) + curator_email = "cdo.curators@undp.org" + logger.info(f"User {user.email} is triggering a digest email to {curator_email}") + + # Map status strings to Status enum if provided + status_enum = None + if request.status: + status_enum = [Status(s) for s in request.status] + + digest_service = WeeklyDigestService() + subject = "UNDP Futures Weekly Digest" + if request.test: + subject = f"[TEST] {subject}" + + # Generate signals and HTML + signals_list = await digest_service.get_recent_signals(days=request.days, status=status_enum, limit=request.limit) + html_content = digest_service.generate_email_html(signals_list) + + # Use user access token for this endpoint + email_service = create_email_service(useUserAccessToken=True) + success = await email_service.send_email( + to_emails=[curator_email], + subject=subject, + content=html_content, + content_type="text/html", + useUserAccessToken=True + ) + if not success: + raise HTTPException(status_code=500, detail="Failed to send digest email") + return {"message": "Digest email sent successfully"} \ No newline at end of file diff --git a/src/routers/trends.py b/src/routers/trends.py index d85bda2..e894d31 100644 --- a/src/routers/trends.py +++ b/src/routers/trends.py @@ -17,6 +17,17 @@ router = APIRouter(prefix="/trends", tags=["trends"]) +@router.get("") +async def get_all_trends( + user: User = Depends(authenticate_user), + cursor: AsyncCursor = Depends(db.yield_cursor), +): + """ + Retrieve all trends from the database. Requires authentication. + """ + trends = await db.list_trends(cursor) + return trends + @router.get("/search", response_model=TrendPage) async def search_trends( filters: Annotated[TrendFilters, Query()], @@ -65,6 +76,8 @@ async def create_trend( return await db.read_trend(cursor, trend_id) + + @router.get("/{uid}", response_model=Trend) async def read_trend( uid: Annotated[int, Path(description="The ID of the trend to retrieve")], diff --git a/src/services/draft_digest.py b/src/services/draft_digest.py new file mode 100644 index 0000000..e3109ca --- /dev/null +++ b/src/services/draft_digest.py @@ -0,0 +1,389 @@ +""" +Service for generating digests of draft signals. +This is a specialized version of the weekly digest service that focuses on draft signals. +""" + +import logging +import datetime +from typing import List, Dict, Any, Optional +from datetime import timedelta + +from ..entities import Signal, SignalFilters, Status +from ..database import signals, connection + +logger = logging.getLogger(__name__) + +class DraftDigestService: + """Service class for generating digests of draft signals.""" + + def __init__(self): + """Initialize the draft digest service.""" + pass + + async def get_recent_draft_signals(self, days: int = 7) -> List[Signal]: + """ + Get draft signals created in the last specified number of days. + + Parameters + ---------- + days : int, optional + The number of days to look back, defaults to 7 days. + + Returns + ------- + List[Signal] + A list of draft signals created in the specified period. + """ + logger.info(f"Getting draft signals from the last {days} days") + + # Calculate date range + end_date = datetime.datetime.now() + start_date = end_date - timedelta(days=days) + start_date_str = start_date.strftime("%Y-%m-%d") + + logger.info(f"Date range: {start_date_str} to {end_date.strftime('%Y-%m-%d')}") + + # Create signal filters - specifically for DRAFT status + filters = SignalFilters( + statuses=[Status.DRAFT], # Only draft signals + # We'll filter by created_at in SQL directly + limit=100 # Limit the number of signals + ) + + # Use a DB connection to fetch signals + async with await connection.get_connection() as conn: + async with conn.cursor() as cursor: + # Get signals created after start_date + query = f""" + SELECT + *, COUNT(*) OVER() AS total_count + FROM + signals AS s + LEFT OUTER JOIN ( + SELECT + signal_id, array_agg(trend_id) AS connected_trends + FROM + connections + GROUP BY + signal_id + ) AS c + ON + s.id = c.signal_id + LEFT OUTER JOIN ( + SELECT + name AS unit_name, + region AS unit_region + FROM + units + ) AS u + ON + s.created_unit = u.unit_name + LEFT OUTER JOIN ( + SELECT + name AS location, + region AS location_region, + bureau AS location_bureau + FROM + locations + ) AS l + ON + s.location = l.location + WHERE + status = ANY(%(statuses)s) + AND created_at >= %(start_date)s + ORDER BY + created_at DESC + LIMIT + %(limit)s + ; + """ + + # Add start_date parameter to the filters + filter_params = filters.model_dump() + filter_params['start_date'] = start_date_str + + await cursor.execute(query, filter_params) + rows = await cursor.fetchall() + + signals_list = [Signal(**row) for row in rows] + + logger.info(f"Found {len(signals_list)} draft signals from the last {days} days") + return signals_list + + async def get_signals_by_status(self, statuses: List[Status], days: int = 7) -> List[Signal]: + """ + Get signals with specified statuses created in the last specified number of days. + + Parameters + ---------- + statuses : List[Status] + List of statuses to filter by (e.g., [Status.DRAFT, Status.PENDING]) + days : int, optional + The number of days to look back, defaults to 7 days. + + Returns + ------- + List[Signal] + A list of signals with the specified statuses created in the specified period. + """ + logger.info(f"Getting signals with statuses {statuses} from the last {days} days") + + # Calculate date range + end_date = datetime.datetime.now() + start_date = end_date - timedelta(days=days) + start_date_str = start_date.strftime("%Y-%m-%d") + + logger.info(f"Date range: {start_date_str} to {end_date.strftime('%Y-%m-%d')}") + + # Create signal filters with the specified statuses + filters = SignalFilters( + statuses=statuses, + # We'll filter by created_at in SQL directly + limit=100 # Limit the number of signals + ) + + # Use a DB connection to fetch signals + async with await connection.get_connection() as conn: + async with conn.cursor() as cursor: + # Get signals created after start_date + query = f""" + SELECT + *, COUNT(*) OVER() AS total_count + FROM + signals AS s + LEFT OUTER JOIN ( + SELECT + signal_id, array_agg(trend_id) AS connected_trends + FROM + connections + GROUP BY + signal_id + ) AS c + ON + s.id = c.signal_id + LEFT OUTER JOIN ( + SELECT + name AS unit_name, + region AS unit_region + FROM + units + ) AS u + ON + s.created_unit = u.unit_name + LEFT OUTER JOIN ( + SELECT + name AS location, + region AS location_region, + bureau AS location_bureau + FROM + locations + ) AS l + ON + s.location = l.location + WHERE + status = ANY(%(statuses)s) + AND created_at >= %(start_date)s + ORDER BY + created_at DESC + LIMIT + %(limit)s + ; + """ + + # Add start_date parameter to the filters + filter_params = filters.model_dump() + filter_params['start_date'] = start_date_str + + await cursor.execute(query, filter_params) + rows = await cursor.fetchall() + + signals_list = [Signal(**row) for row in rows] + + status_names = [s.value for s in statuses] + logger.info(f"Found {len(signals_list)} signals with statuses {status_names} from the last {days} days") + return signals_list + + def generate_digest_html(self, signals_list: List[Signal], intro_text: Optional[str] = None, title: str = "Signal Digest") -> str: + """ + Generate HTML content for the digest. + + Parameters + ---------- + signals_list : List[Signal] + List of signals to include in the digest. + intro_text : Optional[str] + Optional custom introduction text. + title : str + Title for the digest page. + + Returns + ------- + str + HTML content for the digest. + """ + if not signals_list: + logger.warning("No signals to include in digest") + return "

No signals were found for this period.

" + + default_intro = """ +

Here's a digest of signals from the UNDP Futures platform. + Below are the latest signals:

+ """ + + intro = intro_text or default_intro + + html = f""" + + + + + + UNDP Futures - {title} + + + +
+

UNDP Futures - {title}

+

Signals from the UNDP Futures platform

+

Generated on {datetime.datetime.now().strftime("%Y-%m-%d %H:%M")}

+
+ + {intro} + +
+ """ + + # Add each signal to the HTML + for signal in signals_list: + keywords_html = "" + if signal.keywords: + keywords_html = " ".join([f'{k}' for k in signal.keywords]) + + location_text = signal.location or "Global" + + # Add status class + status_class = f"status-{signal.status.lower()}" if hasattr(signal, 'status') else "" + status_text = signal.status.capitalize() if hasattr(signal, 'status') else "Unknown" + + # Format created date + created_date = "" + if hasattr(signal, 'created_at') and signal.created_at: + if isinstance(signal.created_at, str): + created_date = signal.created_at + else: + try: + created_date = signal.created_at.strftime("%Y-%m-%d") + except: + created_date = str(signal.created_at) + + html += f""" +
+

{signal.headline} {status_text}

+
+ Location: {location_text} + {f'β€’ Source: View Source' if hasattr(signal, 'url') and signal.url else ''} + {f'β€’ Created: {created_date}' if created_date else ''} + {f'β€’ Created by: {signal.created_by}' if hasattr(signal, 'created_by') and signal.created_by else ''} +
+

{signal.description}

+
+ {keywords_html} +
+
+ """ + + html += """ +
+ + + + + """ + + return html \ No newline at end of file diff --git a/src/services/email_factory.py b/src/services/email_factory.py new file mode 100644 index 0000000..eac87b7 --- /dev/null +++ b/src/services/email_factory.py @@ -0,0 +1,40 @@ +""" +Factory for creating email service instances. +""" + +import os +import logging +from typing import Optional + +from .email_service import EmailServiceBase +from .msgraph_service import MSGraphEmailService +from .user_auth_service import UserAuthEmailService + +logger = logging.getLogger(__name__) + +# Email service types +MS_GRAPH = "ms_graph" +USER_AUTH = "user_auth" + +# Default to USER_AUTH with Azure CLI authentication +DEFAULT_EMAIL_SERVICE = USER_AUTH + +def create_email_service(useUserAccessToken: bool = False) -> EmailServiceBase: + """ + Factory function to create an email service instance based on configuration. + Accepts useUserAccessToken to control delegated vs app auth. + + Returns: + EmailServiceBase: An instance of the configured email service. + """ + service_type = os.getenv("EMAIL_SERVICE_TYPE", DEFAULT_EMAIL_SERVICE).lower() + + logger.info(f"Creating email service of type: {service_type} (useUserAccessToken={useUserAccessToken})") + + if service_type == MS_GRAPH: + return MSGraphEmailService(useUserAccessToken=useUserAccessToken) + elif service_type == USER_AUTH: + return UserAuthEmailService() + else: + logger.warning(f"Unknown email service type: {service_type}. Defaulting to {DEFAULT_EMAIL_SERVICE}") + return UserAuthEmailService() \ No newline at end of file diff --git a/src/services/email_service.py b/src/services/email_service.py new file mode 100644 index 0000000..9565091 --- /dev/null +++ b/src/services/email_service.py @@ -0,0 +1,36 @@ +""" +Base email service interface. +""" + +import abc +import logging +from typing import Any, Dict, List + +logger = logging.getLogger(__name__) + +class EmailServiceBase(abc.ABC): + """Abstract base class for email services""" + + @abc.abstractmethod + async def send_email( + self, + to_emails: List[str], + subject: str, + content: str, + content_type: str = "text/plain", + useUserAccessToken: bool = False + ) -> bool: + """Send an email to multiple recipients""" + pass + + @abc.abstractmethod + async def send_notification_email( + self, + to_email: str, + subject: str, + template_id: str, + dynamic_data: Dict[str, Any], + useUserAccessToken: bool = False + ) -> bool: + """Send a templated notification email""" + pass \ No newline at end of file diff --git a/src/services/graph_direct_auth.py b/src/services/graph_direct_auth.py new file mode 100644 index 0000000..e8c3165 --- /dev/null +++ b/src/services/graph_direct_auth.py @@ -0,0 +1,132 @@ +""" +Direct authentication approach for Graph API using user credentials. +This is a simplified version for testing purposes. +""" + +import os +import httpx +import logging +import asyncio +import json +from typing import Dict, List, Any, Optional + +logger = logging.getLogger(__name__) + +class GraphDirectAuth: + """Direct authentication for Graph API using user credentials""" + + def __init__(self): + self.token = None + self.token_expires = 0 + self.tenant_id = os.getenv('TENANT_ID') + self.client_id = os.getenv('CLIENT_ID') + self.client_secret = os.getenv('CLIENT_SECRET') + + if not all([self.tenant_id, self.client_id, self.client_secret]): + logger.warning("Missing required environment variables for Graph authentication. Service will not be available.") + self.configured = False + return + + self.configured = True + + self.token_url = f"https://login.microsoftonline.com/{self.tenant_id}/oauth2/v2.0/token" + self.graph_url = "https://graph.microsoft.com/v1.0" + + async def ensure_token(self) -> str: + """Ensure we have a valid token, refreshing if necessary""" + if not getattr(self, 'configured', False): + raise ValueError("GraphDirectAuth not properly configured - missing environment variables") + + current_time = asyncio.get_event_loop().time() + + # If token is expired or will expire in the next 5 minutes, refresh it + if not self.token or current_time > (self.token_expires - 300): + await self.refresh_token() + + return self.token + + async def refresh_token(self) -> None: + """Get a new access token using client credentials flow""" + try: + data = { + 'grant_type': 'client_credentials', + 'client_id': self.client_id, + 'client_secret': self.client_secret, + 'scope': 'https://graph.microsoft.com/.default' + } + + headers = { + 'Content-Type': 'application/x-www-form-urlencoded' + } + + async with httpx.AsyncClient() as client: + response = await client.post(self.token_url, data=data, headers=headers) + + if response.status_code != 200: + logger.error(f"Failed to get token: {response.status_code}, {response.text}") + raise Exception(f"Failed to get token: {response.status_code}") + + token_data = response.json() + self.token = token_data['access_token'] + + # Calculate token expiration time (convert expires_in from seconds to epoch time) + current_time = asyncio.get_event_loop().time() + self.token_expires = current_time + token_data['expires_in'] + + logger.info(f"Token refreshed, expires in {token_data['expires_in']} seconds") + + except Exception as e: + logger.error(f"Error refreshing token: {str(e)}") + raise + + async def send_email(self, from_email: str, to_emails: List[str], subject: str, + content: str, content_type: str = "HTML") -> bool: + """Send an email using Graph API""" + try: + token = await self.ensure_token() + + # Prepare the email message + message = { + "message": { + "subject": subject, + "body": { + "contentType": content_type, + "content": content + }, + "toRecipients": [ + { + "emailAddress": { + "address": email + } + } for email in to_emails + ], + "from": { + "emailAddress": { + "address": from_email + } + } + }, + "saveToSentItems": "true" + } + + headers = { + 'Authorization': f'Bearer {token}', + 'Content-Type': 'application/json' + } + + # Use /users/{from_email} instead of /me to send as that user + endpoint = f"{self.graph_url}/users/{from_email}/sendMail" + + async with httpx.AsyncClient() as client: + response = await client.post(endpoint, json=message, headers=headers) + + if response.status_code in [200, 201, 202, 204]: + logger.info(f"Email sent successfully: {response.status_code}") + return True + else: + logger.error(f"Failed to send email: {response.status_code}, {response.text}") + return False + + except Exception as e: + logger.error(f"Error sending email: {str(e)}") + return False \ No newline at end of file diff --git a/src/services/msgraph_service.py b/src/services/msgraph_service.py new file mode 100644 index 0000000..4e442df --- /dev/null +++ b/src/services/msgraph_service.py @@ -0,0 +1,249 @@ +""" +Microsoft Graph implementation of the email service using Enterprise Application credentials. +""" + +import logging +import os +import json +from typing import Any, Dict, List + +from azure.identity import ClientSecretCredential +import httpx + +from .email_service import EmailServiceBase +from .user_auth_service import UserAuthEmailService + +logger = logging.getLogger(__name__) + +# Define the Microsoft Graph scopes +GRAPH_SCOPE = "https://graph.microsoft.com/.default" +GRAPH_ENDPOINT = "https://graph.microsoft.com/v1.0" + +class MSGraphEmailService(EmailServiceBase): + """Service class for handling email operations using Microsoft Graph API""" + + def __init__(self, useUserAccessToken: bool = False): + self.useUserAccessToken = useUserAccessToken + if useUserAccessToken: + self.user_auth_service = UserAuthEmailService() + return + try: + # Get credentials from environment variables + tenant_id = os.getenv('TENANT_ID') + client_id = os.getenv('CLIENT_ID') + client_secret = os.getenv('CLIENT_SECRET') + service_type = os.getenv('EMAIL_SERVICE_TYPE') + self.from_email = os.getenv('MS_FROM_EMAIL') + logger.info(f"MSGraphEmailService config: TENANT_ID={tenant_id}, CLIENT_ID={client_id}, FROM_EMAIL={self.from_email}, EMAIL_SERVICE_TYPE={service_type}") + if not all([tenant_id, client_id, client_secret]): + logger.warning("Missing required environment variables for MSGraph authentication. Service will not be available.") + self.credential = None + return + + # Use ClientSecretCredential for app authentication + self.credential = ClientSecretCredential( + tenant_id=tenant_id, + client_id=client_id, + client_secret=client_secret + ) + + if not self.from_email: + logger.error("MS_FROM_EMAIL environment variable is not set") + raise ValueError("Microsoft sender email is required") + + logger.info("MSGraphEmailService initialized successfully with enterprise application credentials") + + except Exception as e: + logger.error(f"Failed to initialize MSGraphEmailService: {str(e)}") + raise + + async def send_email( + self, + to_emails: List[str], + subject: str, + content: str, + content_type: str = "text/plain", + useUserAccessToken: bool = False + ) -> bool: + if getattr(self, 'useUserAccessToken', False): + return await self.user_auth_service.send_email( + to_emails=to_emails, + subject=subject, + content=content, + content_type=content_type, + useUserAccessToken=True + ) + + # Check if service is properly configured + if not hasattr(self, 'credential') or self.credential is None: + logger.error("MSGraphEmailService not properly configured - missing credentials") + return False + + """Send an email using Microsoft Graph API with Mail.Send permission""" + try: + logger.info(f"send_email config: TENANT_ID={os.getenv('TENANT_ID')}, CLIENT_ID={os.getenv('CLIENT_ID')}, FROM_EMAIL={self.from_email}, EMAIL_SERVICE_TYPE={os.getenv('EMAIL_SERVICE_TYPE')}, to_emails={to_emails}, subject={subject}") + logger.info(f"Preparing to send email to {len(to_emails)} recipients") + + # Prepare the email message + message = { + "message": { + "subject": subject, + "body": { + "contentType": "HTML" if content_type.lower() == "text/html" else "Text", + "content": content + }, + "toRecipients": [ + { + "emailAddress": { + "address": email + } + } for email in to_emails + ], + "from": { + "emailAddress": { + "address": self.from_email + } + } + }, + "saveToSentItems": "true" + } + + logger.debug(f"Email content prepared: subject='{subject}', type='{content_type}'") + + # For enterprise applications with app permissions, we send on behalf of a user + # using /users/{user_id}/sendMail instead of /me/sendMail + user_email = self.from_email + + logger.info("Acquiring Microsoft Graph token...") + try: + token = self.credential.get_token(GRAPH_SCOPE) + logger.info("Token acquired successfully.") + except Exception as token_exc: + logger.error(f"Failed to acquire token: {token_exc}", exc_info=True) + return False + + logger.info(f"Sending email via Graph API to /users/{user_email}/sendMail ...") + try: + response = await self._post(f"/users/{user_email}/sendMail", message, token=token) + except Exception as post_exc: + logger.error(f"Exception during HTTP POST to Graph API: {post_exc}", exc_info=True) + return False + + if response.status_code in [200, 201, 202, 204]: + logger.info(f"Email sent successfully: status_code={response.status_code}") + return True + else: + logger.error(f"Failed to send email: status_code={response.status_code}, response={response.text}") + return False + + except Exception as e: + logger.error(f"Error sending email: {str(e)}", exc_info=True) + return False + + async def send_notification_email( + self, + to_email: str, + subject: str, + template_id: str, + dynamic_data: Dict[str, Any], + useUserAccessToken: bool = False + ) -> bool: + if getattr(self, 'useUserAccessToken', False): + return await self.user_auth_service.send_notification_email( + to_email=to_email, + subject=subject, + template_id=template_id, + dynamic_data=dynamic_data, + useUserAccessToken=True + ) + + # Check if service is properly configured + if not hasattr(self, 'credential') or self.credential is None: + logger.error("MSGraphEmailService not properly configured - missing credentials") + return False + + """Send a templated notification email using Microsoft Graph API""" + try: + logger.info(f"Preparing to send notification email to {to_email}") + logger.debug(f"Using template_id: {template_id}") + logger.debug(f"Dynamic data: {dynamic_data}") + + # For Microsoft Graph, we'll need to handle templates differently + # This is a simplified version that just replaces variables in the template + template_content = await self._get_template_content(template_id) + if not template_content: + return False + + # Replace template variables with dynamic data + for key, value in dynamic_data.items(): + template_content = template_content.replace(f"{{{key}}}", str(value)) + + # Send the email using the processed template + message = { + "message": { + "subject": subject, + "body": { + "contentType": "HTML", + "content": template_content + }, + "toRecipients": [ + { + "emailAddress": { + "address": to_email + } + } + ], + "from": { + "emailAddress": { + "address": self.from_email + } + } + }, + "saveToSentItems": "true" + } + + # For enterprise applications with app permissions, we send on behalf of a user + user_email = self.from_email + + # Send the email + response = await self._post(f"/users/{user_email}/sendMail", message) + + if response.status_code in [200, 201, 202, 204]: + logger.info(f"Notification email sent successfully: status_code={response.status_code}") + return True + else: + logger.error(f"Failed to send notification email: status_code={response.status_code}, response={response.text}") + return False + + except Exception as e: + logger.error(f"Error sending notification email: {str(e)}", exc_info=True) + return False + + async def _post(self, endpoint: str, data: dict, token=None) -> httpx.Response: + """Helper method to make a POST request to the Graph API""" + try: + if token is None: + logger.info("Acquiring token inside _post (should be passed from send_email)...") + token = self.credential.get_token(GRAPH_SCOPE) + headers = { + "Authorization": f"Bearer {token.token}", + "Content-Type": "application/json" + } + url = f"{GRAPH_ENDPOINT}{endpoint}" + logger.info(f"Making HTTP POST to {url}") + async with httpx.AsyncClient() as client: + response = await client.post(url, headers=headers, json=data) + logger.info(f"HTTP POST completed with status {response.status_code}") + return response + except Exception as e: + logger.error(f"Error in _post method: {str(e)}", exc_info=True) + raise + + async def _get_template_content(self, template_id: str) -> str: + """ + Get the template content from Azure storage or other source. + This is a placeholder - implement based on where templates are stored. + """ + # TODO: Implement template retrieval from Azure storage or other source + logger.warning("Template retrieval not implemented - using placeholder") + return f"

Template {template_id}

This is a placeholder template.

" \ No newline at end of file diff --git a/src/services/user_auth_service.py b/src/services/user_auth_service.py new file mode 100644 index 0000000..0646191 --- /dev/null +++ b/src/services/user_auth_service.py @@ -0,0 +1,186 @@ +""" +Microsoft Graph implementation using user authentication. +This leverages the existing Azure CLI authentication. +""" + +import logging +import os +import json +import asyncio +import subprocess +from typing import Any, Dict, List + +import httpx + +from .email_service import EmailServiceBase + +logger = logging.getLogger(__name__) + +class UserAuthEmailService(EmailServiceBase): + """Service class for handling email operations using Microsoft Graph API with user auth""" + + def __init__(self): + try: + self.from_email = os.getenv('MS_FROM_EMAIL') + if not self.from_email: + logger.error("MS_FROM_EMAIL environment variable is not set") + raise ValueError("Microsoft sender email is required") + + self.user_email = os.getenv('USER_EMAIL') + if not self.user_email: + logger.error("USER_EMAIL environment variable is not set") + raise ValueError("User email is required") + + # Token cache + self.token = None + self.token_expires = 0 + + logger.info("UserAuthEmailService initialized successfully") + + except Exception as e: + logger.error(f"Failed to initialize UserAuthEmailService: {str(e)}") + raise + + async def _get_token(self) -> str: + """Get an access token using az cli""" + current_time = asyncio.get_event_loop().time() + + # If we have a valid token that won't expire in the next 5 minutes, use it + if self.token and current_time < (self.token_expires - 300): + return self.token + + try: + # Get token using az cli command + cmd = [ + "az", "account", "get-access-token", + "--resource", "https://graph.microsoft.com" + ] + + # Run the command + result = subprocess.run(cmd, capture_output=True, text=True, check=True) + token_info = json.loads(result.stdout) + + # Extract token and expiration + self.token = token_info["accessToken"] + + # Calculate token expiration time (timestamp from Azure CLI is already in seconds) + self.token_expires = token_info["expiresOn"] + + logger.info(f"Successfully got access token using Azure CLI. Expires: {self.token_expires}") + return self.token + + except subprocess.CalledProcessError as e: + logger.error(f"Error executing Azure CLI command: {e.stderr}") + raise + except Exception as e: + logger.error(f"Error getting token: {str(e)}") + raise + + async def send_email( + self, + to_emails: List[str], + subject: str, + content: str, + content_type: str = "text/plain", + useUserAccessToken: bool = False + ) -> bool: + # useUserAccessToken is ignored here, always uses user token + try: + logger.info(f"Preparing to send email to {len(to_emails)} recipients") + + # Prepare the email message + message = { + "message": { + "subject": subject, + "body": { + "contentType": "HTML" if content_type.lower() == "text/html" else "Text", + "content": content + }, + "toRecipients": [ + { + "emailAddress": { + "address": email + } + } for email in to_emails + ], + "from": { + "emailAddress": { + "address": self.from_email + } + } + }, + "saveToSentItems": "true" + } + + logger.debug(f"Email content prepared: subject='{subject}', type='{content_type}'") + + # Get token + token = await self._get_token() + + # Send the email using Microsoft Graph API + headers = { + "Authorization": f"Bearer {token}", + "Content-Type": "application/json" + } + + # Use the /me/sendMail endpoint to send from the authenticated user + url = "https://graph.microsoft.com/v1.0/me/sendMail" + + async with httpx.AsyncClient() as client: + response = await client.post(url, json=message, headers=headers) + + if response.status_code in [200, 201, 202, 204]: + logger.info(f"Email sent successfully: status_code={response.status_code}") + return True + else: + logger.error(f"Failed to send email: status_code={response.status_code}, response={response.text}") + return False + + except Exception as e: + logger.error(f"Error sending email: {str(e)}", exc_info=True) + return False + + async def send_notification_email( + self, + to_email: str, + subject: str, + template_id: str, + dynamic_data: Dict[str, Any], + useUserAccessToken: bool = False + ) -> bool: + # useUserAccessToken is ignored here, always uses user token + try: + logger.info(f"Preparing to send notification email to {to_email}") + logger.debug(f"Using template_id: {template_id}") + logger.debug(f"Dynamic data: {dynamic_data}") + + # For Microsoft Graph, we'll need to handle templates differently + # This is a simplified version that just replaces variables in the template + template_content = await self._get_template_content(template_id) + if not template_content: + return False + + # Replace template variables with dynamic data + for key, value in dynamic_data.items(): + template_content = template_content.replace(f"{{{key}}}", str(value)) + + # Send the email using the processed template + return await self.send_email( + to_emails=[to_email], + subject=subject, + content=template_content, + content_type="text/html" + ) + + except Exception as e: + logger.error(f"Error sending notification email: {str(e)}", exc_info=True) + return False + + async def _get_template_content(self, template_id: str) -> str: + """ + Get the template content from Azure storage or other source. + This is a placeholder - implement based on where templates are stored. + """ + # TODO: Implement template retrieval from Azure storage or other source + logger.warning("Template retrieval not implemented - using placeholder") + return f"

Template {template_id}

This is a placeholder template.

" \ No newline at end of file diff --git a/src/services/weekly_digest.py b/src/services/weekly_digest.py new file mode 100644 index 0000000..b499173 --- /dev/null +++ b/src/services/weekly_digest.py @@ -0,0 +1,315 @@ +""" +Service for generating and sending weekly digests of signals. +""" + +import logging +import datetime +from typing import List, Dict, Any, Optional +from datetime import timedelta + +from ..entities import Signal, SignalFilters, Status +from ..database import signals, connection + +logger = logging.getLogger(__name__) + +class WeeklyDigestService: + """Service class for generating and sending weekly digests of signals.""" + + def __init__(self): + """Initialize the weekly digest service.""" + pass + + async def get_recent_signals(self, days: Optional[int] = None, status: Optional[List[Status]] = None, limit: Optional[int] = None) -> List[Signal]: + """ + Get signals filtered by optional days, status, and limit. + If no filters are provided, fetch the last 10 draft signals. + """ + logger.info(f"Getting signals with filters - days: {days}, status: {status}, limit: {limit}") + start_time = datetime.datetime.now() + # Set defaults if not provided + if status is None: + status = [Status.DRAFT] + if limit is None: + limit = 10 + # Calculate date range if days is provided + end_date = datetime.datetime.now() + start_date = end_date - timedelta(days=days) if days is not None else None + start_date_str = start_date.strftime("%Y-%m-%d") if start_date else None + logger.info(f"Date range: {start_date_str} to {end_date.strftime('%Y-%m-%d') if start_date else 'ALL'}") + filters = SignalFilters( + statuses=status, + per_page=limit + ) + logger.debug("Opening database connection for signal fetch...") + async with await connection.get_connection() as conn: + logger.debug("Database connection established.") + async with conn.cursor() as cursor: + logger.debug("Cursor opened. Preparing to execute signal fetch query...") + query = f""" + SELECT + *, COUNT(*) OVER() AS total_count + FROM + signals AS s + LEFT OUTER JOIN ( + SELECT + signal_id, array_agg(trend_id) AS connected_trends + FROM + connections + GROUP BY + signal_id + ) AS c + ON + s.id = c.signal_id + LEFT OUTER JOIN ( + SELECT + name AS unit_name, + region AS unit_region + FROM + units + ) AS u + ON + s.created_unit = u.unit_name + LEFT OUTER JOIN ( + SELECT + name AS location, + region AS location_region, + bureau AS location_bureau + FROM + locations + ) AS l + ON + s.location = l.location + WHERE + status = ANY(%(statuses)s) + {f'AND created_at >= %(start_date)s' if start_date_str else ''} + ORDER BY + created_at DESC + LIMIT + %(limit)s + ; + """ + filter_params = filters.model_dump() + filter_params['limit'] = limit + if start_date_str: + filter_params['start_date'] = start_date_str + logger.debug(f"Executing query with params: {filter_params}") + await cursor.execute(query, filter_params) + logger.debug("Query executed. Fetching rows...") + rows = await cursor.fetchall() + logger.debug(f"Fetched {len(rows)} rows from database.") + signals_list = [Signal(**dict(row)) for row in rows] + logger.info(f"Found {len(signals_list)} signals with filters - days: {days}, status: {status}, limit: {limit}") + elapsed = (datetime.datetime.now() - start_time).total_seconds() + logger.info(f"Signal fetch took {elapsed:.2f} seconds.") + return signals_list + + def generate_email_html(self, signals_list: List[Signal], intro_text: Optional[str] = None) -> str: + """ + Generate HTML content for the weekly digest email. + + Parameters + ---------- + signals_list : List[Signal] + List of signals to include in the digest. + intro_text : Optional[str] + Optional custom introduction text. + + Returns + ------- + str + HTML content for the email. + """ + if not signals_list: + logger.warning("No signals to include in digest") + return "

No new signals were found for this period.

" + + default_intro = """ +

Hello,

+

Here's your weekly digest of new signals from the UNDP Futures platform. + Below are the latest signals that might be of interest:

+ """ + + intro = intro_text or default_intro + + html = f""" + + + + + + UNDP Futures - Weekly Signal Digest + + + +
+

UNDP Futures - Weekly Signal Digest

+

Stay updated with the latest signals from around the world

+
+ + {intro} + +
+ """ + + # Add each signal to the HTML + for signal in signals_list: + keywords_html = "" + if signal.keywords: + keywords_html = " ".join([f'{k}' for k in signal.keywords]) + + location_text = signal.location or "Global" + + html += f""" +
+

{signal.headline}

+
+ Location: {location_text} + {f'β€’ Source: View Source' if signal.url else ''} +
+

{signal.description}

+
+ {keywords_html} +
+
+ """ + + html += """ +
+ + + + + """ + + return html + + async def generate_and_send_digest(self, + recipients: List[str], + days: int = 7, + subject: Optional[str] = None, + custom_intro: Optional[str] = None, + status: Optional[List[Status]] = None, + limit: Optional[int] = None, + useUserAccessToken: bool = False) -> bool: + """ + Generate and send a weekly digest email to specified recipients. + + Parameters + ---------- + recipients : List[str] + List of email addresses to send the digest to. + days : int, optional + Number of days to look back for signals, defaults to 7. + subject : Optional[str], optional + Custom email subject, defaults to standard subject with date. + custom_intro : Optional[str], optional + Custom introduction text for the email. + status : Optional[List[Status]], optional + List of signal statuses to filter by. + limit : Optional[int], optional + Maximum number of signals to include. + useUserAccessToken : bool, optional + Whether to use user access token for email sending. + + Returns + ------- + bool + True if the email was sent successfully, False otherwise. + """ + if not recipients: + logger.error("No recipients specified for weekly digest") + return False + logger.info(f"Generating weekly digest email for {len(recipients)} recipients") + step_start = datetime.datetime.now() + logger.info("Fetching recent signals for digest...") + signals_list = await self.get_recent_signals(days=days, status=status, limit=limit) + logger.info(f"Fetched {len(signals_list)} signals for digest.") + logger.info(f"Signal fetch step took {(datetime.datetime.now() - step_start).total_seconds():.2f} seconds.") + if not signals_list: + logger.warning("No signals found for digest, skipping email send") + return False + logger.info("Generating HTML content for digest email...") + html_start = datetime.datetime.now() + html_content = self.generate_email_html(signals_list, custom_intro) + logger.info(f"HTML generation took {(datetime.datetime.now() - html_start).total_seconds():.2f} seconds.") + today = datetime.datetime.now().strftime("%Y-%m-%d") + email_subject = subject or f"UNDP Futures Weekly Digest - {today}" + from .email_factory import create_email_service + logger.info("Creating email service...") + email_service = create_email_service(useUserAccessToken=useUserAccessToken) + logger.info(f"Sending weekly digest email to {recipients} with subject {email_subject}") + send_start = datetime.datetime.now() + try: + success = await email_service.send_email( + to_emails=recipients, + subject=email_subject, + content=html_content, + content_type="text/html", + useUserAccessToken=useUserAccessToken + ) + logger.info(f"Email send step took {(datetime.datetime.now() - send_start).total_seconds():.2f} seconds.") + if success: + logger.info(f"Weekly digest email sent successfully to {len(recipients)} recipients") + else: + logger.error("Failed to send weekly digest email") + return success + except Exception as e: + logger.error(f"Error sending weekly digest email: {e}", exc_info=True) + return False \ No newline at end of file diff --git a/test_mail_send.py b/test_mail_send.py new file mode 100644 index 0000000..19c903a --- /dev/null +++ b/test_mail_send.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python +""" +Test script to verify the email service with Mail.Send permission. +""" + +import asyncio +import os +import sys +import logging +import subprocess +import json + +# Add the project root to the Python path +sys.path.insert(0, os.path.abspath(os.path.dirname(__file__))) + +from src.services.email_factory import create_email_service + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" +) + +logger = logging.getLogger(__name__) + +def check_azure_cli_auth(): + """Check Azure CLI authentication and ensure correct scopes are set.""" + try: + # Check if Azure CLI is logged in + process = subprocess.run( + ["az", "account", "show"], + capture_output=True, + text=True, + check=False + ) + + if process.returncode != 0: + print("Azure CLI is not authenticated. Logging in...") + subprocess.run( + ["az", "login", "--scope", "https://graph.microsoft.com/.default"], + check=True + ) + else: + # Check if we need to set the correct scope + print("Azure CLI is authenticated. Ensuring correct scope is set...") + subprocess.run( + ["az", "account", "get-access-token", "--scope", "https://graph.microsoft.com/.default"], + check=True + ) + + print("βœ… Azure CLI authentication complete with correct scope") + return True + except Exception as e: + print(f"❌ Failed to authenticate with Azure CLI: {str(e)}") + return False + +async def test_mail_send(): + """Test the email service with Mail.Send permission.""" + + # First check Azure CLI authentication + if not check_azure_cli_auth(): + return + + # Create the email service using the factory + email_service = create_email_service() + + # Define test parameters + recipient_email = "andrew.maguire@undp.org" + subject = "Test Email - UNDP Future Trends and Signals System" + content = """ + This is a test email sent from the UNDP Future Trends and Signals System. + It verifies that the Mail.Send permission is working correctly. + + If you received this email, the Mail.Send permission is properly configured. + """ + + # Send the test email + print(f"Sending test email to {recipient_email}...") + result = await email_service.send_email( + to_emails=[recipient_email], + subject=subject, + content=content, + content_type="text/plain" + ) + + # Check the result + if result: + print("βœ… Test email sent successfully!") + print("The Mail.Send permission is working correctly.") + else: + print("❌ Failed to send test email.") + print("Check logs for more details.") + +if __name__ == "__main__": + asyncio.run(test_mail_send()) \ No newline at end of file diff --git a/tests/test_email.py b/tests/test_email.py new file mode 100644 index 0000000..6a68146 --- /dev/null +++ b/tests/test_email.py @@ -0,0 +1,94 @@ +""" +Tests for email services. +""" + +import os +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from fastapi.testclient import TestClient + +from main import app +from src.services.msgraph_service import MSGraphEmailService + +client = TestClient(app) + +@pytest.fixture +def mock_msgraph_client(): + with patch('src.services.msgraph_service.GraphClient') as mock: + mock_instance = MagicMock() + mock_response = MagicMock() + mock_response.status_code = 202 + mock_instance.post = AsyncMock(return_value=mock_response) + mock.return_value = mock_instance + yield mock_instance + + +@pytest.mark.asyncio +async def test_msgraph_send_email(mock_msgraph_client): + """Test sending email via Microsoft Graph API""" + # Setup + service = MSGraphEmailService() + + # Test + result = await service.send_email( + to_emails=["test@example.com"], + subject="Test Subject", + content="Test Content" + ) + + # Assert + assert result is True + mock_msgraph_client.post.assert_called_once() + call_args = mock_msgraph_client.post.call_args[0] + assert call_args[0] == "/me/sendMail" + +@pytest.mark.asyncio +async def test_msgraph_send_notification(mock_msgraph_client): + """Test sending notification via Microsoft Graph API""" + # Setup + service = MSGraphEmailService() + + # Test + result = await service.send_notification_email( + to_email="test@example.com", + subject="Test Notification", + template_id="test-template", + dynamic_data={"name": "Test User"} + ) + + # Assert + assert result is True + mock_msgraph_client.post.assert_called_once() + call_args = mock_msgraph_client.post.call_args[0] + assert call_args[0] == "/me/sendMail" + + + +@pytest.mark.skip(reason="Requires database connection") +def test_email_endpoints(headers: dict): + """Test email endpoints with authentication""" + # Test send email endpoint + response = client.post( + "/email/send", + json={ + "to_emails": ["test@example.com"], + "subject": "Test Subject", + "content": "Test Content" + }, + headers=headers + ) + assert response.status_code in [200, 403] # 200 if admin, 403 if not admin + + # Test notification endpoint + response = client.post( + "/email/notify", + json={ + "to_email": "test@example.com", + "subject": "Test Notification", + "template_id": "test-template", + "dynamic_data": {"name": "Test User"} + }, + headers=headers + ) + assert response.status_code in [200, 403] # 200 if admin, 403 if not admin