Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 58 additions & 7 deletions containers/api-proxy/server.js
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,54 @@ const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
const ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY;
const COPILOT_GITHUB_TOKEN = process.env.COPILOT_GITHUB_TOKEN;

/**
* Parse an API target value that may be a raw hostname, host:port, or full URL.
* Returns { hostname, port } where port is undefined if not specified.
* Logs a warning and falls back to the default hostname if the value is invalid.
*/
function parseApiTarget(envVar, rawValue, defaultHostname) {
if (!rawValue) return { hostname: defaultHostname, port: undefined };

// If it looks like a full URL (has a scheme), parse it as a URL
if (rawValue.includes('://')) {
try {
const parsed = new URL(rawValue);
return { hostname: parsed.hostname, port: parsed.port || undefined };
} catch {
logRequest('warn', 'startup', { message: `${envVar}: invalid URL "${rawValue}", falling back to ${defaultHostname}` });
return { hostname: defaultHostname, port: undefined };
}
}

// host:port form
const colonIdx = rawValue.lastIndexOf(':');
if (colonIdx > 0) {
const hostname = rawValue.slice(0, colonIdx);
const portStr = rawValue.slice(colonIdx + 1);
const port = parseInt(portStr, 10);
if (isNaN(port) || port < 1 || port > 65535) {
logRequest('warn', 'startup', { message: `${envVar}: invalid port in "${rawValue}", falling back to ${defaultHostname}` });
return { hostname: defaultHostname, port: undefined };
}
return { hostname, port: String(port) };
}

// Plain hostname
return { hostname: rawValue, port: undefined };
}

// Configurable OpenAI API target (supports internal LLM routers / Azure OpenAI)
// Priority: OPENAI_API_TARGET env var > default. Accepts hostname, host:port, or full URL.
const _openaiTarget = parseApiTarget('OPENAI_API_TARGET', process.env.OPENAI_API_TARGET, 'api.openai.com');
const OPENAI_API_TARGET = _openaiTarget.hostname;
const OPENAI_API_PORT = _openaiTarget.port;

// Configurable Anthropic API target (supports internal LLM routers)
// Priority: ANTHROPIC_API_TARGET env var > default. Accepts hostname, host:port, or full URL.
const _anthropicTarget = parseApiTarget('ANTHROPIC_API_TARGET', process.env.ANTHROPIC_API_TARGET, 'api.anthropic.com');
const ANTHROPIC_API_TARGET = _anthropicTarget.hostname;
const ANTHROPIC_API_PORT = _anthropicTarget.port;

// Configurable Copilot API target host (supports GHES/GHEC / custom endpoints)
// Priority: COPILOT_API_TARGET env var > auto-derive from GITHUB_SERVER_URL > default
function deriveCopilotApiTarget() {
Expand Down Expand Up @@ -76,6 +124,8 @@ const HTTPS_PROXY = process.env.HTTPS_PROXY || process.env.HTTP_PROXY;
logRequest('info', 'startup', {
message: 'Starting AWF API proxy sidecar',
squid_proxy: HTTPS_PROXY || 'not configured',
openai_api_target: OPENAI_API_PORT ? `${OPENAI_API_TARGET}:${OPENAI_API_PORT}` : OPENAI_API_TARGET,
anthropic_api_target: ANTHROPIC_API_PORT ? `${ANTHROPIC_API_TARGET}:${ANTHROPIC_API_PORT}` : ANTHROPIC_API_TARGET,
copilot_api_target: COPILOT_API_TARGET,
providers: {
openai: !!OPENAI_API_KEY,
Expand Down Expand Up @@ -145,7 +195,7 @@ function isValidRequestId(id) {
return typeof id === 'string' && id.length <= 128 && /^[\w\-\.]+$/.test(id);
}

function proxyRequest(req, res, targetHost, injectHeaders, provider) {
function proxyRequest(req, res, targetHost, injectHeaders, provider, targetPort) {
const clientRequestId = req.headers['x-request-id'];
const requestId = isValidRequestId(clientRequestId) ? clientRequestId : generateRequestId();
const startTime = Date.now();
Expand Down Expand Up @@ -183,8 +233,9 @@ function proxyRequest(req, res, targetHost, injectHeaders, provider) {
return;
}

// Build target URL
const targetUrl = new URL(req.url, `https://${targetHost}`);
// Build target URL — include port if explicitly specified (e.g. for host:port targets)
const targetBase = targetPort ? `https://${targetHost}:${targetPort}` : `https://${targetHost}`;
const targetUrl = new URL(req.url, targetBase);

// Handle client-side errors (e.g. aborted connections)
req.on('error', (err) => {
Expand Down Expand Up @@ -397,9 +448,9 @@ if (OPENAI_API_KEY) {
const contentLength = parseInt(req.headers['content-length'], 10) || 0;
if (checkRateLimit(req, res, 'openai', contentLength)) return;

proxyRequest(req, res, 'api.openai.com', {
proxyRequest(req, res, OPENAI_API_TARGET, {
'Authorization': `Bearer ${OPENAI_API_KEY}`,
}, 'openai');
}, 'openai', OPENAI_API_PORT);
});

server.listen(HEALTH_PORT, '0.0.0.0', () => {
Expand Down Expand Up @@ -436,7 +487,7 @@ if (ANTHROPIC_API_KEY) {
if (!req.headers['anthropic-version']) {
anthropicHeaders['anthropic-version'] = '2023-06-01';
}
proxyRequest(req, res, 'api.anthropic.com', anthropicHeaders, 'anthropic');
proxyRequest(req, res, ANTHROPIC_API_TARGET, anthropicHeaders, 'anthropic', ANTHROPIC_API_PORT);
});

server.listen(10001, '0.0.0.0', () => {
Expand Down Expand Up @@ -488,7 +539,7 @@ if (ANTHROPIC_API_KEY) {
if (!req.headers['anthropic-version']) {
anthropicHeaders['anthropic-version'] = '2023-06-01';
}
proxyRequest(req, res, 'api.anthropic.com', anthropicHeaders);
proxyRequest(req, res, ANTHROPIC_API_TARGET, anthropicHeaders, 'opencode', ANTHROPIC_API_PORT);
});

opencodeServer.listen(10004, '0.0.0.0', () => {
Expand Down
39 changes: 37 additions & 2 deletions docs/api-proxy-sidecar.md
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,39 @@ sudo awf --enable-api-proxy \
-- your-multi-llm-tool
```

### Custom/internal LLM endpoints

Use `--openai-api-target` or `--anthropic-api-target` to route requests to a custom endpoint (e.g., an internal LLM router, Azure OpenAI, or any OpenAI/Anthropic-compatible API) instead of the public defaults.

```bash
# Route OpenAI/Codex requests to an internal LLM router
export OPENAI_API_KEY="your-internal-key"

sudo awf --enable-api-proxy \
--openai-api-target llm-router.internal.example.com \
--allow-domains llm-router.internal.example.com \
-- npx @openai/codex exec "do something"
```

```bash
# Route Anthropic/Claude requests to an internal LLM router
export ANTHROPIC_API_KEY="your-internal-key"

sudo awf --enable-api-proxy \
--anthropic-api-target llm-router.internal.example.com \
--allow-domains llm-router.internal.example.com \
-- claude-code "do something"
```

The target value accepts:
- A plain hostname: `llm-router.internal.example.com`
- A `host:port` pair: `llm-router.internal.example.com:8443`
- A full URL (scheme + host): `https://llm-router.internal.example.com/v1`

Both flags can also be set via environment variables:
- `OPENAI_API_TARGET` — equivalent to `--openai-api-target`
- `ANTHROPIC_API_TARGET` — equivalent to `--anthropic-api-target`

## Environment variables

AWF manages environment variables differently across the three containers (squid, api-proxy, agent) to ensure secure credential isolation.
Expand All @@ -123,6 +156,9 @@ The API proxy sidecar receives **real credentials** and routing configuration:
| `OPENAI_API_KEY` | Real API key | `--enable-api-proxy` and env set | OpenAI API key (injected into requests) |
| `ANTHROPIC_API_KEY` | Real API key | `--enable-api-proxy` and env set | Anthropic API key (injected into requests) |
| `COPILOT_GITHUB_TOKEN` | Real token | `--enable-api-proxy` and env set | GitHub Copilot token (injected into requests) |
| `OPENAI_API_TARGET` | Hostname or host:port | `--openai-api-target` or env set | Custom upstream for OpenAI requests (default: `api.openai.com`) |
| `ANTHROPIC_API_TARGET` | Hostname or host:port | `--anthropic-api-target` or env set | Custom upstream for Anthropic requests (default: `api.anthropic.com`) |
| `COPILOT_API_TARGET` | Hostname | `--copilot-api-target` or env set | Custom upstream for Copilot requests (default: `api.githubcopilot.com`) |
| `HTTP_PROXY` | `http://172.30.0.10:3128` | Always | Routes through Squid for domain filtering |
| `HTTPS_PROXY` | `http://172.30.0.10:3128` | Always | Routes through Squid for domain filtering |

Expand Down Expand Up @@ -328,9 +364,8 @@ docker exec awf-squid cat /var/log/squid/access.log | grep DENIED

## Limitations

- Only supports OpenAI and Anthropic APIs
- Only supports OpenAI, Anthropic, and GitHub Copilot APIs
- Keys must be set as environment variables (not file-based)
- No support for Azure OpenAI endpoints
- No request/response logging (by design, for security)

## Related documentation
Expand Down
16 changes: 16 additions & 0 deletions src/cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -847,6 +847,20 @@ program
' Defaults to api.githubcopilot.com. Useful for GHES deployments.\n' +
' Can also be set via COPILOT_API_TARGET env var.',
)
.option(
'--openai-api-target <host>',
'Target hostname for OpenAI API requests in the api-proxy sidecar.\n' +
' Defaults to api.openai.com. Useful for internal LLM routers\n' +
' or Azure OpenAI / OpenAI-compatible endpoints.\n' +
' Can also be set via OPENAI_API_TARGET env var.',
)
.option(
'--anthropic-api-target <host>',
'Target hostname for Anthropic API requests in the api-proxy sidecar.\n' +
' Defaults to api.anthropic.com. Useful for internal LLM routers\n' +
' or Anthropic-compatible endpoints.\n' +
' Can also be set via ANTHROPIC_API_TARGET env var.',
)
.option(
'--rate-limit-rpm <n>',
'Enable rate limiting: max requests per minute per provider (requires --enable-api-proxy)',
Expand Down Expand Up @@ -1136,6 +1150,8 @@ program
anthropicApiKey: process.env.ANTHROPIC_API_KEY,
copilotGithubToken: process.env.COPILOT_GITHUB_TOKEN,
copilotApiTarget: options.copilotApiTarget || process.env.COPILOT_API_TARGET,
openaiApiTarget: options.openaiApiTarget || process.env.OPENAI_API_TARGET,
anthropicApiTarget: options.anthropicApiTarget || process.env.ANTHROPIC_API_TARGET,
};

// Build rate limit config when API proxy is enabled
Expand Down
48 changes: 48 additions & 0 deletions src/docker-manager.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1893,6 +1893,54 @@ describe('docker-manager', () => {
expect(env.AWF_RATE_LIMIT_RPH).toBeUndefined();
expect(env.AWF_RATE_LIMIT_BYTES_PM).toBeUndefined();
});

it('should pass OPENAI_API_TARGET to api-proxy when openaiApiTarget is set', () => {
const configWithTarget = { ...mockConfig, enableApiProxy: true, openaiApiKey: 'sk-test-key', openaiApiTarget: 'llm-router.internal.example.com' };
const result = generateDockerCompose(configWithTarget, mockNetworkConfigWithProxy);
const proxy = result.services['api-proxy'];
const env = proxy.environment as Record<string, string>;
expect(env.OPENAI_API_TARGET).toBe('llm-router.internal.example.com');
});

it('should not pass OPENAI_API_TARGET to api-proxy when openaiApiTarget is not set', () => {
const configWithProxy = { ...mockConfig, enableApiProxy: true, openaiApiKey: 'sk-test-key' };
const result = generateDockerCompose(configWithProxy, mockNetworkConfigWithProxy);
const proxy = result.services['api-proxy'];
const env = proxy.environment as Record<string, string>;
expect(env.OPENAI_API_TARGET).toBeUndefined();
});

it('should pass ANTHROPIC_API_TARGET to api-proxy when anthropicApiTarget is set', () => {
const configWithTarget = { ...mockConfig, enableApiProxy: true, anthropicApiKey: 'sk-ant-test-key', anthropicApiTarget: 'llm-router.internal.example.com' };
const result = generateDockerCompose(configWithTarget, mockNetworkConfigWithProxy);
const proxy = result.services['api-proxy'];
const env = proxy.environment as Record<string, string>;
expect(env.ANTHROPIC_API_TARGET).toBe('llm-router.internal.example.com');
});

it('should not pass ANTHROPIC_API_TARGET to api-proxy when anthropicApiTarget is not set', () => {
const configWithProxy = { ...mockConfig, enableApiProxy: true, anthropicApiKey: 'sk-ant-test-key' };
const result = generateDockerCompose(configWithProxy, mockNetworkConfigWithProxy);
const proxy = result.services['api-proxy'];
const env = proxy.environment as Record<string, string>;
expect(env.ANTHROPIC_API_TARGET).toBeUndefined();
});

it('should pass both OPENAI_API_TARGET and ANTHROPIC_API_TARGET when both are set', () => {
const configWithTargets = {
...mockConfig,
enableApiProxy: true,
openaiApiKey: 'sk-test-key',
anthropicApiKey: 'sk-ant-test-key',
openaiApiTarget: 'openai-router.internal.example.com',
anthropicApiTarget: 'anthropic-router.internal.example.com',
};
const result = generateDockerCompose(configWithTargets, mockNetworkConfigWithProxy);
const proxy = result.services['api-proxy'];
const env = proxy.environment as Record<string, string>;
expect(env.OPENAI_API_TARGET).toBe('openai-router.internal.example.com');
expect(env.ANTHROPIC_API_TARGET).toBe('anthropic-router.internal.example.com');
});
});
});

Expand Down
10 changes: 10 additions & 0 deletions src/docker-manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1008,6 +1008,10 @@ export function generateDockerCompose(
...(config.copilotGithubToken && { COPILOT_GITHUB_TOKEN: config.copilotGithubToken }),
// Configurable Copilot API target (for GHES/GHEC support)
...(config.copilotApiTarget && { COPILOT_API_TARGET: config.copilotApiTarget }),
// Configurable OpenAI API target (for internal LLM routers / Azure OpenAI)
...(config.openaiApiTarget && { OPENAI_API_TARGET: config.openaiApiTarget }),
// Configurable Anthropic API target (for internal LLM routers)
...(config.anthropicApiTarget && { ANTHROPIC_API_TARGET: config.anthropicApiTarget }),
// Forward GITHUB_SERVER_URL so api-proxy can auto-derive enterprise endpoints
...(process.env.GITHUB_SERVER_URL && { GITHUB_SERVER_URL: process.env.GITHUB_SERVER_URL }),
// Route through Squid to respect domain whitelisting
Expand Down Expand Up @@ -1065,10 +1069,16 @@ export function generateDockerCompose(
if (config.openaiApiKey) {
environment.OPENAI_BASE_URL = `http://${networkConfig.proxyIp}:${API_PROXY_PORTS.OPENAI}/v1`;
logger.debug(`OpenAI API will be proxied through sidecar at http://${networkConfig.proxyIp}:${API_PROXY_PORTS.OPENAI}/v1`);
if (config.openaiApiTarget) {
logger.debug(`OpenAI API target overridden to: ${config.openaiApiTarget}`);
}
}
if (config.anthropicApiKey) {
environment.ANTHROPIC_BASE_URL = `http://${networkConfig.proxyIp}:${API_PROXY_PORTS.ANTHROPIC}`;
logger.debug(`Anthropic API will be proxied through sidecar at http://${networkConfig.proxyIp}:${API_PROXY_PORTS.ANTHROPIC}`);
if (config.anthropicApiTarget) {
logger.debug(`Anthropic API target overridden to: ${config.anthropicApiTarget}`);
}

// Set placeholder token for Claude Code CLI compatibility
// Real authentication happens via ANTHROPIC_BASE_URL pointing to api-proxy
Expand Down
43 changes: 43 additions & 0 deletions src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -506,6 +506,49 @@ export interface WrapperConfig {
* ```
*/
copilotApiTarget?: string;

/**
* Target hostname for OpenAI API requests (used by API proxy sidecar)
*
* When enableApiProxy is true, this hostname is passed to the Node.js sidecar
* as `OPENAI_API_TARGET`. The proxy will forward OpenAI API requests to this host
* instead of the default `api.openai.com`.
*
* Useful for internal LLM routers, Azure OpenAI endpoints, or any
* OpenAI-compatible self-hosted API.
*
* Can be set via:
* - CLI flag: `--openai-api-target <host>`
* - Environment variable: `OPENAI_API_TARGET`
*
* @default 'api.openai.com'
* @example
* ```bash
* awf --enable-api-proxy --openai-api-target llm-router.internal.example.com -- command
* ```
*/
openaiApiTarget?: string;

/**
* Target hostname for Anthropic API requests (used by API proxy sidecar)
*
* When enableApiProxy is true, this hostname is passed to the Node.js sidecar
* as `ANTHROPIC_API_TARGET`. The proxy will forward Anthropic API requests to this host
* instead of the default `api.anthropic.com`.
*
* Useful for internal LLM routers or any Anthropic-compatible self-hosted API.
*
* Can be set via:
* - CLI flag: `--anthropic-api-target <host>`
* - Environment variable: `ANTHROPIC_API_TARGET`
*
* @default 'api.anthropic.com'
* @example
* ```bash
* awf --enable-api-proxy --anthropic-api-target llm-router.internal.example.com -- command
* ```
*/
anthropicApiTarget?: string;
}

/**
Expand Down
Loading