Skip to content

Commit 69e0302

Browse files
committed
refactor: remove adaptive rate limiting feature
Remove adaptive rate limiting since GitHub Copilot API does not provide rate limit headers. The API only returns x-quota-snapshot-* headers which track quota usage, not rate limits, and overage is permitted freely. Removed: - src/lib/rate-limit-parser.ts - tests/rate-limit-parser.test.ts - onHeaders callback from createChatCompletions - Rate limit header parsing logic from handlers The opt-in request queue remains functional for users who want to set a fixed rate limit via --rate-limit flag. Signed-off-by: leocavalcante <[email protected]>
1 parent 19deaf4 commit 69e0302

File tree

5 files changed

+2
-371
lines changed

5 files changed

+2
-371
lines changed

src/lib/rate-limit-parser.ts

Lines changed: 0 additions & 145 deletions
This file was deleted.

src/routes/chat-completions/handler.ts

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,6 @@ import { streamSSE, type SSEMessage } from "hono/streaming"
55

66
import { awaitApproval } from "~/lib/approval"
77
import { executeWithRateLimit } from "~/lib/rate-limit"
8-
import {
9-
calculateOptimalDelay,
10-
logRateLimitInfo,
11-
parseRateLimitHeaders,
12-
} from "~/lib/rate-limit-parser"
138
import { state } from "~/lib/state"
149
import { getTokenCount } from "~/lib/tokenizer"
1510
import { isNullish } from "~/lib/utils"
@@ -51,16 +46,7 @@ export async function handleCompletion(c: Context) {
5146
consola.debug("Set max_tokens to:", JSON.stringify(payload.max_tokens))
5247
}
5348

54-
const response = await createChatCompletions(payload, (headers) => {
55-
// Parse rate limit headers and update queue if applicable
56-
const rateLimitInfo = parseRateLimitHeaders(headers)
57-
logRateLimitInfo(rateLimitInfo)
58-
59-
const optimalDelay = calculateOptimalDelay(rateLimitInfo)
60-
if (optimalDelay !== undefined) {
61-
state.requestQueue.updateRateLimit(optimalDelay)
62-
}
63-
})
49+
const response = await createChatCompletions(payload)
6450

6551
if (isNonStreaming(response)) {
6652
consola.debug("Non-streaming response:", JSON.stringify(response))

src/routes/messages/handler.ts

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,6 @@ import { streamSSE } from "hono/streaming"
55

66
import { awaitApproval } from "~/lib/approval"
77
import { executeWithRateLimit } from "~/lib/rate-limit"
8-
import {
9-
calculateOptimalDelay,
10-
logRateLimitInfo,
11-
parseRateLimitHeaders,
12-
} from "~/lib/rate-limit-parser"
138
import { state } from "~/lib/state"
149
import {
1510
createChatCompletions,
@@ -45,16 +40,7 @@ export async function handleCompletion(c: Context) {
4540
await awaitApproval()
4641
}
4742

48-
const response = await createChatCompletions(openAIPayload, (headers) => {
49-
// Parse rate limit headers and update queue if applicable
50-
const rateLimitInfo = parseRateLimitHeaders(headers)
51-
logRateLimitInfo(rateLimitInfo)
52-
53-
const optimalDelay = calculateOptimalDelay(rateLimitInfo)
54-
if (optimalDelay !== undefined) {
55-
state.requestQueue.updateRateLimit(optimalDelay)
56-
}
57-
})
43+
const response = await createChatCompletions(openAIPayload)
5844

5945
if (isNonStreaming(response)) {
6046
consola.debug(

src/services/copilot/create-chat-completions.ts

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ import { state } from "~/lib/state"
77

88
export const createChatCompletions = async (
99
payload: ChatCompletionsPayload,
10-
onHeaders?: (headers: Headers) => void,
1110
) => {
1211
if (!state.copilotToken) throw new Error("Copilot token not found")
1312

@@ -40,11 +39,6 @@ export const createChatCompletions = async (
4039
throw new HTTPError("Failed to create chat completions", response)
4140
}
4241

43-
// Call the headers callback if provided
44-
if (onHeaders) {
45-
onHeaders(response.headers)
46-
}
47-
4842
if (payload.stream) {
4943
return events(response)
5044
}

0 commit comments

Comments
 (0)