diff --git a/package.json b/package.json index 17a78b01..02362d37 100644 --- a/package.json +++ b/package.json @@ -96,6 +96,7 @@ "lint:fix": "npx eslint . --fix", "coverage": "vitest run --coverage", "generate": "curl https://raw.githubusercontent.com/google-a2a/A2A/refs/heads/main/specification/json/a2a.json > spec.json && node scripts/generateTypes.js && rm spec.json", + "generate-grpc-types": "cd ./src/grpc && curl -o ./a2a.proto https://raw.githubusercontent.com/a2aproject/A2A/v0.3.0/specification/grpc/a2a.proto && buf generate && rm ./a2a.proto", "test-build": "esbuild ./dist/client/index.js ./dist/server/index.js ./dist/index.js --bundle --platform=neutral --outdir=dist/tmp-checks --outbase=./dist" }, "dependencies": { diff --git a/src/client/transports/grpc_transport.ts b/src/client/transports/grpc_transport.ts new file mode 100644 index 00000000..1b567ac4 --- /dev/null +++ b/src/client/transports/grpc_transport.ts @@ -0,0 +1,354 @@ +import { + CallOptions, + credentials, + ServiceError, + Metadata, + ClientUnaryCall, + ClientReadableStream, + ChannelCredentials, +} from '@grpc/grpc-js'; +import { TransportProtocolName } from '../../core.js'; +import { A2AServiceClient } from '../../grpc/a2a.js'; +import { + MessageSendParams, + TaskPushNotificationConfig, + TaskIdParams, + ListTaskPushNotificationConfigParams, + DeleteTaskPushNotificationConfigParams, + TaskQueryParams, + Task, + AgentCard, + GetTaskPushNotificationConfigParams, +} from '../../types.js'; +import { A2AStreamEventData, SendMessageResult } from '../client.js'; +import { RequestOptions } from '../multitransport-client.js'; +import { Transport, TransportFactory } from './transport.js'; +import { ToProto } from '../../grpc/utils/to_proto.js'; +import { FromProto } from '../../grpc/utils/from_proto.js'; +import { + A2A_ERROR_CODE, + AuthenticatedExtendedCardNotConfiguredError, + ContentTypeNotSupportedError, + InvalidAgentResponseError, + PushNotificationNotSupportedError, + TaskNotFoundError, + TaskNotCancelableError, + UnsupportedOperationError, +} from '../../errors.js'; + +type GrpcUnaryCall = ( + request: TReq, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: TRes) => void +) => ClientUnaryCall; + +type GrpcStreamCall = ( + request: TReq, + metadata?: Metadata, + options?: Partial +) => ClientReadableStream; + +export interface GrpcTransportOptions { + endpoint: string; + grpcClient?: A2AServiceClient; + grpcChannelCredentials?: ChannelCredentials; + grpcCallOptions?: Partial; +} + +export class GrpcTransport implements Transport { + private readonly grpcCallOptions?: Partial; + private readonly grpcClient: A2AServiceClient; + + constructor(options: GrpcTransportOptions) { + this.grpcCallOptions = options.grpcCallOptions; + this.grpcClient = + options.grpcClient ?? + new A2AServiceClient( + options.endpoint, + options.grpcChannelCredentials ?? credentials.createInsecure() + ); + } + + async getExtendedAgentCard(options?: RequestOptions): Promise { + const rpcResponse = await this._sendGrpcRequest( + 'getAgentCard', + undefined, + options, + this.grpcClient.getAgentCard.bind(this.grpcClient), + () => {}, + FromProto.agentCard + ); + return rpcResponse; + } + + async sendMessage( + params: MessageSendParams, + options?: RequestOptions + ): Promise { + const rpcResponse = await this._sendGrpcRequest( + 'sendMessage', + params, + options, + this.grpcClient.sendMessage.bind(this.grpcClient), + ToProto.messageSendParams, + FromProto.sendMessageResult + ); + return rpcResponse; + } + + async *sendMessageStream( + params: MessageSendParams, + options?: RequestOptions + ): AsyncGenerator { + yield* this._sendGrpcStreamingRequest( + 'sendStreamingMessage', + params, + options, + this.grpcClient.sendStreamingMessage.bind(this.grpcClient), + ToProto.messageSendParams + ); + } + + async setTaskPushNotificationConfig( + params: TaskPushNotificationConfig, + options?: RequestOptions + ): Promise { + const rpcResponse = await this._sendGrpcRequest( + 'createTaskPushNotificationConfig', + params, + options, + this.grpcClient.createTaskPushNotificationConfig.bind(this.grpcClient), + ToProto.taskPushNotificationConfigCreate, + FromProto.getTaskPushNoticationConfig + ); + return rpcResponse; + } + + async getTaskPushNotificationConfig( + params: GetTaskPushNotificationConfigParams, + options?: RequestOptions + ): Promise { + const rpcResponse = await this._sendGrpcRequest( + 'getTaskPushNotificationConfig', + params, + options, + this.grpcClient.getTaskPushNotificationConfig.bind(this.grpcClient), + ToProto.getTaskPushNotificationConfigRequest, + FromProto.getTaskPushNoticationConfig + ); + return rpcResponse; + } + + async listTaskPushNotificationConfig( + params: ListTaskPushNotificationConfigParams, + options?: RequestOptions + ): Promise { + const rpcResponse = await this._sendGrpcRequest( + 'listTaskPushNotificationConfig', + params, + options, + this.grpcClient.listTaskPushNotificationConfig.bind(this.grpcClient), + ToProto.listTaskPushNotificationConfigRequest, + FromProto.listTaskPushNotificationConfig + ); + return rpcResponse; + } + + async deleteTaskPushNotificationConfig( + params: DeleteTaskPushNotificationConfigParams, + options?: RequestOptions + ): Promise { + await this._sendGrpcRequest( + 'deleteTaskPushNotificationConfig', + params, + options, + this.grpcClient.deleteTaskPushNotificationConfig.bind(this.grpcClient), + ToProto.deleteTaskPushNotificationConfigRequest, + () => {} + ); + } + + async getTask(params: TaskQueryParams, options?: RequestOptions): Promise { + const rpcResponse = await this._sendGrpcRequest( + 'getTask', + params, + options, + this.grpcClient.getTask.bind(this.grpcClient), + ToProto.getTaskRequest, + FromProto.task + ); + return rpcResponse; + } + + async cancelTask(params: TaskIdParams, options?: RequestOptions): Promise { + const rpcResponse = await this._sendGrpcRequest( + 'cancelTask', + params, + options, + this.grpcClient.cancelTask.bind(this.grpcClient), + ToProto.cancelTaskRequest, + FromProto.task + ); + return rpcResponse; + } + + async *resubscribeTask( + params: TaskIdParams, + options?: RequestOptions + ): AsyncGenerator { + yield* this._sendGrpcStreamingRequest( + 'taskSubscription', + params, + options, + this.grpcClient.taskSubscription.bind(this.grpcClient), + ToProto.taskIdParams + ); + } + + private async _sendGrpcRequest( + method: keyof A2AServiceClient, + params: TParams, + options: RequestOptions | undefined, + call: GrpcUnaryCall, + parser: (req: TParams) => TReq, + converter: (res: TRes) => TResponse + ): Promise { + return new Promise((resolve, reject) => { + call( + parser(params), + this._buildMetadata(options), + this.grpcCallOptions ?? {}, + (error, response) => { + if (error) { + if (this.isA2AServiceError(error)) { + return reject(GrpcTransport.mapToError(error)); + } + const statusInfo = 'code' in error ? `(Status: ${error.code})` : ''; + return reject( + new Error(`GRPC error for ${String(method)}! ${statusInfo} ${error.message}`, { + cause: error, + }) + ); + } + resolve(converter(response)); + } + ); + }); + } + + private async *_sendGrpcStreamingRequest( + method: 'sendStreamingMessage' | 'taskSubscription', + params: TParams, + options: RequestOptions | undefined, + call: GrpcStreamCall, + parser: (req: TParams) => TReq + ): AsyncGenerator { + const streamResponse = call( + parser(params), + this._buildMetadata(options), + this.grpcCallOptions ?? {} + ); + try { + for await (const response of streamResponse) { + const payload = response.payload; + switch (payload.$case) { + case 'msg': + yield FromProto.message(payload.value); + break; + case 'task': + yield FromProto.task(payload.value); + break; + case 'statusUpdate': + yield FromProto.taskStatusUpdate(payload.value); + break; + case 'artifactUpdate': + yield FromProto.taskArtifactUpdate(payload.value); + break; + } + } + } catch (error) { + if (this.isServiceError(error)) { + if (this.isA2AServiceError(error)) { + throw GrpcTransport.mapToError(error); + } + throw new Error(`GRPC error for ${String(method)}! ${error.code} ${error.message}`, { + cause: error, + }); + } else { + throw error; + } + } finally { + streamResponse.cancel(); + } + } + + private isA2AServiceError(error: ServiceError): boolean { + return ( + typeof error === 'object' && error !== null && error.metadata?.get('a2a-error').length === 1 + ); + } + + private isServiceError(error: unknown): error is ServiceError { + return typeof error === 'object' && error !== null && 'code' in error; + } + + private _buildMetadata(options?: RequestOptions): Metadata { + const metadata = new Metadata(); + if (options?.serviceParameters) { + for (const [key, value] of Object.entries(options.serviceParameters)) { + metadata.set(key, value); + } + } + return metadata; + } + + private static mapToError(error: ServiceError): Error { + const a2aErrorCode = error.metadata.get('a2a-error'); + switch (Number(a2aErrorCode[0])) { + case A2A_ERROR_CODE.TASK_NOT_FOUND: + return new TaskNotFoundError(error.message); + case A2A_ERROR_CODE.TASK_NOT_CANCELABLE: + return new TaskNotCancelableError(error.message); + case A2A_ERROR_CODE.PUSH_NOTIFICATION_NOT_SUPPORTED: + return new PushNotificationNotSupportedError(error.message); + case A2A_ERROR_CODE.UNSUPPORTED_OPERATION: + return new UnsupportedOperationError(error.message); + case A2A_ERROR_CODE.CONTENT_TYPE_NOT_SUPPORTED: + return new ContentTypeNotSupportedError(error.message); + case A2A_ERROR_CODE.INVALID_AGENT_RESPONSE: + return new InvalidAgentResponseError(error.message); + case A2A_ERROR_CODE.AUTHENTICATED_EXTENDED_CARD_NOT_CONFIGURED: + return new AuthenticatedExtendedCardNotConfiguredError(error.message); + default: + return new Error( + `GRPC error: ${error.message} Code: ${error.code} Details: ${error.details}` + ); + } + } +} + +export class GrpcTransportFactoryOptions { + grpcClient?: A2AServiceClient; + grpcChannelCredentials?: ChannelCredentials; + grpcCallOptions?: Partial; +} + +export class GrpcTransportFactory implements TransportFactory { + public static readonly name: TransportProtocolName = 'GRPC'; + + constructor(private readonly options?: GrpcTransportFactoryOptions) {} + + get protocolName(): string { + return GrpcTransportFactory.name; + } + + async create(url: string, _agentCard: AgentCard): Promise { + return new GrpcTransport({ + endpoint: url, + grpcClient: this.options?.grpcClient, + grpcChannelCredentials: this.options?.grpcChannelCredentials, + grpcCallOptions: this.options?.grpcCallOptions, + }); + } +} diff --git a/src/grpc/a2a.ts b/src/grpc/a2a.ts new file mode 100644 index 00000000..26711be5 --- /dev/null +++ b/src/grpc/a2a.ts @@ -0,0 +1,6545 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc unknown +// source: a2a.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import { + type CallOptions, + ChannelCredentials, + Client, + type ClientOptions, + type ClientReadableStream, + type ClientUnaryCall, + type handleServerStreamingCall, + type handleUnaryCall, + makeGenericClientConstructor, + Metadata, + type ServiceError, + type UntypedServiceImplementation, +} from "@grpc/grpc-js"; +import { Empty } from "./google/protobuf/empty.js"; +import { Struct } from "./google/protobuf/struct.js"; +import { Timestamp } from "./google/protobuf/timestamp.js"; + +export const protobufPackage = "a2a.v1"; + +/** Older protoc compilers don't understand edition yet. */ + +/** The set of states a Task can be in. */ +export enum TaskState { + TASK_STATE_UNSPECIFIED = 0, + /** TASK_STATE_SUBMITTED - Represents the status that acknowledges a task is created */ + TASK_STATE_SUBMITTED = 1, + /** TASK_STATE_WORKING - Represents the status that a task is actively being processed */ + TASK_STATE_WORKING = 2, + /** TASK_STATE_COMPLETED - Represents the status a task is finished. This is a terminal state */ + TASK_STATE_COMPLETED = 3, + /** TASK_STATE_FAILED - Represents the status a task is done but failed. This is a terminal state */ + TASK_STATE_FAILED = 4, + /** + * TASK_STATE_CANCELLED - Represents the status a task was cancelled before it finished. + * This is a terminal state. + */ + TASK_STATE_CANCELLED = 5, + /** + * TASK_STATE_INPUT_REQUIRED - Represents the status that the task requires information to complete. + * This is an interrupted state. + */ + TASK_STATE_INPUT_REQUIRED = 6, + /** + * TASK_STATE_REJECTED - Represents the status that the agent has decided to not perform the task. + * This may be done during initial task creation or later once an agent + * has determined it can't or won't proceed. This is a terminal state. + */ + TASK_STATE_REJECTED = 7, + /** + * TASK_STATE_AUTH_REQUIRED - Represents the state that some authentication is needed from the upstream + * client. Authentication is expected to come out-of-band thus this is not + * an interrupted or terminal state. + */ + TASK_STATE_AUTH_REQUIRED = 8, + UNRECOGNIZED = -1, +} + +export function taskStateFromJSON(object: any): TaskState { + switch (object) { + case 0: + case "TASK_STATE_UNSPECIFIED": + return TaskState.TASK_STATE_UNSPECIFIED; + case 1: + case "TASK_STATE_SUBMITTED": + return TaskState.TASK_STATE_SUBMITTED; + case 2: + case "TASK_STATE_WORKING": + return TaskState.TASK_STATE_WORKING; + case 3: + case "TASK_STATE_COMPLETED": + return TaskState.TASK_STATE_COMPLETED; + case 4: + case "TASK_STATE_FAILED": + return TaskState.TASK_STATE_FAILED; + case 5: + case "TASK_STATE_CANCELLED": + return TaskState.TASK_STATE_CANCELLED; + case 6: + case "TASK_STATE_INPUT_REQUIRED": + return TaskState.TASK_STATE_INPUT_REQUIRED; + case 7: + case "TASK_STATE_REJECTED": + return TaskState.TASK_STATE_REJECTED; + case 8: + case "TASK_STATE_AUTH_REQUIRED": + return TaskState.TASK_STATE_AUTH_REQUIRED; + case -1: + case "UNRECOGNIZED": + default: + return TaskState.UNRECOGNIZED; + } +} + +export function taskStateToJSON(object: TaskState): string { + switch (object) { + case TaskState.TASK_STATE_UNSPECIFIED: + return "TASK_STATE_UNSPECIFIED"; + case TaskState.TASK_STATE_SUBMITTED: + return "TASK_STATE_SUBMITTED"; + case TaskState.TASK_STATE_WORKING: + return "TASK_STATE_WORKING"; + case TaskState.TASK_STATE_COMPLETED: + return "TASK_STATE_COMPLETED"; + case TaskState.TASK_STATE_FAILED: + return "TASK_STATE_FAILED"; + case TaskState.TASK_STATE_CANCELLED: + return "TASK_STATE_CANCELLED"; + case TaskState.TASK_STATE_INPUT_REQUIRED: + return "TASK_STATE_INPUT_REQUIRED"; + case TaskState.TASK_STATE_REJECTED: + return "TASK_STATE_REJECTED"; + case TaskState.TASK_STATE_AUTH_REQUIRED: + return "TASK_STATE_AUTH_REQUIRED"; + case TaskState.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum Role { + ROLE_UNSPECIFIED = 0, + /** ROLE_USER - USER role refers to communication from the client to the server. */ + ROLE_USER = 1, + /** ROLE_AGENT - AGENT role refers to communication from the server to the client. */ + ROLE_AGENT = 2, + UNRECOGNIZED = -1, +} + +export function roleFromJSON(object: any): Role { + switch (object) { + case 0: + case "ROLE_UNSPECIFIED": + return Role.ROLE_UNSPECIFIED; + case 1: + case "ROLE_USER": + return Role.ROLE_USER; + case 2: + case "ROLE_AGENT": + return Role.ROLE_AGENT; + case -1: + case "UNRECOGNIZED": + default: + return Role.UNRECOGNIZED; + } +} + +export function roleToJSON(object: Role): string { + switch (object) { + case Role.ROLE_UNSPECIFIED: + return "ROLE_UNSPECIFIED"; + case Role.ROLE_USER: + return "ROLE_USER"; + case Role.ROLE_AGENT: + return "ROLE_AGENT"; + case Role.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Configuration of a send message request. */ +export interface SendMessageConfiguration { + /** The output modes that the agent is expected to respond with. */ + acceptedOutputModes: string[]; + /** A configuration of a webhook that can be used to receive updates */ + pushNotification: + | PushNotificationConfig + | undefined; + /** + * The maximum number of messages to include in the history. if 0, the + * history will be unlimited. + */ + historyLength: number; + /** + * If true, the message will be blocking until the task is completed. If + * false, the message will be non-blocking and the task will be returned + * immediately. It is the caller's responsibility to check for any task + * updates. + */ + blocking: boolean; +} + +/** + * Task is the core unit of action for A2A. It has a current status + * and when results are created for the task they are stored in the + * artifact. If there are multiple turns for a task, these are stored in + * history. + */ +export interface Task { + /** Unique identifier for a task, created by the A2A server. */ + id: string; + /** + * Unique identifier for the contextual collection of interactions (tasks + * and messages). Created by the A2A server. + */ + contextId: string; + /** The current status of a Task, including state and a message. */ + status: + | TaskStatus + | undefined; + /** A set of output artifacts for a Task. */ + artifacts: Artifact[]; + /** + * protolint:disable REPEATED_FIELD_NAMES_PLURALIZED + * The history of interactions from a task. + */ + history: Message[]; + /** + * protolint:enable REPEATED_FIELD_NAMES_PLURALIZED + * A key/value object to store custom metadata about a task. + */ + metadata: { [key: string]: any } | undefined; +} + +/** A container for the status of a task */ +export interface TaskStatus { + /** The current state of this task */ + state: TaskState; + /** A message associated with the status. */ + update: + | Message + | undefined; + /** + * Timestamp when the status was recorded. + * Example: "2023-10-27T10:00:00Z" + */ + timestamp: Date | undefined; +} + +/** + * Part represents a container for a section of communication content. + * Parts can be purely textual, some sort of file (image, video, etc) or + * a structured data blob (i.e. JSON). + */ +export interface Part { + part?: + | { $case: "text"; value: string } + | { $case: "file"; value: FilePart } + | { $case: "data"; value: DataPart } + | undefined; +} + +/** + * FilePart represents the different ways files can be provided. If files are + * small, directly feeding the bytes is supported via file_with_bytes. If the + * file is large, the agent should read the content as appropriate directly + * from the file_with_uri source. + */ +export interface FilePart { + file?: { $case: "fileWithUri"; value: string } | { $case: "fileWithBytes"; value: Buffer } | undefined; + mimeType: string; +} + +/** DataPart represents a structured blob. This is most commonly a JSON payload. */ +export interface DataPart { + data: { [key: string]: any } | undefined; +} + +/** + * Message is one unit of communication between client and server. It is + * associated with a context and optionally a task. Since the server is + * responsible for the context definition, it must always provide a context_id + * in its messages. The client can optionally provide the context_id if it + * knows the context to associate the message to. Similarly for task_id, + * except the server decides if a task is created and whether to include the + * task_id. + */ +export interface Message { + /** + * The message id of the message. This is required and created by the + * message creator. + */ + messageId: string; + /** + * The context id of the message. This is optional and if set, the message + * will be associated with the given context. + */ + contextId: string; + /** + * The task id of the message. This is optional and if set, the message + * will be associated with the given task. + */ + taskId: string; + /** A role for the message. */ + role: Role; + /** + * protolint:disable REPEATED_FIELD_NAMES_PLURALIZED + * Content is the container of the message content. + */ + content: Part[]; + /** + * protolint:enable REPEATED_FIELD_NAMES_PLURALIZED + * Any optional metadata to provide along with the message. + */ + metadata: + | { [key: string]: any } + | undefined; + /** The URIs of extensions that are present or contributed to this Message. */ + extensions: string[]; +} + +/** + * Artifacts are the container for task completed results. These are similar + * to Messages but are intended to be the product of a task, as opposed to + * point-to-point communication. + */ +export interface Artifact { + /** Unique id for the artifact. It must be at least unique within a task. */ + artifactId: string; + /** A human readable name for the artifact. */ + name: string; + /** A human readable description of the artifact, optional. */ + description: string; + /** The content of the artifact. */ + parts: Part[]; + /** Optional metadata included with the artifact. */ + metadata: + | { [key: string]: any } + | undefined; + /** The URIs of extensions that are present or contributed to this Artifact. */ + extensions: string[]; +} + +/** + * TaskStatusUpdateEvent is a delta even on a task indicating that a task + * has changed. + */ +export interface TaskStatusUpdateEvent { + /** The id of the task that is changed */ + taskId: string; + /** The id of the context that the task belongs to */ + contextId: string; + /** The new status of the task. */ + status: + | TaskStatus + | undefined; + /** Whether this is the last status update expected for this task. */ + final: boolean; + /** Optional metadata to associate with the task update. */ + metadata: { [key: string]: any } | undefined; +} + +/** + * TaskArtifactUpdateEvent represents a task delta where an artifact has + * been generated. + */ +export interface TaskArtifactUpdateEvent { + /** The id of the task for this artifact */ + taskId: string; + /** The id of the context that this task belongs too */ + contextId: string; + /** The artifact itself */ + artifact: + | Artifact + | undefined; + /** Whether this should be appended to a prior one produced */ + append: boolean; + /** Whether this represents the last part of an artifact */ + lastChunk: boolean; + /** Optional metadata associated with the artifact update. */ + metadata: { [key: string]: any } | undefined; +} + +/** Configuration for setting up push notifications for task updates. */ +export interface PushNotificationConfig { + /** A unique id for this push notification. */ + id: string; + /** Url to send the notification too */ + url: string; + /** Token unique for this task/session */ + token: string; + /** Information about the authentication to sent with the notification */ + authentication: AuthenticationInfo | undefined; +} + +/** Defines authentication details, used for push notifications. */ +export interface AuthenticationInfo { + /** Supported authentication schemes - e.g. Basic, Bearer, etc */ + schemes: string[]; + /** Optional credentials */ + credentials: string; +} + +/** Defines additional transport information for the agent. */ +export interface AgentInterface { + /** The url this interface is found at. */ + url: string; + /** + * The transport supported this url. This is an open form string, to be + * easily extended for many transport protocols. The core ones officially + * supported are JSONRPC, GRPC and HTTP+JSON. + */ + transport: string; +} + +/** + * AgentCard conveys key information: + * - Overall details (version, name, description, uses) + * - Skills; a set of actions/solutions the agent can perform + * - Default modalities/content types supported by the agent. + * - Authentication requirements + * Next ID: 18 + */ +export interface AgentCard { + /** The version of the A2A protocol this agent supports. */ + protocolVersion: string; + /** + * A human readable name for the agent. + * Example: "Recipe Agent" + */ + name: string; + /** + * A description of the agent's domain of action/solution space. + * Example: "Agent that helps users with recipes and cooking." + */ + description: string; + /** + * A URL to the address the agent is hosted at. This represents the + * preferred endpoint as declared by the agent. + */ + url: string; + /** The transport of the preferred endpoint. If empty, defaults to JSONRPC. */ + preferredTransport: string; + /** + * Announcement of additional supported transports. Client can use any of + * the supported transports. + */ + additionalInterfaces: AgentInterface[]; + /** The service provider of the agent. */ + provider: + | AgentProvider + | undefined; + /** + * The version of the agent. + * Example: "1.0.0" + */ + version: string; + /** A url to provide additional documentation about the agent. */ + documentationUrl: string; + /** A2A Capability set supported by the agent. */ + capabilities: + | AgentCapabilities + | undefined; + /** The security scheme details used for authenticating with this agent. */ + securitySchemes: { [key: string]: SecurityScheme }; + /** + * protolint:disable REPEATED_FIELD_NAMES_PLURALIZED + * Security requirements for contacting the agent. + * This list can be seen as an OR of ANDs. Each object in the list describes + * one possible set of security requirements that must be present on a + * request. This allows specifying, for example, "callers must either use + * OAuth OR an API Key AND mTLS." + * Example: + * security { + * schemes { key: "oauth" value { list: ["read"] } } + * } + * security { + * schemes { key: "api-key" } + * schemes { key: "mtls" } + * } + */ + security: Security[]; + /** + * protolint:enable REPEATED_FIELD_NAMES_PLURALIZED + * The set of interaction modes that the agent supports across all skills. + * This can be overridden per skill. Defined as mime types. + */ + defaultInputModes: string[]; + /** The mime types supported as outputs from this agent. */ + defaultOutputModes: string[]; + /** + * Skills represent a unit of ability an agent can perform. This may + * somewhat abstract but represents a more focused set of actions that the + * agent is highly likely to succeed at. + */ + skills: AgentSkill[]; + /** + * Whether the agent supports providing an extended agent card when + * the user is authenticated, i.e. is the card from .well-known + * different than the card from GetAgentCard. + */ + supportsAuthenticatedExtendedCard: boolean; + /** JSON Web Signatures computed for this AgentCard. */ + signatures: AgentCardSignature[]; +} + +export interface AgentCard_SecuritySchemesEntry { + key: string; + value: SecurityScheme | undefined; +} + +/** Represents information about the service provider of an agent. */ +export interface AgentProvider { + /** + * The providers reference url + * Example: "https://ai.google.dev" + */ + url: string; + /** + * The providers organization name + * Example: "Google" + */ + organization: string; +} + +/** Defines the A2A feature set supported by the agent */ +export interface AgentCapabilities { + /** If the agent will support streaming responses */ + streaming: boolean; + /** If the agent can send push notifications to the clients webhook */ + pushNotifications: boolean; + /** Extensions supported by this agent. */ + extensions: AgentExtension[]; +} + +/** A declaration of an extension supported by an Agent. */ +export interface AgentExtension { + /** + * The URI of the extension. + * Example: "https://developers.google.com/identity/protocols/oauth2" + */ + uri: string; + /** + * A description of how this agent uses this extension. + * Example: "Google OAuth 2.0 authentication" + */ + description: string; + /** + * Whether the client must follow specific requirements of the extension. + * Example: false + */ + required: boolean; + /** Optional configuration for the extension. */ + params: { [key: string]: any } | undefined; +} + +/** + * AgentSkill represents a unit of action/solution that the agent can perform. + * One can think of this as a type of highly reliable solution that an agent + * can be tasked to provide. Agents have the autonomy to choose how and when + * to use specific skills, but clients should have confidence that if the + * skill is defined that unit of action can be reliably performed. + */ +export interface AgentSkill { + /** Unique id of the skill within this agent. */ + id: string; + /** A human readable name for the skill. */ + name: string; + /** + * A human (or llm) readable description of the skill + * details and behaviors. + */ + description: string; + /** + * A set of tags for the skill to enhance categorization/utilization. + * Example: ["cooking", "customer support", "billing"] + */ + tags: string[]; + /** + * A set of example queries that this skill is designed to address. + * These examples should help the caller to understand how to craft requests + * to the agent to achieve specific goals. + * Example: ["I need a recipe for bread"] + */ + examples: string[]; + /** Possible input modalities supported. */ + inputModes: string[]; + /** Possible output modalities produced */ + outputModes: string[]; + /** + * protolint:disable REPEATED_FIELD_NAMES_PLURALIZED + * Security schemes necessary for the agent to leverage this skill. + * As in the overall AgentCard.security, this list represents a logical OR of + * security requirement objects. Each object is a set of security schemes + * that must be used together (a logical AND). + */ + security: Security[]; +} + +/** + * AgentCardSignature represents a JWS signature of an AgentCard. + * This follows the JSON format of an RFC 7515 JSON Web Signature (JWS). + */ +export interface AgentCardSignature { + /** + * The protected JWS header for the signature. This is always a + * base64url-encoded JSON object. Required. + */ + protected: string; + /** The computed signature, base64url-encoded. Required. */ + signature: string; + /** The unprotected JWS header values. */ + header: { [key: string]: any } | undefined; +} + +export interface TaskPushNotificationConfig { + /** name=tasks/{id}/pushNotificationConfigs/{id} */ + name: string; + pushNotificationConfig: PushNotificationConfig | undefined; +} + +/** protolint:disable REPEATED_FIELD_NAMES_PLURALIZED */ +export interface StringList { + list: string[]; +} + +export interface Security { + schemes: { [key: string]: StringList }; +} + +export interface Security_SchemesEntry { + key: string; + value: StringList | undefined; +} + +export interface SecurityScheme { + scheme?: + | { $case: "apiKeySecurityScheme"; value: APIKeySecurityScheme } + | { $case: "httpAuthSecurityScheme"; value: HTTPAuthSecurityScheme } + | { $case: "oauth2SecurityScheme"; value: OAuth2SecurityScheme } + | { $case: "openIdConnectSecurityScheme"; value: OpenIdConnectSecurityScheme } + | { $case: "mtlsSecurityScheme"; value: MutualTlsSecurityScheme } + | undefined; +} + +export interface APIKeySecurityScheme { + /** Description of this security scheme. */ + description: string; + /** Location of the API key, valid values are "query", "header", or "cookie" */ + location: string; + /** Name of the header, query or cookie parameter to be used. */ + name: string; +} + +export interface HTTPAuthSecurityScheme { + /** Description of this security scheme. */ + description: string; + /** + * The name of the HTTP Authentication scheme to be used in the + * Authorization header as defined in RFC7235. The values used SHOULD be + * registered in the IANA Authentication Scheme registry. + * The value is case-insensitive, as defined in RFC7235. + */ + scheme: string; + /** + * A hint to the client to identify how the bearer token is formatted. + * Bearer tokens are usually generated by an authorization server, so + * this information is primarily for documentation purposes. + */ + bearerFormat: string; +} + +export interface OAuth2SecurityScheme { + /** Description of this security scheme. */ + description: string; + /** An object containing configuration information for the flow types supported */ + flows: + | OAuthFlows + | undefined; + /** + * URL to the oauth2 authorization server metadata + * [RFC8414](https://datatracker.ietf.org/doc/html/rfc8414). TLS is required. + */ + oauth2MetadataUrl: string; +} + +export interface OpenIdConnectSecurityScheme { + /** Description of this security scheme. */ + description: string; + /** + * Well-known URL to discover the [[OpenID-Connect-Discovery]] provider + * metadata. + */ + openIdConnectUrl: string; +} + +export interface MutualTlsSecurityScheme { + /** Description of this security scheme. */ + description: string; +} + +export interface OAuthFlows { + flow?: + | { $case: "authorizationCode"; value: AuthorizationCodeOAuthFlow } + | { $case: "clientCredentials"; value: ClientCredentialsOAuthFlow } + | { $case: "implicit"; value: ImplicitOAuthFlow } + | { $case: "password"; value: PasswordOAuthFlow } + | undefined; +} + +export interface AuthorizationCodeOAuthFlow { + /** + * The authorization URL to be used for this flow. This MUST be in the + * form of a URL. The OAuth2 standard requires the use of TLS + */ + authorizationUrl: string; + /** + * The token URL to be used for this flow. This MUST be in the form of a URL. + * The OAuth2 standard requires the use of TLS. + */ + tokenUrl: string; + /** + * The URL to be used for obtaining refresh tokens. This MUST be in the + * form of a URL. The OAuth2 standard requires the use of TLS. + */ + refreshUrl: string; + /** + * The available scopes for the OAuth2 security scheme. A map between the + * scope name and a short description for it. The map MAY be empty. + */ + scopes: { [key: string]: string }; +} + +export interface AuthorizationCodeOAuthFlow_ScopesEntry { + key: string; + value: string; +} + +export interface ClientCredentialsOAuthFlow { + /** + * The token URL to be used for this flow. This MUST be in the form of a URL. + * The OAuth2 standard requires the use of TLS. + */ + tokenUrl: string; + /** + * The URL to be used for obtaining refresh tokens. This MUST be in the + * form of a URL. The OAuth2 standard requires the use of TLS. + */ + refreshUrl: string; + /** + * The available scopes for the OAuth2 security scheme. A map between the + * scope name and a short description for it. The map MAY be empty. + */ + scopes: { [key: string]: string }; +} + +export interface ClientCredentialsOAuthFlow_ScopesEntry { + key: string; + value: string; +} + +export interface ImplicitOAuthFlow { + /** + * The authorization URL to be used for this flow. This MUST be in the + * form of a URL. The OAuth2 standard requires the use of TLS + */ + authorizationUrl: string; + /** + * The URL to be used for obtaining refresh tokens. This MUST be in the + * form of a URL. The OAuth2 standard requires the use of TLS. + */ + refreshUrl: string; + /** + * The available scopes for the OAuth2 security scheme. A map between the + * scope name and a short description for it. The map MAY be empty. + */ + scopes: { [key: string]: string }; +} + +export interface ImplicitOAuthFlow_ScopesEntry { + key: string; + value: string; +} + +export interface PasswordOAuthFlow { + /** + * The token URL to be used for this flow. This MUST be in the form of a URL. + * The OAuth2 standard requires the use of TLS. + */ + tokenUrl: string; + /** + * The URL to be used for obtaining refresh tokens. This MUST be in the + * form of a URL. The OAuth2 standard requires the use of TLS. + */ + refreshUrl: string; + /** + * The available scopes for the OAuth2 security scheme. A map between the + * scope name and a short description for it. The map MAY be empty. + */ + scopes: { [key: string]: string }; +} + +export interface PasswordOAuthFlow_ScopesEntry { + key: string; + value: string; +} + +/** /////////// Request Messages /////////// */ +export interface SendMessageRequest { + request: Message | undefined; + configuration: SendMessageConfiguration | undefined; + metadata: { [key: string]: any } | undefined; +} + +export interface GetTaskRequest { + /** name=tasks/{id} */ + name: string; + historyLength: number; +} + +export interface CancelTaskRequest { + /** name=tasks/{id} */ + name: string; +} + +export interface GetTaskPushNotificationConfigRequest { + /** name=tasks/{id}/pushNotificationConfigs/{push_id} */ + name: string; +} + +export interface DeleteTaskPushNotificationConfigRequest { + /** name=tasks/{id}/pushNotificationConfigs/{push_id} */ + name: string; +} + +export interface CreateTaskPushNotificationConfigRequest { + /** + * The task resource for this config. + * Format: tasks/{id} + */ + parent: string; + configId: string; + config: TaskPushNotificationConfig | undefined; +} + +export interface TaskSubscriptionRequest { + /** name=tasks/{id} */ + name: string; +} + +export interface ListTaskPushNotificationConfigRequest { + /** parent=tasks/{id} */ + parent: string; + /** + * For AIP-158 these fields are present. Usually not used/needed. + * The maximum number of configurations to return. + * If unspecified, all configs will be returned. + */ + pageSize: number; + /** + * A page token received from a previous + * ListTaskPushNotificationConfigRequest call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to + * `ListTaskPushNotificationConfigRequest` must match the call that provided + * the page token. + */ + pageToken: string; +} + +/** Empty. Added to fix linter violation. */ +export interface GetAgentCardRequest { +} + +/** ////// Response Messages /////////// */ +export interface SendMessageResponse { + payload?: { $case: "task"; value: Task } | { $case: "msg"; value: Message } | undefined; +} + +/** + * The stream response for a message. The stream should be one of the following + * sequences: + * If the response is a message, the stream should contain one, and only one, + * message and then close + * If the response is a task lifecycle, the first response should be a Task + * object followed by zero or more TaskStatusUpdateEvents and + * TaskArtifactUpdateEvents. The stream should complete when the Task + * if in an interrupted or terminal state. A stream that ends before these + * conditions are met are + */ +export interface StreamResponse { + payload?: + | { $case: "task"; value: Task } + | { $case: "msg"; value: Message } + | { $case: "statusUpdate"; value: TaskStatusUpdateEvent } + | { $case: "artifactUpdate"; value: TaskArtifactUpdateEvent } + | undefined; +} + +export interface ListTaskPushNotificationConfigResponse { + configs: TaskPushNotificationConfig[]; + /** + * A token, which can be sent as `page_token` to retrieve the next page. + * If this field is omitted, there are no subsequent pages. + */ + nextPageToken: string; +} + +function createBaseSendMessageConfiguration(): SendMessageConfiguration { + return { acceptedOutputModes: [], pushNotification: undefined, historyLength: 0, blocking: false }; +} + +export const SendMessageConfiguration: MessageFns = { + encode(message: SendMessageConfiguration, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.acceptedOutputModes) { + writer.uint32(10).string(v!); + } + if (message.pushNotification !== undefined) { + PushNotificationConfig.encode(message.pushNotification, writer.uint32(18).fork()).join(); + } + if (message.historyLength !== 0) { + writer.uint32(24).int32(message.historyLength); + } + if (message.blocking !== false) { + writer.uint32(32).bool(message.blocking); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SendMessageConfiguration { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSendMessageConfiguration(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.acceptedOutputModes.push(reader.string()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.pushNotification = PushNotificationConfig.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.historyLength = reader.int32(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.blocking = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SendMessageConfiguration { + return { + acceptedOutputModes: globalThis.Array.isArray(object?.acceptedOutputModes) + ? object.acceptedOutputModes.map((e: any) => globalThis.String(e)) + : [], + pushNotification: isSet(object.pushNotification) + ? PushNotificationConfig.fromJSON(object.pushNotification) + : undefined, + historyLength: isSet(object.historyLength) ? globalThis.Number(object.historyLength) : 0, + blocking: isSet(object.blocking) ? globalThis.Boolean(object.blocking) : false, + }; + }, + + toJSON(message: SendMessageConfiguration): unknown { + const obj: any = {}; + if (message.acceptedOutputModes?.length) { + obj.acceptedOutputModes = message.acceptedOutputModes; + } + if (message.pushNotification !== undefined) { + obj.pushNotification = PushNotificationConfig.toJSON(message.pushNotification); + } + if (message.historyLength !== 0) { + obj.historyLength = Math.round(message.historyLength); + } + if (message.blocking !== false) { + obj.blocking = message.blocking; + } + return obj; + }, + + create, I>>(base?: I): SendMessageConfiguration { + return SendMessageConfiguration.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SendMessageConfiguration { + const message = createBaseSendMessageConfiguration(); + message.acceptedOutputModes = object.acceptedOutputModes?.map((e) => e) || []; + message.pushNotification = (object.pushNotification !== undefined && object.pushNotification !== null) + ? PushNotificationConfig.fromPartial(object.pushNotification) + : undefined; + message.historyLength = object.historyLength ?? 0; + message.blocking = object.blocking ?? false; + return message; + }, +}; + +function createBaseTask(): Task { + return { id: "", contextId: "", status: undefined, artifacts: [], history: [], metadata: undefined }; +} + +export const Task: MessageFns = { + encode(message: Task, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.contextId !== "") { + writer.uint32(18).string(message.contextId); + } + if (message.status !== undefined) { + TaskStatus.encode(message.status, writer.uint32(26).fork()).join(); + } + for (const v of message.artifacts) { + Artifact.encode(v!, writer.uint32(34).fork()).join(); + } + for (const v of message.history) { + Message.encode(v!, writer.uint32(42).fork()).join(); + } + if (message.metadata !== undefined) { + Struct.encode(Struct.wrap(message.metadata), writer.uint32(50).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Task { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTask(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.contextId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.status = TaskStatus.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.artifacts.push(Artifact.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.history.push(Message.decode(reader, reader.uint32())); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.metadata = Struct.unwrap(Struct.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Task { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + contextId: isSet(object.contextId) ? globalThis.String(object.contextId) : "", + status: isSet(object.status) ? TaskStatus.fromJSON(object.status) : undefined, + artifacts: globalThis.Array.isArray(object?.artifacts) + ? object.artifacts.map((e: any) => Artifact.fromJSON(e)) + : [], + history: globalThis.Array.isArray(object?.history) ? object.history.map((e: any) => Message.fromJSON(e)) : [], + metadata: isObject(object.metadata) ? object.metadata : undefined, + }; + }, + + toJSON(message: Task): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.contextId !== "") { + obj.contextId = message.contextId; + } + if (message.status !== undefined) { + obj.status = TaskStatus.toJSON(message.status); + } + if (message.artifacts?.length) { + obj.artifacts = message.artifacts.map((e) => Artifact.toJSON(e)); + } + if (message.history?.length) { + obj.history = message.history.map((e) => Message.toJSON(e)); + } + if (message.metadata !== undefined) { + obj.metadata = message.metadata; + } + return obj; + }, + + create, I>>(base?: I): Task { + return Task.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Task { + const message = createBaseTask(); + message.id = object.id ?? ""; + message.contextId = object.contextId ?? ""; + message.status = (object.status !== undefined && object.status !== null) + ? TaskStatus.fromPartial(object.status) + : undefined; + message.artifacts = object.artifacts?.map((e) => Artifact.fromPartial(e)) || []; + message.history = object.history?.map((e) => Message.fromPartial(e)) || []; + message.metadata = object.metadata ?? undefined; + return message; + }, +}; + +function createBaseTaskStatus(): TaskStatus { + return { state: 0, update: undefined, timestamp: undefined }; +} + +export const TaskStatus: MessageFns = { + encode(message: TaskStatus, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.state !== 0) { + writer.uint32(8).int32(message.state); + } + if (message.update !== undefined) { + Message.encode(message.update, writer.uint32(18).fork()).join(); + } + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): TaskStatus { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTaskStatus(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.state = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.update = Message.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): TaskStatus { + return { + state: isSet(object.state) ? taskStateFromJSON(object.state) : 0, + update: isSet(object.message) ? Message.fromJSON(object.message) : undefined, + timestamp: isSet(object.timestamp) ? fromJsonTimestamp(object.timestamp) : undefined, + }; + }, + + toJSON(message: TaskStatus): unknown { + const obj: any = {}; + if (message.state !== 0) { + obj.state = taskStateToJSON(message.state); + } + if (message.update !== undefined) { + obj.message = Message.toJSON(message.update); + } + if (message.timestamp !== undefined) { + obj.timestamp = message.timestamp.toISOString(); + } + return obj; + }, + + create, I>>(base?: I): TaskStatus { + return TaskStatus.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): TaskStatus { + const message = createBaseTaskStatus(); + message.state = object.state ?? 0; + message.update = (object.update !== undefined && object.update !== null) + ? Message.fromPartial(object.update) + : undefined; + message.timestamp = object.timestamp ?? undefined; + return message; + }, +}; + +function createBasePart(): Part { + return { part: undefined }; +} + +export const Part: MessageFns = { + encode(message: Part, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + switch (message.part?.$case) { + case "text": + writer.uint32(10).string(message.part.value); + break; + case "file": + FilePart.encode(message.part.value, writer.uint32(18).fork()).join(); + break; + case "data": + DataPart.encode(message.part.value, writer.uint32(26).fork()).join(); + break; + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Part { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.part = { $case: "text", value: reader.string() }; + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.part = { $case: "file", value: FilePart.decode(reader, reader.uint32()) }; + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.part = { $case: "data", value: DataPart.decode(reader, reader.uint32()) }; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Part { + return { + part: isSet(object.text) + ? { $case: "text", value: globalThis.String(object.text) } + : isSet(object.file) + ? { $case: "file", value: FilePart.fromJSON(object.file) } + : isSet(object.data) + ? { $case: "data", value: DataPart.fromJSON(object.data) } + : undefined, + }; + }, + + toJSON(message: Part): unknown { + const obj: any = {}; + if (message.part?.$case === "text") { + obj.text = message.part.value; + } else if (message.part?.$case === "file") { + obj.file = FilePart.toJSON(message.part.value); + } else if (message.part?.$case === "data") { + obj.data = DataPart.toJSON(message.part.value); + } + return obj; + }, + + create, I>>(base?: I): Part { + return Part.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Part { + const message = createBasePart(); + switch (object.part?.$case) { + case "text": { + if (object.part?.value !== undefined && object.part?.value !== null) { + message.part = { $case: "text", value: object.part.value }; + } + break; + } + case "file": { + if (object.part?.value !== undefined && object.part?.value !== null) { + message.part = { $case: "file", value: FilePart.fromPartial(object.part.value) }; + } + break; + } + case "data": { + if (object.part?.value !== undefined && object.part?.value !== null) { + message.part = { $case: "data", value: DataPart.fromPartial(object.part.value) }; + } + break; + } + } + return message; + }, +}; + +function createBaseFilePart(): FilePart { + return { file: undefined, mimeType: "" }; +} + +export const FilePart: MessageFns = { + encode(message: FilePart, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + switch (message.file?.$case) { + case "fileWithUri": + writer.uint32(10).string(message.file.value); + break; + case "fileWithBytes": + writer.uint32(18).bytes(message.file.value); + break; + } + if (message.mimeType !== "") { + writer.uint32(26).string(message.mimeType); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FilePart { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFilePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.file = { $case: "fileWithUri", value: reader.string() }; + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.file = { $case: "fileWithBytes", value: Buffer.from(reader.bytes()) }; + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.mimeType = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FilePart { + return { + file: isSet(object.fileWithUri) + ? { $case: "fileWithUri", value: globalThis.String(object.fileWithUri) } + : isSet(object.fileWithBytes) + ? { $case: "fileWithBytes", value: Buffer.from(bytesFromBase64(object.fileWithBytes)) } + : undefined, + mimeType: isSet(object.mimeType) ? globalThis.String(object.mimeType) : "", + }; + }, + + toJSON(message: FilePart): unknown { + const obj: any = {}; + if (message.file?.$case === "fileWithUri") { + obj.fileWithUri = message.file.value; + } else if (message.file?.$case === "fileWithBytes") { + obj.fileWithBytes = base64FromBytes(message.file.value); + } + if (message.mimeType !== "") { + obj.mimeType = message.mimeType; + } + return obj; + }, + + create, I>>(base?: I): FilePart { + return FilePart.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FilePart { + const message = createBaseFilePart(); + switch (object.file?.$case) { + case "fileWithUri": { + if (object.file?.value !== undefined && object.file?.value !== null) { + message.file = { $case: "fileWithUri", value: object.file.value }; + } + break; + } + case "fileWithBytes": { + if (object.file?.value !== undefined && object.file?.value !== null) { + message.file = { $case: "fileWithBytes", value: object.file.value }; + } + break; + } + } + message.mimeType = object.mimeType ?? ""; + return message; + }, +}; + +function createBaseDataPart(): DataPart { + return { data: undefined }; +} + +export const DataPart: MessageFns = { + encode(message: DataPart, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.data !== undefined) { + Struct.encode(Struct.wrap(message.data), writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DataPart { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDataPart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.data = Struct.unwrap(Struct.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DataPart { + return { data: isObject(object.data) ? object.data : undefined }; + }, + + toJSON(message: DataPart): unknown { + const obj: any = {}; + if (message.data !== undefined) { + obj.data = message.data; + } + return obj; + }, + + create, I>>(base?: I): DataPart { + return DataPart.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): DataPart { + const message = createBaseDataPart(); + message.data = object.data ?? undefined; + return message; + }, +}; + +function createBaseMessage(): Message { + return { messageId: "", contextId: "", taskId: "", role: 0, content: [], metadata: undefined, extensions: [] }; +} + +export const Message: MessageFns = { + encode(message: Message, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.messageId !== "") { + writer.uint32(10).string(message.messageId); + } + if (message.contextId !== "") { + writer.uint32(18).string(message.contextId); + } + if (message.taskId !== "") { + writer.uint32(26).string(message.taskId); + } + if (message.role !== 0) { + writer.uint32(32).int32(message.role); + } + for (const v of message.content) { + Part.encode(v!, writer.uint32(42).fork()).join(); + } + if (message.metadata !== undefined) { + Struct.encode(Struct.wrap(message.metadata), writer.uint32(50).fork()).join(); + } + for (const v of message.extensions) { + writer.uint32(58).string(v!); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Message { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessage(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.messageId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.contextId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.taskId = reader.string(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.role = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.content.push(Part.decode(reader, reader.uint32())); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.metadata = Struct.unwrap(Struct.decode(reader, reader.uint32())); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.extensions.push(reader.string()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Message { + return { + messageId: isSet(object.messageId) ? globalThis.String(object.messageId) : "", + contextId: isSet(object.contextId) ? globalThis.String(object.contextId) : "", + taskId: isSet(object.taskId) ? globalThis.String(object.taskId) : "", + role: isSet(object.role) ? roleFromJSON(object.role) : 0, + content: globalThis.Array.isArray(object?.content) ? object.content.map((e: any) => Part.fromJSON(e)) : [], + metadata: isObject(object.metadata) ? object.metadata : undefined, + extensions: globalThis.Array.isArray(object?.extensions) + ? object.extensions.map((e: any) => globalThis.String(e)) + : [], + }; + }, + + toJSON(message: Message): unknown { + const obj: any = {}; + if (message.messageId !== "") { + obj.messageId = message.messageId; + } + if (message.contextId !== "") { + obj.contextId = message.contextId; + } + if (message.taskId !== "") { + obj.taskId = message.taskId; + } + if (message.role !== 0) { + obj.role = roleToJSON(message.role); + } + if (message.content?.length) { + obj.content = message.content.map((e) => Part.toJSON(e)); + } + if (message.metadata !== undefined) { + obj.metadata = message.metadata; + } + if (message.extensions?.length) { + obj.extensions = message.extensions; + } + return obj; + }, + + create, I>>(base?: I): Message { + return Message.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Message { + const message = createBaseMessage(); + message.messageId = object.messageId ?? ""; + message.contextId = object.contextId ?? ""; + message.taskId = object.taskId ?? ""; + message.role = object.role ?? 0; + message.content = object.content?.map((e) => Part.fromPartial(e)) || []; + message.metadata = object.metadata ?? undefined; + message.extensions = object.extensions?.map((e) => e) || []; + return message; + }, +}; + +function createBaseArtifact(): Artifact { + return { artifactId: "", name: "", description: "", parts: [], metadata: undefined, extensions: [] }; +} + +export const Artifact: MessageFns = { + encode(message: Artifact, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.artifactId !== "") { + writer.uint32(10).string(message.artifactId); + } + if (message.name !== "") { + writer.uint32(26).string(message.name); + } + if (message.description !== "") { + writer.uint32(34).string(message.description); + } + for (const v of message.parts) { + Part.encode(v!, writer.uint32(42).fork()).join(); + } + if (message.metadata !== undefined) { + Struct.encode(Struct.wrap(message.metadata), writer.uint32(50).fork()).join(); + } + for (const v of message.extensions) { + writer.uint32(58).string(v!); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Artifact { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseArtifact(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.artifactId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.name = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.description = reader.string(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.parts.push(Part.decode(reader, reader.uint32())); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.metadata = Struct.unwrap(Struct.decode(reader, reader.uint32())); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.extensions.push(reader.string()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Artifact { + return { + artifactId: isSet(object.artifactId) ? globalThis.String(object.artifactId) : "", + name: isSet(object.name) ? globalThis.String(object.name) : "", + description: isSet(object.description) ? globalThis.String(object.description) : "", + parts: globalThis.Array.isArray(object?.parts) ? object.parts.map((e: any) => Part.fromJSON(e)) : [], + metadata: isObject(object.metadata) ? object.metadata : undefined, + extensions: globalThis.Array.isArray(object?.extensions) + ? object.extensions.map((e: any) => globalThis.String(e)) + : [], + }; + }, + + toJSON(message: Artifact): unknown { + const obj: any = {}; + if (message.artifactId !== "") { + obj.artifactId = message.artifactId; + } + if (message.name !== "") { + obj.name = message.name; + } + if (message.description !== "") { + obj.description = message.description; + } + if (message.parts?.length) { + obj.parts = message.parts.map((e) => Part.toJSON(e)); + } + if (message.metadata !== undefined) { + obj.metadata = message.metadata; + } + if (message.extensions?.length) { + obj.extensions = message.extensions; + } + return obj; + }, + + create, I>>(base?: I): Artifact { + return Artifact.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Artifact { + const message = createBaseArtifact(); + message.artifactId = object.artifactId ?? ""; + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.parts = object.parts?.map((e) => Part.fromPartial(e)) || []; + message.metadata = object.metadata ?? undefined; + message.extensions = object.extensions?.map((e) => e) || []; + return message; + }, +}; + +function createBaseTaskStatusUpdateEvent(): TaskStatusUpdateEvent { + return { taskId: "", contextId: "", status: undefined, final: false, metadata: undefined }; +} + +export const TaskStatusUpdateEvent: MessageFns = { + encode(message: TaskStatusUpdateEvent, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.taskId !== "") { + writer.uint32(10).string(message.taskId); + } + if (message.contextId !== "") { + writer.uint32(18).string(message.contextId); + } + if (message.status !== undefined) { + TaskStatus.encode(message.status, writer.uint32(26).fork()).join(); + } + if (message.final !== false) { + writer.uint32(32).bool(message.final); + } + if (message.metadata !== undefined) { + Struct.encode(Struct.wrap(message.metadata), writer.uint32(42).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): TaskStatusUpdateEvent { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTaskStatusUpdateEvent(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.taskId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.contextId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.status = TaskStatus.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.final = reader.bool(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.metadata = Struct.unwrap(Struct.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): TaskStatusUpdateEvent { + return { + taskId: isSet(object.taskId) ? globalThis.String(object.taskId) : "", + contextId: isSet(object.contextId) ? globalThis.String(object.contextId) : "", + status: isSet(object.status) ? TaskStatus.fromJSON(object.status) : undefined, + final: isSet(object.final) ? globalThis.Boolean(object.final) : false, + metadata: isObject(object.metadata) ? object.metadata : undefined, + }; + }, + + toJSON(message: TaskStatusUpdateEvent): unknown { + const obj: any = {}; + if (message.taskId !== "") { + obj.taskId = message.taskId; + } + if (message.contextId !== "") { + obj.contextId = message.contextId; + } + if (message.status !== undefined) { + obj.status = TaskStatus.toJSON(message.status); + } + if (message.final !== false) { + obj.final = message.final; + } + if (message.metadata !== undefined) { + obj.metadata = message.metadata; + } + return obj; + }, + + create, I>>(base?: I): TaskStatusUpdateEvent { + return TaskStatusUpdateEvent.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): TaskStatusUpdateEvent { + const message = createBaseTaskStatusUpdateEvent(); + message.taskId = object.taskId ?? ""; + message.contextId = object.contextId ?? ""; + message.status = (object.status !== undefined && object.status !== null) + ? TaskStatus.fromPartial(object.status) + : undefined; + message.final = object.final ?? false; + message.metadata = object.metadata ?? undefined; + return message; + }, +}; + +function createBaseTaskArtifactUpdateEvent(): TaskArtifactUpdateEvent { + return { taskId: "", contextId: "", artifact: undefined, append: false, lastChunk: false, metadata: undefined }; +} + +export const TaskArtifactUpdateEvent: MessageFns = { + encode(message: TaskArtifactUpdateEvent, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.taskId !== "") { + writer.uint32(10).string(message.taskId); + } + if (message.contextId !== "") { + writer.uint32(18).string(message.contextId); + } + if (message.artifact !== undefined) { + Artifact.encode(message.artifact, writer.uint32(26).fork()).join(); + } + if (message.append !== false) { + writer.uint32(32).bool(message.append); + } + if (message.lastChunk !== false) { + writer.uint32(40).bool(message.lastChunk); + } + if (message.metadata !== undefined) { + Struct.encode(Struct.wrap(message.metadata), writer.uint32(50).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): TaskArtifactUpdateEvent { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTaskArtifactUpdateEvent(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.taskId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.contextId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.artifact = Artifact.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.append = reader.bool(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.lastChunk = reader.bool(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.metadata = Struct.unwrap(Struct.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): TaskArtifactUpdateEvent { + return { + taskId: isSet(object.taskId) ? globalThis.String(object.taskId) : "", + contextId: isSet(object.contextId) ? globalThis.String(object.contextId) : "", + artifact: isSet(object.artifact) ? Artifact.fromJSON(object.artifact) : undefined, + append: isSet(object.append) ? globalThis.Boolean(object.append) : false, + lastChunk: isSet(object.lastChunk) ? globalThis.Boolean(object.lastChunk) : false, + metadata: isObject(object.metadata) ? object.metadata : undefined, + }; + }, + + toJSON(message: TaskArtifactUpdateEvent): unknown { + const obj: any = {}; + if (message.taskId !== "") { + obj.taskId = message.taskId; + } + if (message.contextId !== "") { + obj.contextId = message.contextId; + } + if (message.artifact !== undefined) { + obj.artifact = Artifact.toJSON(message.artifact); + } + if (message.append !== false) { + obj.append = message.append; + } + if (message.lastChunk !== false) { + obj.lastChunk = message.lastChunk; + } + if (message.metadata !== undefined) { + obj.metadata = message.metadata; + } + return obj; + }, + + create, I>>(base?: I): TaskArtifactUpdateEvent { + return TaskArtifactUpdateEvent.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): TaskArtifactUpdateEvent { + const message = createBaseTaskArtifactUpdateEvent(); + message.taskId = object.taskId ?? ""; + message.contextId = object.contextId ?? ""; + message.artifact = (object.artifact !== undefined && object.artifact !== null) + ? Artifact.fromPartial(object.artifact) + : undefined; + message.append = object.append ?? false; + message.lastChunk = object.lastChunk ?? false; + message.metadata = object.metadata ?? undefined; + return message; + }, +}; + +function createBasePushNotificationConfig(): PushNotificationConfig { + return { id: "", url: "", token: "", authentication: undefined }; +} + +export const PushNotificationConfig: MessageFns = { + encode(message: PushNotificationConfig, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.url !== "") { + writer.uint32(18).string(message.url); + } + if (message.token !== "") { + writer.uint32(26).string(message.token); + } + if (message.authentication !== undefined) { + AuthenticationInfo.encode(message.authentication, writer.uint32(34).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PushNotificationConfig { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePushNotificationConfig(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.url = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.token = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.authentication = AuthenticationInfo.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PushNotificationConfig { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + url: isSet(object.url) ? globalThis.String(object.url) : "", + token: isSet(object.token) ? globalThis.String(object.token) : "", + authentication: isSet(object.authentication) ? AuthenticationInfo.fromJSON(object.authentication) : undefined, + }; + }, + + toJSON(message: PushNotificationConfig): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.url !== "") { + obj.url = message.url; + } + if (message.token !== "") { + obj.token = message.token; + } + if (message.authentication !== undefined) { + obj.authentication = AuthenticationInfo.toJSON(message.authentication); + } + return obj; + }, + + create, I>>(base?: I): PushNotificationConfig { + return PushNotificationConfig.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): PushNotificationConfig { + const message = createBasePushNotificationConfig(); + message.id = object.id ?? ""; + message.url = object.url ?? ""; + message.token = object.token ?? ""; + message.authentication = (object.authentication !== undefined && object.authentication !== null) + ? AuthenticationInfo.fromPartial(object.authentication) + : undefined; + return message; + }, +}; + +function createBaseAuthenticationInfo(): AuthenticationInfo { + return { schemes: [], credentials: "" }; +} + +export const AuthenticationInfo: MessageFns = { + encode(message: AuthenticationInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.schemes) { + writer.uint32(10).string(v!); + } + if (message.credentials !== "") { + writer.uint32(18).string(message.credentials); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AuthenticationInfo { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAuthenticationInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.schemes.push(reader.string()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.credentials = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AuthenticationInfo { + return { + schemes: globalThis.Array.isArray(object?.schemes) ? object.schemes.map((e: any) => globalThis.String(e)) : [], + credentials: isSet(object.credentials) ? globalThis.String(object.credentials) : "", + }; + }, + + toJSON(message: AuthenticationInfo): unknown { + const obj: any = {}; + if (message.schemes?.length) { + obj.schemes = message.schemes; + } + if (message.credentials !== "") { + obj.credentials = message.credentials; + } + return obj; + }, + + create, I>>(base?: I): AuthenticationInfo { + return AuthenticationInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): AuthenticationInfo { + const message = createBaseAuthenticationInfo(); + message.schemes = object.schemes?.map((e) => e) || []; + message.credentials = object.credentials ?? ""; + return message; + }, +}; + +function createBaseAgentInterface(): AgentInterface { + return { url: "", transport: "" }; +} + +export const AgentInterface: MessageFns = { + encode(message: AgentInterface, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.url !== "") { + writer.uint32(10).string(message.url); + } + if (message.transport !== "") { + writer.uint32(18).string(message.transport); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AgentInterface { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAgentInterface(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.url = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.transport = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AgentInterface { + return { + url: isSet(object.url) ? globalThis.String(object.url) : "", + transport: isSet(object.transport) ? globalThis.String(object.transport) : "", + }; + }, + + toJSON(message: AgentInterface): unknown { + const obj: any = {}; + if (message.url !== "") { + obj.url = message.url; + } + if (message.transport !== "") { + obj.transport = message.transport; + } + return obj; + }, + + create, I>>(base?: I): AgentInterface { + return AgentInterface.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): AgentInterface { + const message = createBaseAgentInterface(); + message.url = object.url ?? ""; + message.transport = object.transport ?? ""; + return message; + }, +}; + +function createBaseAgentCard(): AgentCard { + return { + protocolVersion: "", + name: "", + description: "", + url: "", + preferredTransport: "", + additionalInterfaces: [], + provider: undefined, + version: "", + documentationUrl: "", + capabilities: undefined, + securitySchemes: {}, + security: [], + defaultInputModes: [], + defaultOutputModes: [], + skills: [], + supportsAuthenticatedExtendedCard: false, + signatures: [], + }; +} + +export const AgentCard: MessageFns = { + encode(message: AgentCard, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.protocolVersion !== "") { + writer.uint32(130).string(message.protocolVersion); + } + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + if (message.url !== "") { + writer.uint32(26).string(message.url); + } + if (message.preferredTransport !== "") { + writer.uint32(114).string(message.preferredTransport); + } + for (const v of message.additionalInterfaces) { + AgentInterface.encode(v!, writer.uint32(122).fork()).join(); + } + if (message.provider !== undefined) { + AgentProvider.encode(message.provider, writer.uint32(34).fork()).join(); + } + if (message.version !== "") { + writer.uint32(42).string(message.version); + } + if (message.documentationUrl !== "") { + writer.uint32(50).string(message.documentationUrl); + } + if (message.capabilities !== undefined) { + AgentCapabilities.encode(message.capabilities, writer.uint32(58).fork()).join(); + } + Object.entries(message.securitySchemes).forEach(([key, value]) => { + AgentCard_SecuritySchemesEntry.encode({ key: key as any, value }, writer.uint32(66).fork()).join(); + }); + for (const v of message.security) { + Security.encode(v!, writer.uint32(74).fork()).join(); + } + for (const v of message.defaultInputModes) { + writer.uint32(82).string(v!); + } + for (const v of message.defaultOutputModes) { + writer.uint32(90).string(v!); + } + for (const v of message.skills) { + AgentSkill.encode(v!, writer.uint32(98).fork()).join(); + } + if (message.supportsAuthenticatedExtendedCard !== false) { + writer.uint32(104).bool(message.supportsAuthenticatedExtendedCard); + } + for (const v of message.signatures) { + AgentCardSignature.encode(v!, writer.uint32(138).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AgentCard { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAgentCard(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 16: { + if (tag !== 130) { + break; + } + + message.protocolVersion = reader.string(); + continue; + } + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.description = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.url = reader.string(); + continue; + } + case 14: { + if (tag !== 114) { + break; + } + + message.preferredTransport = reader.string(); + continue; + } + case 15: { + if (tag !== 122) { + break; + } + + message.additionalInterfaces.push(AgentInterface.decode(reader, reader.uint32())); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.provider = AgentProvider.decode(reader, reader.uint32()); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.version = reader.string(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.documentationUrl = reader.string(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.capabilities = AgentCapabilities.decode(reader, reader.uint32()); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + const entry8 = AgentCard_SecuritySchemesEntry.decode(reader, reader.uint32()); + if (entry8.value !== undefined) { + message.securitySchemes[entry8.key] = entry8.value; + } + continue; + } + case 9: { + if (tag !== 74) { + break; + } + + message.security.push(Security.decode(reader, reader.uint32())); + continue; + } + case 10: { + if (tag !== 82) { + break; + } + + message.defaultInputModes.push(reader.string()); + continue; + } + case 11: { + if (tag !== 90) { + break; + } + + message.defaultOutputModes.push(reader.string()); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.skills.push(AgentSkill.decode(reader, reader.uint32())); + continue; + } + case 13: { + if (tag !== 104) { + break; + } + + message.supportsAuthenticatedExtendedCard = reader.bool(); + continue; + } + case 17: { + if (tag !== 138) { + break; + } + + message.signatures.push(AgentCardSignature.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AgentCard { + return { + protocolVersion: isSet(object.protocolVersion) ? globalThis.String(object.protocolVersion) : "", + name: isSet(object.name) ? globalThis.String(object.name) : "", + description: isSet(object.description) ? globalThis.String(object.description) : "", + url: isSet(object.url) ? globalThis.String(object.url) : "", + preferredTransport: isSet(object.preferredTransport) ? globalThis.String(object.preferredTransport) : "", + additionalInterfaces: globalThis.Array.isArray(object?.additionalInterfaces) + ? object.additionalInterfaces.map((e: any) => AgentInterface.fromJSON(e)) + : [], + provider: isSet(object.provider) ? AgentProvider.fromJSON(object.provider) : undefined, + version: isSet(object.version) ? globalThis.String(object.version) : "", + documentationUrl: isSet(object.documentationUrl) ? globalThis.String(object.documentationUrl) : "", + capabilities: isSet(object.capabilities) ? AgentCapabilities.fromJSON(object.capabilities) : undefined, + securitySchemes: isObject(object.securitySchemes) + ? Object.entries(object.securitySchemes).reduce<{ [key: string]: SecurityScheme }>((acc, [key, value]) => { + acc[key] = SecurityScheme.fromJSON(value); + return acc; + }, {}) + : {}, + security: globalThis.Array.isArray(object?.security) ? object.security.map((e: any) => Security.fromJSON(e)) : [], + defaultInputModes: globalThis.Array.isArray(object?.defaultInputModes) + ? object.defaultInputModes.map((e: any) => globalThis.String(e)) + : [], + defaultOutputModes: globalThis.Array.isArray(object?.defaultOutputModes) + ? object.defaultOutputModes.map((e: any) => globalThis.String(e)) + : [], + skills: globalThis.Array.isArray(object?.skills) ? object.skills.map((e: any) => AgentSkill.fromJSON(e)) : [], + supportsAuthenticatedExtendedCard: isSet(object.supportsAuthenticatedExtendedCard) + ? globalThis.Boolean(object.supportsAuthenticatedExtendedCard) + : false, + signatures: globalThis.Array.isArray(object?.signatures) + ? object.signatures.map((e: any) => AgentCardSignature.fromJSON(e)) + : [], + }; + }, + + toJSON(message: AgentCard): unknown { + const obj: any = {}; + if (message.protocolVersion !== "") { + obj.protocolVersion = message.protocolVersion; + } + if (message.name !== "") { + obj.name = message.name; + } + if (message.description !== "") { + obj.description = message.description; + } + if (message.url !== "") { + obj.url = message.url; + } + if (message.preferredTransport !== "") { + obj.preferredTransport = message.preferredTransport; + } + if (message.additionalInterfaces?.length) { + obj.additionalInterfaces = message.additionalInterfaces.map((e) => AgentInterface.toJSON(e)); + } + if (message.provider !== undefined) { + obj.provider = AgentProvider.toJSON(message.provider); + } + if (message.version !== "") { + obj.version = message.version; + } + if (message.documentationUrl !== "") { + obj.documentationUrl = message.documentationUrl; + } + if (message.capabilities !== undefined) { + obj.capabilities = AgentCapabilities.toJSON(message.capabilities); + } + if (message.securitySchemes) { + const entries = Object.entries(message.securitySchemes); + if (entries.length > 0) { + obj.securitySchemes = {}; + entries.forEach(([k, v]) => { + obj.securitySchemes[k] = SecurityScheme.toJSON(v); + }); + } + } + if (message.security?.length) { + obj.security = message.security.map((e) => Security.toJSON(e)); + } + if (message.defaultInputModes?.length) { + obj.defaultInputModes = message.defaultInputModes; + } + if (message.defaultOutputModes?.length) { + obj.defaultOutputModes = message.defaultOutputModes; + } + if (message.skills?.length) { + obj.skills = message.skills.map((e) => AgentSkill.toJSON(e)); + } + if (message.supportsAuthenticatedExtendedCard !== false) { + obj.supportsAuthenticatedExtendedCard = message.supportsAuthenticatedExtendedCard; + } + if (message.signatures?.length) { + obj.signatures = message.signatures.map((e) => AgentCardSignature.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): AgentCard { + return AgentCard.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): AgentCard { + const message = createBaseAgentCard(); + message.protocolVersion = object.protocolVersion ?? ""; + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.url = object.url ?? ""; + message.preferredTransport = object.preferredTransport ?? ""; + message.additionalInterfaces = object.additionalInterfaces?.map((e) => AgentInterface.fromPartial(e)) || []; + message.provider = (object.provider !== undefined && object.provider !== null) + ? AgentProvider.fromPartial(object.provider) + : undefined; + message.version = object.version ?? ""; + message.documentationUrl = object.documentationUrl ?? ""; + message.capabilities = (object.capabilities !== undefined && object.capabilities !== null) + ? AgentCapabilities.fromPartial(object.capabilities) + : undefined; + message.securitySchemes = Object.entries(object.securitySchemes ?? {}).reduce<{ [key: string]: SecurityScheme }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[key] = SecurityScheme.fromPartial(value); + } + return acc; + }, + {}, + ); + message.security = object.security?.map((e) => Security.fromPartial(e)) || []; + message.defaultInputModes = object.defaultInputModes?.map((e) => e) || []; + message.defaultOutputModes = object.defaultOutputModes?.map((e) => e) || []; + message.skills = object.skills?.map((e) => AgentSkill.fromPartial(e)) || []; + message.supportsAuthenticatedExtendedCard = object.supportsAuthenticatedExtendedCard ?? false; + message.signatures = object.signatures?.map((e) => AgentCardSignature.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseAgentCard_SecuritySchemesEntry(): AgentCard_SecuritySchemesEntry { + return { key: "", value: undefined }; +} + +export const AgentCard_SecuritySchemesEntry: MessageFns = { + encode(message: AgentCard_SecuritySchemesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + SecurityScheme.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AgentCard_SecuritySchemesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAgentCard_SecuritySchemesEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = SecurityScheme.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AgentCard_SecuritySchemesEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? SecurityScheme.fromJSON(object.value) : undefined, + }; + }, + + toJSON(message: AgentCard_SecuritySchemesEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = SecurityScheme.toJSON(message.value); + } + return obj; + }, + + create, I>>(base?: I): AgentCard_SecuritySchemesEntry { + return AgentCard_SecuritySchemesEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): AgentCard_SecuritySchemesEntry { + const message = createBaseAgentCard_SecuritySchemesEntry(); + message.key = object.key ?? ""; + message.value = (object.value !== undefined && object.value !== null) + ? SecurityScheme.fromPartial(object.value) + : undefined; + return message; + }, +}; + +function createBaseAgentProvider(): AgentProvider { + return { url: "", organization: "" }; +} + +export const AgentProvider: MessageFns = { + encode(message: AgentProvider, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.url !== "") { + writer.uint32(10).string(message.url); + } + if (message.organization !== "") { + writer.uint32(18).string(message.organization); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AgentProvider { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAgentProvider(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.url = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.organization = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AgentProvider { + return { + url: isSet(object.url) ? globalThis.String(object.url) : "", + organization: isSet(object.organization) ? globalThis.String(object.organization) : "", + }; + }, + + toJSON(message: AgentProvider): unknown { + const obj: any = {}; + if (message.url !== "") { + obj.url = message.url; + } + if (message.organization !== "") { + obj.organization = message.organization; + } + return obj; + }, + + create, I>>(base?: I): AgentProvider { + return AgentProvider.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): AgentProvider { + const message = createBaseAgentProvider(); + message.url = object.url ?? ""; + message.organization = object.organization ?? ""; + return message; + }, +}; + +function createBaseAgentCapabilities(): AgentCapabilities { + return { streaming: false, pushNotifications: false, extensions: [] }; +} + +export const AgentCapabilities: MessageFns = { + encode(message: AgentCapabilities, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.streaming !== false) { + writer.uint32(8).bool(message.streaming); + } + if (message.pushNotifications !== false) { + writer.uint32(16).bool(message.pushNotifications); + } + for (const v of message.extensions) { + AgentExtension.encode(v!, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AgentCapabilities { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAgentCapabilities(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.streaming = reader.bool(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.pushNotifications = reader.bool(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.extensions.push(AgentExtension.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AgentCapabilities { + return { + streaming: isSet(object.streaming) ? globalThis.Boolean(object.streaming) : false, + pushNotifications: isSet(object.pushNotifications) ? globalThis.Boolean(object.pushNotifications) : false, + extensions: globalThis.Array.isArray(object?.extensions) + ? object.extensions.map((e: any) => AgentExtension.fromJSON(e)) + : [], + }; + }, + + toJSON(message: AgentCapabilities): unknown { + const obj: any = {}; + if (message.streaming !== false) { + obj.streaming = message.streaming; + } + if (message.pushNotifications !== false) { + obj.pushNotifications = message.pushNotifications; + } + if (message.extensions?.length) { + obj.extensions = message.extensions.map((e) => AgentExtension.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): AgentCapabilities { + return AgentCapabilities.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): AgentCapabilities { + const message = createBaseAgentCapabilities(); + message.streaming = object.streaming ?? false; + message.pushNotifications = object.pushNotifications ?? false; + message.extensions = object.extensions?.map((e) => AgentExtension.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseAgentExtension(): AgentExtension { + return { uri: "", description: "", required: false, params: undefined }; +} + +export const AgentExtension: MessageFns = { + encode(message: AgentExtension, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.uri !== "") { + writer.uint32(10).string(message.uri); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + if (message.required !== false) { + writer.uint32(24).bool(message.required); + } + if (message.params !== undefined) { + Struct.encode(Struct.wrap(message.params), writer.uint32(34).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AgentExtension { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAgentExtension(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.uri = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.description = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.required = reader.bool(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.params = Struct.unwrap(Struct.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AgentExtension { + return { + uri: isSet(object.uri) ? globalThis.String(object.uri) : "", + description: isSet(object.description) ? globalThis.String(object.description) : "", + required: isSet(object.required) ? globalThis.Boolean(object.required) : false, + params: isObject(object.params) ? object.params : undefined, + }; + }, + + toJSON(message: AgentExtension): unknown { + const obj: any = {}; + if (message.uri !== "") { + obj.uri = message.uri; + } + if (message.description !== "") { + obj.description = message.description; + } + if (message.required !== false) { + obj.required = message.required; + } + if (message.params !== undefined) { + obj.params = message.params; + } + return obj; + }, + + create, I>>(base?: I): AgentExtension { + return AgentExtension.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): AgentExtension { + const message = createBaseAgentExtension(); + message.uri = object.uri ?? ""; + message.description = object.description ?? ""; + message.required = object.required ?? false; + message.params = object.params ?? undefined; + return message; + }, +}; + +function createBaseAgentSkill(): AgentSkill { + return { id: "", name: "", description: "", tags: [], examples: [], inputModes: [], outputModes: [], security: [] }; +} + +export const AgentSkill: MessageFns = { + encode(message: AgentSkill, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.name !== "") { + writer.uint32(18).string(message.name); + } + if (message.description !== "") { + writer.uint32(26).string(message.description); + } + for (const v of message.tags) { + writer.uint32(34).string(v!); + } + for (const v of message.examples) { + writer.uint32(42).string(v!); + } + for (const v of message.inputModes) { + writer.uint32(50).string(v!); + } + for (const v of message.outputModes) { + writer.uint32(58).string(v!); + } + for (const v of message.security) { + Security.encode(v!, writer.uint32(66).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AgentSkill { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAgentSkill(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.name = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.description = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.tags.push(reader.string()); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.examples.push(reader.string()); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.inputModes.push(reader.string()); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.outputModes.push(reader.string()); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.security.push(Security.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AgentSkill { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + name: isSet(object.name) ? globalThis.String(object.name) : "", + description: isSet(object.description) ? globalThis.String(object.description) : "", + tags: globalThis.Array.isArray(object?.tags) ? object.tags.map((e: any) => globalThis.String(e)) : [], + examples: globalThis.Array.isArray(object?.examples) ? object.examples.map((e: any) => globalThis.String(e)) : [], + inputModes: globalThis.Array.isArray(object?.inputModes) + ? object.inputModes.map((e: any) => globalThis.String(e)) + : [], + outputModes: globalThis.Array.isArray(object?.outputModes) + ? object.outputModes.map((e: any) => globalThis.String(e)) + : [], + security: globalThis.Array.isArray(object?.security) ? object.security.map((e: any) => Security.fromJSON(e)) : [], + }; + }, + + toJSON(message: AgentSkill): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.name !== "") { + obj.name = message.name; + } + if (message.description !== "") { + obj.description = message.description; + } + if (message.tags?.length) { + obj.tags = message.tags; + } + if (message.examples?.length) { + obj.examples = message.examples; + } + if (message.inputModes?.length) { + obj.inputModes = message.inputModes; + } + if (message.outputModes?.length) { + obj.outputModes = message.outputModes; + } + if (message.security?.length) { + obj.security = message.security.map((e) => Security.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): AgentSkill { + return AgentSkill.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): AgentSkill { + const message = createBaseAgentSkill(); + message.id = object.id ?? ""; + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.tags = object.tags?.map((e) => e) || []; + message.examples = object.examples?.map((e) => e) || []; + message.inputModes = object.inputModes?.map((e) => e) || []; + message.outputModes = object.outputModes?.map((e) => e) || []; + message.security = object.security?.map((e) => Security.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseAgentCardSignature(): AgentCardSignature { + return { protected: "", signature: "", header: undefined }; +} + +export const AgentCardSignature: MessageFns = { + encode(message: AgentCardSignature, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.protected !== "") { + writer.uint32(10).string(message.protected); + } + if (message.signature !== "") { + writer.uint32(18).string(message.signature); + } + if (message.header !== undefined) { + Struct.encode(Struct.wrap(message.header), writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AgentCardSignature { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAgentCardSignature(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.protected = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.signature = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.header = Struct.unwrap(Struct.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AgentCardSignature { + return { + protected: isSet(object.protected) ? globalThis.String(object.protected) : "", + signature: isSet(object.signature) ? globalThis.String(object.signature) : "", + header: isObject(object.header) ? object.header : undefined, + }; + }, + + toJSON(message: AgentCardSignature): unknown { + const obj: any = {}; + if (message.protected !== "") { + obj.protected = message.protected; + } + if (message.signature !== "") { + obj.signature = message.signature; + } + if (message.header !== undefined) { + obj.header = message.header; + } + return obj; + }, + + create, I>>(base?: I): AgentCardSignature { + return AgentCardSignature.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): AgentCardSignature { + const message = createBaseAgentCardSignature(); + message.protected = object.protected ?? ""; + message.signature = object.signature ?? ""; + message.header = object.header ?? undefined; + return message; + }, +}; + +function createBaseTaskPushNotificationConfig(): TaskPushNotificationConfig { + return { name: "", pushNotificationConfig: undefined }; +} + +export const TaskPushNotificationConfig: MessageFns = { + encode(message: TaskPushNotificationConfig, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.pushNotificationConfig !== undefined) { + PushNotificationConfig.encode(message.pushNotificationConfig, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): TaskPushNotificationConfig { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTaskPushNotificationConfig(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.pushNotificationConfig = PushNotificationConfig.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): TaskPushNotificationConfig { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + pushNotificationConfig: isSet(object.pushNotificationConfig) + ? PushNotificationConfig.fromJSON(object.pushNotificationConfig) + : undefined, + }; + }, + + toJSON(message: TaskPushNotificationConfig): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.pushNotificationConfig !== undefined) { + obj.pushNotificationConfig = PushNotificationConfig.toJSON(message.pushNotificationConfig); + } + return obj; + }, + + create, I>>(base?: I): TaskPushNotificationConfig { + return TaskPushNotificationConfig.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): TaskPushNotificationConfig { + const message = createBaseTaskPushNotificationConfig(); + message.name = object.name ?? ""; + message.pushNotificationConfig = + (object.pushNotificationConfig !== undefined && object.pushNotificationConfig !== null) + ? PushNotificationConfig.fromPartial(object.pushNotificationConfig) + : undefined; + return message; + }, +}; + +function createBaseStringList(): StringList { + return { list: [] }; +} + +export const StringList: MessageFns = { + encode(message: StringList, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.list) { + writer.uint32(10).string(v!); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): StringList { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStringList(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.list.push(reader.string()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): StringList { + return { list: globalThis.Array.isArray(object?.list) ? object.list.map((e: any) => globalThis.String(e)) : [] }; + }, + + toJSON(message: StringList): unknown { + const obj: any = {}; + if (message.list?.length) { + obj.list = message.list; + } + return obj; + }, + + create, I>>(base?: I): StringList { + return StringList.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): StringList { + const message = createBaseStringList(); + message.list = object.list?.map((e) => e) || []; + return message; + }, +}; + +function createBaseSecurity(): Security { + return { schemes: {} }; +} + +export const Security: MessageFns = { + encode(message: Security, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + Object.entries(message.schemes).forEach(([key, value]) => { + Security_SchemesEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).join(); + }); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Security { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSecurity(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + const entry1 = Security_SchemesEntry.decode(reader, reader.uint32()); + if (entry1.value !== undefined) { + message.schemes[entry1.key] = entry1.value; + } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Security { + return { + schemes: isObject(object.schemes) + ? Object.entries(object.schemes).reduce<{ [key: string]: StringList }>((acc, [key, value]) => { + acc[key] = StringList.fromJSON(value); + return acc; + }, {}) + : {}, + }; + }, + + toJSON(message: Security): unknown { + const obj: any = {}; + if (message.schemes) { + const entries = Object.entries(message.schemes); + if (entries.length > 0) { + obj.schemes = {}; + entries.forEach(([k, v]) => { + obj.schemes[k] = StringList.toJSON(v); + }); + } + } + return obj; + }, + + create, I>>(base?: I): Security { + return Security.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Security { + const message = createBaseSecurity(); + message.schemes = Object.entries(object.schemes ?? {}).reduce<{ [key: string]: StringList }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[key] = StringList.fromPartial(value); + } + return acc; + }, + {}, + ); + return message; + }, +}; + +function createBaseSecurity_SchemesEntry(): Security_SchemesEntry { + return { key: "", value: undefined }; +} + +export const Security_SchemesEntry: MessageFns = { + encode(message: Security_SchemesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + StringList.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Security_SchemesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSecurity_SchemesEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = StringList.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Security_SchemesEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? StringList.fromJSON(object.value) : undefined, + }; + }, + + toJSON(message: Security_SchemesEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = StringList.toJSON(message.value); + } + return obj; + }, + + create, I>>(base?: I): Security_SchemesEntry { + return Security_SchemesEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Security_SchemesEntry { + const message = createBaseSecurity_SchemesEntry(); + message.key = object.key ?? ""; + message.value = (object.value !== undefined && object.value !== null) + ? StringList.fromPartial(object.value) + : undefined; + return message; + }, +}; + +function createBaseSecurityScheme(): SecurityScheme { + return { scheme: undefined }; +} + +export const SecurityScheme: MessageFns = { + encode(message: SecurityScheme, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + switch (message.scheme?.$case) { + case "apiKeySecurityScheme": + APIKeySecurityScheme.encode(message.scheme.value, writer.uint32(10).fork()).join(); + break; + case "httpAuthSecurityScheme": + HTTPAuthSecurityScheme.encode(message.scheme.value, writer.uint32(18).fork()).join(); + break; + case "oauth2SecurityScheme": + OAuth2SecurityScheme.encode(message.scheme.value, writer.uint32(26).fork()).join(); + break; + case "openIdConnectSecurityScheme": + OpenIdConnectSecurityScheme.encode(message.scheme.value, writer.uint32(34).fork()).join(); + break; + case "mtlsSecurityScheme": + MutualTlsSecurityScheme.encode(message.scheme.value, writer.uint32(42).fork()).join(); + break; + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SecurityScheme { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSecurityScheme(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.scheme = { + $case: "apiKeySecurityScheme", + value: APIKeySecurityScheme.decode(reader, reader.uint32()), + }; + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.scheme = { + $case: "httpAuthSecurityScheme", + value: HTTPAuthSecurityScheme.decode(reader, reader.uint32()), + }; + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.scheme = { + $case: "oauth2SecurityScheme", + value: OAuth2SecurityScheme.decode(reader, reader.uint32()), + }; + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.scheme = { + $case: "openIdConnectSecurityScheme", + value: OpenIdConnectSecurityScheme.decode(reader, reader.uint32()), + }; + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.scheme = { + $case: "mtlsSecurityScheme", + value: MutualTlsSecurityScheme.decode(reader, reader.uint32()), + }; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SecurityScheme { + return { + scheme: isSet(object.apiKeySecurityScheme) + ? { $case: "apiKeySecurityScheme", value: APIKeySecurityScheme.fromJSON(object.apiKeySecurityScheme) } + : isSet(object.httpAuthSecurityScheme) + ? { $case: "httpAuthSecurityScheme", value: HTTPAuthSecurityScheme.fromJSON(object.httpAuthSecurityScheme) } + : isSet(object.oauth2SecurityScheme) + ? { $case: "oauth2SecurityScheme", value: OAuth2SecurityScheme.fromJSON(object.oauth2SecurityScheme) } + : isSet(object.openIdConnectSecurityScheme) + ? { + $case: "openIdConnectSecurityScheme", + value: OpenIdConnectSecurityScheme.fromJSON(object.openIdConnectSecurityScheme), + } + : isSet(object.mtlsSecurityScheme) + ? { $case: "mtlsSecurityScheme", value: MutualTlsSecurityScheme.fromJSON(object.mtlsSecurityScheme) } + : undefined, + }; + }, + + toJSON(message: SecurityScheme): unknown { + const obj: any = {}; + if (message.scheme?.$case === "apiKeySecurityScheme") { + obj.apiKeySecurityScheme = APIKeySecurityScheme.toJSON(message.scheme.value); + } else if (message.scheme?.$case === "httpAuthSecurityScheme") { + obj.httpAuthSecurityScheme = HTTPAuthSecurityScheme.toJSON(message.scheme.value); + } else if (message.scheme?.$case === "oauth2SecurityScheme") { + obj.oauth2SecurityScheme = OAuth2SecurityScheme.toJSON(message.scheme.value); + } else if (message.scheme?.$case === "openIdConnectSecurityScheme") { + obj.openIdConnectSecurityScheme = OpenIdConnectSecurityScheme.toJSON(message.scheme.value); + } else if (message.scheme?.$case === "mtlsSecurityScheme") { + obj.mtlsSecurityScheme = MutualTlsSecurityScheme.toJSON(message.scheme.value); + } + return obj; + }, + + create, I>>(base?: I): SecurityScheme { + return SecurityScheme.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SecurityScheme { + const message = createBaseSecurityScheme(); + switch (object.scheme?.$case) { + case "apiKeySecurityScheme": { + if (object.scheme?.value !== undefined && object.scheme?.value !== null) { + message.scheme = { + $case: "apiKeySecurityScheme", + value: APIKeySecurityScheme.fromPartial(object.scheme.value), + }; + } + break; + } + case "httpAuthSecurityScheme": { + if (object.scheme?.value !== undefined && object.scheme?.value !== null) { + message.scheme = { + $case: "httpAuthSecurityScheme", + value: HTTPAuthSecurityScheme.fromPartial(object.scheme.value), + }; + } + break; + } + case "oauth2SecurityScheme": { + if (object.scheme?.value !== undefined && object.scheme?.value !== null) { + message.scheme = { + $case: "oauth2SecurityScheme", + value: OAuth2SecurityScheme.fromPartial(object.scheme.value), + }; + } + break; + } + case "openIdConnectSecurityScheme": { + if (object.scheme?.value !== undefined && object.scheme?.value !== null) { + message.scheme = { + $case: "openIdConnectSecurityScheme", + value: OpenIdConnectSecurityScheme.fromPartial(object.scheme.value), + }; + } + break; + } + case "mtlsSecurityScheme": { + if (object.scheme?.value !== undefined && object.scheme?.value !== null) { + message.scheme = { + $case: "mtlsSecurityScheme", + value: MutualTlsSecurityScheme.fromPartial(object.scheme.value), + }; + } + break; + } + } + return message; + }, +}; + +function createBaseAPIKeySecurityScheme(): APIKeySecurityScheme { + return { description: "", location: "", name: "" }; +} + +export const APIKeySecurityScheme: MessageFns = { + encode(message: APIKeySecurityScheme, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.description !== "") { + writer.uint32(10).string(message.description); + } + if (message.location !== "") { + writer.uint32(18).string(message.location); + } + if (message.name !== "") { + writer.uint32(26).string(message.name); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): APIKeySecurityScheme { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAPIKeySecurityScheme(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.description = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.location = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.name = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): APIKeySecurityScheme { + return { + description: isSet(object.description) ? globalThis.String(object.description) : "", + location: isSet(object.location) ? globalThis.String(object.location) : "", + name: isSet(object.name) ? globalThis.String(object.name) : "", + }; + }, + + toJSON(message: APIKeySecurityScheme): unknown { + const obj: any = {}; + if (message.description !== "") { + obj.description = message.description; + } + if (message.location !== "") { + obj.location = message.location; + } + if (message.name !== "") { + obj.name = message.name; + } + return obj; + }, + + create, I>>(base?: I): APIKeySecurityScheme { + return APIKeySecurityScheme.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): APIKeySecurityScheme { + const message = createBaseAPIKeySecurityScheme(); + message.description = object.description ?? ""; + message.location = object.location ?? ""; + message.name = object.name ?? ""; + return message; + }, +}; + +function createBaseHTTPAuthSecurityScheme(): HTTPAuthSecurityScheme { + return { description: "", scheme: "", bearerFormat: "" }; +} + +export const HTTPAuthSecurityScheme: MessageFns = { + encode(message: HTTPAuthSecurityScheme, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.description !== "") { + writer.uint32(10).string(message.description); + } + if (message.scheme !== "") { + writer.uint32(18).string(message.scheme); + } + if (message.bearerFormat !== "") { + writer.uint32(26).string(message.bearerFormat); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): HTTPAuthSecurityScheme { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHTTPAuthSecurityScheme(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.description = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.scheme = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.bearerFormat = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): HTTPAuthSecurityScheme { + return { + description: isSet(object.description) ? globalThis.String(object.description) : "", + scheme: isSet(object.scheme) ? globalThis.String(object.scheme) : "", + bearerFormat: isSet(object.bearerFormat) ? globalThis.String(object.bearerFormat) : "", + }; + }, + + toJSON(message: HTTPAuthSecurityScheme): unknown { + const obj: any = {}; + if (message.description !== "") { + obj.description = message.description; + } + if (message.scheme !== "") { + obj.scheme = message.scheme; + } + if (message.bearerFormat !== "") { + obj.bearerFormat = message.bearerFormat; + } + return obj; + }, + + create, I>>(base?: I): HTTPAuthSecurityScheme { + return HTTPAuthSecurityScheme.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): HTTPAuthSecurityScheme { + const message = createBaseHTTPAuthSecurityScheme(); + message.description = object.description ?? ""; + message.scheme = object.scheme ?? ""; + message.bearerFormat = object.bearerFormat ?? ""; + return message; + }, +}; + +function createBaseOAuth2SecurityScheme(): OAuth2SecurityScheme { + return { description: "", flows: undefined, oauth2MetadataUrl: "" }; +} + +export const OAuth2SecurityScheme: MessageFns = { + encode(message: OAuth2SecurityScheme, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.description !== "") { + writer.uint32(10).string(message.description); + } + if (message.flows !== undefined) { + OAuthFlows.encode(message.flows, writer.uint32(18).fork()).join(); + } + if (message.oauth2MetadataUrl !== "") { + writer.uint32(26).string(message.oauth2MetadataUrl); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): OAuth2SecurityScheme { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOAuth2SecurityScheme(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.description = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.flows = OAuthFlows.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.oauth2MetadataUrl = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): OAuth2SecurityScheme { + return { + description: isSet(object.description) ? globalThis.String(object.description) : "", + flows: isSet(object.flows) ? OAuthFlows.fromJSON(object.flows) : undefined, + oauth2MetadataUrl: isSet(object.oauth2MetadataUrl) ? globalThis.String(object.oauth2MetadataUrl) : "", + }; + }, + + toJSON(message: OAuth2SecurityScheme): unknown { + const obj: any = {}; + if (message.description !== "") { + obj.description = message.description; + } + if (message.flows !== undefined) { + obj.flows = OAuthFlows.toJSON(message.flows); + } + if (message.oauth2MetadataUrl !== "") { + obj.oauth2MetadataUrl = message.oauth2MetadataUrl; + } + return obj; + }, + + create, I>>(base?: I): OAuth2SecurityScheme { + return OAuth2SecurityScheme.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): OAuth2SecurityScheme { + const message = createBaseOAuth2SecurityScheme(); + message.description = object.description ?? ""; + message.flows = (object.flows !== undefined && object.flows !== null) + ? OAuthFlows.fromPartial(object.flows) + : undefined; + message.oauth2MetadataUrl = object.oauth2MetadataUrl ?? ""; + return message; + }, +}; + +function createBaseOpenIdConnectSecurityScheme(): OpenIdConnectSecurityScheme { + return { description: "", openIdConnectUrl: "" }; +} + +export const OpenIdConnectSecurityScheme: MessageFns = { + encode(message: OpenIdConnectSecurityScheme, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.description !== "") { + writer.uint32(10).string(message.description); + } + if (message.openIdConnectUrl !== "") { + writer.uint32(18).string(message.openIdConnectUrl); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): OpenIdConnectSecurityScheme { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOpenIdConnectSecurityScheme(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.description = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.openIdConnectUrl = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): OpenIdConnectSecurityScheme { + return { + description: isSet(object.description) ? globalThis.String(object.description) : "", + openIdConnectUrl: isSet(object.openIdConnectUrl) ? globalThis.String(object.openIdConnectUrl) : "", + }; + }, + + toJSON(message: OpenIdConnectSecurityScheme): unknown { + const obj: any = {}; + if (message.description !== "") { + obj.description = message.description; + } + if (message.openIdConnectUrl !== "") { + obj.openIdConnectUrl = message.openIdConnectUrl; + } + return obj; + }, + + create, I>>(base?: I): OpenIdConnectSecurityScheme { + return OpenIdConnectSecurityScheme.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): OpenIdConnectSecurityScheme { + const message = createBaseOpenIdConnectSecurityScheme(); + message.description = object.description ?? ""; + message.openIdConnectUrl = object.openIdConnectUrl ?? ""; + return message; + }, +}; + +function createBaseMutualTlsSecurityScheme(): MutualTlsSecurityScheme { + return { description: "" }; +} + +export const MutualTlsSecurityScheme: MessageFns = { + encode(message: MutualTlsSecurityScheme, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.description !== "") { + writer.uint32(10).string(message.description); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MutualTlsSecurityScheme { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMutualTlsSecurityScheme(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.description = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MutualTlsSecurityScheme { + return { description: isSet(object.description) ? globalThis.String(object.description) : "" }; + }, + + toJSON(message: MutualTlsSecurityScheme): unknown { + const obj: any = {}; + if (message.description !== "") { + obj.description = message.description; + } + return obj; + }, + + create, I>>(base?: I): MutualTlsSecurityScheme { + return MutualTlsSecurityScheme.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MutualTlsSecurityScheme { + const message = createBaseMutualTlsSecurityScheme(); + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseOAuthFlows(): OAuthFlows { + return { flow: undefined }; +} + +export const OAuthFlows: MessageFns = { + encode(message: OAuthFlows, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + switch (message.flow?.$case) { + case "authorizationCode": + AuthorizationCodeOAuthFlow.encode(message.flow.value, writer.uint32(10).fork()).join(); + break; + case "clientCredentials": + ClientCredentialsOAuthFlow.encode(message.flow.value, writer.uint32(18).fork()).join(); + break; + case "implicit": + ImplicitOAuthFlow.encode(message.flow.value, writer.uint32(26).fork()).join(); + break; + case "password": + PasswordOAuthFlow.encode(message.flow.value, writer.uint32(34).fork()).join(); + break; + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): OAuthFlows { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOAuthFlows(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.flow = { + $case: "authorizationCode", + value: AuthorizationCodeOAuthFlow.decode(reader, reader.uint32()), + }; + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.flow = { + $case: "clientCredentials", + value: ClientCredentialsOAuthFlow.decode(reader, reader.uint32()), + }; + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.flow = { $case: "implicit", value: ImplicitOAuthFlow.decode(reader, reader.uint32()) }; + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.flow = { $case: "password", value: PasswordOAuthFlow.decode(reader, reader.uint32()) }; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): OAuthFlows { + return { + flow: isSet(object.authorizationCode) + ? { $case: "authorizationCode", value: AuthorizationCodeOAuthFlow.fromJSON(object.authorizationCode) } + : isSet(object.clientCredentials) + ? { $case: "clientCredentials", value: ClientCredentialsOAuthFlow.fromJSON(object.clientCredentials) } + : isSet(object.implicit) + ? { $case: "implicit", value: ImplicitOAuthFlow.fromJSON(object.implicit) } + : isSet(object.password) + ? { $case: "password", value: PasswordOAuthFlow.fromJSON(object.password) } + : undefined, + }; + }, + + toJSON(message: OAuthFlows): unknown { + const obj: any = {}; + if (message.flow?.$case === "authorizationCode") { + obj.authorizationCode = AuthorizationCodeOAuthFlow.toJSON(message.flow.value); + } else if (message.flow?.$case === "clientCredentials") { + obj.clientCredentials = ClientCredentialsOAuthFlow.toJSON(message.flow.value); + } else if (message.flow?.$case === "implicit") { + obj.implicit = ImplicitOAuthFlow.toJSON(message.flow.value); + } else if (message.flow?.$case === "password") { + obj.password = PasswordOAuthFlow.toJSON(message.flow.value); + } + return obj; + }, + + create, I>>(base?: I): OAuthFlows { + return OAuthFlows.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): OAuthFlows { + const message = createBaseOAuthFlows(); + switch (object.flow?.$case) { + case "authorizationCode": { + if (object.flow?.value !== undefined && object.flow?.value !== null) { + message.flow = { + $case: "authorizationCode", + value: AuthorizationCodeOAuthFlow.fromPartial(object.flow.value), + }; + } + break; + } + case "clientCredentials": { + if (object.flow?.value !== undefined && object.flow?.value !== null) { + message.flow = { + $case: "clientCredentials", + value: ClientCredentialsOAuthFlow.fromPartial(object.flow.value), + }; + } + break; + } + case "implicit": { + if (object.flow?.value !== undefined && object.flow?.value !== null) { + message.flow = { $case: "implicit", value: ImplicitOAuthFlow.fromPartial(object.flow.value) }; + } + break; + } + case "password": { + if (object.flow?.value !== undefined && object.flow?.value !== null) { + message.flow = { $case: "password", value: PasswordOAuthFlow.fromPartial(object.flow.value) }; + } + break; + } + } + return message; + }, +}; + +function createBaseAuthorizationCodeOAuthFlow(): AuthorizationCodeOAuthFlow { + return { authorizationUrl: "", tokenUrl: "", refreshUrl: "", scopes: {} }; +} + +export const AuthorizationCodeOAuthFlow: MessageFns = { + encode(message: AuthorizationCodeOAuthFlow, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.authorizationUrl !== "") { + writer.uint32(10).string(message.authorizationUrl); + } + if (message.tokenUrl !== "") { + writer.uint32(18).string(message.tokenUrl); + } + if (message.refreshUrl !== "") { + writer.uint32(26).string(message.refreshUrl); + } + Object.entries(message.scopes).forEach(([key, value]) => { + AuthorizationCodeOAuthFlow_ScopesEntry.encode({ key: key as any, value }, writer.uint32(34).fork()).join(); + }); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AuthorizationCodeOAuthFlow { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAuthorizationCodeOAuthFlow(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.authorizationUrl = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.tokenUrl = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.refreshUrl = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + const entry4 = AuthorizationCodeOAuthFlow_ScopesEntry.decode(reader, reader.uint32()); + if (entry4.value !== undefined) { + message.scopes[entry4.key] = entry4.value; + } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AuthorizationCodeOAuthFlow { + return { + authorizationUrl: isSet(object.authorizationUrl) ? globalThis.String(object.authorizationUrl) : "", + tokenUrl: isSet(object.tokenUrl) ? globalThis.String(object.tokenUrl) : "", + refreshUrl: isSet(object.refreshUrl) ? globalThis.String(object.refreshUrl) : "", + scopes: isObject(object.scopes) + ? Object.entries(object.scopes).reduce<{ [key: string]: string }>((acc, [key, value]) => { + acc[key] = String(value); + return acc; + }, {}) + : {}, + }; + }, + + toJSON(message: AuthorizationCodeOAuthFlow): unknown { + const obj: any = {}; + if (message.authorizationUrl !== "") { + obj.authorizationUrl = message.authorizationUrl; + } + if (message.tokenUrl !== "") { + obj.tokenUrl = message.tokenUrl; + } + if (message.refreshUrl !== "") { + obj.refreshUrl = message.refreshUrl; + } + if (message.scopes) { + const entries = Object.entries(message.scopes); + if (entries.length > 0) { + obj.scopes = {}; + entries.forEach(([k, v]) => { + obj.scopes[k] = v; + }); + } + } + return obj; + }, + + create, I>>(base?: I): AuthorizationCodeOAuthFlow { + return AuthorizationCodeOAuthFlow.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): AuthorizationCodeOAuthFlow { + const message = createBaseAuthorizationCodeOAuthFlow(); + message.authorizationUrl = object.authorizationUrl ?? ""; + message.tokenUrl = object.tokenUrl ?? ""; + message.refreshUrl = object.refreshUrl ?? ""; + message.scopes = Object.entries(object.scopes ?? {}).reduce<{ [key: string]: string }>((acc, [key, value]) => { + if (value !== undefined) { + acc[key] = globalThis.String(value); + } + return acc; + }, {}); + return message; + }, +}; + +function createBaseAuthorizationCodeOAuthFlow_ScopesEntry(): AuthorizationCodeOAuthFlow_ScopesEntry { + return { key: "", value: "" }; +} + +export const AuthorizationCodeOAuthFlow_ScopesEntry: MessageFns = { + encode(message: AuthorizationCodeOAuthFlow_ScopesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AuthorizationCodeOAuthFlow_ScopesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAuthorizationCodeOAuthFlow_ScopesEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AuthorizationCodeOAuthFlow_ScopesEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: AuthorizationCodeOAuthFlow_ScopesEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create, I>>( + base?: I, + ): AuthorizationCodeOAuthFlow_ScopesEntry { + return AuthorizationCodeOAuthFlow_ScopesEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): AuthorizationCodeOAuthFlow_ScopesEntry { + const message = createBaseAuthorizationCodeOAuthFlow_ScopesEntry(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseClientCredentialsOAuthFlow(): ClientCredentialsOAuthFlow { + return { tokenUrl: "", refreshUrl: "", scopes: {} }; +} + +export const ClientCredentialsOAuthFlow: MessageFns = { + encode(message: ClientCredentialsOAuthFlow, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.tokenUrl !== "") { + writer.uint32(10).string(message.tokenUrl); + } + if (message.refreshUrl !== "") { + writer.uint32(18).string(message.refreshUrl); + } + Object.entries(message.scopes).forEach(([key, value]) => { + ClientCredentialsOAuthFlow_ScopesEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).join(); + }); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ClientCredentialsOAuthFlow { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientCredentialsOAuthFlow(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.tokenUrl = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.refreshUrl = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + const entry3 = ClientCredentialsOAuthFlow_ScopesEntry.decode(reader, reader.uint32()); + if (entry3.value !== undefined) { + message.scopes[entry3.key] = entry3.value; + } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ClientCredentialsOAuthFlow { + return { + tokenUrl: isSet(object.tokenUrl) ? globalThis.String(object.tokenUrl) : "", + refreshUrl: isSet(object.refreshUrl) ? globalThis.String(object.refreshUrl) : "", + scopes: isObject(object.scopes) + ? Object.entries(object.scopes).reduce<{ [key: string]: string }>((acc, [key, value]) => { + acc[key] = String(value); + return acc; + }, {}) + : {}, + }; + }, + + toJSON(message: ClientCredentialsOAuthFlow): unknown { + const obj: any = {}; + if (message.tokenUrl !== "") { + obj.tokenUrl = message.tokenUrl; + } + if (message.refreshUrl !== "") { + obj.refreshUrl = message.refreshUrl; + } + if (message.scopes) { + const entries = Object.entries(message.scopes); + if (entries.length > 0) { + obj.scopes = {}; + entries.forEach(([k, v]) => { + obj.scopes[k] = v; + }); + } + } + return obj; + }, + + create, I>>(base?: I): ClientCredentialsOAuthFlow { + return ClientCredentialsOAuthFlow.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ClientCredentialsOAuthFlow { + const message = createBaseClientCredentialsOAuthFlow(); + message.tokenUrl = object.tokenUrl ?? ""; + message.refreshUrl = object.refreshUrl ?? ""; + message.scopes = Object.entries(object.scopes ?? {}).reduce<{ [key: string]: string }>((acc, [key, value]) => { + if (value !== undefined) { + acc[key] = globalThis.String(value); + } + return acc; + }, {}); + return message; + }, +}; + +function createBaseClientCredentialsOAuthFlow_ScopesEntry(): ClientCredentialsOAuthFlow_ScopesEntry { + return { key: "", value: "" }; +} + +export const ClientCredentialsOAuthFlow_ScopesEntry: MessageFns = { + encode(message: ClientCredentialsOAuthFlow_ScopesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ClientCredentialsOAuthFlow_ScopesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientCredentialsOAuthFlow_ScopesEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ClientCredentialsOAuthFlow_ScopesEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: ClientCredentialsOAuthFlow_ScopesEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create, I>>( + base?: I, + ): ClientCredentialsOAuthFlow_ScopesEntry { + return ClientCredentialsOAuthFlow_ScopesEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): ClientCredentialsOAuthFlow_ScopesEntry { + const message = createBaseClientCredentialsOAuthFlow_ScopesEntry(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseImplicitOAuthFlow(): ImplicitOAuthFlow { + return { authorizationUrl: "", refreshUrl: "", scopes: {} }; +} + +export const ImplicitOAuthFlow: MessageFns = { + encode(message: ImplicitOAuthFlow, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.authorizationUrl !== "") { + writer.uint32(10).string(message.authorizationUrl); + } + if (message.refreshUrl !== "") { + writer.uint32(18).string(message.refreshUrl); + } + Object.entries(message.scopes).forEach(([key, value]) => { + ImplicitOAuthFlow_ScopesEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).join(); + }); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ImplicitOAuthFlow { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseImplicitOAuthFlow(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.authorizationUrl = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.refreshUrl = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + const entry3 = ImplicitOAuthFlow_ScopesEntry.decode(reader, reader.uint32()); + if (entry3.value !== undefined) { + message.scopes[entry3.key] = entry3.value; + } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ImplicitOAuthFlow { + return { + authorizationUrl: isSet(object.authorizationUrl) ? globalThis.String(object.authorizationUrl) : "", + refreshUrl: isSet(object.refreshUrl) ? globalThis.String(object.refreshUrl) : "", + scopes: isObject(object.scopes) + ? Object.entries(object.scopes).reduce<{ [key: string]: string }>((acc, [key, value]) => { + acc[key] = String(value); + return acc; + }, {}) + : {}, + }; + }, + + toJSON(message: ImplicitOAuthFlow): unknown { + const obj: any = {}; + if (message.authorizationUrl !== "") { + obj.authorizationUrl = message.authorizationUrl; + } + if (message.refreshUrl !== "") { + obj.refreshUrl = message.refreshUrl; + } + if (message.scopes) { + const entries = Object.entries(message.scopes); + if (entries.length > 0) { + obj.scopes = {}; + entries.forEach(([k, v]) => { + obj.scopes[k] = v; + }); + } + } + return obj; + }, + + create, I>>(base?: I): ImplicitOAuthFlow { + return ImplicitOAuthFlow.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ImplicitOAuthFlow { + const message = createBaseImplicitOAuthFlow(); + message.authorizationUrl = object.authorizationUrl ?? ""; + message.refreshUrl = object.refreshUrl ?? ""; + message.scopes = Object.entries(object.scopes ?? {}).reduce<{ [key: string]: string }>((acc, [key, value]) => { + if (value !== undefined) { + acc[key] = globalThis.String(value); + } + return acc; + }, {}); + return message; + }, +}; + +function createBaseImplicitOAuthFlow_ScopesEntry(): ImplicitOAuthFlow_ScopesEntry { + return { key: "", value: "" }; +} + +export const ImplicitOAuthFlow_ScopesEntry: MessageFns = { + encode(message: ImplicitOAuthFlow_ScopesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ImplicitOAuthFlow_ScopesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseImplicitOAuthFlow_ScopesEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ImplicitOAuthFlow_ScopesEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: ImplicitOAuthFlow_ScopesEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create, I>>(base?: I): ImplicitOAuthFlow_ScopesEntry { + return ImplicitOAuthFlow_ScopesEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): ImplicitOAuthFlow_ScopesEntry { + const message = createBaseImplicitOAuthFlow_ScopesEntry(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBasePasswordOAuthFlow(): PasswordOAuthFlow { + return { tokenUrl: "", refreshUrl: "", scopes: {} }; +} + +export const PasswordOAuthFlow: MessageFns = { + encode(message: PasswordOAuthFlow, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.tokenUrl !== "") { + writer.uint32(10).string(message.tokenUrl); + } + if (message.refreshUrl !== "") { + writer.uint32(18).string(message.refreshUrl); + } + Object.entries(message.scopes).forEach(([key, value]) => { + PasswordOAuthFlow_ScopesEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).join(); + }); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PasswordOAuthFlow { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePasswordOAuthFlow(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.tokenUrl = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.refreshUrl = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + const entry3 = PasswordOAuthFlow_ScopesEntry.decode(reader, reader.uint32()); + if (entry3.value !== undefined) { + message.scopes[entry3.key] = entry3.value; + } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PasswordOAuthFlow { + return { + tokenUrl: isSet(object.tokenUrl) ? globalThis.String(object.tokenUrl) : "", + refreshUrl: isSet(object.refreshUrl) ? globalThis.String(object.refreshUrl) : "", + scopes: isObject(object.scopes) + ? Object.entries(object.scopes).reduce<{ [key: string]: string }>((acc, [key, value]) => { + acc[key] = String(value); + return acc; + }, {}) + : {}, + }; + }, + + toJSON(message: PasswordOAuthFlow): unknown { + const obj: any = {}; + if (message.tokenUrl !== "") { + obj.tokenUrl = message.tokenUrl; + } + if (message.refreshUrl !== "") { + obj.refreshUrl = message.refreshUrl; + } + if (message.scopes) { + const entries = Object.entries(message.scopes); + if (entries.length > 0) { + obj.scopes = {}; + entries.forEach(([k, v]) => { + obj.scopes[k] = v; + }); + } + } + return obj; + }, + + create, I>>(base?: I): PasswordOAuthFlow { + return PasswordOAuthFlow.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): PasswordOAuthFlow { + const message = createBasePasswordOAuthFlow(); + message.tokenUrl = object.tokenUrl ?? ""; + message.refreshUrl = object.refreshUrl ?? ""; + message.scopes = Object.entries(object.scopes ?? {}).reduce<{ [key: string]: string }>((acc, [key, value]) => { + if (value !== undefined) { + acc[key] = globalThis.String(value); + } + return acc; + }, {}); + return message; + }, +}; + +function createBasePasswordOAuthFlow_ScopesEntry(): PasswordOAuthFlow_ScopesEntry { + return { key: "", value: "" }; +} + +export const PasswordOAuthFlow_ScopesEntry: MessageFns = { + encode(message: PasswordOAuthFlow_ScopesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PasswordOAuthFlow_ScopesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePasswordOAuthFlow_ScopesEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PasswordOAuthFlow_ScopesEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: PasswordOAuthFlow_ScopesEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create, I>>(base?: I): PasswordOAuthFlow_ScopesEntry { + return PasswordOAuthFlow_ScopesEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): PasswordOAuthFlow_ScopesEntry { + const message = createBasePasswordOAuthFlow_ScopesEntry(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseSendMessageRequest(): SendMessageRequest { + return { request: undefined, configuration: undefined, metadata: undefined }; +} + +export const SendMessageRequest: MessageFns = { + encode(message: SendMessageRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.request !== undefined) { + Message.encode(message.request, writer.uint32(10).fork()).join(); + } + if (message.configuration !== undefined) { + SendMessageConfiguration.encode(message.configuration, writer.uint32(18).fork()).join(); + } + if (message.metadata !== undefined) { + Struct.encode(Struct.wrap(message.metadata), writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SendMessageRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSendMessageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.request = Message.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.configuration = SendMessageConfiguration.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.metadata = Struct.unwrap(Struct.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SendMessageRequest { + return { + request: isSet(object.message) ? Message.fromJSON(object.message) : undefined, + configuration: isSet(object.configuration) ? SendMessageConfiguration.fromJSON(object.configuration) : undefined, + metadata: isObject(object.metadata) ? object.metadata : undefined, + }; + }, + + toJSON(message: SendMessageRequest): unknown { + const obj: any = {}; + if (message.request !== undefined) { + obj.message = Message.toJSON(message.request); + } + if (message.configuration !== undefined) { + obj.configuration = SendMessageConfiguration.toJSON(message.configuration); + } + if (message.metadata !== undefined) { + obj.metadata = message.metadata; + } + return obj; + }, + + create, I>>(base?: I): SendMessageRequest { + return SendMessageRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SendMessageRequest { + const message = createBaseSendMessageRequest(); + message.request = (object.request !== undefined && object.request !== null) + ? Message.fromPartial(object.request) + : undefined; + message.configuration = (object.configuration !== undefined && object.configuration !== null) + ? SendMessageConfiguration.fromPartial(object.configuration) + : undefined; + message.metadata = object.metadata ?? undefined; + return message; + }, +}; + +function createBaseGetTaskRequest(): GetTaskRequest { + return { name: "", historyLength: 0 }; +} + +export const GetTaskRequest: MessageFns = { + encode(message: GetTaskRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.historyLength !== 0) { + writer.uint32(16).int32(message.historyLength); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GetTaskRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetTaskRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.historyLength = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GetTaskRequest { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + historyLength: isSet(object.historyLength) ? globalThis.Number(object.historyLength) : 0, + }; + }, + + toJSON(message: GetTaskRequest): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.historyLength !== 0) { + obj.historyLength = Math.round(message.historyLength); + } + return obj; + }, + + create, I>>(base?: I): GetTaskRequest { + return GetTaskRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): GetTaskRequest { + const message = createBaseGetTaskRequest(); + message.name = object.name ?? ""; + message.historyLength = object.historyLength ?? 0; + return message; + }, +}; + +function createBaseCancelTaskRequest(): CancelTaskRequest { + return { name: "" }; +} + +export const CancelTaskRequest: MessageFns = { + encode(message: CancelTaskRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CancelTaskRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCancelTaskRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CancelTaskRequest { + return { name: isSet(object.name) ? globalThis.String(object.name) : "" }; + }, + + toJSON(message: CancelTaskRequest): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + return obj; + }, + + create, I>>(base?: I): CancelTaskRequest { + return CancelTaskRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CancelTaskRequest { + const message = createBaseCancelTaskRequest(); + message.name = object.name ?? ""; + return message; + }, +}; + +function createBaseGetTaskPushNotificationConfigRequest(): GetTaskPushNotificationConfigRequest { + return { name: "" }; +} + +export const GetTaskPushNotificationConfigRequest: MessageFns = { + encode(message: GetTaskPushNotificationConfigRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GetTaskPushNotificationConfigRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetTaskPushNotificationConfigRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GetTaskPushNotificationConfigRequest { + return { name: isSet(object.name) ? globalThis.String(object.name) : "" }; + }, + + toJSON(message: GetTaskPushNotificationConfigRequest): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + return obj; + }, + + create, I>>( + base?: I, + ): GetTaskPushNotificationConfigRequest { + return GetTaskPushNotificationConfigRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): GetTaskPushNotificationConfigRequest { + const message = createBaseGetTaskPushNotificationConfigRequest(); + message.name = object.name ?? ""; + return message; + }, +}; + +function createBaseDeleteTaskPushNotificationConfigRequest(): DeleteTaskPushNotificationConfigRequest { + return { name: "" }; +} + +export const DeleteTaskPushNotificationConfigRequest: MessageFns = { + encode(message: DeleteTaskPushNotificationConfigRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DeleteTaskPushNotificationConfigRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDeleteTaskPushNotificationConfigRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DeleteTaskPushNotificationConfigRequest { + return { name: isSet(object.name) ? globalThis.String(object.name) : "" }; + }, + + toJSON(message: DeleteTaskPushNotificationConfigRequest): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + return obj; + }, + + create, I>>( + base?: I, + ): DeleteTaskPushNotificationConfigRequest { + return DeleteTaskPushNotificationConfigRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): DeleteTaskPushNotificationConfigRequest { + const message = createBaseDeleteTaskPushNotificationConfigRequest(); + message.name = object.name ?? ""; + return message; + }, +}; + +function createBaseCreateTaskPushNotificationConfigRequest(): CreateTaskPushNotificationConfigRequest { + return { parent: "", configId: "", config: undefined }; +} + +export const CreateTaskPushNotificationConfigRequest: MessageFns = { + encode(message: CreateTaskPushNotificationConfigRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.parent !== "") { + writer.uint32(10).string(message.parent); + } + if (message.configId !== "") { + writer.uint32(18).string(message.configId); + } + if (message.config !== undefined) { + TaskPushNotificationConfig.encode(message.config, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CreateTaskPushNotificationConfigRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCreateTaskPushNotificationConfigRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.parent = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.configId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.config = TaskPushNotificationConfig.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CreateTaskPushNotificationConfigRequest { + return { + parent: isSet(object.parent) ? globalThis.String(object.parent) : "", + configId: isSet(object.configId) ? globalThis.String(object.configId) : "", + config: isSet(object.config) ? TaskPushNotificationConfig.fromJSON(object.config) : undefined, + }; + }, + + toJSON(message: CreateTaskPushNotificationConfigRequest): unknown { + const obj: any = {}; + if (message.parent !== "") { + obj.parent = message.parent; + } + if (message.configId !== "") { + obj.configId = message.configId; + } + if (message.config !== undefined) { + obj.config = TaskPushNotificationConfig.toJSON(message.config); + } + return obj; + }, + + create, I>>( + base?: I, + ): CreateTaskPushNotificationConfigRequest { + return CreateTaskPushNotificationConfigRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): CreateTaskPushNotificationConfigRequest { + const message = createBaseCreateTaskPushNotificationConfigRequest(); + message.parent = object.parent ?? ""; + message.configId = object.configId ?? ""; + message.config = (object.config !== undefined && object.config !== null) + ? TaskPushNotificationConfig.fromPartial(object.config) + : undefined; + return message; + }, +}; + +function createBaseTaskSubscriptionRequest(): TaskSubscriptionRequest { + return { name: "" }; +} + +export const TaskSubscriptionRequest: MessageFns = { + encode(message: TaskSubscriptionRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): TaskSubscriptionRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTaskSubscriptionRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): TaskSubscriptionRequest { + return { name: isSet(object.name) ? globalThis.String(object.name) : "" }; + }, + + toJSON(message: TaskSubscriptionRequest): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + return obj; + }, + + create, I>>(base?: I): TaskSubscriptionRequest { + return TaskSubscriptionRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): TaskSubscriptionRequest { + const message = createBaseTaskSubscriptionRequest(); + message.name = object.name ?? ""; + return message; + }, +}; + +function createBaseListTaskPushNotificationConfigRequest(): ListTaskPushNotificationConfigRequest { + return { parent: "", pageSize: 0, pageToken: "" }; +} + +export const ListTaskPushNotificationConfigRequest: MessageFns = { + encode(message: ListTaskPushNotificationConfigRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.parent !== "") { + writer.uint32(10).string(message.parent); + } + if (message.pageSize !== 0) { + writer.uint32(16).int32(message.pageSize); + } + if (message.pageToken !== "") { + writer.uint32(26).string(message.pageToken); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ListTaskPushNotificationConfigRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseListTaskPushNotificationConfigRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.parent = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.pageSize = reader.int32(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.pageToken = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ListTaskPushNotificationConfigRequest { + return { + parent: isSet(object.parent) ? globalThis.String(object.parent) : "", + pageSize: isSet(object.pageSize) ? globalThis.Number(object.pageSize) : 0, + pageToken: isSet(object.pageToken) ? globalThis.String(object.pageToken) : "", + }; + }, + + toJSON(message: ListTaskPushNotificationConfigRequest): unknown { + const obj: any = {}; + if (message.parent !== "") { + obj.parent = message.parent; + } + if (message.pageSize !== 0) { + obj.pageSize = Math.round(message.pageSize); + } + if (message.pageToken !== "") { + obj.pageToken = message.pageToken; + } + return obj; + }, + + create, I>>( + base?: I, + ): ListTaskPushNotificationConfigRequest { + return ListTaskPushNotificationConfigRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): ListTaskPushNotificationConfigRequest { + const message = createBaseListTaskPushNotificationConfigRequest(); + message.parent = object.parent ?? ""; + message.pageSize = object.pageSize ?? 0; + message.pageToken = object.pageToken ?? ""; + return message; + }, +}; + +function createBaseGetAgentCardRequest(): GetAgentCardRequest { + return {}; +} + +export const GetAgentCardRequest: MessageFns = { + encode(_: GetAgentCardRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GetAgentCardRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetAgentCardRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): GetAgentCardRequest { + return {}; + }, + + toJSON(_: GetAgentCardRequest): unknown { + const obj: any = {}; + return obj; + }, + + create, I>>(base?: I): GetAgentCardRequest { + return GetAgentCardRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(_: I): GetAgentCardRequest { + const message = createBaseGetAgentCardRequest(); + return message; + }, +}; + +function createBaseSendMessageResponse(): SendMessageResponse { + return { payload: undefined }; +} + +export const SendMessageResponse: MessageFns = { + encode(message: SendMessageResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + switch (message.payload?.$case) { + case "task": + Task.encode(message.payload.value, writer.uint32(10).fork()).join(); + break; + case "msg": + Message.encode(message.payload.value, writer.uint32(18).fork()).join(); + break; + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SendMessageResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSendMessageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.payload = { $case: "task", value: Task.decode(reader, reader.uint32()) }; + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.payload = { $case: "msg", value: Message.decode(reader, reader.uint32()) }; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SendMessageResponse { + return { + payload: isSet(object.task) + ? { $case: "task", value: Task.fromJSON(object.task) } + : isSet(object.message) + ? { $case: "msg", value: Message.fromJSON(object.message) } + : undefined, + }; + }, + + toJSON(message: SendMessageResponse): unknown { + const obj: any = {}; + if (message.payload?.$case === "task") { + obj.task = Task.toJSON(message.payload.value); + } else if (message.payload?.$case === "msg") { + obj.message = Message.toJSON(message.payload.value); + } + return obj; + }, + + create, I>>(base?: I): SendMessageResponse { + return SendMessageResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SendMessageResponse { + const message = createBaseSendMessageResponse(); + switch (object.payload?.$case) { + case "task": { + if (object.payload?.value !== undefined && object.payload?.value !== null) { + message.payload = { $case: "task", value: Task.fromPartial(object.payload.value) }; + } + break; + } + case "msg": { + if (object.payload?.value !== undefined && object.payload?.value !== null) { + message.payload = { $case: "msg", value: Message.fromPartial(object.payload.value) }; + } + break; + } + } + return message; + }, +}; + +function createBaseStreamResponse(): StreamResponse { + return { payload: undefined }; +} + +export const StreamResponse: MessageFns = { + encode(message: StreamResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + switch (message.payload?.$case) { + case "task": + Task.encode(message.payload.value, writer.uint32(10).fork()).join(); + break; + case "msg": + Message.encode(message.payload.value, writer.uint32(18).fork()).join(); + break; + case "statusUpdate": + TaskStatusUpdateEvent.encode(message.payload.value, writer.uint32(26).fork()).join(); + break; + case "artifactUpdate": + TaskArtifactUpdateEvent.encode(message.payload.value, writer.uint32(34).fork()).join(); + break; + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): StreamResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStreamResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.payload = { $case: "task", value: Task.decode(reader, reader.uint32()) }; + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.payload = { $case: "msg", value: Message.decode(reader, reader.uint32()) }; + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.payload = { $case: "statusUpdate", value: TaskStatusUpdateEvent.decode(reader, reader.uint32()) }; + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.payload = { $case: "artifactUpdate", value: TaskArtifactUpdateEvent.decode(reader, reader.uint32()) }; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): StreamResponse { + return { + payload: isSet(object.task) + ? { $case: "task", value: Task.fromJSON(object.task) } + : isSet(object.message) + ? { $case: "msg", value: Message.fromJSON(object.message) } + : isSet(object.statusUpdate) + ? { $case: "statusUpdate", value: TaskStatusUpdateEvent.fromJSON(object.statusUpdate) } + : isSet(object.artifactUpdate) + ? { $case: "artifactUpdate", value: TaskArtifactUpdateEvent.fromJSON(object.artifactUpdate) } + : undefined, + }; + }, + + toJSON(message: StreamResponse): unknown { + const obj: any = {}; + if (message.payload?.$case === "task") { + obj.task = Task.toJSON(message.payload.value); + } else if (message.payload?.$case === "msg") { + obj.message = Message.toJSON(message.payload.value); + } else if (message.payload?.$case === "statusUpdate") { + obj.statusUpdate = TaskStatusUpdateEvent.toJSON(message.payload.value); + } else if (message.payload?.$case === "artifactUpdate") { + obj.artifactUpdate = TaskArtifactUpdateEvent.toJSON(message.payload.value); + } + return obj; + }, + + create, I>>(base?: I): StreamResponse { + return StreamResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): StreamResponse { + const message = createBaseStreamResponse(); + switch (object.payload?.$case) { + case "task": { + if (object.payload?.value !== undefined && object.payload?.value !== null) { + message.payload = { $case: "task", value: Task.fromPartial(object.payload.value) }; + } + break; + } + case "msg": { + if (object.payload?.value !== undefined && object.payload?.value !== null) { + message.payload = { $case: "msg", value: Message.fromPartial(object.payload.value) }; + } + break; + } + case "statusUpdate": { + if (object.payload?.value !== undefined && object.payload?.value !== null) { + message.payload = { $case: "statusUpdate", value: TaskStatusUpdateEvent.fromPartial(object.payload.value) }; + } + break; + } + case "artifactUpdate": { + if (object.payload?.value !== undefined && object.payload?.value !== null) { + message.payload = { + $case: "artifactUpdate", + value: TaskArtifactUpdateEvent.fromPartial(object.payload.value), + }; + } + break; + } + } + return message; + }, +}; + +function createBaseListTaskPushNotificationConfigResponse(): ListTaskPushNotificationConfigResponse { + return { configs: [], nextPageToken: "" }; +} + +export const ListTaskPushNotificationConfigResponse: MessageFns = { + encode(message: ListTaskPushNotificationConfigResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.configs) { + TaskPushNotificationConfig.encode(v!, writer.uint32(10).fork()).join(); + } + if (message.nextPageToken !== "") { + writer.uint32(18).string(message.nextPageToken); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ListTaskPushNotificationConfigResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseListTaskPushNotificationConfigResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.configs.push(TaskPushNotificationConfig.decode(reader, reader.uint32())); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.nextPageToken = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ListTaskPushNotificationConfigResponse { + return { + configs: globalThis.Array.isArray(object?.configs) + ? object.configs.map((e: any) => TaskPushNotificationConfig.fromJSON(e)) + : [], + nextPageToken: isSet(object.nextPageToken) ? globalThis.String(object.nextPageToken) : "", + }; + }, + + toJSON(message: ListTaskPushNotificationConfigResponse): unknown { + const obj: any = {}; + if (message.configs?.length) { + obj.configs = message.configs.map((e) => TaskPushNotificationConfig.toJSON(e)); + } + if (message.nextPageToken !== "") { + obj.nextPageToken = message.nextPageToken; + } + return obj; + }, + + create, I>>( + base?: I, + ): ListTaskPushNotificationConfigResponse { + return ListTaskPushNotificationConfigResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): ListTaskPushNotificationConfigResponse { + const message = createBaseListTaskPushNotificationConfigResponse(); + message.configs = object.configs?.map((e) => TaskPushNotificationConfig.fromPartial(e)) || []; + message.nextPageToken = object.nextPageToken ?? ""; + return message; + }, +}; + +/** + * A2AService defines the gRPC version of the A2A protocol. This has a slightly + * different shape than the JSONRPC version to better conform to AIP-127, + * where appropriate. The nouns are AgentCard, Message, Task and + * TaskPushNotificationConfig. + * - Messages are not a standard resource so there is no get/delete/update/list + * interface, only a send and stream custom methods. + * - Tasks have a get interface and custom cancel and subscribe methods. + * - TaskPushNotificationConfig are a resource whose parent is a task. + * They have get, list and create methods. + * - AgentCard is a static resource with only a get method. + * fields are not present as they don't comply with AIP rules, and the + * optional history_length on the get task method is not present as it also + * violates AIP-127 and AIP-131. + */ +export type A2AServiceService = typeof A2AServiceService; +export const A2AServiceService = { + /** + * Send a message to the agent. This is a blocking call that will return the + * task once it is completed, or a LRO if requested. + */ + sendMessage: { + path: "/a2a.v1.A2AService/SendMessage", + requestStream: false, + responseStream: false, + requestSerialize: (value: SendMessageRequest) => Buffer.from(SendMessageRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => SendMessageRequest.decode(value), + responseSerialize: (value: SendMessageResponse) => Buffer.from(SendMessageResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => SendMessageResponse.decode(value), + }, + /** + * SendStreamingMessage is a streaming call that will return a stream of + * task update events until the Task is in an interrupted or terminal state. + */ + sendStreamingMessage: { + path: "/a2a.v1.A2AService/SendStreamingMessage", + requestStream: false, + responseStream: true, + requestSerialize: (value: SendMessageRequest) => Buffer.from(SendMessageRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => SendMessageRequest.decode(value), + responseSerialize: (value: StreamResponse) => Buffer.from(StreamResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => StreamResponse.decode(value), + }, + /** Get the current state of a task from the agent. */ + getTask: { + path: "/a2a.v1.A2AService/GetTask", + requestStream: false, + responseStream: false, + requestSerialize: (value: GetTaskRequest) => Buffer.from(GetTaskRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => GetTaskRequest.decode(value), + responseSerialize: (value: Task) => Buffer.from(Task.encode(value).finish()), + responseDeserialize: (value: Buffer) => Task.decode(value), + }, + /** + * Cancel a task from the agent. If supported one should expect no + * more task updates for the task. + */ + cancelTask: { + path: "/a2a.v1.A2AService/CancelTask", + requestStream: false, + responseStream: false, + requestSerialize: (value: CancelTaskRequest) => Buffer.from(CancelTaskRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => CancelTaskRequest.decode(value), + responseSerialize: (value: Task) => Buffer.from(Task.encode(value).finish()), + responseDeserialize: (value: Buffer) => Task.decode(value), + }, + /** + * TaskSubscription is a streaming call that will return a stream of task + * update events. This attaches the stream to an existing in process task. + * If the task is complete the stream will return the completed task (like + * GetTask) and close the stream. + */ + taskSubscription: { + path: "/a2a.v1.A2AService/TaskSubscription", + requestStream: false, + responseStream: true, + requestSerialize: (value: TaskSubscriptionRequest) => Buffer.from(TaskSubscriptionRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => TaskSubscriptionRequest.decode(value), + responseSerialize: (value: StreamResponse) => Buffer.from(StreamResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => StreamResponse.decode(value), + }, + /** Set a push notification config for a task. */ + createTaskPushNotificationConfig: { + path: "/a2a.v1.A2AService/CreateTaskPushNotificationConfig", + requestStream: false, + responseStream: false, + requestSerialize: (value: CreateTaskPushNotificationConfigRequest) => + Buffer.from(CreateTaskPushNotificationConfigRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => CreateTaskPushNotificationConfigRequest.decode(value), + responseSerialize: (value: TaskPushNotificationConfig) => + Buffer.from(TaskPushNotificationConfig.encode(value).finish()), + responseDeserialize: (value: Buffer) => TaskPushNotificationConfig.decode(value), + }, + /** Get a push notification config for a task. */ + getTaskPushNotificationConfig: { + path: "/a2a.v1.A2AService/GetTaskPushNotificationConfig", + requestStream: false, + responseStream: false, + requestSerialize: (value: GetTaskPushNotificationConfigRequest) => + Buffer.from(GetTaskPushNotificationConfigRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => GetTaskPushNotificationConfigRequest.decode(value), + responseSerialize: (value: TaskPushNotificationConfig) => + Buffer.from(TaskPushNotificationConfig.encode(value).finish()), + responseDeserialize: (value: Buffer) => TaskPushNotificationConfig.decode(value), + }, + /** Get a list of push notifications configured for a task. */ + listTaskPushNotificationConfig: { + path: "/a2a.v1.A2AService/ListTaskPushNotificationConfig", + requestStream: false, + responseStream: false, + requestSerialize: (value: ListTaskPushNotificationConfigRequest) => + Buffer.from(ListTaskPushNotificationConfigRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => ListTaskPushNotificationConfigRequest.decode(value), + responseSerialize: (value: ListTaskPushNotificationConfigResponse) => + Buffer.from(ListTaskPushNotificationConfigResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => ListTaskPushNotificationConfigResponse.decode(value), + }, + /** GetAgentCard returns the agent card for the agent. */ + getAgentCard: { + path: "/a2a.v1.A2AService/GetAgentCard", + requestStream: false, + responseStream: false, + requestSerialize: (value: GetAgentCardRequest) => Buffer.from(GetAgentCardRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => GetAgentCardRequest.decode(value), + responseSerialize: (value: AgentCard) => Buffer.from(AgentCard.encode(value).finish()), + responseDeserialize: (value: Buffer) => AgentCard.decode(value), + }, + /** Delete a push notification config for a task. */ + deleteTaskPushNotificationConfig: { + path: "/a2a.v1.A2AService/DeleteTaskPushNotificationConfig", + requestStream: false, + responseStream: false, + requestSerialize: (value: DeleteTaskPushNotificationConfigRequest) => + Buffer.from(DeleteTaskPushNotificationConfigRequest.encode(value).finish()), + requestDeserialize: (value: Buffer) => DeleteTaskPushNotificationConfigRequest.decode(value), + responseSerialize: (value: Empty) => Buffer.from(Empty.encode(value).finish()), + responseDeserialize: (value: Buffer) => Empty.decode(value), + }, +} as const; + +export interface A2AServiceServer extends UntypedServiceImplementation { + /** + * Send a message to the agent. This is a blocking call that will return the + * task once it is completed, or a LRO if requested. + */ + sendMessage: handleUnaryCall; + /** + * SendStreamingMessage is a streaming call that will return a stream of + * task update events until the Task is in an interrupted or terminal state. + */ + sendStreamingMessage: handleServerStreamingCall; + /** Get the current state of a task from the agent. */ + getTask: handleUnaryCall; + /** + * Cancel a task from the agent. If supported one should expect no + * more task updates for the task. + */ + cancelTask: handleUnaryCall; + /** + * TaskSubscription is a streaming call that will return a stream of task + * update events. This attaches the stream to an existing in process task. + * If the task is complete the stream will return the completed task (like + * GetTask) and close the stream. + */ + taskSubscription: handleServerStreamingCall; + /** Set a push notification config for a task. */ + createTaskPushNotificationConfig: handleUnaryCall< + CreateTaskPushNotificationConfigRequest, + TaskPushNotificationConfig + >; + /** Get a push notification config for a task. */ + getTaskPushNotificationConfig: handleUnaryCall; + /** Get a list of push notifications configured for a task. */ + listTaskPushNotificationConfig: handleUnaryCall< + ListTaskPushNotificationConfigRequest, + ListTaskPushNotificationConfigResponse + >; + /** GetAgentCard returns the agent card for the agent. */ + getAgentCard: handleUnaryCall; + /** Delete a push notification config for a task. */ + deleteTaskPushNotificationConfig: handleUnaryCall; +} + +export interface A2AServiceClient extends Client { + /** + * Send a message to the agent. This is a blocking call that will return the + * task once it is completed, or a LRO if requested. + */ + sendMessage( + request: SendMessageRequest, + callback: (error: ServiceError | null, response: SendMessageResponse) => void, + ): ClientUnaryCall; + sendMessage( + request: SendMessageRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: SendMessageResponse) => void, + ): ClientUnaryCall; + sendMessage( + request: SendMessageRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: SendMessageResponse) => void, + ): ClientUnaryCall; + /** + * SendStreamingMessage is a streaming call that will return a stream of + * task update events until the Task is in an interrupted or terminal state. + */ + sendStreamingMessage( + request: SendMessageRequest, + options?: Partial, + ): ClientReadableStream; + sendStreamingMessage( + request: SendMessageRequest, + metadata?: Metadata, + options?: Partial, + ): ClientReadableStream; + /** Get the current state of a task from the agent. */ + getTask(request: GetTaskRequest, callback: (error: ServiceError | null, response: Task) => void): ClientUnaryCall; + getTask( + request: GetTaskRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: Task) => void, + ): ClientUnaryCall; + getTask( + request: GetTaskRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: Task) => void, + ): ClientUnaryCall; + /** + * Cancel a task from the agent. If supported one should expect no + * more task updates for the task. + */ + cancelTask( + request: CancelTaskRequest, + callback: (error: ServiceError | null, response: Task) => void, + ): ClientUnaryCall; + cancelTask( + request: CancelTaskRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: Task) => void, + ): ClientUnaryCall; + cancelTask( + request: CancelTaskRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: Task) => void, + ): ClientUnaryCall; + /** + * TaskSubscription is a streaming call that will return a stream of task + * update events. This attaches the stream to an existing in process task. + * If the task is complete the stream will return the completed task (like + * GetTask) and close the stream. + */ + taskSubscription( + request: TaskSubscriptionRequest, + options?: Partial, + ): ClientReadableStream; + taskSubscription( + request: TaskSubscriptionRequest, + metadata?: Metadata, + options?: Partial, + ): ClientReadableStream; + /** Set a push notification config for a task. */ + createTaskPushNotificationConfig( + request: CreateTaskPushNotificationConfigRequest, + callback: (error: ServiceError | null, response: TaskPushNotificationConfig) => void, + ): ClientUnaryCall; + createTaskPushNotificationConfig( + request: CreateTaskPushNotificationConfigRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: TaskPushNotificationConfig) => void, + ): ClientUnaryCall; + createTaskPushNotificationConfig( + request: CreateTaskPushNotificationConfigRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: TaskPushNotificationConfig) => void, + ): ClientUnaryCall; + /** Get a push notification config for a task. */ + getTaskPushNotificationConfig( + request: GetTaskPushNotificationConfigRequest, + callback: (error: ServiceError | null, response: TaskPushNotificationConfig) => void, + ): ClientUnaryCall; + getTaskPushNotificationConfig( + request: GetTaskPushNotificationConfigRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: TaskPushNotificationConfig) => void, + ): ClientUnaryCall; + getTaskPushNotificationConfig( + request: GetTaskPushNotificationConfigRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: TaskPushNotificationConfig) => void, + ): ClientUnaryCall; + /** Get a list of push notifications configured for a task. */ + listTaskPushNotificationConfig( + request: ListTaskPushNotificationConfigRequest, + callback: (error: ServiceError | null, response: ListTaskPushNotificationConfigResponse) => void, + ): ClientUnaryCall; + listTaskPushNotificationConfig( + request: ListTaskPushNotificationConfigRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: ListTaskPushNotificationConfigResponse) => void, + ): ClientUnaryCall; + listTaskPushNotificationConfig( + request: ListTaskPushNotificationConfigRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: ListTaskPushNotificationConfigResponse) => void, + ): ClientUnaryCall; + /** GetAgentCard returns the agent card for the agent. */ + getAgentCard( + request: GetAgentCardRequest, + callback: (error: ServiceError | null, response: AgentCard) => void, + ): ClientUnaryCall; + getAgentCard( + request: GetAgentCardRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: AgentCard) => void, + ): ClientUnaryCall; + getAgentCard( + request: GetAgentCardRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: AgentCard) => void, + ): ClientUnaryCall; + /** Delete a push notification config for a task. */ + deleteTaskPushNotificationConfig( + request: DeleteTaskPushNotificationConfigRequest, + callback: (error: ServiceError | null, response: Empty) => void, + ): ClientUnaryCall; + deleteTaskPushNotificationConfig( + request: DeleteTaskPushNotificationConfigRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: Empty) => void, + ): ClientUnaryCall; + deleteTaskPushNotificationConfig( + request: DeleteTaskPushNotificationConfigRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: Empty) => void, + ): ClientUnaryCall; +} + +export const A2AServiceClient = makeGenericClientConstructor(A2AServiceService, "a2a.v1.A2AService") as unknown as { + new (address: string, credentials: ChannelCredentials, options?: Partial): A2AServiceClient; + service: typeof A2AServiceService; + serviceName: string; +}; + +function bytesFromBase64(b64: string): Uint8Array { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); +} + +function base64FromBytes(arr: Uint8Array): string { + return globalThis.Buffer.from(arr).toString("base64"); +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends { $case: string; value: unknown } ? { $case: T["$case"]; value?: DeepPartial } + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function toTimestamp(date: Date): Timestamp { + const seconds = Math.trunc(date.getTime() / 1_000); + const nanos = (date.getTime() % 1_000) * 1_000_000; + return { seconds, nanos }; +} + +function fromTimestamp(t: Timestamp): Date { + let millis = (t.seconds || 0) * 1_000; + millis += (t.nanos || 0) / 1_000_000; + return new globalThis.Date(millis); +} + +function fromJsonTimestamp(o: any): Date { + if (o instanceof globalThis.Date) { + return o; + } else if (typeof o === "string") { + return new globalThis.Date(o); + } else { + return fromTimestamp(Timestamp.fromJSON(o)); + } +} + +function isObject(value: any): boolean { + return typeof value === "object" && value !== null; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create, I>>(base?: I): T; + fromPartial, I>>(object: I): T; +} diff --git a/src/grpc/buf.gen.yaml b/src/grpc/buf.gen.yaml index c7a42db2..ff7c99ec 100644 --- a/src/grpc/buf.gen.yaml +++ b/src/grpc/buf.gen.yaml @@ -14,4 +14,4 @@ plugins: - esModuleInterop=true - outputJsonMethods=false - outputPartialMethods=false - out: ./pb \ No newline at end of file + out: ./pb diff --git a/src/grpc/google/api/annotations.ts b/src/grpc/google/api/annotations.ts new file mode 100644 index 00000000..60211996 --- /dev/null +++ b/src/grpc/google/api/annotations.ts @@ -0,0 +1,9 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc unknown +// source: google/api/annotations.proto + +/* eslint-disable */ + +export const protobufPackage = "google.api"; diff --git a/src/grpc/google/api/client.ts b/src/grpc/google/api/client.ts new file mode 100644 index 00000000..7e686fe8 --- /dev/null +++ b/src/grpc/google/api/client.ts @@ -0,0 +1,2624 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc unknown +// source: google/api/client.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import { Duration } from "../protobuf/duration.js"; +import { LaunchStage, launchStageFromJSON, launchStageToJSON } from "./launch_stage.js"; + +export const protobufPackage = "google.api"; + +/** + * The organization for which the client libraries are being published. + * Affects the url where generated docs are published, etc. + */ +export enum ClientLibraryOrganization { + /** CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED - Not useful. */ + CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED = 0, + /** CLOUD - Google Cloud Platform Org. */ + CLOUD = 1, + /** ADS - Ads (Advertising) Org. */ + ADS = 2, + /** PHOTOS - Photos Org. */ + PHOTOS = 3, + /** STREET_VIEW - Street View Org. */ + STREET_VIEW = 4, + /** SHOPPING - Shopping Org. */ + SHOPPING = 5, + /** GEO - Geo Org. */ + GEO = 6, + /** GENERATIVE_AI - Generative AI - https://developers.generativeai.google */ + GENERATIVE_AI = 7, + UNRECOGNIZED = -1, +} + +export function clientLibraryOrganizationFromJSON(object: any): ClientLibraryOrganization { + switch (object) { + case 0: + case "CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED": + return ClientLibraryOrganization.CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED; + case 1: + case "CLOUD": + return ClientLibraryOrganization.CLOUD; + case 2: + case "ADS": + return ClientLibraryOrganization.ADS; + case 3: + case "PHOTOS": + return ClientLibraryOrganization.PHOTOS; + case 4: + case "STREET_VIEW": + return ClientLibraryOrganization.STREET_VIEW; + case 5: + case "SHOPPING": + return ClientLibraryOrganization.SHOPPING; + case 6: + case "GEO": + return ClientLibraryOrganization.GEO; + case 7: + case "GENERATIVE_AI": + return ClientLibraryOrganization.GENERATIVE_AI; + case -1: + case "UNRECOGNIZED": + default: + return ClientLibraryOrganization.UNRECOGNIZED; + } +} + +export function clientLibraryOrganizationToJSON(object: ClientLibraryOrganization): string { + switch (object) { + case ClientLibraryOrganization.CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED: + return "CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED"; + case ClientLibraryOrganization.CLOUD: + return "CLOUD"; + case ClientLibraryOrganization.ADS: + return "ADS"; + case ClientLibraryOrganization.PHOTOS: + return "PHOTOS"; + case ClientLibraryOrganization.STREET_VIEW: + return "STREET_VIEW"; + case ClientLibraryOrganization.SHOPPING: + return "SHOPPING"; + case ClientLibraryOrganization.GEO: + return "GEO"; + case ClientLibraryOrganization.GENERATIVE_AI: + return "GENERATIVE_AI"; + case ClientLibraryOrganization.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** To where should client libraries be published? */ +export enum ClientLibraryDestination { + /** + * CLIENT_LIBRARY_DESTINATION_UNSPECIFIED - Client libraries will neither be generated nor published to package + * managers. + */ + CLIENT_LIBRARY_DESTINATION_UNSPECIFIED = 0, + /** + * GITHUB - Generate the client library in a repo under github.com/googleapis, + * but don't publish it to package managers. + */ + GITHUB = 10, + /** PACKAGE_MANAGER - Publish the library to package managers like nuget.org and npmjs.com. */ + PACKAGE_MANAGER = 20, + UNRECOGNIZED = -1, +} + +export function clientLibraryDestinationFromJSON(object: any): ClientLibraryDestination { + switch (object) { + case 0: + case "CLIENT_LIBRARY_DESTINATION_UNSPECIFIED": + return ClientLibraryDestination.CLIENT_LIBRARY_DESTINATION_UNSPECIFIED; + case 10: + case "GITHUB": + return ClientLibraryDestination.GITHUB; + case 20: + case "PACKAGE_MANAGER": + return ClientLibraryDestination.PACKAGE_MANAGER; + case -1: + case "UNRECOGNIZED": + default: + return ClientLibraryDestination.UNRECOGNIZED; + } +} + +export function clientLibraryDestinationToJSON(object: ClientLibraryDestination): string { + switch (object) { + case ClientLibraryDestination.CLIENT_LIBRARY_DESTINATION_UNSPECIFIED: + return "CLIENT_LIBRARY_DESTINATION_UNSPECIFIED"; + case ClientLibraryDestination.GITHUB: + return "GITHUB"; + case ClientLibraryDestination.PACKAGE_MANAGER: + return "PACKAGE_MANAGER"; + case ClientLibraryDestination.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Required information for every language. */ +export interface CommonLanguageSettings { + /** + * Link to automatically generated reference documentation. Example: + * https://cloud.google.com/nodejs/docs/reference/asset/latest + * + * @deprecated + */ + referenceDocsUri: string; + /** The destination where API teams want this client library to be published. */ + destinations: ClientLibraryDestination[]; + /** Configuration for which RPCs should be generated in the GAPIC client. */ + selectiveGapicGeneration: SelectiveGapicGeneration | undefined; +} + +/** Details about how and where to publish client libraries. */ +export interface ClientLibrarySettings { + /** + * Version of the API to apply these settings to. This is the full protobuf + * package for the API, ending in the version element. + * Examples: "google.cloud.speech.v1" and "google.spanner.admin.database.v1". + */ + version: string; + /** Launch stage of this version of the API. */ + launchStage: LaunchStage; + /** + * When using transport=rest, the client request will encode enums as + * numbers rather than strings. + */ + restNumericEnums: boolean; + /** Settings for legacy Java features, supported in the Service YAML. */ + javaSettings: + | JavaSettings + | undefined; + /** Settings for C++ client libraries. */ + cppSettings: + | CppSettings + | undefined; + /** Settings for PHP client libraries. */ + phpSettings: + | PhpSettings + | undefined; + /** Settings for Python client libraries. */ + pythonSettings: + | PythonSettings + | undefined; + /** Settings for Node client libraries. */ + nodeSettings: + | NodeSettings + | undefined; + /** Settings for .NET client libraries. */ + dotnetSettings: + | DotnetSettings + | undefined; + /** Settings for Ruby client libraries. */ + rubySettings: + | RubySettings + | undefined; + /** Settings for Go client libraries. */ + goSettings: GoSettings | undefined; +} + +/** + * This message configures the settings for publishing [Google Cloud Client + * libraries](https://cloud.google.com/apis/docs/cloud-client-libraries) + * generated from the service config. + */ +export interface Publishing { + /** + * A list of API method settings, e.g. the behavior for methods that use the + * long-running operation pattern. + */ + methodSettings: MethodSettings[]; + /** + * Link to a *public* URI where users can report issues. Example: + * https://issuetracker.google.com/issues/new?component=190865&template=1161103 + */ + newIssueUri: string; + /** + * Link to product home page. Example: + * https://cloud.google.com/asset-inventory/docs/overview + */ + documentationUri: string; + /** + * Used as a tracking tag when collecting data about the APIs developer + * relations artifacts like docs, packages delivered to package managers, + * etc. Example: "speech". + */ + apiShortName: string; + /** GitHub label to apply to issues and pull requests opened for this API. */ + githubLabel: string; + /** + * GitHub teams to be added to CODEOWNERS in the directory in GitHub + * containing source code for the client libraries for this API. + */ + codeownerGithubTeams: string[]; + /** + * A prefix used in sample code when demarking regions to be included in + * documentation. + */ + docTagPrefix: string; + /** For whom the client library is being published. */ + organization: ClientLibraryOrganization; + /** + * Client library settings. If the same version string appears multiple + * times in this list, then the last one wins. Settings from earlier + * settings with the same version string are discarded. + */ + librarySettings: ClientLibrarySettings[]; + /** + * Optional link to proto reference documentation. Example: + * https://cloud.google.com/pubsub/lite/docs/reference/rpc + */ + protoReferenceDocumentationUri: string; + /** + * Optional link to REST reference documentation. Example: + * https://cloud.google.com/pubsub/lite/docs/reference/rest + */ + restReferenceDocumentationUri: string; +} + +/** Settings for Java client libraries. */ +export interface JavaSettings { + /** + * The package name to use in Java. Clobbers the java_package option + * set in the protobuf. This should be used **only** by APIs + * who have already set the language_settings.java.package_name" field + * in gapic.yaml. API teams should use the protobuf java_package option + * where possible. + * + * Example of a YAML configuration:: + * + * publishing: + * java_settings: + * library_package: com.google.cloud.pubsub.v1 + */ + libraryPackage: string; + /** + * Configure the Java class name to use instead of the service's for its + * corresponding generated GAPIC client. Keys are fully-qualified + * service names as they appear in the protobuf (including the full + * the language_settings.java.interface_names" field in gapic.yaml. API + * teams should otherwise use the service name as it appears in the + * protobuf. + * + * Example of a YAML configuration:: + * + * publishing: + * java_settings: + * service_class_names: + * - google.pubsub.v1.Publisher: TopicAdmin + * - google.pubsub.v1.Subscriber: SubscriptionAdmin + */ + serviceClassNames: { [key: string]: string }; + /** Some settings. */ + common: CommonLanguageSettings | undefined; +} + +export interface JavaSettings_ServiceClassNamesEntry { + key: string; + value: string; +} + +/** Settings for C++ client libraries. */ +export interface CppSettings { + /** Some settings. */ + common: CommonLanguageSettings | undefined; +} + +/** Settings for Php client libraries. */ +export interface PhpSettings { + /** Some settings. */ + common: CommonLanguageSettings | undefined; +} + +/** Settings for Python client libraries. */ +export interface PythonSettings { + /** Some settings. */ + common: + | CommonLanguageSettings + | undefined; + /** Experimental features to be included during client library generation. */ + experimentalFeatures: PythonSettings_ExperimentalFeatures | undefined; +} + +/** + * Experimental features to be included during client library generation. + * These fields will be deprecated once the feature graduates and is enabled + * by default. + */ +export interface PythonSettings_ExperimentalFeatures { + /** + * Enables generation of asynchronous REST clients if `rest` transport is + * enabled. By default, asynchronous REST clients will not be generated. + * This feature will be enabled by default 1 month after launching the + * feature in preview packages. + */ + restAsyncIoEnabled: boolean; + /** + * Enables generation of protobuf code using new types that are more + * Pythonic which are included in `protobuf>=5.29.x`. This feature will be + * enabled by default 1 month after launching the feature in preview + * packages. + */ + protobufPythonicTypesEnabled: boolean; + /** + * Disables generation of an unversioned Python package for this client + * library. This means that the module names will need to be versioned in + * import statements. For example `import google.cloud.library_v2` instead + * of `import google.cloud.library`. + */ + unversionedPackageDisabled: boolean; +} + +/** Settings for Node client libraries. */ +export interface NodeSettings { + /** Some settings. */ + common: CommonLanguageSettings | undefined; +} + +/** Settings for Dotnet client libraries. */ +export interface DotnetSettings { + /** Some settings. */ + common: + | CommonLanguageSettings + | undefined; + /** + * Map from original service names to renamed versions. + * This is used when the default generated types + * would cause a naming conflict. (Neither name is + * fully-qualified.) + * Example: Subscriber to SubscriberServiceApi. + */ + renamedServices: { [key: string]: string }; + /** + * Map from full resource types to the effective short name + * for the resource. This is used when otherwise resource + * named from different services would cause naming collisions. + * Example entry: + * "datalabeling.googleapis.com/Dataset": "DataLabelingDataset" + */ + renamedResources: { [key: string]: string }; + /** + * List of full resource types to ignore during generation. + * This is typically used for API-specific Location resources, + * which should be handled by the generator as if they were actually + * the common Location resources. + * Example entry: "documentai.googleapis.com/Location" + */ + ignoredResources: string[]; + /** + * Namespaces which must be aliased in snippets due to + * a known (but non-generator-predictable) naming collision + */ + forcedNamespaceAliases: string[]; + /** + * Method signatures (in the form "service.method(signature)") + * which are provided separately, so shouldn't be generated. + * Snippets *calling* these methods are still generated, however. + */ + handwrittenSignatures: string[]; +} + +export interface DotnetSettings_RenamedServicesEntry { + key: string; + value: string; +} + +export interface DotnetSettings_RenamedResourcesEntry { + key: string; + value: string; +} + +/** Settings for Ruby client libraries. */ +export interface RubySettings { + /** Some settings. */ + common: CommonLanguageSettings | undefined; +} + +/** Settings for Go client libraries. */ +export interface GoSettings { + /** Some settings. */ + common: + | CommonLanguageSettings + | undefined; + /** + * Map of service names to renamed services. Keys are the package relative + * service names and values are the name to be used for the service client + * and call options. + * + * publishing: + * go_settings: + * renamed_services: + * Publisher: TopicAdmin + */ + renamedServices: { [key: string]: string }; +} + +export interface GoSettings_RenamedServicesEntry { + key: string; + value: string; +} + +/** Describes the generator configuration for a method. */ +export interface MethodSettings { + /** + * The fully qualified name of the method, for which the options below apply. + * This is used to find the method to apply the options. + * + * Example: + * + * publishing: + * method_settings: + * - selector: google.storage.control.v2.StorageControl.CreateFolder + * # method settings for CreateFolder... + */ + selector: string; + /** + * Describes settings to use for long-running operations when generating + * API methods for RPCs. Complements RPCs that use the annotations in + * google/longrunning/operations.proto. + * + * Example of a YAML configuration:: + * + * publishing: + * method_settings: + * - selector: google.cloud.speech.v2.Speech.BatchRecognize + * long_running: + * initial_poll_delay: 60s # 1 minute + * poll_delay_multiplier: 1.5 + * max_poll_delay: 360s # 6 minutes + * total_poll_timeout: 54000s # 90 minutes + */ + longRunning: + | MethodSettings_LongRunning + | undefined; + /** + * List of top-level fields of the request message, that should be + * automatically populated by the client libraries based on their + * (google.api.field_info).format. Currently supported format: UUID4. + * + * Example of a YAML configuration: + * + * publishing: + * method_settings: + * - selector: google.example.v1.ExampleService.CreateExample + * auto_populated_fields: + * - request_id + */ + autoPopulatedFields: string[]; +} + +/** + * Describes settings to use when generating API methods that use the + * long-running operation pattern. + * All default values below are from those used in the client library + * generators (e.g. + * [Java](https://github.com/googleapis/gapic-generator-java/blob/04c2faa191a9b5a10b92392fe8482279c4404803/src/main/java/com/google/api/generator/gapic/composer/common/RetrySettingsComposer.java)). + */ +export interface MethodSettings_LongRunning { + /** + * Initial delay after which the first poll request will be made. + * Default value: 5 seconds. + */ + initialPollDelay: + | Duration + | undefined; + /** + * Multiplier to gradually increase delay between subsequent polls until it + * reaches max_poll_delay. + * Default value: 1.5. + */ + pollDelayMultiplier: number; + /** + * Maximum time between two subsequent poll requests. + * Default value: 45 seconds. + */ + maxPollDelay: + | Duration + | undefined; + /** + * Total polling timeout. + * Default value: 5 minutes. + */ + totalPollTimeout: Duration | undefined; +} + +/** + * This message is used to configure the generation of a subset of the RPCs in + * a service for client libraries. + */ +export interface SelectiveGapicGeneration { + /** + * An allowlist of the fully qualified names of RPCs that should be included + * on public client surfaces. + */ + methods: string[]; + /** + * Setting this to true indicates to the client generators that methods + * that would be excluded from the generation should instead be generated + * in a way that indicates these methods should not be consumed by + * end users. How this is expressed is up to individual language + * implementations to decide. Some examples may be: added annotations, + * obfuscated identifiers, or other language idiomatic patterns. + */ + generateOmittedAsInternal: boolean; +} + +function createBaseCommonLanguageSettings(): CommonLanguageSettings { + return { referenceDocsUri: "", destinations: [], selectiveGapicGeneration: undefined }; +} + +export const CommonLanguageSettings: MessageFns = { + encode(message: CommonLanguageSettings, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.referenceDocsUri !== "") { + writer.uint32(10).string(message.referenceDocsUri); + } + writer.uint32(18).fork(); + for (const v of message.destinations) { + writer.int32(v); + } + writer.join(); + if (message.selectiveGapicGeneration !== undefined) { + SelectiveGapicGeneration.encode(message.selectiveGapicGeneration, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CommonLanguageSettings { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommonLanguageSettings(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.referenceDocsUri = reader.string(); + continue; + } + case 2: { + if (tag === 16) { + message.destinations.push(reader.int32() as any); + + continue; + } + + if (tag === 18) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.destinations.push(reader.int32() as any); + } + + continue; + } + + break; + } + case 3: { + if (tag !== 26) { + break; + } + + message.selectiveGapicGeneration = SelectiveGapicGeneration.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CommonLanguageSettings { + return { + referenceDocsUri: isSet(object.referenceDocsUri) ? globalThis.String(object.referenceDocsUri) : "", + destinations: globalThis.Array.isArray(object?.destinations) + ? object.destinations.map((e: any) => clientLibraryDestinationFromJSON(e)) + : [], + selectiveGapicGeneration: isSet(object.selectiveGapicGeneration) + ? SelectiveGapicGeneration.fromJSON(object.selectiveGapicGeneration) + : undefined, + }; + }, + + toJSON(message: CommonLanguageSettings): unknown { + const obj: any = {}; + if (message.referenceDocsUri !== "") { + obj.referenceDocsUri = message.referenceDocsUri; + } + if (message.destinations?.length) { + obj.destinations = message.destinations.map((e) => clientLibraryDestinationToJSON(e)); + } + if (message.selectiveGapicGeneration !== undefined) { + obj.selectiveGapicGeneration = SelectiveGapicGeneration.toJSON(message.selectiveGapicGeneration); + } + return obj; + }, + + create, I>>(base?: I): CommonLanguageSettings { + return CommonLanguageSettings.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CommonLanguageSettings { + const message = createBaseCommonLanguageSettings(); + message.referenceDocsUri = object.referenceDocsUri ?? ""; + message.destinations = object.destinations?.map((e) => e) || []; + message.selectiveGapicGeneration = + (object.selectiveGapicGeneration !== undefined && object.selectiveGapicGeneration !== null) + ? SelectiveGapicGeneration.fromPartial(object.selectiveGapicGeneration) + : undefined; + return message; + }, +}; + +function createBaseClientLibrarySettings(): ClientLibrarySettings { + return { + version: "", + launchStage: 0, + restNumericEnums: false, + javaSettings: undefined, + cppSettings: undefined, + phpSettings: undefined, + pythonSettings: undefined, + nodeSettings: undefined, + dotnetSettings: undefined, + rubySettings: undefined, + goSettings: undefined, + }; +} + +export const ClientLibrarySettings: MessageFns = { + encode(message: ClientLibrarySettings, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.version !== "") { + writer.uint32(10).string(message.version); + } + if (message.launchStage !== 0) { + writer.uint32(16).int32(message.launchStage); + } + if (message.restNumericEnums !== false) { + writer.uint32(24).bool(message.restNumericEnums); + } + if (message.javaSettings !== undefined) { + JavaSettings.encode(message.javaSettings, writer.uint32(170).fork()).join(); + } + if (message.cppSettings !== undefined) { + CppSettings.encode(message.cppSettings, writer.uint32(178).fork()).join(); + } + if (message.phpSettings !== undefined) { + PhpSettings.encode(message.phpSettings, writer.uint32(186).fork()).join(); + } + if (message.pythonSettings !== undefined) { + PythonSettings.encode(message.pythonSettings, writer.uint32(194).fork()).join(); + } + if (message.nodeSettings !== undefined) { + NodeSettings.encode(message.nodeSettings, writer.uint32(202).fork()).join(); + } + if (message.dotnetSettings !== undefined) { + DotnetSettings.encode(message.dotnetSettings, writer.uint32(210).fork()).join(); + } + if (message.rubySettings !== undefined) { + RubySettings.encode(message.rubySettings, writer.uint32(218).fork()).join(); + } + if (message.goSettings !== undefined) { + GoSettings.encode(message.goSettings, writer.uint32(226).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ClientLibrarySettings { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientLibrarySettings(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.version = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.launchStage = reader.int32() as any; + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.restNumericEnums = reader.bool(); + continue; + } + case 21: { + if (tag !== 170) { + break; + } + + message.javaSettings = JavaSettings.decode(reader, reader.uint32()); + continue; + } + case 22: { + if (tag !== 178) { + break; + } + + message.cppSettings = CppSettings.decode(reader, reader.uint32()); + continue; + } + case 23: { + if (tag !== 186) { + break; + } + + message.phpSettings = PhpSettings.decode(reader, reader.uint32()); + continue; + } + case 24: { + if (tag !== 194) { + break; + } + + message.pythonSettings = PythonSettings.decode(reader, reader.uint32()); + continue; + } + case 25: { + if (tag !== 202) { + break; + } + + message.nodeSettings = NodeSettings.decode(reader, reader.uint32()); + continue; + } + case 26: { + if (tag !== 210) { + break; + } + + message.dotnetSettings = DotnetSettings.decode(reader, reader.uint32()); + continue; + } + case 27: { + if (tag !== 218) { + break; + } + + message.rubySettings = RubySettings.decode(reader, reader.uint32()); + continue; + } + case 28: { + if (tag !== 226) { + break; + } + + message.goSettings = GoSettings.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ClientLibrarySettings { + return { + version: isSet(object.version) ? globalThis.String(object.version) : "", + launchStage: isSet(object.launchStage) ? launchStageFromJSON(object.launchStage) : 0, + restNumericEnums: isSet(object.restNumericEnums) ? globalThis.Boolean(object.restNumericEnums) : false, + javaSettings: isSet(object.javaSettings) ? JavaSettings.fromJSON(object.javaSettings) : undefined, + cppSettings: isSet(object.cppSettings) ? CppSettings.fromJSON(object.cppSettings) : undefined, + phpSettings: isSet(object.phpSettings) ? PhpSettings.fromJSON(object.phpSettings) : undefined, + pythonSettings: isSet(object.pythonSettings) ? PythonSettings.fromJSON(object.pythonSettings) : undefined, + nodeSettings: isSet(object.nodeSettings) ? NodeSettings.fromJSON(object.nodeSettings) : undefined, + dotnetSettings: isSet(object.dotnetSettings) ? DotnetSettings.fromJSON(object.dotnetSettings) : undefined, + rubySettings: isSet(object.rubySettings) ? RubySettings.fromJSON(object.rubySettings) : undefined, + goSettings: isSet(object.goSettings) ? GoSettings.fromJSON(object.goSettings) : undefined, + }; + }, + + toJSON(message: ClientLibrarySettings): unknown { + const obj: any = {}; + if (message.version !== "") { + obj.version = message.version; + } + if (message.launchStage !== 0) { + obj.launchStage = launchStageToJSON(message.launchStage); + } + if (message.restNumericEnums !== false) { + obj.restNumericEnums = message.restNumericEnums; + } + if (message.javaSettings !== undefined) { + obj.javaSettings = JavaSettings.toJSON(message.javaSettings); + } + if (message.cppSettings !== undefined) { + obj.cppSettings = CppSettings.toJSON(message.cppSettings); + } + if (message.phpSettings !== undefined) { + obj.phpSettings = PhpSettings.toJSON(message.phpSettings); + } + if (message.pythonSettings !== undefined) { + obj.pythonSettings = PythonSettings.toJSON(message.pythonSettings); + } + if (message.nodeSettings !== undefined) { + obj.nodeSettings = NodeSettings.toJSON(message.nodeSettings); + } + if (message.dotnetSettings !== undefined) { + obj.dotnetSettings = DotnetSettings.toJSON(message.dotnetSettings); + } + if (message.rubySettings !== undefined) { + obj.rubySettings = RubySettings.toJSON(message.rubySettings); + } + if (message.goSettings !== undefined) { + obj.goSettings = GoSettings.toJSON(message.goSettings); + } + return obj; + }, + + create, I>>(base?: I): ClientLibrarySettings { + return ClientLibrarySettings.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ClientLibrarySettings { + const message = createBaseClientLibrarySettings(); + message.version = object.version ?? ""; + message.launchStage = object.launchStage ?? 0; + message.restNumericEnums = object.restNumericEnums ?? false; + message.javaSettings = (object.javaSettings !== undefined && object.javaSettings !== null) + ? JavaSettings.fromPartial(object.javaSettings) + : undefined; + message.cppSettings = (object.cppSettings !== undefined && object.cppSettings !== null) + ? CppSettings.fromPartial(object.cppSettings) + : undefined; + message.phpSettings = (object.phpSettings !== undefined && object.phpSettings !== null) + ? PhpSettings.fromPartial(object.phpSettings) + : undefined; + message.pythonSettings = (object.pythonSettings !== undefined && object.pythonSettings !== null) + ? PythonSettings.fromPartial(object.pythonSettings) + : undefined; + message.nodeSettings = (object.nodeSettings !== undefined && object.nodeSettings !== null) + ? NodeSettings.fromPartial(object.nodeSettings) + : undefined; + message.dotnetSettings = (object.dotnetSettings !== undefined && object.dotnetSettings !== null) + ? DotnetSettings.fromPartial(object.dotnetSettings) + : undefined; + message.rubySettings = (object.rubySettings !== undefined && object.rubySettings !== null) + ? RubySettings.fromPartial(object.rubySettings) + : undefined; + message.goSettings = (object.goSettings !== undefined && object.goSettings !== null) + ? GoSettings.fromPartial(object.goSettings) + : undefined; + return message; + }, +}; + +function createBasePublishing(): Publishing { + return { + methodSettings: [], + newIssueUri: "", + documentationUri: "", + apiShortName: "", + githubLabel: "", + codeownerGithubTeams: [], + docTagPrefix: "", + organization: 0, + librarySettings: [], + protoReferenceDocumentationUri: "", + restReferenceDocumentationUri: "", + }; +} + +export const Publishing: MessageFns = { + encode(message: Publishing, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.methodSettings) { + MethodSettings.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.newIssueUri !== "") { + writer.uint32(810).string(message.newIssueUri); + } + if (message.documentationUri !== "") { + writer.uint32(818).string(message.documentationUri); + } + if (message.apiShortName !== "") { + writer.uint32(826).string(message.apiShortName); + } + if (message.githubLabel !== "") { + writer.uint32(834).string(message.githubLabel); + } + for (const v of message.codeownerGithubTeams) { + writer.uint32(842).string(v!); + } + if (message.docTagPrefix !== "") { + writer.uint32(850).string(message.docTagPrefix); + } + if (message.organization !== 0) { + writer.uint32(856).int32(message.organization); + } + for (const v of message.librarySettings) { + ClientLibrarySettings.encode(v!, writer.uint32(874).fork()).join(); + } + if (message.protoReferenceDocumentationUri !== "") { + writer.uint32(882).string(message.protoReferenceDocumentationUri); + } + if (message.restReferenceDocumentationUri !== "") { + writer.uint32(890).string(message.restReferenceDocumentationUri); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Publishing { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePublishing(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + if (tag !== 18) { + break; + } + + message.methodSettings.push(MethodSettings.decode(reader, reader.uint32())); + continue; + } + case 101: { + if (tag !== 810) { + break; + } + + message.newIssueUri = reader.string(); + continue; + } + case 102: { + if (tag !== 818) { + break; + } + + message.documentationUri = reader.string(); + continue; + } + case 103: { + if (tag !== 826) { + break; + } + + message.apiShortName = reader.string(); + continue; + } + case 104: { + if (tag !== 834) { + break; + } + + message.githubLabel = reader.string(); + continue; + } + case 105: { + if (tag !== 842) { + break; + } + + message.codeownerGithubTeams.push(reader.string()); + continue; + } + case 106: { + if (tag !== 850) { + break; + } + + message.docTagPrefix = reader.string(); + continue; + } + case 107: { + if (tag !== 856) { + break; + } + + message.organization = reader.int32() as any; + continue; + } + case 109: { + if (tag !== 874) { + break; + } + + message.librarySettings.push(ClientLibrarySettings.decode(reader, reader.uint32())); + continue; + } + case 110: { + if (tag !== 882) { + break; + } + + message.protoReferenceDocumentationUri = reader.string(); + continue; + } + case 111: { + if (tag !== 890) { + break; + } + + message.restReferenceDocumentationUri = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Publishing { + return { + methodSettings: globalThis.Array.isArray(object?.methodSettings) + ? object.methodSettings.map((e: any) => MethodSettings.fromJSON(e)) + : [], + newIssueUri: isSet(object.newIssueUri) ? globalThis.String(object.newIssueUri) : "", + documentationUri: isSet(object.documentationUri) ? globalThis.String(object.documentationUri) : "", + apiShortName: isSet(object.apiShortName) ? globalThis.String(object.apiShortName) : "", + githubLabel: isSet(object.githubLabel) ? globalThis.String(object.githubLabel) : "", + codeownerGithubTeams: globalThis.Array.isArray(object?.codeownerGithubTeams) + ? object.codeownerGithubTeams.map((e: any) => globalThis.String(e)) + : [], + docTagPrefix: isSet(object.docTagPrefix) ? globalThis.String(object.docTagPrefix) : "", + organization: isSet(object.organization) ? clientLibraryOrganizationFromJSON(object.organization) : 0, + librarySettings: globalThis.Array.isArray(object?.librarySettings) + ? object.librarySettings.map((e: any) => ClientLibrarySettings.fromJSON(e)) + : [], + protoReferenceDocumentationUri: isSet(object.protoReferenceDocumentationUri) + ? globalThis.String(object.protoReferenceDocumentationUri) + : "", + restReferenceDocumentationUri: isSet(object.restReferenceDocumentationUri) + ? globalThis.String(object.restReferenceDocumentationUri) + : "", + }; + }, + + toJSON(message: Publishing): unknown { + const obj: any = {}; + if (message.methodSettings?.length) { + obj.methodSettings = message.methodSettings.map((e) => MethodSettings.toJSON(e)); + } + if (message.newIssueUri !== "") { + obj.newIssueUri = message.newIssueUri; + } + if (message.documentationUri !== "") { + obj.documentationUri = message.documentationUri; + } + if (message.apiShortName !== "") { + obj.apiShortName = message.apiShortName; + } + if (message.githubLabel !== "") { + obj.githubLabel = message.githubLabel; + } + if (message.codeownerGithubTeams?.length) { + obj.codeownerGithubTeams = message.codeownerGithubTeams; + } + if (message.docTagPrefix !== "") { + obj.docTagPrefix = message.docTagPrefix; + } + if (message.organization !== 0) { + obj.organization = clientLibraryOrganizationToJSON(message.organization); + } + if (message.librarySettings?.length) { + obj.librarySettings = message.librarySettings.map((e) => ClientLibrarySettings.toJSON(e)); + } + if (message.protoReferenceDocumentationUri !== "") { + obj.protoReferenceDocumentationUri = message.protoReferenceDocumentationUri; + } + if (message.restReferenceDocumentationUri !== "") { + obj.restReferenceDocumentationUri = message.restReferenceDocumentationUri; + } + return obj; + }, + + create, I>>(base?: I): Publishing { + return Publishing.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Publishing { + const message = createBasePublishing(); + message.methodSettings = object.methodSettings?.map((e) => MethodSettings.fromPartial(e)) || []; + message.newIssueUri = object.newIssueUri ?? ""; + message.documentationUri = object.documentationUri ?? ""; + message.apiShortName = object.apiShortName ?? ""; + message.githubLabel = object.githubLabel ?? ""; + message.codeownerGithubTeams = object.codeownerGithubTeams?.map((e) => e) || []; + message.docTagPrefix = object.docTagPrefix ?? ""; + message.organization = object.organization ?? 0; + message.librarySettings = object.librarySettings?.map((e) => ClientLibrarySettings.fromPartial(e)) || []; + message.protoReferenceDocumentationUri = object.protoReferenceDocumentationUri ?? ""; + message.restReferenceDocumentationUri = object.restReferenceDocumentationUri ?? ""; + return message; + }, +}; + +function createBaseJavaSettings(): JavaSettings { + return { libraryPackage: "", serviceClassNames: {}, common: undefined }; +} + +export const JavaSettings: MessageFns = { + encode(message: JavaSettings, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.libraryPackage !== "") { + writer.uint32(10).string(message.libraryPackage); + } + Object.entries(message.serviceClassNames).forEach(([key, value]) => { + JavaSettings_ServiceClassNamesEntry.encode({ key: key as any, value }, writer.uint32(18).fork()).join(); + }); + if (message.common !== undefined) { + CommonLanguageSettings.encode(message.common, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JavaSettings { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJavaSettings(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.libraryPackage = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + const entry2 = JavaSettings_ServiceClassNamesEntry.decode(reader, reader.uint32()); + if (entry2.value !== undefined) { + message.serviceClassNames[entry2.key] = entry2.value; + } + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.common = CommonLanguageSettings.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JavaSettings { + return { + libraryPackage: isSet(object.libraryPackage) ? globalThis.String(object.libraryPackage) : "", + serviceClassNames: isObject(object.serviceClassNames) + ? Object.entries(object.serviceClassNames).reduce<{ [key: string]: string }>((acc, [key, value]) => { + acc[key] = String(value); + return acc; + }, {}) + : {}, + common: isSet(object.common) ? CommonLanguageSettings.fromJSON(object.common) : undefined, + }; + }, + + toJSON(message: JavaSettings): unknown { + const obj: any = {}; + if (message.libraryPackage !== "") { + obj.libraryPackage = message.libraryPackage; + } + if (message.serviceClassNames) { + const entries = Object.entries(message.serviceClassNames); + if (entries.length > 0) { + obj.serviceClassNames = {}; + entries.forEach(([k, v]) => { + obj.serviceClassNames[k] = v; + }); + } + } + if (message.common !== undefined) { + obj.common = CommonLanguageSettings.toJSON(message.common); + } + return obj; + }, + + create, I>>(base?: I): JavaSettings { + return JavaSettings.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): JavaSettings { + const message = createBaseJavaSettings(); + message.libraryPackage = object.libraryPackage ?? ""; + message.serviceClassNames = Object.entries(object.serviceClassNames ?? {}).reduce<{ [key: string]: string }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[key] = globalThis.String(value); + } + return acc; + }, + {}, + ); + message.common = (object.common !== undefined && object.common !== null) + ? CommonLanguageSettings.fromPartial(object.common) + : undefined; + return message; + }, +}; + +function createBaseJavaSettings_ServiceClassNamesEntry(): JavaSettings_ServiceClassNamesEntry { + return { key: "", value: "" }; +} + +export const JavaSettings_ServiceClassNamesEntry: MessageFns = { + encode(message: JavaSettings_ServiceClassNamesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JavaSettings_ServiceClassNamesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJavaSettings_ServiceClassNamesEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JavaSettings_ServiceClassNamesEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: JavaSettings_ServiceClassNamesEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create, I>>( + base?: I, + ): JavaSettings_ServiceClassNamesEntry { + return JavaSettings_ServiceClassNamesEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): JavaSettings_ServiceClassNamesEntry { + const message = createBaseJavaSettings_ServiceClassNamesEntry(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseCppSettings(): CppSettings { + return { common: undefined }; +} + +export const CppSettings: MessageFns = { + encode(message: CppSettings, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.common !== undefined) { + CommonLanguageSettings.encode(message.common, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CppSettings { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCppSettings(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.common = CommonLanguageSettings.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CppSettings { + return { common: isSet(object.common) ? CommonLanguageSettings.fromJSON(object.common) : undefined }; + }, + + toJSON(message: CppSettings): unknown { + const obj: any = {}; + if (message.common !== undefined) { + obj.common = CommonLanguageSettings.toJSON(message.common); + } + return obj; + }, + + create, I>>(base?: I): CppSettings { + return CppSettings.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CppSettings { + const message = createBaseCppSettings(); + message.common = (object.common !== undefined && object.common !== null) + ? CommonLanguageSettings.fromPartial(object.common) + : undefined; + return message; + }, +}; + +function createBasePhpSettings(): PhpSettings { + return { common: undefined }; +} + +export const PhpSettings: MessageFns = { + encode(message: PhpSettings, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.common !== undefined) { + CommonLanguageSettings.encode(message.common, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PhpSettings { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePhpSettings(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.common = CommonLanguageSettings.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PhpSettings { + return { common: isSet(object.common) ? CommonLanguageSettings.fromJSON(object.common) : undefined }; + }, + + toJSON(message: PhpSettings): unknown { + const obj: any = {}; + if (message.common !== undefined) { + obj.common = CommonLanguageSettings.toJSON(message.common); + } + return obj; + }, + + create, I>>(base?: I): PhpSettings { + return PhpSettings.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): PhpSettings { + const message = createBasePhpSettings(); + message.common = (object.common !== undefined && object.common !== null) + ? CommonLanguageSettings.fromPartial(object.common) + : undefined; + return message; + }, +}; + +function createBasePythonSettings(): PythonSettings { + return { common: undefined, experimentalFeatures: undefined }; +} + +export const PythonSettings: MessageFns = { + encode(message: PythonSettings, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.common !== undefined) { + CommonLanguageSettings.encode(message.common, writer.uint32(10).fork()).join(); + } + if (message.experimentalFeatures !== undefined) { + PythonSettings_ExperimentalFeatures.encode(message.experimentalFeatures, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PythonSettings { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePythonSettings(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.common = CommonLanguageSettings.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.experimentalFeatures = PythonSettings_ExperimentalFeatures.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PythonSettings { + return { + common: isSet(object.common) ? CommonLanguageSettings.fromJSON(object.common) : undefined, + experimentalFeatures: isSet(object.experimentalFeatures) + ? PythonSettings_ExperimentalFeatures.fromJSON(object.experimentalFeatures) + : undefined, + }; + }, + + toJSON(message: PythonSettings): unknown { + const obj: any = {}; + if (message.common !== undefined) { + obj.common = CommonLanguageSettings.toJSON(message.common); + } + if (message.experimentalFeatures !== undefined) { + obj.experimentalFeatures = PythonSettings_ExperimentalFeatures.toJSON(message.experimentalFeatures); + } + return obj; + }, + + create, I>>(base?: I): PythonSettings { + return PythonSettings.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): PythonSettings { + const message = createBasePythonSettings(); + message.common = (object.common !== undefined && object.common !== null) + ? CommonLanguageSettings.fromPartial(object.common) + : undefined; + message.experimentalFeatures = (object.experimentalFeatures !== undefined && object.experimentalFeatures !== null) + ? PythonSettings_ExperimentalFeatures.fromPartial(object.experimentalFeatures) + : undefined; + return message; + }, +}; + +function createBasePythonSettings_ExperimentalFeatures(): PythonSettings_ExperimentalFeatures { + return { restAsyncIoEnabled: false, protobufPythonicTypesEnabled: false, unversionedPackageDisabled: false }; +} + +export const PythonSettings_ExperimentalFeatures: MessageFns = { + encode(message: PythonSettings_ExperimentalFeatures, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.restAsyncIoEnabled !== false) { + writer.uint32(8).bool(message.restAsyncIoEnabled); + } + if (message.protobufPythonicTypesEnabled !== false) { + writer.uint32(16).bool(message.protobufPythonicTypesEnabled); + } + if (message.unversionedPackageDisabled !== false) { + writer.uint32(24).bool(message.unversionedPackageDisabled); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PythonSettings_ExperimentalFeatures { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePythonSettings_ExperimentalFeatures(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.restAsyncIoEnabled = reader.bool(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.protobufPythonicTypesEnabled = reader.bool(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.unversionedPackageDisabled = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PythonSettings_ExperimentalFeatures { + return { + restAsyncIoEnabled: isSet(object.restAsyncIoEnabled) ? globalThis.Boolean(object.restAsyncIoEnabled) : false, + protobufPythonicTypesEnabled: isSet(object.protobufPythonicTypesEnabled) + ? globalThis.Boolean(object.protobufPythonicTypesEnabled) + : false, + unversionedPackageDisabled: isSet(object.unversionedPackageDisabled) + ? globalThis.Boolean(object.unversionedPackageDisabled) + : false, + }; + }, + + toJSON(message: PythonSettings_ExperimentalFeatures): unknown { + const obj: any = {}; + if (message.restAsyncIoEnabled !== false) { + obj.restAsyncIoEnabled = message.restAsyncIoEnabled; + } + if (message.protobufPythonicTypesEnabled !== false) { + obj.protobufPythonicTypesEnabled = message.protobufPythonicTypesEnabled; + } + if (message.unversionedPackageDisabled !== false) { + obj.unversionedPackageDisabled = message.unversionedPackageDisabled; + } + return obj; + }, + + create, I>>( + base?: I, + ): PythonSettings_ExperimentalFeatures { + return PythonSettings_ExperimentalFeatures.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): PythonSettings_ExperimentalFeatures { + const message = createBasePythonSettings_ExperimentalFeatures(); + message.restAsyncIoEnabled = object.restAsyncIoEnabled ?? false; + message.protobufPythonicTypesEnabled = object.protobufPythonicTypesEnabled ?? false; + message.unversionedPackageDisabled = object.unversionedPackageDisabled ?? false; + return message; + }, +}; + +function createBaseNodeSettings(): NodeSettings { + return { common: undefined }; +} + +export const NodeSettings: MessageFns = { + encode(message: NodeSettings, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.common !== undefined) { + CommonLanguageSettings.encode(message.common, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): NodeSettings { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNodeSettings(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.common = CommonLanguageSettings.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): NodeSettings { + return { common: isSet(object.common) ? CommonLanguageSettings.fromJSON(object.common) : undefined }; + }, + + toJSON(message: NodeSettings): unknown { + const obj: any = {}; + if (message.common !== undefined) { + obj.common = CommonLanguageSettings.toJSON(message.common); + } + return obj; + }, + + create, I>>(base?: I): NodeSettings { + return NodeSettings.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): NodeSettings { + const message = createBaseNodeSettings(); + message.common = (object.common !== undefined && object.common !== null) + ? CommonLanguageSettings.fromPartial(object.common) + : undefined; + return message; + }, +}; + +function createBaseDotnetSettings(): DotnetSettings { + return { + common: undefined, + renamedServices: {}, + renamedResources: {}, + ignoredResources: [], + forcedNamespaceAliases: [], + handwrittenSignatures: [], + }; +} + +export const DotnetSettings: MessageFns = { + encode(message: DotnetSettings, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.common !== undefined) { + CommonLanguageSettings.encode(message.common, writer.uint32(10).fork()).join(); + } + Object.entries(message.renamedServices).forEach(([key, value]) => { + DotnetSettings_RenamedServicesEntry.encode({ key: key as any, value }, writer.uint32(18).fork()).join(); + }); + Object.entries(message.renamedResources).forEach(([key, value]) => { + DotnetSettings_RenamedResourcesEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).join(); + }); + for (const v of message.ignoredResources) { + writer.uint32(34).string(v!); + } + for (const v of message.forcedNamespaceAliases) { + writer.uint32(42).string(v!); + } + for (const v of message.handwrittenSignatures) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DotnetSettings { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDotnetSettings(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.common = CommonLanguageSettings.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + const entry2 = DotnetSettings_RenamedServicesEntry.decode(reader, reader.uint32()); + if (entry2.value !== undefined) { + message.renamedServices[entry2.key] = entry2.value; + } + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + const entry3 = DotnetSettings_RenamedResourcesEntry.decode(reader, reader.uint32()); + if (entry3.value !== undefined) { + message.renamedResources[entry3.key] = entry3.value; + } + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.ignoredResources.push(reader.string()); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.forcedNamespaceAliases.push(reader.string()); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.handwrittenSignatures.push(reader.string()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DotnetSettings { + return { + common: isSet(object.common) ? CommonLanguageSettings.fromJSON(object.common) : undefined, + renamedServices: isObject(object.renamedServices) + ? Object.entries(object.renamedServices).reduce<{ [key: string]: string }>((acc, [key, value]) => { + acc[key] = String(value); + return acc; + }, {}) + : {}, + renamedResources: isObject(object.renamedResources) + ? Object.entries(object.renamedResources).reduce<{ [key: string]: string }>((acc, [key, value]) => { + acc[key] = String(value); + return acc; + }, {}) + : {}, + ignoredResources: globalThis.Array.isArray(object?.ignoredResources) + ? object.ignoredResources.map((e: any) => globalThis.String(e)) + : [], + forcedNamespaceAliases: globalThis.Array.isArray(object?.forcedNamespaceAliases) + ? object.forcedNamespaceAliases.map((e: any) => globalThis.String(e)) + : [], + handwrittenSignatures: globalThis.Array.isArray(object?.handwrittenSignatures) + ? object.handwrittenSignatures.map((e: any) => globalThis.String(e)) + : [], + }; + }, + + toJSON(message: DotnetSettings): unknown { + const obj: any = {}; + if (message.common !== undefined) { + obj.common = CommonLanguageSettings.toJSON(message.common); + } + if (message.renamedServices) { + const entries = Object.entries(message.renamedServices); + if (entries.length > 0) { + obj.renamedServices = {}; + entries.forEach(([k, v]) => { + obj.renamedServices[k] = v; + }); + } + } + if (message.renamedResources) { + const entries = Object.entries(message.renamedResources); + if (entries.length > 0) { + obj.renamedResources = {}; + entries.forEach(([k, v]) => { + obj.renamedResources[k] = v; + }); + } + } + if (message.ignoredResources?.length) { + obj.ignoredResources = message.ignoredResources; + } + if (message.forcedNamespaceAliases?.length) { + obj.forcedNamespaceAliases = message.forcedNamespaceAliases; + } + if (message.handwrittenSignatures?.length) { + obj.handwrittenSignatures = message.handwrittenSignatures; + } + return obj; + }, + + create, I>>(base?: I): DotnetSettings { + return DotnetSettings.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): DotnetSettings { + const message = createBaseDotnetSettings(); + message.common = (object.common !== undefined && object.common !== null) + ? CommonLanguageSettings.fromPartial(object.common) + : undefined; + message.renamedServices = Object.entries(object.renamedServices ?? {}).reduce<{ [key: string]: string }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[key] = globalThis.String(value); + } + return acc; + }, + {}, + ); + message.renamedResources = Object.entries(object.renamedResources ?? {}).reduce<{ [key: string]: string }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[key] = globalThis.String(value); + } + return acc; + }, + {}, + ); + message.ignoredResources = object.ignoredResources?.map((e) => e) || []; + message.forcedNamespaceAliases = object.forcedNamespaceAliases?.map((e) => e) || []; + message.handwrittenSignatures = object.handwrittenSignatures?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDotnetSettings_RenamedServicesEntry(): DotnetSettings_RenamedServicesEntry { + return { key: "", value: "" }; +} + +export const DotnetSettings_RenamedServicesEntry: MessageFns = { + encode(message: DotnetSettings_RenamedServicesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DotnetSettings_RenamedServicesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDotnetSettings_RenamedServicesEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DotnetSettings_RenamedServicesEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: DotnetSettings_RenamedServicesEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create, I>>( + base?: I, + ): DotnetSettings_RenamedServicesEntry { + return DotnetSettings_RenamedServicesEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): DotnetSettings_RenamedServicesEntry { + const message = createBaseDotnetSettings_RenamedServicesEntry(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseDotnetSettings_RenamedResourcesEntry(): DotnetSettings_RenamedResourcesEntry { + return { key: "", value: "" }; +} + +export const DotnetSettings_RenamedResourcesEntry: MessageFns = { + encode(message: DotnetSettings_RenamedResourcesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DotnetSettings_RenamedResourcesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDotnetSettings_RenamedResourcesEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DotnetSettings_RenamedResourcesEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: DotnetSettings_RenamedResourcesEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create, I>>( + base?: I, + ): DotnetSettings_RenamedResourcesEntry { + return DotnetSettings_RenamedResourcesEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): DotnetSettings_RenamedResourcesEntry { + const message = createBaseDotnetSettings_RenamedResourcesEntry(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseRubySettings(): RubySettings { + return { common: undefined }; +} + +export const RubySettings: MessageFns = { + encode(message: RubySettings, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.common !== undefined) { + CommonLanguageSettings.encode(message.common, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): RubySettings { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRubySettings(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.common = CommonLanguageSettings.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): RubySettings { + return { common: isSet(object.common) ? CommonLanguageSettings.fromJSON(object.common) : undefined }; + }, + + toJSON(message: RubySettings): unknown { + const obj: any = {}; + if (message.common !== undefined) { + obj.common = CommonLanguageSettings.toJSON(message.common); + } + return obj; + }, + + create, I>>(base?: I): RubySettings { + return RubySettings.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): RubySettings { + const message = createBaseRubySettings(); + message.common = (object.common !== undefined && object.common !== null) + ? CommonLanguageSettings.fromPartial(object.common) + : undefined; + return message; + }, +}; + +function createBaseGoSettings(): GoSettings { + return { common: undefined, renamedServices: {} }; +} + +export const GoSettings: MessageFns = { + encode(message: GoSettings, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.common !== undefined) { + CommonLanguageSettings.encode(message.common, writer.uint32(10).fork()).join(); + } + Object.entries(message.renamedServices).forEach(([key, value]) => { + GoSettings_RenamedServicesEntry.encode({ key: key as any, value }, writer.uint32(18).fork()).join(); + }); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GoSettings { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGoSettings(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.common = CommonLanguageSettings.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + const entry2 = GoSettings_RenamedServicesEntry.decode(reader, reader.uint32()); + if (entry2.value !== undefined) { + message.renamedServices[entry2.key] = entry2.value; + } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GoSettings { + return { + common: isSet(object.common) ? CommonLanguageSettings.fromJSON(object.common) : undefined, + renamedServices: isObject(object.renamedServices) + ? Object.entries(object.renamedServices).reduce<{ [key: string]: string }>((acc, [key, value]) => { + acc[key] = String(value); + return acc; + }, {}) + : {}, + }; + }, + + toJSON(message: GoSettings): unknown { + const obj: any = {}; + if (message.common !== undefined) { + obj.common = CommonLanguageSettings.toJSON(message.common); + } + if (message.renamedServices) { + const entries = Object.entries(message.renamedServices); + if (entries.length > 0) { + obj.renamedServices = {}; + entries.forEach(([k, v]) => { + obj.renamedServices[k] = v; + }); + } + } + return obj; + }, + + create, I>>(base?: I): GoSettings { + return GoSettings.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): GoSettings { + const message = createBaseGoSettings(); + message.common = (object.common !== undefined && object.common !== null) + ? CommonLanguageSettings.fromPartial(object.common) + : undefined; + message.renamedServices = Object.entries(object.renamedServices ?? {}).reduce<{ [key: string]: string }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[key] = globalThis.String(value); + } + return acc; + }, + {}, + ); + return message; + }, +}; + +function createBaseGoSettings_RenamedServicesEntry(): GoSettings_RenamedServicesEntry { + return { key: "", value: "" }; +} + +export const GoSettings_RenamedServicesEntry: MessageFns = { + encode(message: GoSettings_RenamedServicesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GoSettings_RenamedServicesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGoSettings_RenamedServicesEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GoSettings_RenamedServicesEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: GoSettings_RenamedServicesEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create, I>>(base?: I): GoSettings_RenamedServicesEntry { + return GoSettings_RenamedServicesEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): GoSettings_RenamedServicesEntry { + const message = createBaseGoSettings_RenamedServicesEntry(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseMethodSettings(): MethodSettings { + return { selector: "", longRunning: undefined, autoPopulatedFields: [] }; +} + +export const MethodSettings: MessageFns = { + encode(message: MethodSettings, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.longRunning !== undefined) { + MethodSettings_LongRunning.encode(message.longRunning, writer.uint32(18).fork()).join(); + } + for (const v of message.autoPopulatedFields) { + writer.uint32(26).string(v!); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MethodSettings { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodSettings(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.selector = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.longRunning = MethodSettings_LongRunning.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.autoPopulatedFields.push(reader.string()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MethodSettings { + return { + selector: isSet(object.selector) ? globalThis.String(object.selector) : "", + longRunning: isSet(object.longRunning) ? MethodSettings_LongRunning.fromJSON(object.longRunning) : undefined, + autoPopulatedFields: globalThis.Array.isArray(object?.autoPopulatedFields) + ? object.autoPopulatedFields.map((e: any) => globalThis.String(e)) + : [], + }; + }, + + toJSON(message: MethodSettings): unknown { + const obj: any = {}; + if (message.selector !== "") { + obj.selector = message.selector; + } + if (message.longRunning !== undefined) { + obj.longRunning = MethodSettings_LongRunning.toJSON(message.longRunning); + } + if (message.autoPopulatedFields?.length) { + obj.autoPopulatedFields = message.autoPopulatedFields; + } + return obj; + }, + + create, I>>(base?: I): MethodSettings { + return MethodSettings.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MethodSettings { + const message = createBaseMethodSettings(); + message.selector = object.selector ?? ""; + message.longRunning = (object.longRunning !== undefined && object.longRunning !== null) + ? MethodSettings_LongRunning.fromPartial(object.longRunning) + : undefined; + message.autoPopulatedFields = object.autoPopulatedFields?.map((e) => e) || []; + return message; + }, +}; + +function createBaseMethodSettings_LongRunning(): MethodSettings_LongRunning { + return { initialPollDelay: undefined, pollDelayMultiplier: 0, maxPollDelay: undefined, totalPollTimeout: undefined }; +} + +export const MethodSettings_LongRunning: MessageFns = { + encode(message: MethodSettings_LongRunning, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.initialPollDelay !== undefined) { + Duration.encode(message.initialPollDelay, writer.uint32(10).fork()).join(); + } + if (message.pollDelayMultiplier !== 0) { + writer.uint32(21).float(message.pollDelayMultiplier); + } + if (message.maxPollDelay !== undefined) { + Duration.encode(message.maxPollDelay, writer.uint32(26).fork()).join(); + } + if (message.totalPollTimeout !== undefined) { + Duration.encode(message.totalPollTimeout, writer.uint32(34).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MethodSettings_LongRunning { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodSettings_LongRunning(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.initialPollDelay = Duration.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 21) { + break; + } + + message.pollDelayMultiplier = reader.float(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.maxPollDelay = Duration.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.totalPollTimeout = Duration.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MethodSettings_LongRunning { + return { + initialPollDelay: isSet(object.initialPollDelay) ? Duration.fromJSON(object.initialPollDelay) : undefined, + pollDelayMultiplier: isSet(object.pollDelayMultiplier) ? globalThis.Number(object.pollDelayMultiplier) : 0, + maxPollDelay: isSet(object.maxPollDelay) ? Duration.fromJSON(object.maxPollDelay) : undefined, + totalPollTimeout: isSet(object.totalPollTimeout) ? Duration.fromJSON(object.totalPollTimeout) : undefined, + }; + }, + + toJSON(message: MethodSettings_LongRunning): unknown { + const obj: any = {}; + if (message.initialPollDelay !== undefined) { + obj.initialPollDelay = Duration.toJSON(message.initialPollDelay); + } + if (message.pollDelayMultiplier !== 0) { + obj.pollDelayMultiplier = message.pollDelayMultiplier; + } + if (message.maxPollDelay !== undefined) { + obj.maxPollDelay = Duration.toJSON(message.maxPollDelay); + } + if (message.totalPollTimeout !== undefined) { + obj.totalPollTimeout = Duration.toJSON(message.totalPollTimeout); + } + return obj; + }, + + create, I>>(base?: I): MethodSettings_LongRunning { + return MethodSettings_LongRunning.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MethodSettings_LongRunning { + const message = createBaseMethodSettings_LongRunning(); + message.initialPollDelay = (object.initialPollDelay !== undefined && object.initialPollDelay !== null) + ? Duration.fromPartial(object.initialPollDelay) + : undefined; + message.pollDelayMultiplier = object.pollDelayMultiplier ?? 0; + message.maxPollDelay = (object.maxPollDelay !== undefined && object.maxPollDelay !== null) + ? Duration.fromPartial(object.maxPollDelay) + : undefined; + message.totalPollTimeout = (object.totalPollTimeout !== undefined && object.totalPollTimeout !== null) + ? Duration.fromPartial(object.totalPollTimeout) + : undefined; + return message; + }, +}; + +function createBaseSelectiveGapicGeneration(): SelectiveGapicGeneration { + return { methods: [], generateOmittedAsInternal: false }; +} + +export const SelectiveGapicGeneration: MessageFns = { + encode(message: SelectiveGapicGeneration, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.methods) { + writer.uint32(10).string(v!); + } + if (message.generateOmittedAsInternal !== false) { + writer.uint32(16).bool(message.generateOmittedAsInternal); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SelectiveGapicGeneration { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSelectiveGapicGeneration(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.methods.push(reader.string()); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.generateOmittedAsInternal = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SelectiveGapicGeneration { + return { + methods: globalThis.Array.isArray(object?.methods) ? object.methods.map((e: any) => globalThis.String(e)) : [], + generateOmittedAsInternal: isSet(object.generateOmittedAsInternal) + ? globalThis.Boolean(object.generateOmittedAsInternal) + : false, + }; + }, + + toJSON(message: SelectiveGapicGeneration): unknown { + const obj: any = {}; + if (message.methods?.length) { + obj.methods = message.methods; + } + if (message.generateOmittedAsInternal !== false) { + obj.generateOmittedAsInternal = message.generateOmittedAsInternal; + } + return obj; + }, + + create, I>>(base?: I): SelectiveGapicGeneration { + return SelectiveGapicGeneration.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SelectiveGapicGeneration { + const message = createBaseSelectiveGapicGeneration(); + message.methods = object.methods?.map((e) => e) || []; + message.generateOmittedAsInternal = object.generateOmittedAsInternal ?? false; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends { $case: string; value: unknown } ? { $case: T["$case"]; value?: DeepPartial } + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isObject(value: any): boolean { + return typeof value === "object" && value !== null; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create, I>>(base?: I): T; + fromPartial, I>>(object: I): T; +} diff --git a/src/grpc/google/api/field_behavior.ts b/src/grpc/google/api/field_behavior.ts new file mode 100644 index 00000000..46bc7f4a --- /dev/null +++ b/src/grpc/google/api/field_behavior.ts @@ -0,0 +1,145 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc unknown +// source: google/api/field_behavior.proto + +/* eslint-disable */ + +export const protobufPackage = "google.api"; + +/** + * An indicator of the behavior of a given field (for example, that a field + * is required in requests, or given as output but ignored as input). + * This **does not** change the behavior in protocol buffers itself; it only + * denotes the behavior and may affect how API tooling handles the field. + * + * Note: This enum **may** receive new values in the future. + */ +export enum FieldBehavior { + /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */ + FIELD_BEHAVIOR_UNSPECIFIED = 0, + /** + * OPTIONAL - Specifically denotes a field as optional. + * While all fields in protocol buffers are optional, this may be specified + * for emphasis if appropriate. + */ + OPTIONAL = 1, + /** + * REQUIRED - Denotes a field as required. + * This indicates that the field **must** be provided as part of the request, + * and failure to do so will cause an error (usually `INVALID_ARGUMENT`). + */ + REQUIRED = 2, + /** + * OUTPUT_ONLY - Denotes a field as output only. + * This indicates that the field is provided in responses, but including the + * field in a request does nothing (the server *must* ignore it and + * *must not* throw an error as a result of the field's presence). + */ + OUTPUT_ONLY = 3, + /** + * INPUT_ONLY - Denotes a field as input only. + * This indicates that the field is provided in requests, and the + * corresponding field is not included in output. + */ + INPUT_ONLY = 4, + /** + * IMMUTABLE - Denotes a field as immutable. + * This indicates that the field may be set once in a request to create a + * resource, but may not be changed thereafter. + */ + IMMUTABLE = 5, + /** + * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list. + * This indicates that the service may provide the elements of the list + * in any arbitrary order, rather than the order the user originally + * provided. Additionally, the list's order may or may not be stable. + */ + UNORDERED_LIST = 6, + /** + * NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set. + * This indicates that if the user provides the empty value in a request, + * a non-empty value will be returned. The user will not be aware of what + * non-empty value to expect. + */ + NON_EMPTY_DEFAULT = 7, + /** + * IDENTIFIER - Denotes that the field in a resource (a message annotated with + * google.api.resource) is used in the resource name to uniquely identify the + * resource. For AIP-compliant APIs, this should only be applied to the + * `name` field on the resource. + * + * This behavior should not be applied to references to other resources within + * the message. + * + * The identifier field of resources often have different field behavior + * depending on the request it is embedded in (e.g. for Create methods name + * is optional and unused, while for Update methods it is required). Instead + * of method-specific annotations, only `IDENTIFIER` is required. + */ + IDENTIFIER = 8, + UNRECOGNIZED = -1, +} + +export function fieldBehaviorFromJSON(object: any): FieldBehavior { + switch (object) { + case 0: + case "FIELD_BEHAVIOR_UNSPECIFIED": + return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED; + case 1: + case "OPTIONAL": + return FieldBehavior.OPTIONAL; + case 2: + case "REQUIRED": + return FieldBehavior.REQUIRED; + case 3: + case "OUTPUT_ONLY": + return FieldBehavior.OUTPUT_ONLY; + case 4: + case "INPUT_ONLY": + return FieldBehavior.INPUT_ONLY; + case 5: + case "IMMUTABLE": + return FieldBehavior.IMMUTABLE; + case 6: + case "UNORDERED_LIST": + return FieldBehavior.UNORDERED_LIST; + case 7: + case "NON_EMPTY_DEFAULT": + return FieldBehavior.NON_EMPTY_DEFAULT; + case 8: + case "IDENTIFIER": + return FieldBehavior.IDENTIFIER; + case -1: + case "UNRECOGNIZED": + default: + return FieldBehavior.UNRECOGNIZED; + } +} + +export function fieldBehaviorToJSON(object: FieldBehavior): string { + switch (object) { + case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED: + return "FIELD_BEHAVIOR_UNSPECIFIED"; + case FieldBehavior.OPTIONAL: + return "OPTIONAL"; + case FieldBehavior.REQUIRED: + return "REQUIRED"; + case FieldBehavior.OUTPUT_ONLY: + return "OUTPUT_ONLY"; + case FieldBehavior.INPUT_ONLY: + return "INPUT_ONLY"; + case FieldBehavior.IMMUTABLE: + return "IMMUTABLE"; + case FieldBehavior.UNORDERED_LIST: + return "UNORDERED_LIST"; + case FieldBehavior.NON_EMPTY_DEFAULT: + return "NON_EMPTY_DEFAULT"; + case FieldBehavior.IDENTIFIER: + return "IDENTIFIER"; + case FieldBehavior.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} diff --git a/src/grpc/google/api/http.ts b/src/grpc/google/api/http.ts new file mode 100644 index 00000000..cd967746 --- /dev/null +++ b/src/grpc/google/api/http.ts @@ -0,0 +1,795 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc unknown +// source: google/api/http.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parameters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * gRPC Transcoding + * + * gRPC Transcoding is a feature for mapping between a gRPC method and one or + * more HTTP REST endpoints. It allows developers to build a single API service + * that supports both gRPC APIs and REST APIs. Many systems, including [Google + * APIs](https://github.com/googleapis/googleapis), + * [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC + * Gateway](https://github.com/grpc-ecosystem/grpc-gateway), + * and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature + * and use it for large scale production services. + * + * `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies + * how different portions of the gRPC request message are mapped to the URL + * path, URL query parameters, and HTTP request body. It also controls how the + * gRPC response message is mapped to the HTTP response body. `HttpRule` is + * typically specified as an `google.api.http` annotation on the gRPC method. + * + * Each mapping specifies a URL path template and an HTTP method. The path + * template may refer to one or more fields in the gRPC request message, as long + * as each field is a non-repeated field with a primitive (non-message) type. + * The path template controls how fields of the request message are mapped to + * the URL path. + * + * Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/{name=messages/*}" + * }; + * } + * } + * message GetMessageRequest { + * string name = 1; // Mapped to URL path. + * } + * message Message { + * string text = 1; // The resource content. + * } + * + * This enables an HTTP REST to gRPC mapping as below: + * + * - HTTP: `GET /v1/messages/123456` + * - gRPC: `GetMessage(name: "messages/123456")` + * + * Any fields in the request message which are not bound by the path template + * automatically become HTTP query parameters if there is no HTTP request body. + * For example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get:"/v1/messages/{message_id}" + * }; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // Mapped to URL path. + * int64 revision = 2; // Mapped to URL query parameter `revision`. + * SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * - HTTP: `GET /v1/messages/123456?revision=2&sub.subfield=foo` + * - gRPC: `GetMessage(message_id: "123456" revision: 2 sub: + * SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to URL query parameters must have a + * primitive type or a repeated primitive type or a non-repeated message type. + * In the case of a repeated type, the parameter can be repeated in the URL + * as `...?param=A¶m=B`. In the case of a message type, each field of the + * message is mapped to a separate parameter, such as + * `...?foo.a=A&foo.b=B&foo.c=C`. + * + * For HTTP methods that allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * - HTTP: `PATCH /v1/messages/123456 { "text": "Hi!" }` + * - gRPC: `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * - HTTP: `PATCH /v1/messages/123456 { "text": "Hi!" }` + * - gRPC: `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice when + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC mappings: + * + * - HTTP: `GET /v1/messages/123456` + * - gRPC: `GetMessage(message_id: "123456")` + * + * - HTTP: `GET /v1/users/me/messages/123456` + * - gRPC: `GetMessage(user_id: "me" message_id: "123456")` + * + * Rules for HTTP mapping + * + * 1. Leaf request fields (recursive expansion nested messages in the request + * message) are classified into three categories: + * - Fields referred by the path template. They are passed via the URL path. + * - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They + * are passed via the HTTP + * request body. + * - All other fields are passed via the URL query parameters, and the + * parameter name is the field path in the request message. A repeated + * field can be represented as multiple query parameters under the same + * name. + * 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL + * query parameter, all fields + * are passed via URL path and HTTP request body. + * 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP + * request body, all + * fields are passed via URL path and URL query parameters. + * + * Path template syntax + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single URL path segment. The syntax `**` matches + * zero or more URL path segments, which must be the last part of the URL path + * except the `Verb`. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` + * contains any reserved character, such characters should be percent-encoded + * before the matching. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path on the client + * side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The + * server side does the reverse decoding. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{var}`. + * + * If a variable contains multiple path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path on the + * client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. + * The server side does the reverse decoding, except "%2F" and "%2f" are left + * unchanged. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{+var}`. + * + * Using gRPC API Service Configuration + * + * gRPC API Service Configuration (service config) is a configuration language + * for configuring a gRPC service to become a user-facing product. The + * service config is simply the YAML representation of the `google.api.Service` + * proto message. + * + * As an alternative to annotating your proto file, you can configure gRPC + * transcoding in your service config YAML files. You do this by specifying a + * `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same + * effect as the proto annotation. This can be particularly useful if you + * have a proto that is reused in multiple services. Note that any transcoding + * specified in the service config will override any matching transcoding + * configuration in the proto. + * + * The following example selects a gRPC method and applies an `HttpRule` to it: + * + * http: + * rules: + * - selector: example.v1.Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * Special notes + * + * When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the + * proto to JSON conversion must follow the [proto3 + * specification](https://developers.google.com/protocol-buffers/docs/proto3#json). + * + * While the single segment variable follows the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String + * Expansion, the multi segment variable **does not** follow RFC 6570 Section + * 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. As the result, gRPC Transcoding uses a custom encoding + * for multi segment variables. + * + * The path variables **must not** refer to any repeated or mapped field, + * because client libraries are not capable of handling such variable expansion. + * + * The path variables **must not** capture the leading "/" character. The reason + * is that the most common use case "{var}" does not capture the leading "/" + * character. For consistency, all path variables must share the same behavior. + * + * Repeated message fields must not be mapped to URL query parameters, because + * no client library can support such complicated mapping. + * + * If an API needs to use a JSON array for request or response body, it can map + * the request or response body to a repeated field. However, some gRPC + * Transcoding implementations may not support this feature. + */ +export interface HttpRule { + /** + * Selects a method to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax + * details. + */ + selector: string; + /** + * Determines the URL pattern is matched by this rules. This pattern can be + * used with any of the {get|put|post|delete|patch} methods. A custom method + * can be defined using the 'custom' field. + */ + pattern?: + | // + /** + * Maps to HTTP GET. Used for listing and getting information about + * resources. + */ + { $case: "get"; value: string } + | // + /** Maps to HTTP PUT. Used for replacing a resource. */ + { $case: "put"; value: string } + | // + /** Maps to HTTP POST. Used for creating a resource or performing an action. */ + { $case: "post"; value: string } + | // + /** Maps to HTTP DELETE. Used for deleting a resource. */ + { $case: "delete"; value: string } + | // + /** Maps to HTTP PATCH. Used for updating a resource. */ + { $case: "patch"; value: string } + | // + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + { $case: "custom"; value: CustomHttpPattern } + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP request + * body, or `*` for mapping all request fields not captured by the path + * pattern to the HTTP body, or omitted for not having any HTTP request body. + * + * NOTE: the referred field must be present at the top-level of the request + * message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * response body. When omitted, the entire response message will be used + * as the HTTP response body. + * + * NOTE: The referred field must be present at the top-level of the response + * message type. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http: MessageFns = { + encode(message: Http, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).join(); + } + if (message.fullyDecodeReservedExpansion !== false) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Http { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.rules.push(HttpRule.decode(reader, reader.uint32())); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.fullyDecodeReservedExpansion = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: globalThis.Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? globalThis.Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules?.length) { + obj.rules = message.rules.map((e) => HttpRule.toJSON(e)); + } + if (message.fullyDecodeReservedExpansion !== false) { + obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion; + } + return obj; + }, + + create, I>>(base?: I): Http { + return Http.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { selector: "", pattern: undefined, body: "", responseBody: "", additionalBindings: [] }; +} + +export const HttpRule: MessageFns = { + encode(message: HttpRule, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + switch (message.pattern?.$case) { + case "get": + writer.uint32(18).string(message.pattern.value); + break; + case "put": + writer.uint32(26).string(message.pattern.value); + break; + case "post": + writer.uint32(34).string(message.pattern.value); + break; + case "delete": + writer.uint32(42).string(message.pattern.value); + break; + case "patch": + writer.uint32(50).string(message.pattern.value); + break; + case "custom": + CustomHttpPattern.encode(message.pattern.value, writer.uint32(66).fork()).join(); + break; + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.selector = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.pattern = { $case: "get", value: reader.string() }; + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.pattern = { $case: "put", value: reader.string() }; + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.pattern = { $case: "post", value: reader.string() }; + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.pattern = { $case: "delete", value: reader.string() }; + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.pattern = { $case: "patch", value: reader.string() }; + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.pattern = { $case: "custom", value: CustomHttpPattern.decode(reader, reader.uint32()) }; + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.body = reader.string(); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.responseBody = reader.string(); + continue; + } + case 11: { + if (tag !== 90) { + break; + } + + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? globalThis.String(object.selector) : "", + pattern: isSet(object.get) + ? { $case: "get", value: globalThis.String(object.get) } + : isSet(object.put) + ? { $case: "put", value: globalThis.String(object.put) } + : isSet(object.post) + ? { $case: "post", value: globalThis.String(object.post) } + : isSet(object.delete) + ? { $case: "delete", value: globalThis.String(object.delete) } + : isSet(object.patch) + ? { $case: "patch", value: globalThis.String(object.patch) } + : isSet(object.custom) + ? { $case: "custom", value: CustomHttpPattern.fromJSON(object.custom) } + : undefined, + body: isSet(object.body) ? globalThis.String(object.body) : "", + responseBody: isSet(object.responseBody) ? globalThis.String(object.responseBody) : "", + additionalBindings: globalThis.Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + if (message.selector !== "") { + obj.selector = message.selector; + } + if (message.pattern?.$case === "get") { + obj.get = message.pattern.value; + } else if (message.pattern?.$case === "put") { + obj.put = message.pattern.value; + } else if (message.pattern?.$case === "post") { + obj.post = message.pattern.value; + } else if (message.pattern?.$case === "delete") { + obj.delete = message.pattern.value; + } else if (message.pattern?.$case === "patch") { + obj.patch = message.pattern.value; + } else if (message.pattern?.$case === "custom") { + obj.custom = CustomHttpPattern.toJSON(message.pattern.value); + } + if (message.body !== "") { + obj.body = message.body; + } + if (message.responseBody !== "") { + obj.responseBody = message.responseBody; + } + if (message.additionalBindings?.length) { + obj.additionalBindings = message.additionalBindings.map((e) => HttpRule.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): HttpRule { + return HttpRule.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + switch (object.pattern?.$case) { + case "get": { + if (object.pattern?.value !== undefined && object.pattern?.value !== null) { + message.pattern = { $case: "get", value: object.pattern.value }; + } + break; + } + case "put": { + if (object.pattern?.value !== undefined && object.pattern?.value !== null) { + message.pattern = { $case: "put", value: object.pattern.value }; + } + break; + } + case "post": { + if (object.pattern?.value !== undefined && object.pattern?.value !== null) { + message.pattern = { $case: "post", value: object.pattern.value }; + } + break; + } + case "delete": { + if (object.pattern?.value !== undefined && object.pattern?.value !== null) { + message.pattern = { $case: "delete", value: object.pattern.value }; + } + break; + } + case "patch": { + if (object.pattern?.value !== undefined && object.pattern?.value !== null) { + message.pattern = { $case: "patch", value: object.pattern.value }; + } + break; + } + case "custom": { + if (object.pattern?.value !== undefined && object.pattern?.value !== null) { + message.pattern = { $case: "custom", value: CustomHttpPattern.fromPartial(object.pattern.value) }; + } + break; + } + } + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern: MessageFns = { + encode(message: CustomHttpPattern, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.kind = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.path = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { + kind: isSet(object.kind) ? globalThis.String(object.kind) : "", + path: isSet(object.path) ? globalThis.String(object.path) : "", + }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + if (message.kind !== "") { + obj.kind = message.kind; + } + if (message.path !== "") { + obj.path = message.path; + } + return obj; + }, + + create, I>>(base?: I): CustomHttpPattern { + return CustomHttpPattern.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends { $case: string; value: unknown } ? { $case: T["$case"]; value?: DeepPartial } + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create, I>>(base?: I): T; + fromPartial, I>>(object: I): T; +} diff --git a/src/grpc/google/api/launch_stage.ts b/src/grpc/google/api/launch_stage.ts new file mode 100644 index 00000000..fd362264 --- /dev/null +++ b/src/grpc/google/api/launch_stage.ts @@ -0,0 +1,121 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc unknown +// source: google/api/launch_stage.proto + +/* eslint-disable */ + +export const protobufPackage = "google.api"; + +/** + * The launch stage as defined by [Google Cloud Platform + * Launch Stages](https://cloud.google.com/terms/launch-stages). + */ +export enum LaunchStage { + /** LAUNCH_STAGE_UNSPECIFIED - Do not use this default value. */ + LAUNCH_STAGE_UNSPECIFIED = 0, + /** UNIMPLEMENTED - The feature is not yet implemented. Users can not use it. */ + UNIMPLEMENTED = 6, + /** PRELAUNCH - Prelaunch features are hidden from users and are only visible internally. */ + PRELAUNCH = 7, + /** + * EARLY_ACCESS - Early Access features are limited to a closed group of testers. To use + * these features, you must sign up in advance and sign a Trusted Tester + * agreement (which includes confidentiality provisions). These features may + * be unstable, changed in backward-incompatible ways, and are not + * guaranteed to be released. + */ + EARLY_ACCESS = 1, + /** + * ALPHA - Alpha is a limited availability test for releases before they are cleared + * for widespread use. By Alpha, all significant design issues are resolved + * and we are in the process of verifying functionality. Alpha customers + * need to apply for access, agree to applicable terms, and have their + * projects allowlisted. Alpha releases don't have to be feature complete, + * no SLAs are provided, and there are no technical support obligations, but + * they will be far enough along that customers can actually use them in + * test environments or for limited-use tests -- just like they would in + * normal production cases. + */ + ALPHA = 2, + /** + * BETA - Beta is the point at which we are ready to open a release for any + * customer to use. There are no SLA or technical support obligations in a + * Beta release. Products will be complete from a feature perspective, but + * may have some open outstanding issues. Beta releases are suitable for + * limited production use cases. + */ + BETA = 3, + /** + * GA - GA features are open to all developers and are considered stable and + * fully qualified for production use. + */ + GA = 4, + /** + * DEPRECATED - Deprecated features are scheduled to be shut down and removed. For more + * information, see the "Deprecation Policy" section of our [Terms of + * Service](https://cloud.google.com/terms/) + * and the [Google Cloud Platform Subject to the Deprecation + * Policy](https://cloud.google.com/terms/deprecation) documentation. + */ + DEPRECATED = 5, + UNRECOGNIZED = -1, +} + +export function launchStageFromJSON(object: any): LaunchStage { + switch (object) { + case 0: + case "LAUNCH_STAGE_UNSPECIFIED": + return LaunchStage.LAUNCH_STAGE_UNSPECIFIED; + case 6: + case "UNIMPLEMENTED": + return LaunchStage.UNIMPLEMENTED; + case 7: + case "PRELAUNCH": + return LaunchStage.PRELAUNCH; + case 1: + case "EARLY_ACCESS": + return LaunchStage.EARLY_ACCESS; + case 2: + case "ALPHA": + return LaunchStage.ALPHA; + case 3: + case "BETA": + return LaunchStage.BETA; + case 4: + case "GA": + return LaunchStage.GA; + case 5: + case "DEPRECATED": + return LaunchStage.DEPRECATED; + case -1: + case "UNRECOGNIZED": + default: + return LaunchStage.UNRECOGNIZED; + } +} + +export function launchStageToJSON(object: LaunchStage): string { + switch (object) { + case LaunchStage.LAUNCH_STAGE_UNSPECIFIED: + return "LAUNCH_STAGE_UNSPECIFIED"; + case LaunchStage.UNIMPLEMENTED: + return "UNIMPLEMENTED"; + case LaunchStage.PRELAUNCH: + return "PRELAUNCH"; + case LaunchStage.EARLY_ACCESS: + return "EARLY_ACCESS"; + case LaunchStage.ALPHA: + return "ALPHA"; + case LaunchStage.BETA: + return "BETA"; + case LaunchStage.GA: + return "GA"; + case LaunchStage.DEPRECATED: + return "DEPRECATED"; + case LaunchStage.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} diff --git a/src/grpc/google/protobuf/descriptor.ts b/src/grpc/google/protobuf/descriptor.ts new file mode 100644 index 00000000..41fa849f --- /dev/null +++ b/src/grpc/google/protobuf/descriptor.ts @@ -0,0 +1,6971 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc unknown +// source: google/protobuf/descriptor.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "google.protobuf"; + +/** The full set of known editions. */ +export enum Edition { + /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */ + EDITION_UNKNOWN = 0, + /** + * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature + * was first introduced. This is effectively an "infinite past". + */ + EDITION_LEGACY = 900, + /** + * EDITION_PROTO2 - Legacy syntax "editions". These pre-date editions, but behave much like + * distinct editions. These can't be used to specify the edition of proto + * files, but feature definitions must supply proto2/proto3 defaults for + * backwards compatibility. + */ + EDITION_PROTO2 = 998, + EDITION_PROTO3 = 999, + /** + * EDITION_2023 - Editions that have been released. The specific values are arbitrary and + * should not be depended on, but they will always be time-ordered for easy + * comparison. + */ + EDITION_2023 = 1000, + EDITION_2024 = 1001, + /** + * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution. These should not be + * used or relied on outside of tests. + */ + EDITION_1_TEST_ONLY = 1, + EDITION_2_TEST_ONLY = 2, + EDITION_99997_TEST_ONLY = 99997, + EDITION_99998_TEST_ONLY = 99998, + EDITION_99999_TEST_ONLY = 99999, + /** + * EDITION_MAX - Placeholder for specifying unbounded edition support. This should only + * ever be used by plugins that can expect to never require any changes to + * support a new edition. + */ + EDITION_MAX = 2147483647, + UNRECOGNIZED = -1, +} + +export function editionFromJSON(object: any): Edition { + switch (object) { + case 0: + case "EDITION_UNKNOWN": + return Edition.EDITION_UNKNOWN; + case 900: + case "EDITION_LEGACY": + return Edition.EDITION_LEGACY; + case 998: + case "EDITION_PROTO2": + return Edition.EDITION_PROTO2; + case 999: + case "EDITION_PROTO3": + return Edition.EDITION_PROTO3; + case 1000: + case "EDITION_2023": + return Edition.EDITION_2023; + case 1001: + case "EDITION_2024": + return Edition.EDITION_2024; + case 1: + case "EDITION_1_TEST_ONLY": + return Edition.EDITION_1_TEST_ONLY; + case 2: + case "EDITION_2_TEST_ONLY": + return Edition.EDITION_2_TEST_ONLY; + case 99997: + case "EDITION_99997_TEST_ONLY": + return Edition.EDITION_99997_TEST_ONLY; + case 99998: + case "EDITION_99998_TEST_ONLY": + return Edition.EDITION_99998_TEST_ONLY; + case 99999: + case "EDITION_99999_TEST_ONLY": + return Edition.EDITION_99999_TEST_ONLY; + case 2147483647: + case "EDITION_MAX": + return Edition.EDITION_MAX; + case -1: + case "UNRECOGNIZED": + default: + return Edition.UNRECOGNIZED; + } +} + +export function editionToJSON(object: Edition): string { + switch (object) { + case Edition.EDITION_UNKNOWN: + return "EDITION_UNKNOWN"; + case Edition.EDITION_LEGACY: + return "EDITION_LEGACY"; + case Edition.EDITION_PROTO2: + return "EDITION_PROTO2"; + case Edition.EDITION_PROTO3: + return "EDITION_PROTO3"; + case Edition.EDITION_2023: + return "EDITION_2023"; + case Edition.EDITION_2024: + return "EDITION_2024"; + case Edition.EDITION_1_TEST_ONLY: + return "EDITION_1_TEST_ONLY"; + case Edition.EDITION_2_TEST_ONLY: + return "EDITION_2_TEST_ONLY"; + case Edition.EDITION_99997_TEST_ONLY: + return "EDITION_99997_TEST_ONLY"; + case Edition.EDITION_99998_TEST_ONLY: + return "EDITION_99998_TEST_ONLY"; + case Edition.EDITION_99999_TEST_ONLY: + return "EDITION_99999_TEST_ONLY"; + case Edition.EDITION_MAX: + return "EDITION_MAX"; + case Edition.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * Describes the 'visibility' of a symbol with respect to the proto import + * system. Symbols can only be imported when the visibility rules do not prevent + * it (ex: local symbols cannot be imported). Visibility modifiers can only set + * on `message` and `enum` as they are the only types available to be referenced + * from other files. + */ +export enum SymbolVisibility { + VISIBILITY_UNSET = 0, + VISIBILITY_LOCAL = 1, + VISIBILITY_EXPORT = 2, + UNRECOGNIZED = -1, +} + +export function symbolVisibilityFromJSON(object: any): SymbolVisibility { + switch (object) { + case 0: + case "VISIBILITY_UNSET": + return SymbolVisibility.VISIBILITY_UNSET; + case 1: + case "VISIBILITY_LOCAL": + return SymbolVisibility.VISIBILITY_LOCAL; + case 2: + case "VISIBILITY_EXPORT": + return SymbolVisibility.VISIBILITY_EXPORT; + case -1: + case "UNRECOGNIZED": + default: + return SymbolVisibility.UNRECOGNIZED; + } +} + +export function symbolVisibilityToJSON(object: SymbolVisibility): string { + switch (object) { + case SymbolVisibility.VISIBILITY_UNSET: + return "VISIBILITY_UNSET"; + case SymbolVisibility.VISIBILITY_LOCAL: + return "VISIBILITY_LOCAL"; + case SymbolVisibility.VISIBILITY_EXPORT: + return "VISIBILITY_EXPORT"; + case SymbolVisibility.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name?: + | string + | undefined; + /** e.g. "foo", "foo.bar", etc. */ + package?: + | string + | undefined; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** + * Names of files imported by this file purely for the purpose of providing + * option extensions. These are excluded from the dependency list above. + */ + optionDependency: string[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options?: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo?: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2", "proto3", and "editions". + * + * If `edition` is present, this value must be "editions". + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + syntax?: + | string + | undefined; + /** + * The edition of the proto file. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + edition?: Edition | undefined; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name?: string | undefined; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options?: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; + /** Support for `export` and `local` keywords on enums. */ + visibility?: SymbolVisibility | undefined; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start?: + | number + | undefined; + /** Exclusive. */ + end?: number | undefined; + options?: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start?: + | number + | undefined; + /** Exclusive. */ + end?: number | undefined; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; + /** + * For external users: DO NOT USE. We are in the process of open sourcing + * extension declaration and executing internal cleanups before it can be + * used externally. + */ + declaration: ExtensionRangeOptions_Declaration[]; + /** Any features defined in the specific edition. */ + features?: + | FeatureSet + | undefined; + /** + * The verification state of the range. + * TODO: flip the default to DECLARATION once all empty ranges + * are marked as UNVERIFIED. + */ + verification?: ExtensionRangeOptions_VerificationState | undefined; +} + +/** The verification state of the extension range. */ +export enum ExtensionRangeOptions_VerificationState { + /** DECLARATION - All the extensions of the range must be declared. */ + DECLARATION = 0, + UNVERIFIED = 1, + UNRECOGNIZED = -1, +} + +export function extensionRangeOptions_VerificationStateFromJSON(object: any): ExtensionRangeOptions_VerificationState { + switch (object) { + case 0: + case "DECLARATION": + return ExtensionRangeOptions_VerificationState.DECLARATION; + case 1: + case "UNVERIFIED": + return ExtensionRangeOptions_VerificationState.UNVERIFIED; + case -1: + case "UNRECOGNIZED": + default: + return ExtensionRangeOptions_VerificationState.UNRECOGNIZED; + } +} + +export function extensionRangeOptions_VerificationStateToJSON(object: ExtensionRangeOptions_VerificationState): string { + switch (object) { + case ExtensionRangeOptions_VerificationState.DECLARATION: + return "DECLARATION"; + case ExtensionRangeOptions_VerificationState.UNVERIFIED: + return "UNVERIFIED"; + case ExtensionRangeOptions_VerificationState.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface ExtensionRangeOptions_Declaration { + /** The extension number declared within the extension range. */ + number?: + | number + | undefined; + /** + * The fully-qualified name of the extension field. There must be a leading + * dot in front of the full name. + */ + fullName?: + | string + | undefined; + /** + * The fully-qualified type name of the extension field. Unlike + * Metadata.type, Declaration.type must have a leading dot for messages + * and enums. + */ + type?: + | string + | undefined; + /** + * If true, indicates that the number is reserved in the extension range, + * and any extension field with the number will fail to compile. Set this + * when a declared extension field is deleted. + */ + reserved?: + | boolean + | undefined; + /** + * If true, indicates that the extension must be defined as repeated. + * Otherwise the extension must be defined as optional. + */ + repeated?: boolean | undefined; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name?: string | undefined; + number?: number | undefined; + label?: + | FieldDescriptorProto_Label + | undefined; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type?: + | FieldDescriptorProto_Type + | undefined; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName?: + | string + | undefined; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee?: + | string + | undefined; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + */ + defaultValue?: + | string + | undefined; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex?: + | number + | undefined; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName?: string | undefined; + options?: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must belong to a oneof to signal + * to old proto3 clients that presence is tracked for this field. This oneof + * is known as a "synthetic" oneof, and this field must be its sole member + * (each proto3 optional field gets its own synthetic oneof). Synthetic oneofs + * exist in the descriptor only, and do not generate any API. Synthetic oneofs + * must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional?: boolean | undefined; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported after google.protobuf. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. In Editions, the group wire format + * can be enabled via the `message_encoding` feature. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REPEATED = 3, + /** + * LABEL_REQUIRED - The required label is only allowed in google.protobuf. In proto3 and Editions + * it's explicitly prohibited. In Editions, the `field_presence` feature + * can be used to get this behavior. + */ + LABEL_REQUIRED = 2, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name?: string | undefined; + options?: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name?: string | undefined; + value: EnumValueDescriptorProto[]; + options?: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; + /** Support for `export` and `local` keywords on enums. */ + visibility?: SymbolVisibility | undefined; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start?: + | number + | undefined; + /** Inclusive. */ + end?: number | undefined; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name?: string | undefined; + number?: number | undefined; + options?: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name?: string | undefined; + method: MethodDescriptorProto[]; + options?: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name?: + | string + | undefined; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType?: string | undefined; + outputType?: string | undefined; + options?: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming?: + | boolean + | undefined; + /** Identifies if server streams multiple server messages */ + serverStreaming?: boolean | undefined; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage?: + | string + | undefined; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname?: + | string + | undefined; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles?: + | boolean + | undefined; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash?: + | boolean + | undefined; + /** + * A proto2 file can set this to true to opt in to UTF-8 checking for Java, + * which will throw an exception if invalid UTF-8 is parsed from the wire or + * assigned to a string field. + * + * TODO: clarify exactly what kinds of field types this option + * applies to, and update these docs accordingly. + * + * Proto3 files already perform these checks. Setting the option explicitly to + * false has no effect: it cannot be used to opt proto3 files out of UTF-8 + * checks. + */ + javaStringCheckUtf8?: boolean | undefined; + optimizeFor?: + | FileOptions_OptimizeMode + | undefined; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage?: + | string + | undefined; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices?: boolean | undefined; + javaGenericServices?: boolean | undefined; + pyGenericServices?: + | boolean + | undefined; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated?: + | boolean + | undefined; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas?: + | boolean + | undefined; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix?: + | string + | undefined; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace?: + | string + | undefined; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix?: + | string + | undefined; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix?: + | string + | undefined; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace?: + | string + | undefined; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace?: + | string + | undefined; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage?: + | string + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat?: + | boolean + | undefined; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor?: + | boolean + | undefined; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated?: + | boolean + | undefined; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry?: + | boolean + | undefined; + /** + * Enable the legacy handling of JSON field name conflicts. This lowercases + * and strips underscored from the fields before comparison in proto3 only. + * The new behavior takes `json_name` into account and applies to proto2 as + * well. + * + * This should only be used as a temporary measure against broken builds due + * to the change in behavior for JSON field name conflicts. + * + * TODO This is legacy behavior we plan to remove once downstream + * teams have had time to migrate. + * + * @deprecated + */ + deprecatedLegacyJsonFieldConflicts?: + | boolean + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * NOTE: ctype is deprecated. Use `features.(pb.cpp).string_type` instead. + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is only implemented to support use of + * [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of + * type "bytes" in the open source release. + * TODO: make ctype actually deprecated. + */ + ctype?: + | FieldOptions_CType + | undefined; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. This option is prohibited in + * Editions, but the `repeated_field_encoding` feature can be used to control + * the behavior. + */ + packed?: + | boolean + | undefined; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype?: + | FieldOptions_JSType + | undefined; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that lazy message fields are still eagerly verified to check + * ill-formed wireformat or missing required fields. Calling IsInitialized() + * on the outer message would fail if the inner message has missing required + * fields. Failed verification would result in parsing failure (except when + * uninitialized messages are acceptable). + */ + lazy?: + | boolean + | undefined; + /** + * unverified_lazy does no correctness checks on the byte stream. This should + * only be used where lazy with verification is prohibitive for performance + * reasons. + */ + unverifiedLazy?: + | boolean + | undefined; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated?: + | boolean + | undefined; + /** For Google-internal migration only. Do not use. */ + weak?: + | boolean + | undefined; + /** + * Indicate that the field value should not be printed out when using debug + * formats, e.g. when the field contains sensitive credentials. + */ + debugRedact?: boolean | undefined; + retention?: FieldOptions_OptionRetention | undefined; + targets: FieldOptions_OptionTargetType[]; + editionDefaults: FieldOptions_EditionDefault[]; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: FeatureSet | undefined; + featureSupport?: + | FieldOptions_FeatureSupport + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + /** + * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type + * "bytes". It indicates that in C++, the data should be stored in a Cord + * instead of a string. For very large strings, this may reduce memory + * fragmentation. It may also allow better performance when parsing from a + * Cord, or when parsing with aliasing enabled, as the parsed Cord may then + * alias the original buffer. + */ + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */ +export enum FieldOptions_OptionRetention { + RETENTION_UNKNOWN = 0, + RETENTION_RUNTIME = 1, + RETENTION_SOURCE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_OptionRetentionFromJSON(object: any): FieldOptions_OptionRetention { + switch (object) { + case 0: + case "RETENTION_UNKNOWN": + return FieldOptions_OptionRetention.RETENTION_UNKNOWN; + case 1: + case "RETENTION_RUNTIME": + return FieldOptions_OptionRetention.RETENTION_RUNTIME; + case 2: + case "RETENTION_SOURCE": + return FieldOptions_OptionRetention.RETENTION_SOURCE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_OptionRetention.UNRECOGNIZED; + } +} + +export function fieldOptions_OptionRetentionToJSON(object: FieldOptions_OptionRetention): string { + switch (object) { + case FieldOptions_OptionRetention.RETENTION_UNKNOWN: + return "RETENTION_UNKNOWN"; + case FieldOptions_OptionRetention.RETENTION_RUNTIME: + return "RETENTION_RUNTIME"; + case FieldOptions_OptionRetention.RETENTION_SOURCE: + return "RETENTION_SOURCE"; + case FieldOptions_OptionRetention.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * This indicates the types of entities that the field may apply to when used + * as an option. If it is unset, then the field may be freely used as an + * option on any kind of entity. + */ +export enum FieldOptions_OptionTargetType { + TARGET_TYPE_UNKNOWN = 0, + TARGET_TYPE_FILE = 1, + TARGET_TYPE_EXTENSION_RANGE = 2, + TARGET_TYPE_MESSAGE = 3, + TARGET_TYPE_FIELD = 4, + TARGET_TYPE_ONEOF = 5, + TARGET_TYPE_ENUM = 6, + TARGET_TYPE_ENUM_ENTRY = 7, + TARGET_TYPE_SERVICE = 8, + TARGET_TYPE_METHOD = 9, + UNRECOGNIZED = -1, +} + +export function fieldOptions_OptionTargetTypeFromJSON(object: any): FieldOptions_OptionTargetType { + switch (object) { + case 0: + case "TARGET_TYPE_UNKNOWN": + return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN; + case 1: + case "TARGET_TYPE_FILE": + return FieldOptions_OptionTargetType.TARGET_TYPE_FILE; + case 2: + case "TARGET_TYPE_EXTENSION_RANGE": + return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE; + case 3: + case "TARGET_TYPE_MESSAGE": + return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE; + case 4: + case "TARGET_TYPE_FIELD": + return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD; + case 5: + case "TARGET_TYPE_ONEOF": + return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF; + case 6: + case "TARGET_TYPE_ENUM": + return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM; + case 7: + case "TARGET_TYPE_ENUM_ENTRY": + return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY; + case 8: + case "TARGET_TYPE_SERVICE": + return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE; + case 9: + case "TARGET_TYPE_METHOD": + return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_OptionTargetType.UNRECOGNIZED; + } +} + +export function fieldOptions_OptionTargetTypeToJSON(object: FieldOptions_OptionTargetType): string { + switch (object) { + case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN: + return "TARGET_TYPE_UNKNOWN"; + case FieldOptions_OptionTargetType.TARGET_TYPE_FILE: + return "TARGET_TYPE_FILE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE: + return "TARGET_TYPE_EXTENSION_RANGE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE: + return "TARGET_TYPE_MESSAGE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD: + return "TARGET_TYPE_FIELD"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF: + return "TARGET_TYPE_ONEOF"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM: + return "TARGET_TYPE_ENUM"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY: + return "TARGET_TYPE_ENUM_ENTRY"; + case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE: + return "TARGET_TYPE_SERVICE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD: + return "TARGET_TYPE_METHOD"; + case FieldOptions_OptionTargetType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface FieldOptions_EditionDefault { + edition?: + | Edition + | undefined; + /** Textproto value. */ + value?: string | undefined; +} + +/** Information about the support window of a feature. */ +export interface FieldOptions_FeatureSupport { + /** + * The edition that this feature was first available in. In editions + * earlier than this one, the default assigned to EDITION_LEGACY will be + * used, and proto files will not be able to override it. + */ + editionIntroduced?: + | Edition + | undefined; + /** + * The edition this feature becomes deprecated in. Using this after this + * edition may trigger warnings. + */ + editionDeprecated?: + | Edition + | undefined; + /** + * The deprecation warning text if this feature is used after the edition it + * was marked deprecated in. + */ + deprecationWarning?: + | string + | undefined; + /** + * The edition this feature is no longer available in. In editions after + * this one, the last default assigned will be used, and proto files will + * not be able to override it. + */ + editionRemoved?: Edition | undefined; +} + +export interface OneofOptions { + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias?: + | boolean + | undefined; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated?: + | boolean + | undefined; + /** + * Enable the legacy handling of JSON field name conflicts. This lowercases + * and strips underscored from the fields before comparison in proto3 only. + * The new behavior takes `json_name` into account and applies to proto2 as + * well. + * TODO Remove this legacy behavior once downstream teams have + * had time to migrate. + * + * @deprecated + */ + deprecatedLegacyJsonFieldConflicts?: + | boolean + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated?: + | boolean + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** + * Indicate that fields annotated with this enum value should not be printed + * out when using debug formats, e.g. when the field contains sensitive + * credentials. + */ + debugRedact?: + | boolean + | undefined; + /** Information about the support window of a feature value. */ + featureSupport?: + | FieldOptions_FeatureSupport + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated?: + | boolean + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated?: boolean | undefined; + idempotencyLevel?: + | MethodOptions_IdempotencyLevel + | undefined; + /** + * Any features defined in the specific edition. + * WARNING: This field should only be used by protobuf plugins or special + * cases like the proto compiler. Other uses are discouraged and + * developers should rely on the protoreflect APIs for their client language. + */ + features?: + | FeatureSet + | undefined; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue?: string | undefined; + positiveIntValue?: number | undefined; + negativeIntValue?: number | undefined; + doubleValue?: number | undefined; + stringValue?: Buffer | undefined; + aggregateValue?: string | undefined; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + * "foo.(bar.baz).moo". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * TODO Enums in C++ gencode (and potentially other languages) are + * not well scoped. This means that each of the feature enums below can clash + * with each other. The short names we've chosen maximize call-site + * readability, but leave us very open to this scenario. A future feature will + * be designed and implemented to handle this, hopefully before we ever hit a + * conflict here. + */ +export interface FeatureSet { + fieldPresence?: FeatureSet_FieldPresence | undefined; + enumType?: FeatureSet_EnumType | undefined; + repeatedFieldEncoding?: FeatureSet_RepeatedFieldEncoding | undefined; + utf8Validation?: FeatureSet_Utf8Validation | undefined; + messageEncoding?: FeatureSet_MessageEncoding | undefined; + jsonFormat?: FeatureSet_JsonFormat | undefined; + enforceNamingStyle?: FeatureSet_EnforceNamingStyle | undefined; + defaultSymbolVisibility?: FeatureSet_VisibilityFeature_DefaultSymbolVisibility | undefined; +} + +export enum FeatureSet_FieldPresence { + FIELD_PRESENCE_UNKNOWN = 0, + EXPLICIT = 1, + IMPLICIT = 2, + LEGACY_REQUIRED = 3, + UNRECOGNIZED = -1, +} + +export function featureSet_FieldPresenceFromJSON(object: any): FeatureSet_FieldPresence { + switch (object) { + case 0: + case "FIELD_PRESENCE_UNKNOWN": + return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN; + case 1: + case "EXPLICIT": + return FeatureSet_FieldPresence.EXPLICIT; + case 2: + case "IMPLICIT": + return FeatureSet_FieldPresence.IMPLICIT; + case 3: + case "LEGACY_REQUIRED": + return FeatureSet_FieldPresence.LEGACY_REQUIRED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_FieldPresence.UNRECOGNIZED; + } +} + +export function featureSet_FieldPresenceToJSON(object: FeatureSet_FieldPresence): string { + switch (object) { + case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN: + return "FIELD_PRESENCE_UNKNOWN"; + case FeatureSet_FieldPresence.EXPLICIT: + return "EXPLICIT"; + case FeatureSet_FieldPresence.IMPLICIT: + return "IMPLICIT"; + case FeatureSet_FieldPresence.LEGACY_REQUIRED: + return "LEGACY_REQUIRED"; + case FeatureSet_FieldPresence.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_EnumType { + ENUM_TYPE_UNKNOWN = 0, + OPEN = 1, + CLOSED = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_EnumTypeFromJSON(object: any): FeatureSet_EnumType { + switch (object) { + case 0: + case "ENUM_TYPE_UNKNOWN": + return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN; + case 1: + case "OPEN": + return FeatureSet_EnumType.OPEN; + case 2: + case "CLOSED": + return FeatureSet_EnumType.CLOSED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_EnumType.UNRECOGNIZED; + } +} + +export function featureSet_EnumTypeToJSON(object: FeatureSet_EnumType): string { + switch (object) { + case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN: + return "ENUM_TYPE_UNKNOWN"; + case FeatureSet_EnumType.OPEN: + return "OPEN"; + case FeatureSet_EnumType.CLOSED: + return "CLOSED"; + case FeatureSet_EnumType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_RepeatedFieldEncoding { + REPEATED_FIELD_ENCODING_UNKNOWN = 0, + PACKED = 1, + EXPANDED = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_RepeatedFieldEncodingFromJSON(object: any): FeatureSet_RepeatedFieldEncoding { + switch (object) { + case 0: + case "REPEATED_FIELD_ENCODING_UNKNOWN": + return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN; + case 1: + case "PACKED": + return FeatureSet_RepeatedFieldEncoding.PACKED; + case 2: + case "EXPANDED": + return FeatureSet_RepeatedFieldEncoding.EXPANDED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_RepeatedFieldEncoding.UNRECOGNIZED; + } +} + +export function featureSet_RepeatedFieldEncodingToJSON(object: FeatureSet_RepeatedFieldEncoding): string { + switch (object) { + case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN: + return "REPEATED_FIELD_ENCODING_UNKNOWN"; + case FeatureSet_RepeatedFieldEncoding.PACKED: + return "PACKED"; + case FeatureSet_RepeatedFieldEncoding.EXPANDED: + return "EXPANDED"; + case FeatureSet_RepeatedFieldEncoding.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_Utf8Validation { + UTF8_VALIDATION_UNKNOWN = 0, + VERIFY = 2, + NONE = 3, + UNRECOGNIZED = -1, +} + +export function featureSet_Utf8ValidationFromJSON(object: any): FeatureSet_Utf8Validation { + switch (object) { + case 0: + case "UTF8_VALIDATION_UNKNOWN": + return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN; + case 2: + case "VERIFY": + return FeatureSet_Utf8Validation.VERIFY; + case 3: + case "NONE": + return FeatureSet_Utf8Validation.NONE; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_Utf8Validation.UNRECOGNIZED; + } +} + +export function featureSet_Utf8ValidationToJSON(object: FeatureSet_Utf8Validation): string { + switch (object) { + case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN: + return "UTF8_VALIDATION_UNKNOWN"; + case FeatureSet_Utf8Validation.VERIFY: + return "VERIFY"; + case FeatureSet_Utf8Validation.NONE: + return "NONE"; + case FeatureSet_Utf8Validation.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_MessageEncoding { + MESSAGE_ENCODING_UNKNOWN = 0, + LENGTH_PREFIXED = 1, + DELIMITED = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_MessageEncodingFromJSON(object: any): FeatureSet_MessageEncoding { + switch (object) { + case 0: + case "MESSAGE_ENCODING_UNKNOWN": + return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN; + case 1: + case "LENGTH_PREFIXED": + return FeatureSet_MessageEncoding.LENGTH_PREFIXED; + case 2: + case "DELIMITED": + return FeatureSet_MessageEncoding.DELIMITED; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_MessageEncoding.UNRECOGNIZED; + } +} + +export function featureSet_MessageEncodingToJSON(object: FeatureSet_MessageEncoding): string { + switch (object) { + case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN: + return "MESSAGE_ENCODING_UNKNOWN"; + case FeatureSet_MessageEncoding.LENGTH_PREFIXED: + return "LENGTH_PREFIXED"; + case FeatureSet_MessageEncoding.DELIMITED: + return "DELIMITED"; + case FeatureSet_MessageEncoding.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_JsonFormat { + JSON_FORMAT_UNKNOWN = 0, + ALLOW = 1, + LEGACY_BEST_EFFORT = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_JsonFormatFromJSON(object: any): FeatureSet_JsonFormat { + switch (object) { + case 0: + case "JSON_FORMAT_UNKNOWN": + return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN; + case 1: + case "ALLOW": + return FeatureSet_JsonFormat.ALLOW; + case 2: + case "LEGACY_BEST_EFFORT": + return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_JsonFormat.UNRECOGNIZED; + } +} + +export function featureSet_JsonFormatToJSON(object: FeatureSet_JsonFormat): string { + switch (object) { + case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN: + return "JSON_FORMAT_UNKNOWN"; + case FeatureSet_JsonFormat.ALLOW: + return "ALLOW"; + case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT: + return "LEGACY_BEST_EFFORT"; + case FeatureSet_JsonFormat.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FeatureSet_EnforceNamingStyle { + ENFORCE_NAMING_STYLE_UNKNOWN = 0, + STYLE2024 = 1, + STYLE_LEGACY = 2, + UNRECOGNIZED = -1, +} + +export function featureSet_EnforceNamingStyleFromJSON(object: any): FeatureSet_EnforceNamingStyle { + switch (object) { + case 0: + case "ENFORCE_NAMING_STYLE_UNKNOWN": + return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN; + case 1: + case "STYLE2024": + return FeatureSet_EnforceNamingStyle.STYLE2024; + case 2: + case "STYLE_LEGACY": + return FeatureSet_EnforceNamingStyle.STYLE_LEGACY; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_EnforceNamingStyle.UNRECOGNIZED; + } +} + +export function featureSet_EnforceNamingStyleToJSON(object: FeatureSet_EnforceNamingStyle): string { + switch (object) { + case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN: + return "ENFORCE_NAMING_STYLE_UNKNOWN"; + case FeatureSet_EnforceNamingStyle.STYLE2024: + return "STYLE2024"; + case FeatureSet_EnforceNamingStyle.STYLE_LEGACY: + return "STYLE_LEGACY"; + case FeatureSet_EnforceNamingStyle.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface FeatureSet_VisibilityFeature { +} + +export enum FeatureSet_VisibilityFeature_DefaultSymbolVisibility { + DEFAULT_SYMBOL_VISIBILITY_UNKNOWN = 0, + /** EXPORT_ALL - Default pre-EDITION_2024, all UNSET visibility are export. */ + EXPORT_ALL = 1, + /** EXPORT_TOP_LEVEL - All top-level symbols default to export, nested default to local. */ + EXPORT_TOP_LEVEL = 2, + /** LOCAL_ALL - All symbols default to local. */ + LOCAL_ALL = 3, + /** + * STRICT - All symbols local by default. Nested types cannot be exported. + * With special case caveat for message { enum {} reserved 1 to max; } + * This is the recommended setting for new protos. + */ + STRICT = 4, + UNRECOGNIZED = -1, +} + +export function featureSet_VisibilityFeature_DefaultSymbolVisibilityFromJSON( + object: any, +): FeatureSet_VisibilityFeature_DefaultSymbolVisibility { + switch (object) { + case 0: + case "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.DEFAULT_SYMBOL_VISIBILITY_UNKNOWN; + case 1: + case "EXPORT_ALL": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.EXPORT_ALL; + case 2: + case "EXPORT_TOP_LEVEL": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.EXPORT_TOP_LEVEL; + case 3: + case "LOCAL_ALL": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.LOCAL_ALL; + case 4: + case "STRICT": + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.STRICT; + case -1: + case "UNRECOGNIZED": + default: + return FeatureSet_VisibilityFeature_DefaultSymbolVisibility.UNRECOGNIZED; + } +} + +export function featureSet_VisibilityFeature_DefaultSymbolVisibilityToJSON( + object: FeatureSet_VisibilityFeature_DefaultSymbolVisibility, +): string { + switch (object) { + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.DEFAULT_SYMBOL_VISIBILITY_UNKNOWN: + return "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.EXPORT_ALL: + return "EXPORT_ALL"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.EXPORT_TOP_LEVEL: + return "EXPORT_TOP_LEVEL"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.LOCAL_ALL: + return "LOCAL_ALL"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.STRICT: + return "STRICT"; + case FeatureSet_VisibilityFeature_DefaultSymbolVisibility.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A compiled specification for the defaults of a set of features. These + * messages are generated from FeatureSet extensions and can be used to seed + * feature resolution. The resolution with this object becomes a simple search + * for the closest matching edition, followed by proto merges. + */ +export interface FeatureSetDefaults { + defaults: FeatureSetDefaults_FeatureSetEditionDefault[]; + /** + * The minimum supported edition (inclusive) when this was constructed. + * Editions before this will not have defaults. + */ + minimumEdition?: + | Edition + | undefined; + /** + * The maximum known edition (inclusive) when this was constructed. Editions + * after this will not have reliable defaults. + */ + maximumEdition?: Edition | undefined; +} + +/** + * A map from every known edition with a unique set of defaults to its + * defaults. Not all editions may be contained here. For a given edition, + * the defaults at the closest matching edition ordered at or before it should + * be used. This field must be in strict ascending order by edition. + */ +export interface FeatureSetDefaults_FeatureSetEditionDefault { + edition?: + | Edition + | undefined; + /** Defaults of features that can be overridden in this edition. */ + overridableFeatures?: + | FeatureSet + | undefined; + /** Defaults of features that can't be overridden in this edition. */ + fixedFeatures?: FeatureSet | undefined; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition appears. + * For example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to moo. + * // + * // Another line attached to moo. + * optional double moo = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to moo or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments?: string | undefined; + trailingComments?: string | undefined; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile?: + | string + | undefined; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin?: + | number + | undefined; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified object. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end?: number | undefined; + semantic?: GeneratedCodeInfo_Annotation_Semantic | undefined; +} + +/** + * Represents the identified object's effect on the element in the original + * .proto file. + */ +export enum GeneratedCodeInfo_Annotation_Semantic { + /** NONE - There is no effect or the effect is indescribable. */ + NONE = 0, + /** SET - The element is set or otherwise mutated. */ + SET = 1, + /** ALIAS - An alias to the element is returned. */ + ALIAS = 2, + UNRECOGNIZED = -1, +} + +export function generatedCodeInfo_Annotation_SemanticFromJSON(object: any): GeneratedCodeInfo_Annotation_Semantic { + switch (object) { + case 0: + case "NONE": + return GeneratedCodeInfo_Annotation_Semantic.NONE; + case 1: + case "SET": + return GeneratedCodeInfo_Annotation_Semantic.SET; + case 2: + case "ALIAS": + return GeneratedCodeInfo_Annotation_Semantic.ALIAS; + case -1: + case "UNRECOGNIZED": + default: + return GeneratedCodeInfo_Annotation_Semantic.UNRECOGNIZED; + } +} + +export function generatedCodeInfo_Annotation_SemanticToJSON(object: GeneratedCodeInfo_Annotation_Semantic): string { + switch (object) { + case GeneratedCodeInfo_Annotation_Semantic.NONE: + return "NONE"; + case GeneratedCodeInfo_Annotation_Semantic.SET: + return "SET"; + case GeneratedCodeInfo_Annotation_Semantic.ALIAS: + return "ALIAS"; + case GeneratedCodeInfo_Annotation_Semantic.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet: MessageFns = { + encode(message: FileDescriptorSet, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { + file: globalThis.Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [], + }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file?.length) { + obj.file = message.file.map((e) => FileDescriptorProto.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): FileDescriptorSet { + return FileDescriptorSet.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + optionDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + edition: 0, + }; +} + +export const FileDescriptorProto: MessageFns = { + encode(message: FileDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== undefined && message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.join(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.join(); + for (const v of message.optionDependency) { + writer.uint32(122).string(v!); + } + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).join(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).join(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).join(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).join(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).join(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).join(); + } + if (message.syntax !== undefined && message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + if (message.edition !== undefined && message.edition !== 0) { + writer.uint32(112).int32(message.edition); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.package = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.dependency.push(reader.string()); + continue; + } + case 10: { + if (tag === 80) { + message.publicDependency.push(reader.int32()); + + continue; + } + + if (tag === 82) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + + continue; + } + + break; + } + case 11: { + if (tag === 88) { + message.weakDependency.push(reader.int32()); + + continue; + } + + if (tag === 90) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + + continue; + } + + break; + } + case 15: { + if (tag !== 122) { + break; + } + + message.optionDependency.push(reader.string()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.options = FileOptions.decode(reader, reader.uint32()); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.syntax = reader.string(); + continue; + } + case 14: { + if (tag !== 112) { + break; + } + + message.edition = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + package: isSet(object.package) ? globalThis.String(object.package) : "", + dependency: globalThis.Array.isArray(object?.dependency) + ? object.dependency.map((e: any) => globalThis.String(e)) + : [], + publicDependency: globalThis.Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => globalThis.Number(e)) + : [], + weakDependency: globalThis.Array.isArray(object?.weakDependency) + ? object.weakDependency.map((e: any) => globalThis.Number(e)) + : [], + optionDependency: globalThis.Array.isArray(object?.optionDependency) + ? object.optionDependency.map((e: any) => globalThis.String(e)) + : [], + messageType: globalThis.Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: globalThis.Array.isArray(object?.enumType) + ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) + : [], + service: globalThis.Array.isArray(object?.service) + ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) + : [], + extension: globalThis.Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "", + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.package !== undefined && message.package !== "") { + obj.package = message.package; + } + if (message.dependency?.length) { + obj.dependency = message.dependency; + } + if (message.publicDependency?.length) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } + if (message.weakDependency?.length) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } + if (message.optionDependency?.length) { + obj.optionDependency = message.optionDependency; + } + if (message.messageType?.length) { + obj.messageType = message.messageType.map((e) => DescriptorProto.toJSON(e)); + } + if (message.enumType?.length) { + obj.enumType = message.enumType.map((e) => EnumDescriptorProto.toJSON(e)); + } + if (message.service?.length) { + obj.service = message.service.map((e) => ServiceDescriptorProto.toJSON(e)); + } + if (message.extension?.length) { + obj.extension = message.extension.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = FileOptions.toJSON(message.options); + } + if (message.sourceCodeInfo !== undefined) { + obj.sourceCodeInfo = SourceCodeInfo.toJSON(message.sourceCodeInfo); + } + if (message.syntax !== undefined && message.syntax !== "") { + obj.syntax = message.syntax; + } + if (message.edition !== undefined && message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + return obj; + }, + + create, I>>(base?: I): FileDescriptorProto { + return FileDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.optionDependency = object.optionDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + message.edition = object.edition ?? 0; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + visibility: 0, + }; +} + +export const DescriptorProto: MessageFns = { + encode(message: DescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).join(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).join(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).join(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).join(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).join(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).join(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).join(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).join(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + if (message.visibility !== undefined && message.visibility !== 0) { + writer.uint32(88).int32(message.visibility); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.options = MessageOptions.decode(reader, reader.uint32()); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + continue; + } + case 10: { + if (tag !== 82) { + break; + } + + message.reservedName.push(reader.string()); + continue; + } + case 11: { + if (tag !== 88) { + break; + } + + message.visibility = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + field: globalThis.Array.isArray(object?.field) + ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + extension: globalThis.Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: globalThis.Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: globalThis.Array.isArray(object?.enumType) + ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) + : [], + extensionRange: globalThis.Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: globalThis.Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: globalThis.Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: globalThis.Array.isArray(object?.reservedName) + ? object.reservedName.map((e: any) => globalThis.String(e)) + : [], + visibility: isSet(object.visibility) ? symbolVisibilityFromJSON(object.visibility) : 0, + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.field?.length) { + obj.field = message.field.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.extension?.length) { + obj.extension = message.extension.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.nestedType?.length) { + obj.nestedType = message.nestedType.map((e) => DescriptorProto.toJSON(e)); + } + if (message.enumType?.length) { + obj.enumType = message.enumType.map((e) => EnumDescriptorProto.toJSON(e)); + } + if (message.extensionRange?.length) { + obj.extensionRange = message.extensionRange.map((e) => DescriptorProto_ExtensionRange.toJSON(e)); + } + if (message.oneofDecl?.length) { + obj.oneofDecl = message.oneofDecl.map((e) => OneofDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = MessageOptions.toJSON(message.options); + } + if (message.reservedRange?.length) { + obj.reservedRange = message.reservedRange.map((e) => DescriptorProto_ReservedRange.toJSON(e)); + } + if (message.reservedName?.length) { + obj.reservedName = message.reservedName; + } + if (message.visibility !== undefined && message.visibility !== 0) { + obj.visibility = symbolVisibilityToJSON(message.visibility); + } + return obj; + }, + + create, I>>(base?: I): DescriptorProto { + return DescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + message.visibility = object.visibility ?? 0; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange: MessageFns = { + encode(message: DescriptorProto_ExtensionRange, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.start !== undefined && message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== undefined && message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? globalThis.Number(object.start) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + if (message.start !== undefined && message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + if (message.options !== undefined) { + obj.options = ExtensionRangeOptions.toJSON(message.options); + } + return obj; + }, + + create, I>>(base?: I): DescriptorProto_ExtensionRange { + return DescriptorProto_ExtensionRange.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange: MessageFns = { + encode(message: DescriptorProto_ReservedRange, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.start !== undefined && message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== undefined && message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { + start: isSet(object.start) ? globalThis.Number(object.start) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + if (message.start !== undefined && message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, + + create, I>>(base?: I): DescriptorProto_ReservedRange { + return DescriptorProto_ReservedRange.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [], declaration: [], features: undefined, verification: 1 }; +} + +export const ExtensionRangeOptions: MessageFns = { + encode(message: ExtensionRangeOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + for (const v of message.declaration) { + ExtensionRangeOptions_Declaration.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(402).fork()).join(); + } + if (message.verification !== undefined && message.verification !== 1) { + writer.uint32(24).int32(message.verification); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.declaration.push(ExtensionRangeOptions_Declaration.decode(reader, reader.uint32())); + continue; + } + case 50: { + if (tag !== 402) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.verification = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + declaration: globalThis.Array.isArray(object?.declaration) + ? object.declaration.map((e: any) => ExtensionRangeOptions_Declaration.fromJSON(e)) + : [], + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + verification: isSet(object.verification) + ? extensionRangeOptions_VerificationStateFromJSON(object.verification) + : 1, + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + if (message.declaration?.length) { + obj.declaration = message.declaration.map((e) => ExtensionRangeOptions_Declaration.toJSON(e)); + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.verification !== undefined && message.verification !== 1) { + obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification); + } + return obj; + }, + + create, I>>(base?: I): ExtensionRangeOptions { + return ExtensionRangeOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + message.declaration = object.declaration?.map((e) => ExtensionRangeOptions_Declaration.fromPartial(e)) || []; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.verification = object.verification ?? 1; + return message; + }, +}; + +function createBaseExtensionRangeOptions_Declaration(): ExtensionRangeOptions_Declaration { + return { number: 0, fullName: "", type: "", reserved: false, repeated: false }; +} + +export const ExtensionRangeOptions_Declaration: MessageFns = { + encode(message: ExtensionRangeOptions_Declaration, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.number !== undefined && message.number !== 0) { + writer.uint32(8).int32(message.number); + } + if (message.fullName !== undefined && message.fullName !== "") { + writer.uint32(18).string(message.fullName); + } + if (message.type !== undefined && message.type !== "") { + writer.uint32(26).string(message.type); + } + if (message.reserved !== undefined && message.reserved !== false) { + writer.uint32(40).bool(message.reserved); + } + if (message.repeated !== undefined && message.repeated !== false) { + writer.uint32(48).bool(message.repeated); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ExtensionRangeOptions_Declaration { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions_Declaration(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.number = reader.int32(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.fullName = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.type = reader.string(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.reserved = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.repeated = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions_Declaration { + return { + number: isSet(object.number) ? globalThis.Number(object.number) : 0, + fullName: isSet(object.fullName) ? globalThis.String(object.fullName) : "", + type: isSet(object.type) ? globalThis.String(object.type) : "", + reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false, + repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false, + }; + }, + + toJSON(message: ExtensionRangeOptions_Declaration): unknown { + const obj: any = {}; + if (message.number !== undefined && message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.fullName !== undefined && message.fullName !== "") { + obj.fullName = message.fullName; + } + if (message.type !== undefined && message.type !== "") { + obj.type = message.type; + } + if (message.reserved !== undefined && message.reserved !== false) { + obj.reserved = message.reserved; + } + if (message.repeated !== undefined && message.repeated !== false) { + obj.repeated = message.repeated; + } + return obj; + }, + + create, I>>( + base?: I, + ): ExtensionRangeOptions_Declaration { + return ExtensionRangeOptions_Declaration.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): ExtensionRangeOptions_Declaration { + const message = createBaseExtensionRangeOptions_Declaration(); + message.number = object.number ?? 0; + message.fullName = object.fullName ?? ""; + message.type = object.type ?? ""; + message.reserved = object.reserved ?? false; + message.repeated = object.repeated ?? false; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto: MessageFns = { + encode(message: FieldDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== undefined && message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== undefined && message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== undefined && message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== undefined && message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== undefined && message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== undefined && message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== undefined && message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== undefined && message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).join(); + } + if (message.proto3Optional !== undefined && message.proto3Optional !== false) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.number = reader.int32(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.label = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.type = reader.int32() as any; + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.typeName = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.extendee = reader.string(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.defaultValue = reader.string(); + continue; + } + case 9: { + if (tag !== 72) { + break; + } + + message.oneofIndex = reader.int32(); + continue; + } + case 10: { + if (tag !== 82) { + break; + } + + message.jsonName = reader.string(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.options = FieldOptions.decode(reader, reader.uint32()); + continue; + } + case 17: { + if (tag !== 136) { + break; + } + + message.proto3Optional = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + number: isSet(object.number) ? globalThis.Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? globalThis.String(object.typeName) : "", + extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? globalThis.String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? globalThis.Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? globalThis.String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? globalThis.Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.number !== undefined && message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.label !== undefined && message.label !== 1) { + obj.label = fieldDescriptorProto_LabelToJSON(message.label); + } + if (message.type !== undefined && message.type !== 1) { + obj.type = fieldDescriptorProto_TypeToJSON(message.type); + } + if (message.typeName !== undefined && message.typeName !== "") { + obj.typeName = message.typeName; + } + if (message.extendee !== undefined && message.extendee !== "") { + obj.extendee = message.extendee; + } + if (message.defaultValue !== undefined && message.defaultValue !== "") { + obj.defaultValue = message.defaultValue; + } + if (message.oneofIndex !== undefined && message.oneofIndex !== 0) { + obj.oneofIndex = Math.round(message.oneofIndex); + } + if (message.jsonName !== undefined && message.jsonName !== "") { + obj.jsonName = message.jsonName; + } + if (message.options !== undefined) { + obj.options = FieldOptions.toJSON(message.options); + } + if (message.proto3Optional !== undefined && message.proto3Optional !== false) { + obj.proto3Optional = message.proto3Optional; + } + return obj; + }, + + create, I>>(base?: I): FieldDescriptorProto { + return FieldDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto: MessageFns = { + encode(message: OneofDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.options = OneofOptions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.options !== undefined) { + obj.options = OneofOptions.toJSON(message.options); + } + return obj; + }, + + create, I>>(base?: I): OneofDescriptorProto { + return OneofDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [], visibility: 0 }; +} + +export const EnumDescriptorProto: MessageFns = { + encode(message: EnumDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).join(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).join(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + if (message.visibility !== undefined && message.visibility !== 0) { + writer.uint32(48).int32(message.visibility); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.options = EnumOptions.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.reservedName.push(reader.string()); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.visibility = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + value: globalThis.Array.isArray(object?.value) + ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: globalThis.Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: globalThis.Array.isArray(object?.reservedName) + ? object.reservedName.map((e: any) => globalThis.String(e)) + : [], + visibility: isSet(object.visibility) ? symbolVisibilityFromJSON(object.visibility) : 0, + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.value?.length) { + obj.value = message.value.map((e) => EnumValueDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = EnumOptions.toJSON(message.options); + } + if (message.reservedRange?.length) { + obj.reservedRange = message.reservedRange.map((e) => EnumDescriptorProto_EnumReservedRange.toJSON(e)); + } + if (message.reservedName?.length) { + obj.reservedName = message.reservedName; + } + if (message.visibility !== undefined && message.visibility !== 0) { + obj.visibility = symbolVisibilityToJSON(message.visibility); + } + return obj; + }, + + create, I>>(base?: I): EnumDescriptorProto { + return EnumDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) || + []; + message.reservedName = object.reservedName?.map((e) => e) || []; + message.visibility = object.visibility ?? 0; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange: MessageFns = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.start !== undefined && message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== undefined && message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { + start: isSet(object.start) ? globalThis.Number(object.start) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + if (message.start !== undefined && message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, + + create, I>>( + base?: I, + ): EnumDescriptorProto_EnumReservedRange { + return EnumDescriptorProto_EnumReservedRange.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto: MessageFns = { + encode(message: EnumValueDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== undefined && message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.number = reader.int32(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.options = EnumValueOptions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + number: isSet(object.number) ? globalThis.Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.number !== undefined && message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.options !== undefined) { + obj.options = EnumValueOptions.toJSON(message.options); + } + return obj; + }, + + create, I>>(base?: I): EnumValueDescriptorProto { + return EnumValueDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto: MessageFns = { + encode(message: ServiceDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.options = ServiceOptions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + method: globalThis.Array.isArray(object?.method) + ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.method?.length) { + obj.method = message.method.map((e) => MethodDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = ServiceOptions.toJSON(message.options); + } + return obj; + }, + + create, I>>(base?: I): ServiceDescriptorProto { + return ServiceDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto: MessageFns = { + encode(message: MethodDescriptorProto, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined && message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== undefined && message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== undefined && message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).join(); + } + if (message.clientStreaming !== undefined && message.clientStreaming !== false) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming !== undefined && message.serverStreaming !== false) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.inputType = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.outputType = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.options = MethodOptions.decode(reader, reader.uint32()); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.clientStreaming = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.serverStreaming = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + inputType: isSet(object.inputType) ? globalThis.String(object.inputType) : "", + outputType: isSet(object.outputType) ? globalThis.String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? globalThis.Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? globalThis.Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.inputType !== undefined && message.inputType !== "") { + obj.inputType = message.inputType; + } + if (message.outputType !== undefined && message.outputType !== "") { + obj.outputType = message.outputType; + } + if (message.options !== undefined) { + obj.options = MethodOptions.toJSON(message.options); + } + if (message.clientStreaming !== undefined && message.clientStreaming !== false) { + obj.clientStreaming = message.clientStreaming; + } + if (message.serverStreaming !== undefined && message.serverStreaming !== false) { + obj.serverStreaming = message.serverStreaming; + } + return obj; + }, + + create, I>>(base?: I): MethodDescriptorProto { + return MethodDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + deprecated: false, + ccEnableArenas: true, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + features: undefined, + uninterpretedOption: [], + }; +} + +export const FileOptions: MessageFns = { + encode(message: FileOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.javaPackage !== undefined && message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== undefined && message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== undefined && message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== undefined && message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== undefined && message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(402).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.javaPackage = reader.string(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.javaOuterClassname = reader.string(); + continue; + } + case 10: { + if (tag !== 80) { + break; + } + + message.javaMultipleFiles = reader.bool(); + continue; + } + case 20: { + if (tag !== 160) { + break; + } + + message.javaGenerateEqualsAndHash = reader.bool(); + continue; + } + case 27: { + if (tag !== 216) { + break; + } + + message.javaStringCheckUtf8 = reader.bool(); + continue; + } + case 9: { + if (tag !== 72) { + break; + } + + message.optimizeFor = reader.int32() as any; + continue; + } + case 11: { + if (tag !== 90) { + break; + } + + message.goPackage = reader.string(); + continue; + } + case 16: { + if (tag !== 128) { + break; + } + + message.ccGenericServices = reader.bool(); + continue; + } + case 17: { + if (tag !== 136) { + break; + } + + message.javaGenericServices = reader.bool(); + continue; + } + case 18: { + if (tag !== 144) { + break; + } + + message.pyGenericServices = reader.bool(); + continue; + } + case 23: { + if (tag !== 184) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 31: { + if (tag !== 248) { + break; + } + + message.ccEnableArenas = reader.bool(); + continue; + } + case 36: { + if (tag !== 290) { + break; + } + + message.objcClassPrefix = reader.string(); + continue; + } + case 37: { + if (tag !== 298) { + break; + } + + message.csharpNamespace = reader.string(); + continue; + } + case 39: { + if (tag !== 314) { + break; + } + + message.swiftPrefix = reader.string(); + continue; + } + case 40: { + if (tag !== 322) { + break; + } + + message.phpClassPrefix = reader.string(); + continue; + } + case 41: { + if (tag !== 330) { + break; + } + + message.phpNamespace = reader.string(); + continue; + } + case 44: { + if (tag !== 354) { + break; + } + + message.phpMetadataNamespace = reader.string(); + continue; + } + case 45: { + if (tag !== 362) { + break; + } + + message.rubyPackage = reader.string(); + continue; + } + case 50: { + if (tag !== 402) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? globalThis.String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? globalThis.String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? globalThis.Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? globalThis.Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? globalThis.Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? globalThis.String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? globalThis.Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? globalThis.Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? globalThis.Boolean(object.pyGenericServices) : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? globalThis.Boolean(object.ccEnableArenas) : true, + objcClassPrefix: isSet(object.objcClassPrefix) ? globalThis.String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? globalThis.String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? globalThis.String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? globalThis.String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? globalThis.String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? globalThis.String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? globalThis.String(object.rubyPackage) : "", + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + if (message.javaPackage !== undefined && message.javaPackage !== "") { + obj.javaPackage = message.javaPackage; + } + if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") { + obj.javaOuterClassname = message.javaOuterClassname; + } + if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) { + obj.javaMultipleFiles = message.javaMultipleFiles; + } + if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) { + obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash; + } + if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) { + obj.javaStringCheckUtf8 = message.javaStringCheckUtf8; + } + if (message.optimizeFor !== undefined && message.optimizeFor !== 1) { + obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor); + } + if (message.goPackage !== undefined && message.goPackage !== "") { + obj.goPackage = message.goPackage; + } + if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) { + obj.ccGenericServices = message.ccGenericServices; + } + if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) { + obj.javaGenericServices = message.javaGenericServices; + } + if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) { + obj.pyGenericServices = message.pyGenericServices; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) { + obj.ccEnableArenas = message.ccEnableArenas; + } + if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") { + obj.objcClassPrefix = message.objcClassPrefix; + } + if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") { + obj.csharpNamespace = message.csharpNamespace; + } + if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") { + obj.swiftPrefix = message.swiftPrefix; + } + if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") { + obj.phpClassPrefix = message.phpClassPrefix; + } + if (message.phpNamespace !== undefined && message.phpNamespace !== "") { + obj.phpNamespace = message.phpNamespace; + } + if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") { + obj.phpMetadataNamespace = message.phpMetadataNamespace; + } + if (message.rubyPackage !== undefined && message.rubyPackage !== "") { + obj.rubyPackage = message.rubyPackage; + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): FileOptions { + return FileOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? true; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + deprecatedLegacyJsonFieldConflicts: false, + features: undefined, + uninterpretedOption: [], + }; +} + +export const MessageOptions: MessageFns = { + encode(message: MessageOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry !== undefined && message.mapEntry !== false) { + writer.uint32(56).bool(message.mapEntry); + } + if ( + message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false + ) { + writer.uint32(88).bool(message.deprecatedLegacyJsonFieldConflicts); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(98).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.messageSetWireFormat = reader.bool(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.noStandardDescriptorAccessor = reader.bool(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 7: { + if (tag !== 56) { + break; + } + + message.mapEntry = reader.bool(); + continue; + } + case 11: { + if (tag !== 88) { + break; + } + + message.deprecatedLegacyJsonFieldConflicts = reader.bool(); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) + ? globalThis.Boolean(object.messageSetWireFormat) + : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? globalThis.Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? globalThis.Boolean(object.mapEntry) : false, + deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) + ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts) + : false, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) { + obj.messageSetWireFormat = message.messageSetWireFormat; + } + if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) { + obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.mapEntry !== undefined && message.mapEntry !== false) { + obj.mapEntry = message.mapEntry; + } + if ( + message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false + ) { + obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): MessageOptions { + return MessageOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.deprecatedLegacyJsonFieldConflicts = object.deprecatedLegacyJsonFieldConflicts ?? false; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { + ctype: 0, + packed: false, + jstype: 0, + lazy: false, + unverifiedLazy: false, + deprecated: false, + weak: false, + debugRedact: false, + retention: 0, + targets: [], + editionDefaults: [], + features: undefined, + featureSupport: undefined, + uninterpretedOption: [], + }; +} + +export const FieldOptions: MessageFns = { + encode(message: FieldOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.ctype !== undefined && message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed !== undefined && message.packed !== false) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== undefined && message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy !== undefined && message.lazy !== false) { + writer.uint32(40).bool(message.lazy); + } + if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) { + writer.uint32(120).bool(message.unverifiedLazy); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak !== undefined && message.weak !== false) { + writer.uint32(80).bool(message.weak); + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + writer.uint32(128).bool(message.debugRedact); + } + if (message.retention !== undefined && message.retention !== 0) { + writer.uint32(136).int32(message.retention); + } + writer.uint32(154).fork(); + for (const v of message.targets) { + writer.int32(v); + } + writer.join(); + for (const v of message.editionDefaults) { + FieldOptions_EditionDefault.encode(v!, writer.uint32(162).fork()).join(); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(170).fork()).join(); + } + if (message.featureSupport !== undefined) { + FieldOptions_FeatureSupport.encode(message.featureSupport, writer.uint32(178).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.ctype = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.packed = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.jstype = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.lazy = reader.bool(); + continue; + } + case 15: { + if (tag !== 120) { + break; + } + + message.unverifiedLazy = reader.bool(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 10: { + if (tag !== 80) { + break; + } + + message.weak = reader.bool(); + continue; + } + case 16: { + if (tag !== 128) { + break; + } + + message.debugRedact = reader.bool(); + continue; + } + case 17: { + if (tag !== 136) { + break; + } + + message.retention = reader.int32() as any; + continue; + } + case 19: { + if (tag === 152) { + message.targets.push(reader.int32() as any); + + continue; + } + + if (tag === 154) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.targets.push(reader.int32() as any); + } + + continue; + } + + break; + } + case 20: { + if (tag !== 162) { + break; + } + + message.editionDefaults.push(FieldOptions_EditionDefault.decode(reader, reader.uint32())); + continue; + } + case 21: { + if (tag !== 170) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 22: { + if (tag !== 178) { + break; + } + + message.featureSupport = FieldOptions_FeatureSupport.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false, + unverifiedLazy: isSet(object.unverifiedLazy) ? globalThis.Boolean(object.unverifiedLazy) : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false, + debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false, + retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0, + targets: globalThis.Array.isArray(object?.targets) + ? object.targets.map((e: any) => fieldOptions_OptionTargetTypeFromJSON(e)) + : [], + editionDefaults: globalThis.Array.isArray(object?.editionDefaults) + ? object.editionDefaults.map((e: any) => FieldOptions_EditionDefault.fromJSON(e)) + : [], + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + featureSupport: isSet(object.featureSupport) + ? FieldOptions_FeatureSupport.fromJSON(object.featureSupport) + : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + if (message.ctype !== undefined && message.ctype !== 0) { + obj.ctype = fieldOptions_CTypeToJSON(message.ctype); + } + if (message.packed !== undefined && message.packed !== false) { + obj.packed = message.packed; + } + if (message.jstype !== undefined && message.jstype !== 0) { + obj.jstype = fieldOptions_JSTypeToJSON(message.jstype); + } + if (message.lazy !== undefined && message.lazy !== false) { + obj.lazy = message.lazy; + } + if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) { + obj.unverifiedLazy = message.unverifiedLazy; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.weak !== undefined && message.weak !== false) { + obj.weak = message.weak; + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + obj.debugRedact = message.debugRedact; + } + if (message.retention !== undefined && message.retention !== 0) { + obj.retention = fieldOptions_OptionRetentionToJSON(message.retention); + } + if (message.targets?.length) { + obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e)); + } + if (message.editionDefaults?.length) { + obj.editionDefaults = message.editionDefaults.map((e) => FieldOptions_EditionDefault.toJSON(e)); + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.featureSupport !== undefined) { + obj.featureSupport = FieldOptions_FeatureSupport.toJSON(message.featureSupport); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): FieldOptions { + return FieldOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.unverifiedLazy = object.unverifiedLazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.debugRedact = object.debugRedact ?? false; + message.retention = object.retention ?? 0; + message.targets = object.targets?.map((e) => e) || []; + message.editionDefaults = object.editionDefaults?.map((e) => FieldOptions_EditionDefault.fromPartial(e)) || []; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.featureSupport = (object.featureSupport !== undefined && object.featureSupport !== null) + ? FieldOptions_FeatureSupport.fromPartial(object.featureSupport) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions_EditionDefault(): FieldOptions_EditionDefault { + return { edition: 0, value: "" }; +} + +export const FieldOptions_EditionDefault: MessageFns = { + encode(message: FieldOptions_EditionDefault, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.edition !== undefined && message.edition !== 0) { + writer.uint32(24).int32(message.edition); + } + if (message.value !== undefined && message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FieldOptions_EditionDefault { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions_EditionDefault(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 3: { + if (tag !== 24) { + break; + } + + message.edition = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldOptions_EditionDefault { + return { + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: FieldOptions_EditionDefault): unknown { + const obj: any = {}; + if (message.edition !== undefined && message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + if (message.value !== undefined && message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create, I>>(base?: I): FieldOptions_EditionDefault { + return FieldOptions_EditionDefault.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FieldOptions_EditionDefault { + const message = createBaseFieldOptions_EditionDefault(); + message.edition = object.edition ?? 0; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseFieldOptions_FeatureSupport(): FieldOptions_FeatureSupport { + return { editionIntroduced: 0, editionDeprecated: 0, deprecationWarning: "", editionRemoved: 0 }; +} + +export const FieldOptions_FeatureSupport: MessageFns = { + encode(message: FieldOptions_FeatureSupport, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) { + writer.uint32(8).int32(message.editionIntroduced); + } + if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) { + writer.uint32(16).int32(message.editionDeprecated); + } + if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") { + writer.uint32(26).string(message.deprecationWarning); + } + if (message.editionRemoved !== undefined && message.editionRemoved !== 0) { + writer.uint32(32).int32(message.editionRemoved); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FieldOptions_FeatureSupport { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions_FeatureSupport(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.editionIntroduced = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.editionDeprecated = reader.int32() as any; + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.deprecationWarning = reader.string(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.editionRemoved = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldOptions_FeatureSupport { + return { + editionIntroduced: isSet(object.editionIntroduced) ? editionFromJSON(object.editionIntroduced) : 0, + editionDeprecated: isSet(object.editionDeprecated) ? editionFromJSON(object.editionDeprecated) : 0, + deprecationWarning: isSet(object.deprecationWarning) ? globalThis.String(object.deprecationWarning) : "", + editionRemoved: isSet(object.editionRemoved) ? editionFromJSON(object.editionRemoved) : 0, + }; + }, + + toJSON(message: FieldOptions_FeatureSupport): unknown { + const obj: any = {}; + if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) { + obj.editionIntroduced = editionToJSON(message.editionIntroduced); + } + if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) { + obj.editionDeprecated = editionToJSON(message.editionDeprecated); + } + if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") { + obj.deprecationWarning = message.deprecationWarning; + } + if (message.editionRemoved !== undefined && message.editionRemoved !== 0) { + obj.editionRemoved = editionToJSON(message.editionRemoved); + } + return obj; + }, + + create, I>>(base?: I): FieldOptions_FeatureSupport { + return FieldOptions_FeatureSupport.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FieldOptions_FeatureSupport { + const message = createBaseFieldOptions_FeatureSupport(); + message.editionIntroduced = object.editionIntroduced ?? 0; + message.editionDeprecated = object.editionDeprecated ?? 0; + message.deprecationWarning = object.deprecationWarning ?? ""; + message.editionRemoved = object.editionRemoved ?? 0; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { features: undefined, uninterpretedOption: [] }; +} + +export const OneofOptions: MessageFns = { + encode(message: OneofOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(10).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): OneofOptions { + return OneofOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { + allowAlias: false, + deprecated: false, + deprecatedLegacyJsonFieldConflicts: false, + features: undefined, + uninterpretedOption: [], + }; +} + +export const EnumOptions: MessageFns = { + encode(message: EnumOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.allowAlias !== undefined && message.allowAlias !== false) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(24).bool(message.deprecated); + } + if ( + message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false + ) { + writer.uint32(48).bool(message.deprecatedLegacyJsonFieldConflicts); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(58).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + if (tag !== 16) { + break; + } + + message.allowAlias = reader.bool(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.deprecatedLegacyJsonFieldConflicts = reader.bool(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? globalThis.Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) + ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts) + : false, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + if (message.allowAlias !== undefined && message.allowAlias !== false) { + obj.allowAlias = message.allowAlias; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if ( + message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false + ) { + obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): EnumOptions { + return EnumOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.deprecatedLegacyJsonFieldConflicts = object.deprecatedLegacyJsonFieldConflicts ?? false; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { + deprecated: false, + features: undefined, + debugRedact: false, + featureSupport: undefined, + uninterpretedOption: [], + }; +} + +export const EnumValueOptions: MessageFns = { + encode(message: EnumValueOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(8).bool(message.deprecated); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(18).fork()).join(); + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + writer.uint32(24).bool(message.debugRedact); + } + if (message.featureSupport !== undefined) { + FieldOptions_FeatureSupport.encode(message.featureSupport, writer.uint32(34).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.debugRedact = reader.bool(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.featureSupport = FieldOptions_FeatureSupport.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false, + featureSupport: isSet(object.featureSupport) + ? FieldOptions_FeatureSupport.fromJSON(object.featureSupport) + : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + obj.debugRedact = message.debugRedact; + } + if (message.featureSupport !== undefined) { + obj.featureSupport = FieldOptions_FeatureSupport.toJSON(message.featureSupport); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): EnumValueOptions { + return EnumValueOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.debugRedact = object.debugRedact ?? false; + message.featureSupport = (object.featureSupport !== undefined && object.featureSupport !== null) + ? FieldOptions_FeatureSupport.fromPartial(object.featureSupport) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { features: undefined, deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions: MessageFns = { + encode(message: ServiceOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(274).fork()).join(); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 34: { + if (tag !== 274) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 33: { + if (tag !== 264) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): ServiceOptions { + return ServiceOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, features: undefined, uninterpretedOption: [] }; +} + +export const MethodOptions: MessageFns = { + encode(message: MethodOptions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.deprecated !== undefined && message.deprecated !== false) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + if (message.features !== undefined) { + FeatureSet.encode(message.features, writer.uint32(282).fork()).join(); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: { + if (tag !== 264) { + break; + } + + message.deprecated = reader.bool(); + continue; + } + case 34: { + if (tag !== 272) { + break; + } + + message.idempotencyLevel = reader.int32() as any; + continue; + } + case 35: { + if (tag !== 282) { + break; + } + + message.features = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 999: { + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) { + obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel); + } + if (message.features !== undefined) { + obj.features = FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): MethodOptions { + return MethodOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.features = (object.features !== undefined && object.features !== null) + ? FeatureSet.fromPartial(object.features) + : undefined; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: Buffer.alloc(0), + aggregateValue: "", + }; +} + +export const UninterpretedOption: MessageFns = { + encode(message: UninterpretedOption, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.identifierValue !== undefined && message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== undefined && message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== undefined && message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== undefined && message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue !== undefined && message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== undefined && message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + if (tag !== 18) { + break; + } + + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.identifierValue = reader.string(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.positiveIntValue = longToNumber(reader.uint64()); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.negativeIntValue = longToNumber(reader.int64()); + continue; + } + case 6: { + if (tag !== 49) { + break; + } + + message.doubleValue = reader.double(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.stringValue = Buffer.from(reader.bytes()); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.aggregateValue = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: globalThis.Array.isArray(object?.name) + ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) + : [], + identifierValue: isSet(object.identifierValue) ? globalThis.String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? globalThis.Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? globalThis.Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? globalThis.Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0), + aggregateValue: isSet(object.aggregateValue) ? globalThis.String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name?.length) { + obj.name = message.name.map((e) => UninterpretedOption_NamePart.toJSON(e)); + } + if (message.identifierValue !== undefined && message.identifierValue !== "") { + obj.identifierValue = message.identifierValue; + } + if (message.positiveIntValue !== undefined && message.positiveIntValue !== 0) { + obj.positiveIntValue = Math.round(message.positiveIntValue); + } + if (message.negativeIntValue !== undefined && message.negativeIntValue !== 0) { + obj.negativeIntValue = Math.round(message.negativeIntValue); + } + if (message.doubleValue !== undefined && message.doubleValue !== 0) { + obj.doubleValue = message.doubleValue; + } + if (message.stringValue !== undefined && message.stringValue.length !== 0) { + obj.stringValue = base64FromBytes(message.stringValue); + } + if (message.aggregateValue !== undefined && message.aggregateValue !== "") { + obj.aggregateValue = message.aggregateValue; + } + return obj; + }, + + create, I>>(base?: I): UninterpretedOption { + return UninterpretedOption.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? Buffer.alloc(0); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart: MessageFns = { + encode(message: UninterpretedOption_NamePart, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension !== false) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.namePart = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.isExtension = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? globalThis.String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? globalThis.Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + if (message.namePart !== "") { + obj.namePart = message.namePart; + } + if (message.isExtension !== false) { + obj.isExtension = message.isExtension; + } + return obj; + }, + + create, I>>(base?: I): UninterpretedOption_NamePart { + return UninterpretedOption_NamePart.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseFeatureSet(): FeatureSet { + return { + fieldPresence: 0, + enumType: 0, + repeatedFieldEncoding: 0, + utf8Validation: 0, + messageEncoding: 0, + jsonFormat: 0, + enforceNamingStyle: 0, + defaultSymbolVisibility: 0, + }; +} + +export const FeatureSet: MessageFns = { + encode(message: FeatureSet, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.fieldPresence !== undefined && message.fieldPresence !== 0) { + writer.uint32(8).int32(message.fieldPresence); + } + if (message.enumType !== undefined && message.enumType !== 0) { + writer.uint32(16).int32(message.enumType); + } + if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) { + writer.uint32(24).int32(message.repeatedFieldEncoding); + } + if (message.utf8Validation !== undefined && message.utf8Validation !== 0) { + writer.uint32(32).int32(message.utf8Validation); + } + if (message.messageEncoding !== undefined && message.messageEncoding !== 0) { + writer.uint32(40).int32(message.messageEncoding); + } + if (message.jsonFormat !== undefined && message.jsonFormat !== 0) { + writer.uint32(48).int32(message.jsonFormat); + } + if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) { + writer.uint32(56).int32(message.enforceNamingStyle); + } + if (message.defaultSymbolVisibility !== undefined && message.defaultSymbolVisibility !== 0) { + writer.uint32(64).int32(message.defaultSymbolVisibility); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FeatureSet { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.fieldPresence = reader.int32() as any; + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.enumType = reader.int32() as any; + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.repeatedFieldEncoding = reader.int32() as any; + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.utf8Validation = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.messageEncoding = reader.int32() as any; + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.jsonFormat = reader.int32() as any; + continue; + } + case 7: { + if (tag !== 56) { + break; + } + + message.enforceNamingStyle = reader.int32() as any; + continue; + } + case 8: { + if (tag !== 64) { + break; + } + + message.defaultSymbolVisibility = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FeatureSet { + return { + fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0, + enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0, + repeatedFieldEncoding: isSet(object.repeatedFieldEncoding) + ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding) + : 0, + utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0, + messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0, + jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0, + enforceNamingStyle: isSet(object.enforceNamingStyle) + ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle) + : 0, + defaultSymbolVisibility: isSet(object.defaultSymbolVisibility) + ? featureSet_VisibilityFeature_DefaultSymbolVisibilityFromJSON(object.defaultSymbolVisibility) + : 0, + }; + }, + + toJSON(message: FeatureSet): unknown { + const obj: any = {}; + if (message.fieldPresence !== undefined && message.fieldPresence !== 0) { + obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence); + } + if (message.enumType !== undefined && message.enumType !== 0) { + obj.enumType = featureSet_EnumTypeToJSON(message.enumType); + } + if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) { + obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding); + } + if (message.utf8Validation !== undefined && message.utf8Validation !== 0) { + obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation); + } + if (message.messageEncoding !== undefined && message.messageEncoding !== 0) { + obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding); + } + if (message.jsonFormat !== undefined && message.jsonFormat !== 0) { + obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat); + } + if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) { + obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle); + } + if (message.defaultSymbolVisibility !== undefined && message.defaultSymbolVisibility !== 0) { + obj.defaultSymbolVisibility = featureSet_VisibilityFeature_DefaultSymbolVisibilityToJSON( + message.defaultSymbolVisibility, + ); + } + return obj; + }, + + create, I>>(base?: I): FeatureSet { + return FeatureSet.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FeatureSet { + const message = createBaseFeatureSet(); + message.fieldPresence = object.fieldPresence ?? 0; + message.enumType = object.enumType ?? 0; + message.repeatedFieldEncoding = object.repeatedFieldEncoding ?? 0; + message.utf8Validation = object.utf8Validation ?? 0; + message.messageEncoding = object.messageEncoding ?? 0; + message.jsonFormat = object.jsonFormat ?? 0; + message.enforceNamingStyle = object.enforceNamingStyle ?? 0; + message.defaultSymbolVisibility = object.defaultSymbolVisibility ?? 0; + return message; + }, +}; + +function createBaseFeatureSet_VisibilityFeature(): FeatureSet_VisibilityFeature { + return {}; +} + +export const FeatureSet_VisibilityFeature: MessageFns = { + encode(_: FeatureSet_VisibilityFeature, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FeatureSet_VisibilityFeature { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSet_VisibilityFeature(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): FeatureSet_VisibilityFeature { + return {}; + }, + + toJSON(_: FeatureSet_VisibilityFeature): unknown { + const obj: any = {}; + return obj; + }, + + create, I>>(base?: I): FeatureSet_VisibilityFeature { + return FeatureSet_VisibilityFeature.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(_: I): FeatureSet_VisibilityFeature { + const message = createBaseFeatureSet_VisibilityFeature(); + return message; + }, +}; + +function createBaseFeatureSetDefaults(): FeatureSetDefaults { + return { defaults: [], minimumEdition: 0, maximumEdition: 0 }; +} + +export const FeatureSetDefaults: MessageFns = { + encode(message: FeatureSetDefaults, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.defaults) { + FeatureSetDefaults_FeatureSetEditionDefault.encode(v!, writer.uint32(10).fork()).join(); + } + if (message.minimumEdition !== undefined && message.minimumEdition !== 0) { + writer.uint32(32).int32(message.minimumEdition); + } + if (message.maximumEdition !== undefined && message.maximumEdition !== 0) { + writer.uint32(40).int32(message.maximumEdition); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FeatureSetDefaults { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSetDefaults(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.defaults.push(FeatureSetDefaults_FeatureSetEditionDefault.decode(reader, reader.uint32())); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.minimumEdition = reader.int32() as any; + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.maximumEdition = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FeatureSetDefaults { + return { + defaults: globalThis.Array.isArray(object?.defaults) + ? object.defaults.map((e: any) => FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e)) + : [], + minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0, + maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0, + }; + }, + + toJSON(message: FeatureSetDefaults): unknown { + const obj: any = {}; + if (message.defaults?.length) { + obj.defaults = message.defaults.map((e) => FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e)); + } + if (message.minimumEdition !== undefined && message.minimumEdition !== 0) { + obj.minimumEdition = editionToJSON(message.minimumEdition); + } + if (message.maximumEdition !== undefined && message.maximumEdition !== 0) { + obj.maximumEdition = editionToJSON(message.maximumEdition); + } + return obj; + }, + + create, I>>(base?: I): FeatureSetDefaults { + return FeatureSetDefaults.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FeatureSetDefaults { + const message = createBaseFeatureSetDefaults(); + message.defaults = object.defaults?.map((e) => FeatureSetDefaults_FeatureSetEditionDefault.fromPartial(e)) || []; + message.minimumEdition = object.minimumEdition ?? 0; + message.maximumEdition = object.maximumEdition ?? 0; + return message; + }, +}; + +function createBaseFeatureSetDefaults_FeatureSetEditionDefault(): FeatureSetDefaults_FeatureSetEditionDefault { + return { edition: 0, overridableFeatures: undefined, fixedFeatures: undefined }; +} + +export const FeatureSetDefaults_FeatureSetEditionDefault: MessageFns = { + encode( + message: FeatureSetDefaults_FeatureSetEditionDefault, + writer: BinaryWriter = new BinaryWriter(), + ): BinaryWriter { + if (message.edition !== undefined && message.edition !== 0) { + writer.uint32(24).int32(message.edition); + } + if (message.overridableFeatures !== undefined) { + FeatureSet.encode(message.overridableFeatures, writer.uint32(34).fork()).join(); + } + if (message.fixedFeatures !== undefined) { + FeatureSet.encode(message.fixedFeatures, writer.uint32(42).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FeatureSetDefaults_FeatureSetEditionDefault { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeatureSetDefaults_FeatureSetEditionDefault(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 3: { + if (tag !== 24) { + break; + } + + message.edition = reader.int32() as any; + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.overridableFeatures = FeatureSet.decode(reader, reader.uint32()); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.fixedFeatures = FeatureSet.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FeatureSetDefaults_FeatureSetEditionDefault { + return { + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + overridableFeatures: isSet(object.overridableFeatures) + ? FeatureSet.fromJSON(object.overridableFeatures) + : undefined, + fixedFeatures: isSet(object.fixedFeatures) ? FeatureSet.fromJSON(object.fixedFeatures) : undefined, + }; + }, + + toJSON(message: FeatureSetDefaults_FeatureSetEditionDefault): unknown { + const obj: any = {}; + if (message.edition !== undefined && message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + if (message.overridableFeatures !== undefined) { + obj.overridableFeatures = FeatureSet.toJSON(message.overridableFeatures); + } + if (message.fixedFeatures !== undefined) { + obj.fixedFeatures = FeatureSet.toJSON(message.fixedFeatures); + } + return obj; + }, + + create, I>>( + base?: I, + ): FeatureSetDefaults_FeatureSetEditionDefault { + return FeatureSetDefaults_FeatureSetEditionDefault.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): FeatureSetDefaults_FeatureSetEditionDefault { + const message = createBaseFeatureSetDefaults_FeatureSetEditionDefault(); + message.edition = object.edition ?? 0; + message.overridableFeatures = (object.overridableFeatures !== undefined && object.overridableFeatures !== null) + ? FeatureSet.fromPartial(object.overridableFeatures) + : undefined; + message.fixedFeatures = (object.fixedFeatures !== undefined && object.fixedFeatures !== null) + ? FeatureSet.fromPartial(object.fixedFeatures) + : undefined; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo: MessageFns = { + encode(message: SourceCodeInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: globalThis.Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location?.length) { + obj.location = message.location.map((e) => SourceCodeInfo_Location.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): SourceCodeInfo { + return SourceCodeInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location: MessageFns = { + encode(message: SourceCodeInfo_Location, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.join(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.join(); + if (message.leadingComments !== undefined && message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== undefined && message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag === 8) { + message.path.push(reader.int32()); + + continue; + } + + if (tag === 10) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + + continue; + } + + break; + } + case 2: { + if (tag === 16) { + message.span.push(reader.int32()); + + continue; + } + + if (tag === 18) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + + continue; + } + + break; + } + case 3: { + if (tag !== 26) { + break; + } + + message.leadingComments = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.trailingComments = reader.string(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.leadingDetachedComments.push(reader.string()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: globalThis.Array.isArray(object?.path) ? object.path.map((e: any) => globalThis.Number(e)) : [], + span: globalThis.Array.isArray(object?.span) ? object.span.map((e: any) => globalThis.Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? globalThis.String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? globalThis.String(object.trailingComments) : "", + leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => globalThis.String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path?.length) { + obj.path = message.path.map((e) => Math.round(e)); + } + if (message.span?.length) { + obj.span = message.span.map((e) => Math.round(e)); + } + if (message.leadingComments !== undefined && message.leadingComments !== "") { + obj.leadingComments = message.leadingComments; + } + if (message.trailingComments !== undefined && message.trailingComments !== "") { + obj.trailingComments = message.trailingComments; + } + if (message.leadingDetachedComments?.length) { + obj.leadingDetachedComments = message.leadingDetachedComments; + } + return obj; + }, + + create, I>>(base?: I): SourceCodeInfo_Location { + return SourceCodeInfo_Location.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo: MessageFns = { + encode(message: GeneratedCodeInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: globalThis.Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation?.length) { + obj.annotation = message.annotation.map((e) => GeneratedCodeInfo_Annotation.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): GeneratedCodeInfo { + return GeneratedCodeInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0, semantic: 0 }; +} + +export const GeneratedCodeInfo_Annotation: MessageFns = { + encode(message: GeneratedCodeInfo_Annotation, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.join(); + if (message.sourceFile !== undefined && message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== undefined && message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== undefined && message.end !== 0) { + writer.uint32(32).int32(message.end); + } + if (message.semantic !== undefined && message.semantic !== 0) { + writer.uint32(40).int32(message.semantic); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag === 8) { + message.path.push(reader.int32()); + + continue; + } + + if (tag === 10) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + + continue; + } + + break; + } + case 2: { + if (tag !== 18) { + break; + } + + message.sourceFile = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.begin = reader.int32(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.end = reader.int32(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.semantic = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: globalThis.Array.isArray(object?.path) ? object.path.map((e: any) => globalThis.Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? globalThis.String(object.sourceFile) : "", + begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path?.length) { + obj.path = message.path.map((e) => Math.round(e)); + } + if (message.sourceFile !== undefined && message.sourceFile !== "") { + obj.sourceFile = message.sourceFile; + } + if (message.begin !== undefined && message.begin !== 0) { + obj.begin = Math.round(message.begin); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + if (message.semantic !== undefined && message.semantic !== 0) { + obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic); + } + return obj; + }, + + create, I>>(base?: I): GeneratedCodeInfo_Annotation { + return GeneratedCodeInfo_Annotation.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + message.semantic = object.semantic ?? 0; + return message; + }, +}; + +function bytesFromBase64(b64: string): Uint8Array { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); +} + +function base64FromBytes(arr: Uint8Array): string { + return globalThis.Buffer.from(arr).toString("base64"); +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends { $case: string; value: unknown } ? { $case: T["$case"]; value?: DeepPartial } + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(int64: { toString(): string }): number { + const num = globalThis.Number(int64.toString()); + if (num > globalThis.Number.MAX_SAFE_INTEGER) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + if (num < globalThis.Number.MIN_SAFE_INTEGER) { + throw new globalThis.Error("Value is smaller than Number.MIN_SAFE_INTEGER"); + } + return num; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create, I>>(base?: I): T; + fromPartial, I>>(object: I): T; +} diff --git a/src/grpc/google/protobuf/duration.ts b/src/grpc/google/protobuf/duration.ts new file mode 100644 index 00000000..cca5192f --- /dev/null +++ b/src/grpc/google/protobuf/duration.ts @@ -0,0 +1,201 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc unknown +// source: google/protobuf/duration.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "google.protobuf"; + +/** + * A Duration represents a signed, fixed-length span of time represented + * as a count of seconds and fractions of seconds at nanosecond + * resolution. It is independent of any calendar and concepts like "day" + * or "month". It is related to Timestamp in that the difference between + * two Timestamp values is a Duration and it can be added or subtracted + * from a Timestamp. Range is approximately +-10,000 years. + * + * # Examples + * + * Example 1: Compute Duration from two Timestamps in pseudo code. + * + * Timestamp start = ...; + * Timestamp end = ...; + * Duration duration = ...; + * + * duration.seconds = end.seconds - start.seconds; + * duration.nanos = end.nanos - start.nanos; + * + * if (duration.seconds < 0 && duration.nanos > 0) { + * duration.seconds += 1; + * duration.nanos -= 1000000000; + * } else if (duration.seconds > 0 && duration.nanos < 0) { + * duration.seconds -= 1; + * duration.nanos += 1000000000; + * } + * + * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + * + * Timestamp start = ...; + * Duration duration = ...; + * Timestamp end = ...; + * + * end.seconds = start.seconds + duration.seconds; + * end.nanos = start.nanos + duration.nanos; + * + * if (end.nanos < 0) { + * end.seconds -= 1; + * end.nanos += 1000000000; + * } else if (end.nanos >= 1000000000) { + * end.seconds += 1; + * end.nanos -= 1000000000; + * } + * + * Example 3: Compute Duration from datetime.timedelta in Python. + * + * td = datetime.timedelta(days=3, minutes=10) + * duration = Duration() + * duration.FromTimedelta(td) + * + * # JSON Mapping + * + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". + */ +export interface Duration { + /** + * Signed seconds of the span of time. Must be from -315,576,000,000 + * to +315,576,000,000 inclusive. Note: these bounds are computed from: + * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + */ + seconds: number; + /** + * Signed fractions of a second at nanosecond resolution of the span + * of time. Durations less than one second are represented with a 0 + * `seconds` field and a positive or negative `nanos` field. For durations + * of one second or more, a non-zero value for the `nanos` field must be + * of the same sign as the `seconds` field. Must be from -999,999,999 + * to +999,999,999 inclusive. + */ + nanos: number; +} + +function createBaseDuration(): Duration { + return { seconds: 0, nanos: 0 }; +} + +export const Duration: MessageFns = { + encode(message: Duration, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.seconds !== 0) { + writer.uint32(8).int64(message.seconds); + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Duration { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDuration(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.seconds = longToNumber(reader.int64()); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.nanos = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Duration { + return { + seconds: isSet(object.seconds) ? globalThis.Number(object.seconds) : 0, + nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0, + }; + }, + + toJSON(message: Duration): unknown { + const obj: any = {}; + if (message.seconds !== 0) { + obj.seconds = Math.round(message.seconds); + } + if (message.nanos !== 0) { + obj.nanos = Math.round(message.nanos); + } + return obj; + }, + + create, I>>(base?: I): Duration { + return Duration.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Duration { + const message = createBaseDuration(); + message.seconds = object.seconds ?? 0; + message.nanos = object.nanos ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends { $case: string; value: unknown } ? { $case: T["$case"]; value?: DeepPartial } + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(int64: { toString(): string }): number { + const num = globalThis.Number(int64.toString()); + if (num > globalThis.Number.MAX_SAFE_INTEGER) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + if (num < globalThis.Number.MIN_SAFE_INTEGER) { + throw new globalThis.Error("Value is smaller than Number.MIN_SAFE_INTEGER"); + } + return num; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create, I>>(base?: I): T; + fromPartial, I>>(object: I): T; +} diff --git a/src/grpc/google/protobuf/empty.ts b/src/grpc/google/protobuf/empty.ts new file mode 100644 index 00000000..d538ed26 --- /dev/null +++ b/src/grpc/google/protobuf/empty.ts @@ -0,0 +1,87 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc unknown +// source: google/protobuf/empty.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "google.protobuf"; + +/** + * A generic empty message that you can re-use to avoid defining duplicated + * empty messages in your APIs. A typical example is to use it as the request + * or the response type of an API method. For instance: + * + * service Foo { + * rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + * } + */ +export interface Empty { +} + +function createBaseEmpty(): Empty { + return {}; +} + +export const Empty: MessageFns = { + encode(_: Empty, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Empty { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEmpty(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): Empty { + return {}; + }, + + toJSON(_: Empty): unknown { + const obj: any = {}; + return obj; + }, + + create, I>>(base?: I): Empty { + return Empty.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(_: I): Empty { + const message = createBaseEmpty(); + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends { $case: string; value: unknown } ? { $case: T["$case"]; value?: DeepPartial } + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create, I>>(base?: I): T; + fromPartial, I>>(object: I): T; +} diff --git a/src/grpc/google/protobuf/struct.ts b/src/grpc/google/protobuf/struct.ts new file mode 100644 index 00000000..c379c4f8 --- /dev/null +++ b/src/grpc/google/protobuf/struct.ts @@ -0,0 +1,604 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc unknown +// source: google/protobuf/struct.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "google.protobuf"; + +/** + * `NullValue` is a singleton enumeration to represent the null value for the + * `Value` type union. + * + * The JSON representation for `NullValue` is JSON `null`. + */ +export enum NullValue { + /** NULL_VALUE - Null value. */ + NULL_VALUE = 0, + UNRECOGNIZED = -1, +} + +export function nullValueFromJSON(object: any): NullValue { + switch (object) { + case 0: + case "NULL_VALUE": + return NullValue.NULL_VALUE; + case -1: + case "UNRECOGNIZED": + default: + return NullValue.UNRECOGNIZED; + } +} + +export function nullValueToJSON(object: NullValue): string { + switch (object) { + case NullValue.NULL_VALUE: + return "NULL_VALUE"; + case NullValue.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * `Struct` represents a structured data value, consisting of fields + * which map to dynamically typed values. In some languages, `Struct` + * might be supported by a native representation. For example, in + * scripting languages like JS a struct is represented as an + * object. The details of that representation are described together + * with the proto support for the language. + * + * The JSON representation for `Struct` is JSON object. + */ +export interface Struct { + /** Unordered map of dynamically typed values. */ + fields: { [key: string]: any | undefined }; +} + +export interface Struct_FieldsEntry { + key: string; + value: any | undefined; +} + +/** + * `Value` represents a dynamically typed value which can be either + * null, a number, a string, a boolean, a recursive struct value, or a + * list of values. A producer of value is expected to set one of these + * variants. Absence of any variant indicates an error. + * + * The JSON representation for `Value` is JSON value. + */ +export interface Value { + /** The kind of value. */ + kind?: + | // + /** Represents a null value. */ + { $case: "nullValue"; value: NullValue } + | // + /** Represents a double value. */ + { $case: "numberValue"; value: number } + | // + /** Represents a string value. */ + { $case: "stringValue"; value: string } + | // + /** Represents a boolean value. */ + { $case: "boolValue"; value: boolean } + | // + /** Represents a structured value. */ + { $case: "structValue"; value: { [key: string]: any } | undefined } + | // + /** Represents a repeated `Value`. */ + { $case: "listValue"; value: Array | undefined } + | undefined; +} + +/** + * `ListValue` is a wrapper around a repeated field of values. + * + * The JSON representation for `ListValue` is JSON array. + */ +export interface ListValue { + /** Repeated field of dynamically typed values. */ + values: any[]; +} + +function createBaseStruct(): Struct { + return { fields: {} }; +} + +export const Struct: MessageFns & StructWrapperFns = { + encode(message: Struct, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + Object.entries(message.fields).forEach(([key, value]) => { + if (value !== undefined) { + Struct_FieldsEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).join(); + } + }); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Struct { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStruct(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + const entry1 = Struct_FieldsEntry.decode(reader, reader.uint32()); + if (entry1.value !== undefined) { + message.fields[entry1.key] = entry1.value; + } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Struct { + return { + fields: isObject(object.fields) + ? Object.entries(object.fields).reduce<{ [key: string]: any | undefined }>((acc, [key, value]) => { + acc[key] = value as any | undefined; + return acc; + }, {}) + : {}, + }; + }, + + toJSON(message: Struct): unknown { + const obj: any = {}; + if (message.fields) { + const entries = Object.entries(message.fields); + if (entries.length > 0) { + obj.fields = {}; + entries.forEach(([k, v]) => { + obj.fields[k] = v; + }); + } + } + return obj; + }, + + create, I>>(base?: I): Struct { + return Struct.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Struct { + const message = createBaseStruct(); + message.fields = Object.entries(object.fields ?? {}).reduce<{ [key: string]: any | undefined }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[key] = value; + } + return acc; + }, + {}, + ); + return message; + }, + + wrap(object: { [key: string]: any } | undefined): Struct { + const struct = createBaseStruct(); + + if (object !== undefined) { + for (const key of Object.keys(object)) { + struct.fields[key] = object[key]; + } + } + return struct; + }, + + unwrap(message: Struct): { [key: string]: any } { + const object: { [key: string]: any } = {}; + if (message.fields) { + for (const key of Object.keys(message.fields)) { + object[key] = message.fields[key]; + } + } + return object; + }, +}; + +function createBaseStruct_FieldsEntry(): Struct_FieldsEntry { + return { key: "", value: undefined }; +} + +export const Struct_FieldsEntry: MessageFns = { + encode(message: Struct_FieldsEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + Value.encode(Value.wrap(message.value), writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Struct_FieldsEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStruct_FieldsEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = Value.unwrap(Value.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Struct_FieldsEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object?.value) ? object.value : undefined, + }; + }, + + toJSON(message: Struct_FieldsEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = message.value; + } + return obj; + }, + + create, I>>(base?: I): Struct_FieldsEntry { + return Struct_FieldsEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Struct_FieldsEntry { + const message = createBaseStruct_FieldsEntry(); + message.key = object.key ?? ""; + message.value = object.value ?? undefined; + return message; + }, +}; + +function createBaseValue(): Value { + return { kind: undefined }; +} + +export const Value: MessageFns & AnyValueWrapperFns = { + encode(message: Value, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + switch (message.kind?.$case) { + case "nullValue": + writer.uint32(8).int32(message.kind.value); + break; + case "numberValue": + writer.uint32(17).double(message.kind.value); + break; + case "stringValue": + writer.uint32(26).string(message.kind.value); + break; + case "boolValue": + writer.uint32(32).bool(message.kind.value); + break; + case "structValue": + Struct.encode(Struct.wrap(message.kind.value), writer.uint32(42).fork()).join(); + break; + case "listValue": + ListValue.encode(ListValue.wrap(message.kind.value), writer.uint32(50).fork()).join(); + break; + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Value { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValue(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.kind = { $case: "nullValue", value: reader.int32() as any }; + continue; + } + case 2: { + if (tag !== 17) { + break; + } + + message.kind = { $case: "numberValue", value: reader.double() }; + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.kind = { $case: "stringValue", value: reader.string() }; + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.kind = { $case: "boolValue", value: reader.bool() }; + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.kind = { $case: "structValue", value: Struct.unwrap(Struct.decode(reader, reader.uint32())) }; + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.kind = { $case: "listValue", value: ListValue.unwrap(ListValue.decode(reader, reader.uint32())) }; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Value { + return { + kind: isSet(object.nullValue) + ? { $case: "nullValue", value: nullValueFromJSON(object.nullValue) } + : isSet(object.numberValue) + ? { $case: "numberValue", value: globalThis.Number(object.numberValue) } + : isSet(object.stringValue) + ? { $case: "stringValue", value: globalThis.String(object.stringValue) } + : isSet(object.boolValue) + ? { $case: "boolValue", value: globalThis.Boolean(object.boolValue) } + : isSet(object.structValue) + ? { $case: "structValue", value: object.structValue } + : isSet(object.listValue) + ? { $case: "listValue", value: [...object.listValue] } + : undefined, + }; + }, + + toJSON(message: Value): unknown { + const obj: any = {}; + if (message.kind?.$case === "nullValue") { + obj.nullValue = nullValueToJSON(message.kind.value); + } else if (message.kind?.$case === "numberValue") { + obj.numberValue = message.kind.value; + } else if (message.kind?.$case === "stringValue") { + obj.stringValue = message.kind.value; + } else if (message.kind?.$case === "boolValue") { + obj.boolValue = message.kind.value; + } else if (message.kind?.$case === "structValue") { + obj.structValue = message.kind.value; + } else if (message.kind?.$case === "listValue") { + obj.listValue = message.kind.value; + } + return obj; + }, + + create, I>>(base?: I): Value { + return Value.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Value { + const message = createBaseValue(); + switch (object.kind?.$case) { + case "nullValue": { + if (object.kind?.value !== undefined && object.kind?.value !== null) { + message.kind = { $case: "nullValue", value: object.kind.value }; + } + break; + } + case "numberValue": { + if (object.kind?.value !== undefined && object.kind?.value !== null) { + message.kind = { $case: "numberValue", value: object.kind.value }; + } + break; + } + case "stringValue": { + if (object.kind?.value !== undefined && object.kind?.value !== null) { + message.kind = { $case: "stringValue", value: object.kind.value }; + } + break; + } + case "boolValue": { + if (object.kind?.value !== undefined && object.kind?.value !== null) { + message.kind = { $case: "boolValue", value: object.kind.value }; + } + break; + } + case "structValue": { + if (object.kind?.value !== undefined && object.kind?.value !== null) { + message.kind = { $case: "structValue", value: object.kind.value }; + } + break; + } + case "listValue": { + if (object.kind?.value !== undefined && object.kind?.value !== null) { + message.kind = { $case: "listValue", value: object.kind.value }; + } + break; + } + } + return message; + }, + + wrap(value: any): Value { + const result = createBaseValue(); + if (value === null) { + result.kind = { $case: "nullValue", value }; + } else if (typeof value === "boolean") { + result.kind = { $case: "boolValue", value }; + } else if (typeof value === "number") { + result.kind = { $case: "numberValue", value }; + } else if (typeof value === "string") { + result.kind = { $case: "stringValue", value }; + } else if (globalThis.Array.isArray(value)) { + result.kind = { $case: "listValue", value }; + } else if (typeof value === "object") { + result.kind = { $case: "structValue", value }; + } else if (typeof value !== "undefined") { + throw new globalThis.Error("Unsupported any value type: " + typeof value); + } + return result; + }, + + unwrap(message: Value): string | number | boolean | Object | null | Array | undefined { + return message.kind?.value; + }, +}; + +function createBaseListValue(): ListValue { + return { values: [] }; +} + +export const ListValue: MessageFns & ListValueWrapperFns = { + encode(message: ListValue, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.values) { + Value.encode(Value.wrap(v!), writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ListValue { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseListValue(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.values.push(Value.unwrap(Value.decode(reader, reader.uint32()))); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ListValue { + return { values: globalThis.Array.isArray(object?.values) ? [...object.values] : [] }; + }, + + toJSON(message: ListValue): unknown { + const obj: any = {}; + if (message.values?.length) { + obj.values = message.values; + } + return obj; + }, + + create, I>>(base?: I): ListValue { + return ListValue.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ListValue { + const message = createBaseListValue(); + message.values = object.values?.map((e) => e) || []; + return message; + }, + + wrap(array: Array | undefined): ListValue { + const result = createBaseListValue(); + result.values = array ?? []; + return result; + }, + + unwrap(message: ListValue): Array { + if (message?.hasOwnProperty("values") && globalThis.Array.isArray(message.values)) { + return message.values; + } else { + return message as any; + } + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends { $case: string; value: unknown } ? { $case: T["$case"]; value?: DeepPartial } + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isObject(value: any): boolean { + return typeof value === "object" && value !== null; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create, I>>(base?: I): T; + fromPartial, I>>(object: I): T; +} + +export interface StructWrapperFns { + wrap(object: { [key: string]: any } | undefined): Struct; + unwrap(message: Struct): { [key: string]: any }; +} + +export interface AnyValueWrapperFns { + wrap(value: any): Value; + unwrap(message: any): string | number | boolean | Object | null | Array | undefined; +} + +export interface ListValueWrapperFns { + wrap(array: Array | undefined): ListValue; + unwrap(message: ListValue): Array; +} diff --git a/src/grpc/google/protobuf/timestamp.ts b/src/grpc/google/protobuf/timestamp.ts new file mode 100644 index 00000000..faf68ee0 --- /dev/null +++ b/src/grpc/google/protobuf/timestamp.ts @@ -0,0 +1,230 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc unknown +// source: google/protobuf/timestamp.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "google.protobuf"; + +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * Example 5: Compute Timestamp from Java `Instant.now()`. + * + * Instant now = Instant.now(); + * + * Timestamp timestamp = + * Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + * .setNanos(now.getNano()).build(); + * + * Example 6: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: number; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + nanos: number; +} + +function createBaseTimestamp(): Timestamp { + return { seconds: 0, nanos: 0 }; +} + +export const Timestamp: MessageFns = { + encode(message: Timestamp, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.seconds !== 0) { + writer.uint32(8).int64(message.seconds); + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Timestamp { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTimestamp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.seconds = longToNumber(reader.int64()); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.nanos = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Timestamp { + return { + seconds: isSet(object.seconds) ? globalThis.Number(object.seconds) : 0, + nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0, + }; + }, + + toJSON(message: Timestamp): unknown { + const obj: any = {}; + if (message.seconds !== 0) { + obj.seconds = Math.round(message.seconds); + } + if (message.nanos !== 0) { + obj.nanos = Math.round(message.nanos); + } + return obj; + }, + + create, I>>(base?: I): Timestamp { + return Timestamp.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Timestamp { + const message = createBaseTimestamp(); + message.seconds = object.seconds ?? 0; + message.nanos = object.nanos ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends { $case: string; value: unknown } ? { $case: T["$case"]; value?: DeepPartial } + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(int64: { toString(): string }): number { + const num = globalThis.Number(int64.toString()); + if (num > globalThis.Number.MAX_SAFE_INTEGER) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + if (num < globalThis.Number.MIN_SAFE_INTEGER) { + throw new globalThis.Error("Value is smaller than Number.MIN_SAFE_INTEGER"); + } + return num; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create, I>>(base?: I): T; + fromPartial, I>>(object: I): T; +} diff --git a/src/server/grpc/grpc_handler.ts b/src/server/grpc/grpc_handler.ts new file mode 100644 index 00000000..d22d77c0 --- /dev/null +++ b/src/server/grpc/grpc_handler.ts @@ -0,0 +1,275 @@ +import * as grpc from '@grpc/grpc-js'; +import { + A2AServiceServer, + AgentCard, + CancelTaskRequest, + CreateTaskPushNotificationConfigRequest, + DeleteTaskPushNotificationConfigRequest, + GetAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigRequest, + ListTaskPushNotificationConfigResponse, + SendMessageRequest, + SendMessageResponse, + StreamResponse, + Task, + TaskPushNotificationConfig, + TaskSubscriptionRequest, +} from '../../grpc/a2a.js'; +import { MessageSendParams, TaskIdParams } from '../../types.js'; +import { Empty } from '../../grpc/google/protobuf/empty.js'; +import { A2ARequestHandler } from '../request_handler/a2a_request_handler.js'; +import { FromProto } from '../../grpc/utils/from_proto.js'; +import { ToProto } from '../../grpc/utils/to_proto.js'; +import { GrpcTransportHandler } from '../transports/grpc/grpc_transport_handler.js'; +import { ServerCallContext } from '../context.js'; +import { Extensions } from '../../extensions.js'; +import { UserBuilder } from './common.js'; +import { HTTP_EXTENSION_HEADER } from '../../constants.js'; +import { A2AError } from '../error.js'; + +/** + * Options for configuring the gRPC handler. + */ +export interface GrpcHandlerOptions { + requestHandler: A2ARequestHandler; + userBuilder: UserBuilder; +} + +/** + * Creates a gRPC transport handler. + * This handler implements the A2A gRPC service definition and acts as an + * adapter between the gRPC transport layer and the core A2A request handler. + * + * @param requestHandler - The core A2A request handler for business logic. + * @returns An object that implements the A2AServiceServer interface. + * + * @example + * ```ts + * const server = new grpc.Server(); + * const requestHandler = new DefaultRequestHandler(...); + * server.addService(A2AService, grpcHandler({ requestHandler, userBuilder: UserBuilder.noAuthentication })); + * ``` + */ +export function grpcHandler(options: GrpcHandlerOptions): A2AServiceServer { + const grpcTransportHandler = new GrpcTransportHandler(options.requestHandler); + + /** + * Helper to wrap Unary calls with common logic (context, metadata, error handling) + */ + const wrapUnary = async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + parser: (req: TReq) => TParams, + handler: (params: TParams, ctx: ServerCallContext) => Promise, + converter: (res: TResult) => TRes + ) => { + try { + const context = await buildContext(call, options.userBuilder); + const params = parser(call.request); + const result = await handler(params, context); + call.sendMetadata(buildMetadata(context)); + callback(null, converter(result)); + } catch (error) { + callback(mapToError(error), null); + } + }; + + return { + async sendMessage( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData + ): Promise { + return wrapUnary( + call, + callback, + FromProto.messageSendParams, + grpcTransportHandler.sendMessage.bind(grpcTransportHandler), + ToProto.messageSendResult + ); + }, + + async sendStreamingMessage( + call: grpc.ServerWritableStream + ): Promise { + try { + const context = await buildContext(call, options.userBuilder); + const params: MessageSendParams = FromProto.messageSendParams(call.request); + const stream = await grpcTransportHandler.sendMessageStream(params, context); + const metadata = buildMetadata(context); + call.sendMetadata(metadata); + for await (const responsePart of stream) { + const response = ToProto.messageStreamResult(responsePart); + call.write(response); + } + } catch (error) { + call.emit('error', mapToError(error)); + } finally { + call.end(); + } + }, + + async taskSubscription( + call: grpc.ServerWritableStream + ): Promise { + try { + const context = await buildContext(call, options.userBuilder); + const params: TaskIdParams = FromProto.taskIdParams(call.request); + const stream = await grpcTransportHandler.resubscribe(params, context); + const metadata = buildMetadata(context); + call.sendMetadata(metadata); + for await (const responsePart of stream) { + const response = ToProto.messageStreamResult(responsePart); + call.write(response); + } + } catch (error) { + call.emit('error', mapToError(error)); + } finally { + call.end(); + } + }, + + async deleteTaskPushNotificationConfig( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData + ): Promise { + return wrapUnary( + call, + callback, + FromProto.deleteTaskPushNotificationConfigParams, + grpcTransportHandler.deleteTaskPushNotificationConfig.bind(grpcTransportHandler), + () => ({}) + ); + }, + async listTaskPushNotificationConfig( + call: grpc.ServerUnaryCall< + ListTaskPushNotificationConfigRequest, + ListTaskPushNotificationConfigResponse + >, + callback: grpc.sendUnaryData + ): Promise { + return wrapUnary( + call, + callback, + FromProto.listTaskPushNotificationConfigParams, + grpcTransportHandler.listTaskPushNotificationConfigs.bind(grpcTransportHandler), + ToProto.listTaskPushNotificationConfigs + ); + }, + async createTaskPushNotificationConfig( + call: grpc.ServerUnaryCall< + CreateTaskPushNotificationConfigRequest, + TaskPushNotificationConfig + >, + callback: grpc.sendUnaryData + ): Promise { + return wrapUnary( + call, + callback, + FromProto.setTaskPushNotificationConfigParams, + grpcTransportHandler.setTaskPushNotificationConfig.bind(grpcTransportHandler), + ToProto.taskPushNotificationConfig + ); + }, + async getTaskPushNotificationConfig( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData + ): Promise { + return wrapUnary( + call, + callback, + FromProto.getTaskPushNotificationConfigParams, + grpcTransportHandler.getTaskPushNotificationConfig.bind(grpcTransportHandler), + ToProto.taskPushNotificationConfig + ); + }, + async getTask( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData + ): Promise { + return wrapUnary( + call, + callback, + FromProto.taskQueryParams, + grpcTransportHandler.getTask.bind(grpcTransportHandler), + ToProto.task + ); + }, + async cancelTask( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData + ): Promise { + return wrapUnary( + call, + callback, + FromProto.taskIdParams, + grpcTransportHandler.cancelTask.bind(grpcTransportHandler), + ToProto.task + ); + }, + async getAgentCard( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData + ): Promise { + return await wrapUnary( + call, + callback, + () => ({}), + (_params, _context) => grpcTransportHandler.getAgentCard(), + ToProto.agentCard + ); + }, + }; +} + +// --- Internal Helpers --- + +/** + * Maps A2AError or standard Error to gRPC Status codes + */ +const mapping: Record = { + [-32001]: grpc.status.NOT_FOUND, + [-32002]: grpc.status.FAILED_PRECONDITION, + [-32003]: grpc.status.UNIMPLEMENTED, + [-32004]: grpc.status.UNIMPLEMENTED, + [-32005]: grpc.status.INVALID_ARGUMENT, + [-32006]: grpc.status.INTERNAL, + [-32007]: grpc.status.FAILED_PRECONDITION, + [-32600]: grpc.status.INVALID_ARGUMENT, + [-32601]: grpc.status.UNIMPLEMENTED, + [-32602]: grpc.status.INVALID_ARGUMENT, + [-32603]: grpc.status.INTERNAL, + [-32700]: grpc.status.INTERNAL, +}; + +const mapToError = (error: unknown): Partial => { + const a2aError = + error instanceof A2AError + ? error + : A2AError.internalError(error instanceof Error ? error.message : 'Unknown Error'); + + return { + code: mapping[a2aError.code] ?? grpc.status.INTERNAL, + details: a2aError.message, + }; +}; + +const buildContext = async ( + call: grpc.ServerUnaryCall | grpc.ServerWritableStream, + userBuilder: UserBuilder +): Promise => { + const user = await userBuilder(call); + const extensionHeaders = call.metadata.get(HTTP_EXTENSION_HEADER); + const extensionString = extensionHeaders.map((v) => v.toString()).join(','); + + return new ServerCallContext(Extensions.parseServiceParameter(extensionString), user); +}; + +const buildMetadata = (context: ServerCallContext): grpc.Metadata => { + const metadata = new grpc.Metadata(); + if (context.activatedExtensions?.length) { + metadata.set(HTTP_EXTENSION_HEADER, context.activatedExtensions.join(',')); + } + return metadata; +}; diff --git a/src/server/transports/grpc/grpc_transport_handler.ts b/src/server/transports/grpc/grpc_transport_handler.ts new file mode 100644 index 00000000..c61d3531 --- /dev/null +++ b/src/server/transports/grpc/grpc_transport_handler.ts @@ -0,0 +1,159 @@ +import { A2AError } from '../../error.js'; +import { A2ARequestHandler } from '../../request_handler/a2a_request_handler.js'; +import { ServerCallContext } from '../../context.js'; +import { + Message, + Task, + TaskStatusUpdateEvent, + TaskArtifactUpdateEvent, + MessageSendParams, + TaskPushNotificationConfig, + TaskQueryParams, + TaskIdParams, + AgentCard, + DeleteTaskPushNotificationConfigParams, + ListTaskPushNotificationConfigParams, + GetTaskPushNotificationConfigParams, +} from '../../../types.js'; + +export class GrpcTransportHandler { + private requestHandler: A2ARequestHandler; + + constructor(requestHandler: A2ARequestHandler) { + this.requestHandler = requestHandler; + } + + /** + * Gets the agent card (for capability checks). + */ + async getAgentCard(): Promise { + return this.requestHandler.getAgentCard(); + } + + /** + * Gets the authenticated extended agent card. + */ + async getAuthenticatedExtendedAgentCard(context: ServerCallContext): Promise { + return this.requestHandler.getAuthenticatedExtendedAgentCard(context); + } + + /** + * Sends a message to the agent. + */ + async sendMessage( + params: MessageSendParams, + context: ServerCallContext + ): Promise { + return this.requestHandler.sendMessage(params, context); + } + + /** + * Sends a message with streaming response. + * @throws {A2AError} UnsupportedOperation if streaming not supported + */ + async sendMessageStream( + params: MessageSendParams, + context: ServerCallContext + ): Promise< + AsyncGenerator< + Message | Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent, + void, + undefined + > + > { + this.requireCapability('streaming'); + return this.requestHandler.sendMessageStream(params, context); + } + + /** + * Gets a task by ID. + * Validates historyLength parameter if provided. + */ + async getTask(params: TaskQueryParams, context: ServerCallContext): Promise { + if (params.historyLength !== undefined && params.historyLength < 0) { + throw A2AError.invalidParams('historyLength must be non-negative'); + } + return this.requestHandler.getTask(params, context); + } + + /** + * Cancels a task. + */ + async cancelTask(params: TaskIdParams, context: ServerCallContext): Promise { + return this.requestHandler.cancelTask(params, context); + } + + /** + * Resubscribes to task updates. + * @throws {A2AError} UnsupportedOperation if streaming not supported + */ + async resubscribe( + params: TaskIdParams, + context: ServerCallContext + ): Promise< + AsyncGenerator + > { + return this.requestHandler.resubscribe(params, context); + } + + /** + * Sets a push notification configuration. + * @throws {A2AError} PushNotificationNotSupported if push notifications not supported + */ + async setTaskPushNotificationConfig( + config: TaskPushNotificationConfig, + context: ServerCallContext + ): Promise { + this.requireCapability('pushNotifications'); + return this.requestHandler.setTaskPushNotificationConfig(config, context); + } + + /** + * Lists all push notification configurations for a task. + */ + async listTaskPushNotificationConfigs( + params: ListTaskPushNotificationConfigParams, + context: ServerCallContext + ): Promise { + return this.requestHandler.listTaskPushNotificationConfigs(params, context); + } + + /** + * Gets a specific push notification configuration. + */ + async getTaskPushNotificationConfig( + params: GetTaskPushNotificationConfigParams, + context: ServerCallContext + ): Promise { + return this.requestHandler.getTaskPushNotificationConfig(params, context); + } + + /** + * Deletes a push notification configuration. + */ + async deleteTaskPushNotificationConfig( + params: DeleteTaskPushNotificationConfigParams, + context: ServerCallContext + ): Promise { + await this.requestHandler.deleteTaskPushNotificationConfig(params, context); + } + + /** + * Static map of capability to error for missing capabilities. + */ + private readonly CAPABILITY_ERRORS: Record<'streaming' | 'pushNotifications', () => A2AError> = { + streaming: () => A2AError.unsupportedOperation('Agent does not support streaming'), + pushNotifications: () => A2AError.pushNotificationNotSupported(), + }; + + /** + * Validates that the agent supports a required capability. + * @throws {A2AError} UnsupportedOperation for streaming, PushNotificationNotSupported for push notifications + */ + private async requireCapability(capability: 'streaming' | 'pushNotifications'): Promise { + const agentCard = await this.getAgentCard(); + if (!agentCard.capabilities?.[capability]) { + throw this.CAPABILITY_ERRORS[capability](); + } + } +} diff --git a/test/client/transports/grpc_transport.spec.ts b/test/client/transports/grpc_transport.spec.ts new file mode 100644 index 00000000..2ad51ca9 --- /dev/null +++ b/test/client/transports/grpc_transport.spec.ts @@ -0,0 +1,341 @@ +import { describe, it, beforeEach, afterEach, expect, vi, type Mock } from 'vitest'; +import { credentials, Metadata, ServiceError } from '@grpc/grpc-js'; +import { + GrpcTransport, + GrpcTransportFactory, +} from '../../../src/client/transports/grpc_transport.js'; +import { A2AServiceClient } from '../../../src/grpc/a2a.js'; +import { ToProto } from '../../../src/grpc/utils/to_proto.js'; +import { FromProto } from '../../../src/grpc/utils/from_proto.js'; +import { + TaskNotFoundError, + TaskNotCancelableError, + PushNotificationNotSupportedError, +} from '../../../src/errors.js'; +import { + createMessageParams, + createMockAgentCard, + createMockMessage, + createMockTask, +} from '../util.js'; + +// --- Mocks --- + +// Mock the gRPC client class +vi.mock('../../../src/grpc/a2a.js', () => { + const A2AServiceClient = vi.fn(); + A2AServiceClient.prototype.getAgentCard = vi.fn(); + A2AServiceClient.prototype.sendMessage = vi.fn(); + A2AServiceClient.prototype.sendStreamingMessage = vi.fn(); + A2AServiceClient.prototype.createTaskPushNotificationConfig = vi.fn(); + A2AServiceClient.prototype.getTaskPushNotificationConfig = vi.fn(); + A2AServiceClient.prototype.listTaskPushNotificationConfig = vi.fn(); + A2AServiceClient.prototype.deleteTaskPushNotificationConfig = vi.fn(); + A2AServiceClient.prototype.getTask = vi.fn(); + A2AServiceClient.prototype.cancelTask = vi.fn(); + A2AServiceClient.prototype.taskSubscription = vi.fn(); + return { A2AServiceClient }; +}); + +// Mock ToProto and FromProto to act as pass-throughs or return simple objects for testing flow +vi.mock('../../../src/grpc/utils/to_proto.js', () => ({ + ToProto: { + agentCard: vi.fn((x) => x), + messageSendParams: vi.fn((x) => x), + taskPushNotificationConfig: vi.fn((x) => x), + getTaskPushNotificationConfigRequest: vi.fn((x) => x), + listTaskPushNotificationConfigRequest: vi.fn((x) => x), + deleteTaskPushNotificationConfigRequest: vi.fn((x) => x), + getTaskRequest: vi.fn((x) => x), + cancelTaskRequest: vi.fn((x) => x), + taskIdParams: vi.fn((x) => x), + taskPushNotificationConfigCreate: vi.fn((x) => x), + }, +})); + +vi.mock('../../../src/grpc/utils/from_proto.js', () => ({ + FromProto: { + agentCard: vi.fn((x) => x), + sendMessageResult: vi.fn((x) => x), + message: vi.fn((x) => x), + setTaskPushNotificationConfigParams: vi.fn((x) => x), + getTaskPushNoticationConfig: vi.fn((x) => x), + listTaskPushNotificationConfig: vi.fn((x) => x), + task: vi.fn((x) => x), + taskStatusUpdate: vi.fn((x) => x), + taskArtifactUpdate: vi.fn((x) => x), + }, +})); + +describe('GrpcTransport', () => { + let transport: GrpcTransport; + let mockGrpcClient: A2AServiceClient; + const endpoint = 'localhost:50051'; + + // Helper to simulate a successful gRPC unary callback + const mockUnarySuccess = (method: Mock, response: any) => { + method.mockImplementation((_req: any, _meta: any, _opts: any, callback: any) => { + callback(null, response); + return {}; + }); + }; + + // Helper to simulate a gRPC error + const mockUnaryError = (method: Mock, code: number, message: string, a2aCode?: number) => { + method.mockImplementation((_req: any, _meta: any, _opts: any, callback: any) => { + const error: Partial = { + code, + message, + details: message, + metadata: new Metadata(), + }; + if (a2aCode !== undefined) { + error.metadata!.set('a2a-error', String(a2aCode)); + } + callback(error, null); + return {}; + }); + }; + + beforeEach(() => { + mockGrpcClient = new A2AServiceClient(endpoint, credentials.createInsecure()); + transport = new GrpcTransport({ endpoint, grpcClient: mockGrpcClient }); + }); + + afterEach(() => { + vi.clearAllMocks(); + }); + + describe('getExtendedAgentCard', () => { + it('should get agent card successfully', async () => { + const mockCard = createMockAgentCard(); + mockUnarySuccess(mockGrpcClient.getAgentCard as Mock, mockCard); + + const result = await transport.getExtendedAgentCard(); + + expect(result).toEqual(mockCard); + expect(mockGrpcClient.getAgentCard).toHaveBeenCalled(); + expect(FromProto.agentCard).toHaveBeenCalledWith(mockCard); + }); + }); + + describe('sendMessage', () => { + it('should send message successfully', async () => { + const params = createMessageParams(); + const mockResult = createMockMessage(); + mockUnarySuccess(mockGrpcClient.sendMessage as Mock, mockResult); + + const result = await transport.sendMessage(params); + + expect(result).toEqual(mockResult); + expect(ToProto.messageSendParams).toHaveBeenCalledWith(params); + expect(mockGrpcClient.sendMessage).toHaveBeenCalled(); + }); + + it('should pass service parameters as metadata', async () => { + const params = createMessageParams(); + const options = { serviceParameters: { 'x-test-header': 'test-value' } }; + mockUnarySuccess(mockGrpcClient.sendMessage as Mock, {}); + + await transport.sendMessage(params, options); + + const calledMetadata = (mockGrpcClient.sendMessage as Mock).mock.calls[0][1] as Metadata; + expect(calledMetadata.get('x-test-header')).toEqual(['test-value']); + }); + + it('should throw TaskNotFoundError when mapped from A2A error code', async () => { + const params = createMessageParams(); + mockUnaryError(mockGrpcClient.sendMessage as Mock, 5, 'Task Missing', -32001); + + await expect(transport.sendMessage(params)).rejects.toThrow(TaskNotFoundError); + }); + + it('should throw generic Error for unmapped gRPC errors', async () => { + const params = createMessageParams(); + mockUnaryError(mockGrpcClient.sendMessage as Mock, 13, 'Internal Error'); + + await expect(transport.sendMessage(params)).rejects.toThrow('GRPC error for sendMessage'); + }); + }); + + describe('sendMessageStream', () => { + it('should yield messages from stream', async () => { + const params = createMessageParams(); + const mockMsg = createMockMessage(); + + const mockStream = { + [Symbol.asyncIterator]: async function* () { + yield { payload: { $case: 'msg', value: mockMsg } }; + }, + cancel: vi.fn(), + }; + (mockGrpcClient.sendStreamingMessage as Mock).mockReturnValue(mockStream); + + const iterator = transport.sendMessageStream(params); + const result = await iterator.next(); + + expect(result.value).toEqual(mockMsg); + expect(FromProto.message).toHaveBeenCalledWith(mockMsg); + expect(mockGrpcClient.sendStreamingMessage).toHaveBeenCalled(); + }); + + it('should handle stream errors', async () => { + const params = createMessageParams(); + const mockStream = { + [Symbol.asyncIterator]: async function* () { + throw { code: 13, message: 'Stream failed' }; + yield {}; + }, + cancel: vi.fn(), + }; + (mockGrpcClient.sendStreamingMessage as Mock).mockReturnValue(mockStream); + + const iterator = transport.sendMessageStream(params); + await expect(iterator.next()).rejects.toThrow('GRPC error for sendStreamingMessage!'); + }); + }); + + describe('getTask', () => { + it('should get task successfully', async () => { + const taskId = 'task-123'; + const mockTask = createMockTask(taskId); + mockUnarySuccess(mockGrpcClient.getTask as Mock, mockTask); + + const result = await transport.getTask({ id: taskId }); + + expect(result).toEqual(mockTask); + expect(ToProto.getTaskRequest).toHaveBeenCalled(); + expect(mockGrpcClient.getTask).toHaveBeenCalled(); + }); + + it('should throw TaskNotFoundError', async () => { + mockUnaryError(mockGrpcClient.getTask as Mock, 5, 'Not Found', -32001); + await expect(transport.getTask({ id: 'bad-id' })).rejects.toThrow(TaskNotFoundError); + }); + }); + + describe('cancelTask', () => { + it('should cancel task successfully', async () => { + const taskId = 'task-123'; + const mockTask = createMockTask(taskId, 'canceled'); + mockUnarySuccess(mockGrpcClient.cancelTask as Mock, mockTask); + + const result = await transport.cancelTask({ id: taskId }); + + expect(result).toEqual(mockTask); + expect(mockGrpcClient.cancelTask).toHaveBeenCalled(); + }); + + it('should throw TaskNotCancelableError', async () => { + mockUnaryError(mockGrpcClient.cancelTask as Mock, 9, 'Cannot cancel', -32002); + await expect(transport.cancelTask({ id: 'task-123' })).rejects.toThrow( + TaskNotCancelableError + ); + }); + }); + + describe('Push Notification Config', () => { + const taskId = 'task-123'; + const configId = 'config-456'; + const mockConfig = { + taskId, + pushNotificationConfig: { id: configId, url: 'http://test' }, + }; + + describe('setTaskPushNotificationConfig', () => { + it('should set config successfully', async () => { + mockUnarySuccess(mockGrpcClient.createTaskPushNotificationConfig as Mock, mockConfig); + + const result = await transport.setTaskPushNotificationConfig(mockConfig); + + expect(result).toEqual(mockConfig); + expect(mockGrpcClient.createTaskPushNotificationConfig).toHaveBeenCalled(); + }); + + it('should throw PushNotificationNotSupportedError', async () => { + mockUnaryError( + mockGrpcClient.createTaskPushNotificationConfig as Mock, + 3, + 'Not supported', + -32003 + ); + await expect(transport.setTaskPushNotificationConfig(mockConfig)).rejects.toThrow( + PushNotificationNotSupportedError + ); + }); + }); + + describe('getTaskPushNotificationConfig', () => { + it('should get config successfully', async () => { + mockUnarySuccess(mockGrpcClient.getTaskPushNotificationConfig as Mock, mockConfig); + + const result = await transport.getTaskPushNotificationConfig({ + id: taskId, + pushNotificationConfigId: configId, + }); + + expect(result).toEqual(mockConfig); + }); + }); + + describe('listTaskPushNotificationConfig', () => { + it('should list configs successfully', async () => { + const mockList = [mockConfig]; + mockUnarySuccess(mockGrpcClient.listTaskPushNotificationConfig as Mock, mockList); + + const result = await transport.listTaskPushNotificationConfig({ id: taskId }); + + expect(result).toEqual(mockList); + }); + }); + + describe('deleteTaskPushNotificationConfig', () => { + it('should delete config successfully', async () => { + mockUnarySuccess(mockGrpcClient.deleteTaskPushNotificationConfig as Mock, {}); + + await transport.deleteTaskPushNotificationConfig({ + id: taskId, + pushNotificationConfigId: configId, + }); + + expect(mockGrpcClient.deleteTaskPushNotificationConfig).toHaveBeenCalled(); + }); + }); + }); + + describe('resubscribeTask', () => { + it('should yield task updates from stream', async () => { + const params = { id: 'task-123' }; + const mockUpdate = createMockTask('task-123'); + + const mockStream = { + [Symbol.asyncIterator]: async function* () { + yield { payload: { $case: 'task', value: mockUpdate } }; + }, + cancel: vi.fn(), + }; + (mockGrpcClient.taskSubscription as Mock).mockReturnValue(mockStream); + + const iterator = transport.resubscribeTask(params); + const result = await iterator.next(); + + expect(result.value).toEqual(mockUpdate); + expect(FromProto.task).toHaveBeenCalledWith(mockUpdate); + }); + }); +}); + +describe('GrpcTransportFactory', () => { + it('should have correct protocol name', () => { + const factory = new GrpcTransportFactory(); + expect(factory.protocolName).toBe('GRPC'); + }); + + it('should create transport with correct endpoint', async () => { + const factory = new GrpcTransportFactory(); + const agentCard = createMockAgentCard({ url: 'localhost:50051' }); + const transport = await factory.create(agentCard.url, agentCard); + + expect(transport).toBeInstanceOf(GrpcTransport); + }); +});