import { BaseBedrockInput, CredentialType } from "../../utils/bedrock/index.js";
import { SerializedFields } from "../../load/map_keys.js";
import { GenerationChunk } from "@langchain/core/outputs";
import { EventStreamCodec } from "@smithy/eventstream-codec";
import { BaseLLMParams, LLM } from "@langchain/core/language_models/llms";
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";

//#region src/llms/bedrock/web.d.ts
/**
 * A type of Large Language Model (LLM) that interacts with the Bedrock
 * service. It extends the base `LLM` class and implements the
 * `BaseBedrockInput` interface. The class is designed to authenticate and
 * interact with the Bedrock service, which is a part of Amazon Web
 * Services (AWS). It uses AWS credentials for authentication and can be
 * configured with various parameters such as the model to use, the AWS
 * region, and the maximum number of tokens to generate.
 */
declare class Bedrock extends LLM implements BaseBedrockInput {
  model: string;
  modelProvider: string;
  region: string;
  credentials: CredentialType;
  temperature?: number | undefined;
  maxTokens?: number | undefined;
  fetchFn: typeof fetch;
  endpointHost?: string;
  modelKwargs?: Record<string, unknown>;
  codec: EventStreamCodec;
  streaming: boolean;
  lc_serializable: boolean;
  get lc_aliases(): Record<string, string>;
  get lc_secrets(): {
    [key: string]: string;
  } | undefined;
  get lc_attributes(): SerializedFields | undefined;
  _llmType(): string;
  static lc_name(): string;
  constructor(fields?: Partial<BaseBedrockInput> & BaseLLMParams);
  /** Call out to Bedrock service model.
    Arguments:
      prompt: The prompt to pass into the model.
       Returns:
      The string generated by the model.
       Example:
      response = model.invoke("Tell me a joke.")
  */
  _call(prompt: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<string>;
  _signedFetch(prompt: string, options: this["ParsedCallOptions"], fields: {
    bedrockMethod: "invoke" | "invoke-with-response-stream";
    endpointHost: string;
    provider: string;
  }): Promise<Response>;
  invocationParams(options?: this["ParsedCallOptions"]): {
    model: string;
    region: string;
    temperature: number | undefined;
    maxTokens: number | undefined;
    stop: string[] | undefined;
    modelKwargs: Record<string, unknown> | undefined;
  };
  _streamResponseChunks(prompt: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;
  _readChunks(reader: any): {
    [Symbol.asyncIterator](): AsyncGenerator<Uint8Array<ArrayBuffer>, void, unknown>;
  };
}
//#endregion
export { Bedrock };
//# sourceMappingURL=web.d.ts.map