langchain.js
    Preparing search index...

    Interface BaseLanguageModelInterface<RunOutput, CallOptions>

    Base interface implemented by all runnables. Used for cross-compatibility between different versions of LangChain core.

    Should not change on patch releases.

    interface BaseLanguageModelInterface<
        RunOutput = any,
        CallOptions extends
            BaseLanguageModelCallOptions = BaseLanguageModelCallOptions,
    > {
        lc_serializable: boolean;
        get callKeys(): string[];
        get lc_id(): string[];
        _identifyingParams(): Record<string, any>;
        _llmType(): string;
        _modelType(): string;
        batch(
            inputs: BaseLanguageModelInput[],
            options?: Partial<CallOptions> | Partial<CallOptions>[],
            batchOptions?: RunnableBatchOptions & { returnExceptions?: false },
        ): Promise<RunOutput[]>;
        batch(
            inputs: BaseLanguageModelInput[],
            options?: Partial<CallOptions> | Partial<CallOptions>[],
            batchOptions?: RunnableBatchOptions & { returnExceptions: true },
        ): Promise<(Error | RunOutput)[]>;
        batch(
            inputs: BaseLanguageModelInput[],
            options?: Partial<CallOptions> | Partial<CallOptions>[],
            batchOptions?: RunnableBatchOptions,
        ): Promise<(Error | RunOutput)[]>;
        generatePrompt(
            promptValues: BasePromptValueInterface[],
            options?: string[] | CallOptions,
            callbacks?: Callbacks,
        ): Promise<LLMResult>;
        getName(suffix?: string): string;
        getNumTokens(content: MessageContent): Promise<number>;
        invoke(
            input: BaseLanguageModelInput,
            options?: Partial<CallOptions>,
        ): Promise<RunOutput>;
        predict(
            text: string,
            options?: string[] | CallOptions,
            callbacks?: Callbacks,
        ): Promise<string>;
        predictMessages(
            messages: BaseMessage<MessageStructure, MessageType>[],
            options?: string[] | CallOptions,
            callbacks?: Callbacks,
        ): Promise<BaseMessage<MessageStructure, MessageType>>;
        serialize(): SerializedLLM;
        stream(
            input: BaseLanguageModelInput,
            options?: Partial<CallOptions>,
        ): Promise<IterableReadableStreamInterface<RunOutput>>;
        transform(
            generator: AsyncGenerator<BaseLanguageModelInput>,
            options: Partial<CallOptions>,
        ): AsyncGenerator<RunOutput>;
    }

    Type Parameters

    Hierarchy (View Summary)

    Implemented by

    Index

    Properties

    lc_serializable: boolean

    Accessors

    • get callKeys(): string[]

      Returns string[]

    • get lc_id(): string[]

      Returns string[]

    Methods

    • Get the identifying parameters of the LLM.

      Returns Record<string, any>

    • Returns string

    • Returns string

    • Parameters

      • Optionalsuffix: string

      Returns string

    • Parameters

      Returns Promise<number>

    • Parameters

      Returns Promise<string>

      Use .invoke() instead. Will be removed in 0.2.0.

    • Returns SerializedLLM