langchain.js
    Preparing search index...

    Interface OllamaInput

    interface OllamaInput {
        baseUrl?: string;
        embeddingOnly?: boolean;
        f16Kv?: boolean;
        fetch?: {
            (input: RequestInfo | URL, init?: RequestInit): Promise<Response>;
            (input: string | Request | URL, init?: RequestInit): Promise<Response>;
        };
        format?: string;
        frequencyPenalty?: number;
        headers?: Headers
        | Record<string, string>;
        keepAlive?: string | number;
        logitsAll?: boolean;
        lowVram?: boolean;
        mainGpu?: number;
        mirostat?: number;
        mirostatEta?: number;
        mirostatTau?: number;
        model?: string;
        numa?: boolean;
        numBatch?: number;
        numCtx?: number;
        numGpu?: number;
        numKeep?: number;
        numPredict?: number;
        numThread?: number;
        penalizeNewline?: boolean;
        presencePenalty?: number;
        repeatLastN?: number;
        repeatPenalty?: number;
        seed?: number;
        stop?: string[];
        temperature?: number;
        tfsZ?: number;
        topK?: number;
        topP?: number;
        typicalP?: number;
        useMlock?: boolean;
        useMmap?: boolean;
        vocabOnly?: boolean;
    }

    Hierarchy (View Summary)

    Implemented by

    Index

    Properties

    baseUrl?: string

    Optionally override the base URL to make request to. This should only be set if your Ollama instance is being server from a non-standard location.

    "http://localhost:11434"
    
    embeddingOnly?: boolean
    f16Kv?: boolean
    fetch?: {
        (input: RequestInfo | URL, init?: RequestInit): Promise<Response>;
        (input: string | Request | URL, init?: RequestInit): Promise<Response>;
    }

    The fetch function to use.

    Type Declaration

      • (input: RequestInfo | URL, init?: RequestInit): Promise<Response>
      • Parameters

        • input: RequestInfo | URL
        • Optionalinit: RequestInit

        Returns Promise<Response>

      • (input: string | Request | URL, init?: RequestInit): Promise<Response>
      • Parameters

        • input: string | Request | URL
        • Optionalinit: RequestInit

        Returns Promise<Response>

    fetch
    
    format?: string
    frequencyPenalty?: number
    headers?: Headers | Record<string, string>

    Optional HTTP Headers to include in the request.

    keepAlive?: string | number
    "5m"
    
    logitsAll?: boolean
    lowVram?: boolean
    mainGpu?: number
    mirostat?: number
    mirostatEta?: number
    mirostatTau?: number
    model?: string

    The model to use when making requests.

    "llama3"
    
    numa?: boolean
    numBatch?: number
    numCtx?: number
    numGpu?: number
    numKeep?: number
    numPredict?: number
    numThread?: number
    penalizeNewline?: boolean
    presencePenalty?: number
    repeatLastN?: number
    repeatPenalty?: number
    seed?: number
    stop?: string[]
    temperature?: number
    tfsZ?: number
    topK?: number
    topP?: number
    typicalP?: number
    useMlock?: boolean
    useMmap?: boolean
    vocabOnly?: boolean