langchain.js
    Preparing search index...

    Ollama LLM has moved to the @langchain/ollama package. Please install it using npm install @langchain/ollama and import it from there.

    Class that represents the Ollama language model. It extends the base LLM class and implements the OllamaInput interface.

    const ollama = new Ollama({
    baseUrl: "http://api.example.com",
    model: "llama2",
    });

    // Streaming translation from English to German
    const stream = await ollama.stream(
    `Translate "I love programming" into German.`
    );

    const chunks = [];
    for await (const chunk of stream) {
    chunks.push(chunk);
    }

    console.log(chunks.join(""));

    Hierarchy (View Summary)

    Implements

    Index

    Constructors

    • Parameters

      • fields: any

      Returns Ollama

    Properties

    baseUrl: string = "http://localhost:11434"
    embeddingOnly?: boolean
    f16KV?: boolean
    format?: any
    frequencyPenalty?: number
    headers?: Record<string, string>
    keepAlive: string = "5m"
    lc_serializable: boolean = true
    logitsAll?: boolean
    lowVram?: boolean
    mainGpu?: number
    mirostat?: number
    mirostatEta?: number
    mirostatTau?: number
    model: string = "llama2"
    numBatch?: number
    numCtx?: number
    numGpu?: number
    numGqa?: number
    numKeep?: number
    numPredict?: number
    numThread?: number
    penalizeNewline?: boolean
    presencePenalty?: number
    repeatLastN?: number
    repeatPenalty?: number
    ropeFrequencyBase?: number
    ropeFrequencyScale?: number
    stop?: string[]
    temperature?: number
    tfsZ?: number
    topK?: number
    topP?: number
    typicalP?: number
    useMLock?: boolean
    useMMap?: boolean
    vocabOnly?: boolean

    Methods

    • Returns string

    • Parameters

      • prompt: string
      • options: unknown
      • OptionalrunManager: any

      Returns AsyncGenerator<GenerationChunk>

    • Parameters

      • Optionaloptions: unknown

      Returns {
          format: any;
          images: any;
          keep_alive: string;
          model: string;
          options: {
              embedding_only: undefined | boolean;
              f16_kv: undefined | boolean;
              frequency_penalty: undefined | number;
              logits_all: undefined | boolean;
              low_vram: undefined | boolean;
              main_gpu: undefined | number;
              mirostat: undefined | number;
              mirostat_eta: undefined | number;
              mirostat_tau: undefined | number;
              num_batch: undefined | number;
              num_ctx: undefined | number;
              num_gpu: undefined | number;
              num_gqa: undefined | number;
              num_keep: undefined | number;
              num_predict: undefined | number;
              num_thread: undefined | number;
              penalize_newline: undefined | boolean;
              presence_penalty: undefined | number;
              repeat_last_n: undefined | number;
              repeat_penalty: undefined | number;
              rope_frequency_base: undefined | number;
              rope_frequency_scale: undefined | number;
              stop: any;
              temperature: undefined | number;
              tfs_z: undefined | number;
              top_k: undefined | number;
              top_p: undefined | number;
              typical_p: undefined | number;
              use_mlock: undefined | boolean;
              use_mmap: undefined | boolean;
              vocab_only: undefined | boolean;
          };
      }

    • Returns string