langchain.js
Preparing search index...
@langchain/community
experimental/llms/chrome_ai
AILanguageModel
Interface AILanguageModel
interface
AILanguageModel
{
oncontextoverflow
:
(
event
:
Event
)
=>
void
;
get
maxTokens
()
:
number
;
get
temperature
()
:
number
;
get
tokensLeft
()
:
number
;
get
tokensSoFar
()
:
number
;
get
topK
()
:
number
;
clone
(
options
?:
AILanguageModelCloneOptions
)
:
Promise
<
AILanguageModel
>
;
countPromptTokens
(
input
:
AILanguageModelPromptInput
,
options
?:
AILanguageModelPromptOptions
,
)
:
Promise
<
number
>
;
destroy
()
:
void
;
prompt
(
input
:
AILanguageModelPromptInput
,
options
?:
AILanguageModelPromptOptions
,
)
:
Promise
<
string
>
;
promptStreaming
(
input
:
AILanguageModelPromptInput
,
options
?:
AILanguageModelPromptOptions
,
)
:
ReadableStream
;
}
Hierarchy
EventTarget
AILanguageModel
Index
Properties
oncontextoverflow
Accessors
max
Tokens
temperature
tokens
Left
tokens
So
Far
top
K
Methods
clone
count
Prompt
Tokens
destroy
prompt
prompt
Streaming
Properties
oncontextoverflow
oncontextoverflow
:
(
event
:
Event
)
=>
void
Accessors
max
Tokens
get
maxTokens
()
:
number
Returns
number
temperature
get
temperature
()
:
number
Returns
number
tokens
Left
get
tokensLeft
()
:
number
Returns
number
tokens
So
Far
get
tokensSoFar
()
:
number
Returns
number
top
K
get
topK
()
:
number
Returns
number
Methods
clone
clone
(
options
?:
AILanguageModelCloneOptions
)
:
Promise
<
AILanguageModel
>
Parameters
Optional
options
:
AILanguageModelCloneOptions
Returns
Promise
<
AILanguageModel
>
count
Prompt
Tokens
countPromptTokens
(
input
:
AILanguageModelPromptInput
,
options
?:
AILanguageModelPromptOptions
,
)
:
Promise
<
number
>
Parameters
input
:
AILanguageModelPromptInput
Optional
options
:
AILanguageModelPromptOptions
Returns
Promise
<
number
>
destroy
destroy
()
:
void
Returns
void
prompt
prompt
(
input
:
AILanguageModelPromptInput
,
options
?:
AILanguageModelPromptOptions
,
)
:
Promise
<
string
>
Parameters
input
:
AILanguageModelPromptInput
Optional
options
:
AILanguageModelPromptOptions
Returns
Promise
<
string
>
prompt
Streaming
promptStreaming
(
input
:
AILanguageModelPromptInput
,
options
?:
AILanguageModelPromptOptions
,
)
:
ReadableStream
Parameters
input
:
AILanguageModelPromptInput
Optional
options
:
AILanguageModelPromptOptions
Returns
ReadableStream
Settings
Member Visibility
Protected
Inherited
External
Theme
OS
Light
Dark
On This Page
Properties
oncontextoverflow
Accessors
max
Tokens
temperature
tokens
Left
tokens
So
Far
top
K
Methods
clone
count
Prompt
Tokens
destroy
prompt
prompt
Streaming
langchain.js
Loading...