langchain.js
Preparing search index...
@langchain/community
llms/togetherai
TogetherAI
Class TogetherAI
Hierarchy (
View Summary
)
BaseDynamicToolInput
<
TogetherAICallOptions
>
TogetherAI
Index
Constructors
constructor
Properties
lc_
serializable
logprobs?
max
Tokens?
model
model
Name
repetition
Penalty
safety
Model?
stop?
streaming
temperature
top
K
top
P
inputs
Methods
_
llm
Type
_
stream
Response
Chunks
completion
With
Retry
lc_
name
Constructors
constructor
new
TogetherAI
(
inputs
:
TogetherAIInputs
)
:
TogetherAI
Parameters
inputs
:
TogetherAIInputs
Returns
TogetherAI
Properties
lc_
serializable
lc_serializable
:
boolean
= true
Optional
logprobs
logprobs
?:
number
Optional
max
Tokens
maxTokens
?:
number
model
model
:
string
model
Name
modelName
:
string
repetition
Penalty
repetitionPenalty
:
number
= 1
Optional
safety
Model
safetyModel
?:
string
Optional
stop
stop
?:
string
[]
streaming
streaming
:
boolean
= false
temperature
temperature
:
number
= 0.7
top
K
topK
:
number
= 50
top
P
topP
:
number
= 0.7
Static
inputs
inputs
:
TogetherAIInputs
Methods
_
llm
Type
_llmType
()
:
string
Returns
string
_
stream
Response
Chunks
_streamResponseChunks
(
prompt
:
string
,
options
:
unknown
,
runManager
?:
any
,
)
:
AsyncGenerator
<
GenerationChunk
>
Parameters
prompt
:
string
options
:
unknown
Optional
runManager
:
any
Returns
AsyncGenerator
<
GenerationChunk
>
completion
With
Retry
completionWithRetry
(
prompt
:
string
,
options
?:
unknown
)
:
Promise
<
any
>
Parameters
prompt
:
string
Optional
options
:
unknown
Returns
Promise
<
any
>
Static
lc_
name
lc_name
()
:
string
Returns
string
Settings
Member Visibility
Protected
Inherited
External
Theme
OS
Light
Dark
On This Page
Constructors
constructor
Properties
lc_
serializable
logprobs
max
Tokens
model
model
Name
repetition
Penalty
safety
Model
stop
streaming
temperature
top
K
top
P
inputs
Methods
_
llm
Type
_
stream
Response
Chunks
completion
With
Retry
lc_
name
langchain.js
Loading...