- useOptions(options?): {
gpt: {
best_of?: number;
echo?: boolean;
frequency_penalty?: number;
logprobs?: number;
max_tokens?: number;
model: string;
n?: number;
presence_penalty?: number;
stop?: string | string[];
stream?: boolean;
temperature?: number;
top_p?: number;
user?: string;
};
instance: {
api_key: string;
markdown?: boolean;
};
} Returns {
gpt: {
best_of?: number;
echo?: boolean;
frequency_penalty?: number;
logprobs?: number;
max_tokens?: number;
model: string;
n?: number;
presence_penalty?: number;
stop?: string | string[];
stream?: boolean;
temperature?: number;
top_p?: number;
user?: string;
};
instance: {
api_key: string;
markdown?: boolean;
};
}
gpt: {
best_of?: number;
echo?: boolean;
frequency_penalty?: number;
logprobs?: number;
max_tokens?: number;
model: string;
n?: number;
presence_penalty?: number;
stop?: string | string[];
stream?: boolean;
temperature?: number;
top_p?: number;
user?: string;
}
Optional best_of?: number
Optional echo?: boolean
Optional frequency_penalty?: number
Optional logprobs?: number
Optional max_tokens?: number
model: string
Optional n?: number
Optional presence_penalty?: number
Optional stop?: string | string[]
Optional stream?: boolean
Optional temperature?: number
Optional top_p?: number
Optional user?: string
instance: {
api_key: string;
markdown?: boolean;
}
api_key: string
Optional markdown?: boolean