update docs
This commit is contained in:
parent
ea915ad7d7
commit
10b940559a
|
@ -16,6 +16,7 @@ pub(crate) fn parameters_table(
|
||||||
typical_p: Option<f32>,
|
typical_p: Option<f32>,
|
||||||
repetition_penalty: Option<f32>,
|
repetition_penalty: Option<f32>,
|
||||||
frequency_penalty: Option<f32>,
|
frequency_penalty: Option<f32>,
|
||||||
|
no_repeat_ngram_size: Option<u32>,
|
||||||
watermark: bool,
|
watermark: bool,
|
||||||
do_sample: bool,
|
do_sample: bool,
|
||||||
) -> Table {
|
) -> Table {
|
||||||
|
@ -35,6 +36,7 @@ pub(crate) fn parameters_table(
|
||||||
builder.push_record(["Typical P", &format!("{typical_p:?}")]);
|
builder.push_record(["Typical P", &format!("{typical_p:?}")]);
|
||||||
builder.push_record(["Repetition Penalty", &format!("{repetition_penalty:?}")]);
|
builder.push_record(["Repetition Penalty", &format!("{repetition_penalty:?}")]);
|
||||||
builder.push_record(["Frequency Penalty", &format!("{frequency_penalty:?}")]);
|
builder.push_record(["Frequency Penalty", &format!("{frequency_penalty:?}")]);
|
||||||
|
builder.push_record(["No Repeat Ngram Size", &format!("{no_repeat_ngram_size:?}")]);
|
||||||
builder.push_record(["Watermark", &watermark.to_string()]);
|
builder.push_record(["Watermark", &watermark.to_string()]);
|
||||||
builder.push_record(["Do Sample", &do_sample.to_string()]);
|
builder.push_record(["Do Sample", &do_sample.to_string()]);
|
||||||
|
|
||||||
|
|
|
@ -135,6 +135,10 @@ class Parameters:
|
||||||
# Penalize new tokens based on their existing frequency in the text so far,
|
# Penalize new tokens based on their existing frequency in the text so far,
|
||||||
# decreasing the model's likelihood to repeat the same line verbatim.
|
# decreasing the model's likelihood to repeat the same line verbatim.
|
||||||
frequency_penalty: Optional[float]
|
frequency_penalty: Optional[float]
|
||||||
|
# n-grams are groups of "n" consecutive words, characters, or tokens taken from a sequence of text. Given the
|
||||||
|
# sentence: "She runs fast", the bi-grams (n=2) would be ("she", "runs") and ("runs", "fast"). Set this to avoid
|
||||||
|
# generating the same n-grams in the completion.
|
||||||
|
no_repeat_ngram_size: Optional[int]
|
||||||
# Whether to prepend the prompt to the generated text
|
# Whether to prepend the prompt to the generated text
|
||||||
return_full_text: bool
|
return_full_text: bool
|
||||||
# Stop generating tokens if a member of `stop_sequences` is generated
|
# Stop generating tokens if a member of `stop_sequences` is generated
|
||||||
|
|
|
@ -1379,6 +1379,15 @@
|
||||||
"nullable": true,
|
"nullable": true,
|
||||||
"exclusiveMinimum": -2
|
"exclusiveMinimum": -2
|
||||||
},
|
},
|
||||||
|
"no_repeat_ngram_size": {
|
||||||
|
"type": "integer",
|
||||||
|
"format": "int32",
|
||||||
|
"description": "If set to int > 0, all ngrams of that size can only occur once.",
|
||||||
|
"default": "null",
|
||||||
|
"example": 12,
|
||||||
|
"nullable": true,
|
||||||
|
"minimum": 0
|
||||||
|
},
|
||||||
"grammar": {
|
"grammar": {
|
||||||
"allOf": [
|
"allOf": [
|
||||||
{
|
{
|
||||||
|
|
|
@ -95,6 +95,8 @@ message NextTokenChooserParameters {
|
||||||
float repetition_penalty = 7;
|
float repetition_penalty = 7;
|
||||||
/// frequency penalty
|
/// frequency penalty
|
||||||
float frequency_penalty = 9;
|
float frequency_penalty = 9;
|
||||||
|
/// no_repeat_ngram_size
|
||||||
|
uint32 no_repeat_ngram_size = 12;
|
||||||
/// token watermarking using "A Watermark for Large Language Models"
|
/// token watermarking using "A Watermark for Large Language Models"
|
||||||
bool watermark = 8;
|
bool watermark = 8;
|
||||||
/// grammar (applied if not empty)
|
/// grammar (applied if not empty)
|
||||||
|
|
|
@ -687,6 +687,8 @@ pub struct ValidParameters {
|
||||||
pub repetition_penalty: f32,
|
pub repetition_penalty: f32,
|
||||||
/// / frequency penalty
|
/// / frequency penalty
|
||||||
pub frequency_penalty: f32,
|
pub frequency_penalty: f32,
|
||||||
|
/// / no_repeat_ngram_size
|
||||||
|
pub no_repeat_ngram_size: u32,
|
||||||
/// / token watermarking using "A Watermark for Large Language Models"
|
/// / token watermarking using "A Watermark for Large Language Models"
|
||||||
pub watermark: bool,
|
pub watermark: bool,
|
||||||
/// / grammar (applied if not empty)
|
/// / grammar (applied if not empty)
|
||||||
|
|
Loading…
Reference in New Issue