Add support for some extended parameters of llama.cpp(top_k, top_p, min_p, and repeat_penalty) (#311)

* Add support for some extended parameters of llama.cpp(top_k, top_p, min_p, and repeat_penalty)

Added 'top_k,' 'top_p,' 'min_p,' and 'repeat_penalty' fields to the llama.cpp request configuration. The default values for these fields match the defaults of llama.cpp. If left untouched, they do not affect the model's response to the request.

* Bump llm-client

---------

Co-authored-by: Carl-Robert Linnupuu <carlrobertoh@gmail.com>
This commit is contained in:
Aliet Expósito García 2023-12-18 04:53:23 -05:00 committed by GitHub
parent 52c1b5d68c
commit 9d83107dd5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 142 additions and 1 deletions

View file

@ -22,6 +22,10 @@ public class ConfigurationState implements PersistentStateComponent<Configuratio
private String commitMessagePrompt = COMPLETION_COMMIT_MESSAGE_PROMPT;
private int maxTokens = 1000;
private double temperature = 0.1;
private int topK = 40;
private double topP = 0.9;
private double minP = 0.05;
private double repeatPenalty = 1.1;
private boolean checkForPluginUpdates = true;
private boolean createNewChatOnEachAction;
private boolean ignoreGitCommitTokenLimit;
@ -76,6 +80,38 @@ public class ConfigurationState implements PersistentStateComponent<Configuratio
this.temperature = temperature;
}
public int getTopK() {
return topK;
}
public void setTopK(int topK) {
this.topK = topK;
}
public double getTopP() {
return topP;
}
public void setTopP(double topP) {
this.topP = topP;
}
public double getMinP() {
return minP;
}
public void setMinP(double minP) {
this.minP = minP;
}
public double getRepeatPenalty() {
return repeatPenalty;
}
public void setRepeatPenalty(double repeatPenalty) {
this.repeatPenalty = repeatPenalty;
}
public boolean isCreateNewChatOnEachAction() {
return createNewChatOnEachAction;
}