ProxyAI/src/main/java/ee/carlrobert/codegpt/completions/llama/PromptTemplate.java
Carl-Robert 45908e69df
#178 - Add support for running local LLMs via LLaMA C/C++ port (#249)
* Initial implementation of integrating llama.cpp to run LLaMA models locally

* Move submodule

* Copy llama submodule to bundle

* Support for downloading models from IDE

* Code cleanup

* Store port field

* Replace service selection radio group with dropdown

* Add quantization support + other fixes

* Add option to override host

* Fix override host handler

* Disable port field when override host enabled

* Design updates

* Fix llama settings configuration, design changes, clean up code

* Improve You.com coupon design

* Add new Phind model and help tooltip

* Fetch you.com subscription

* Add CodeBooga model, fix downloadable model selection

* Chat history support

* Code refactoring, minor bug fixes

* UI updates, several bug fixes, removed code llama python model

* Code cleanup, enable llama port only on macOS

* Change downloaded gguf models path

* Move some of the labels to codegpt bundle

* Minor fixes

* Remove ToRA model, add help texts

* Fix test

* Modify description
2023-11-03 12:00:24 +02:00

113 lines
3.2 KiB
Java

package ee.carlrobert.codegpt.completions.llama;
import ee.carlrobert.codegpt.conversations.message.Message;
import java.util.List;
public enum PromptTemplate {
CHAT_ML("Chat Markup Language (ChatML)") {
@Override
public String buildPrompt(String systemPrompt, String userPrompt, List<Message> history) {
StringBuilder prompt = new StringBuilder();
if (systemPrompt != null && !systemPrompt.isEmpty()) {
prompt.append("<|im_start|>system\n")
.append(systemPrompt)
.append("<|im_end|>\n");
}
for (Message message : history) {
prompt.append("<|im_start|>user\n")
.append(message.getPrompt())
.append("<|im_end|>\n")
.append("<|im_start|>assistant\n")
.append(message.getResponse())
.append("<|im_end|>\n");
}
return prompt.append("<|im_start|>user\n")
.append(userPrompt)
.append("<|im_end|>")
.toString();
}
},
LLAMA("Llama") {
@Override
public String buildPrompt(String systemPrompt, String userPrompt, List<Message> history) {
StringBuilder prompt = new StringBuilder();
if (systemPrompt != null && !systemPrompt.isEmpty()) {
prompt.append("<<SYS>>")
.append(systemPrompt)
.append("<</SYS>>\n");
}
for (Message message : history) {
prompt.append("[INST]")
.append(message.getPrompt())
.append("[/INST]\n")
.append(message.getResponse()).append("\n");
}
return prompt.append("[INST]")
.append(userPrompt)
.append("[/INST]")
.toString();
}
},
TORA("ToRA") {
@Override
public String buildPrompt(String systemPrompt, String userPrompt, List<Message> history) {
StringBuilder prompt = new StringBuilder();
for (Message message : history) {
prompt.append("<|user|>\n")
.append(message.getPrompt())
.append("\n<|assistant|>\n")
.append(message.getResponse()).append("\n");
}
return prompt.append("<|user|>\n")
.append(userPrompt)
.append("\n<|assistant|>")
.toString();
}
},
ALPACA("Alpaca/Vicuna") {
@Override
public String buildPrompt(String systemPrompt, String userPrompt, List<Message> history) {
StringBuilder prompt = new StringBuilder();
prompt.append(
"Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n");
for (Message message : history) {
prompt.append("### Instruction\n")
.append(message.getPrompt())
.append("\n\n")
.append("### Response:\n")
.append(message.getResponse())
.append("\n\n");
}
return prompt.append("### Instruction\n")
.append(userPrompt)
.append("\n\n")
.append("### Response:\n")
.toString();
}
};
private final String label;
PromptTemplate(String label) {
this.label = label;
}
public abstract String buildPrompt(String systemPrompt, String userPrompt, List<Message> history);
@Override
public String toString() {
return label;
}
}