ProxyAI/src/main/java/ee/carlrobert/codegpt/toolwindow/ModelIconLabel.java
Carl-Robert 45908e69df
#178 - Add support for running local LLMs via LLaMA C/C++ port (#249)
* Initial implementation of integrating llama.cpp to run LLaMA models locally

* Move submodule

* Copy llama submodule to bundle

* Support for downloading models from IDE

* Code cleanup

* Store port field

* Replace service selection radio group with dropdown

* Add quantization support + other fixes

* Add option to override host

* Fix override host handler

* Disable port field when override host enabled

* Design updates

* Fix llama settings configuration, design changes, clean up code

* Improve You.com coupon design

* Add new Phind model and help tooltip

* Fetch you.com subscription

* Add CodeBooga model, fix downloadable model selection

* Chat history support

* Code refactoring, minor bug fixes

* UI updates, several bug fixes, removed code llama python model

* Code cleanup, enable llama port only on macOS

* Change downloaded gguf models path

* Move some of the labels to codegpt bundle

* Minor fixes

* Remove ToRA model, add help texts

* Fix test

* Modify description
2023-11-03 12:00:24 +02:00

39 lines
1.1 KiB
Java

package ee.carlrobert.codegpt.toolwindow;
import com.intellij.ui.components.JBLabel;
import com.intellij.util.ui.JBFont;
import ee.carlrobert.codegpt.Icons;
import ee.carlrobert.llm.client.openai.completion.chat.OpenAIChatCompletionModel;
import java.util.NoSuchElementException;
import javax.swing.SwingConstants;
public class ModelIconLabel extends JBLabel {
public ModelIconLabel(String clientCode, String modelCode) {
if ("you.chat.completion".equals(clientCode)) {
setIcon(Icons.YouIcon);
return;
}
if ("chat.completion".equals(clientCode)) {
setIcon(Icons.OpenAIIcon);
}
if ("azure.chat.completion".equals(clientCode)) {
setIcon(Icons.AzureIcon);
}
if ("llama.chat.completion".equals(clientCode)) {
setIcon(Icons.LlamaIcon);
}
setText(formatModelName(modelCode));
setFont(JBFont.small().asBold());
setHorizontalAlignment(SwingConstants.LEADING);
}
private String formatModelName(String modelCode) {
try {
return OpenAIChatCompletionModel.findByCode(modelCode).getDescription();
} catch (NoSuchElementException e) {
return modelCode;
}
}
}