feat: Start/stop LLaMA Server from statusbar (#544)

This commit is contained in:
Rene Leonhardt 2024-05-13 18:02:22 +02:00 committed by GitHub
parent 91c7302008
commit 7c668ae143
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 248 additions and 84 deletions

View file

@ -8,7 +8,6 @@ import com.intellij.openapi.extensions.PluginId;
import com.intellij.openapi.project.Project;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.jetbrains.annotations.NotNull;
public final class CodeGPTPlugin {
@ -38,10 +37,6 @@ public final class CodeGPTPlugin {
return getPluginBasePath() + File.separator + "llama.cpp";
}
public static @NotNull String getLlamaModelsPath() {
return Paths.get(System.getProperty("user.home"), ".codegpt/models/gguf").toString();
}
public static @NotNull String getProjectIndexStorePath(@NotNull Project project) {
return getIndexStorePath() + File.separator + project.getName();
}

View file

@ -42,7 +42,7 @@ public final class LlamaServerAgent implements Disposable {
LlamaServerStartupParams params,
ServerProgressPanel serverProgressPanel,
Runnable onSuccess,
Consumer<ServerProgressPanel> onServerTerminated) {
Consumer<ServerProgressPanel> onServerStopped) {
this.activeServerProgressPanel = serverProgressPanel;
ApplicationManager.getApplication().invokeLater(() -> {
try {
@ -52,10 +52,10 @@ public final class LlamaServerAgent implements Disposable {
makeProcessHandler = new OSProcessHandler(
getMakeCommandLine(params.additionalBuildParameters()));
makeProcessHandler.addProcessListener(
getMakeProcessListener(params, onSuccess, onServerTerminated));
getMakeProcessListener(params, onSuccess, onServerStopped));
makeProcessHandler.startNotify();
} catch (ExecutionException e) {
showServerError(e.getMessage(), onServerTerminated);
showServerError(e.getMessage(), onServerStopped);
}
});
}
@ -82,7 +82,7 @@ public final class LlamaServerAgent implements Disposable {
private ProcessListener getMakeProcessListener(
LlamaServerStartupParams params,
Runnable onSuccess,
Consumer<ServerProgressPanel> onServerTerminated) {
Consumer<ServerProgressPanel> onServerStopped) {
LOG.info("Building llama project");
return new ProcessAdapter() {
@ -103,11 +103,11 @@ public final class LlamaServerAgent implements Disposable {
int exitCode = event.getExitCode();
LOG.info(format("Server build exited with code %d", exitCode));
if (stoppedByUser) {
onServerTerminated.accept(activeServerProgressPanel);
onServerStopped.accept(activeServerProgressPanel);
return;
}
if (exitCode != 0) {
showServerError(String.join(",", errorLines), onServerTerminated);
showServerError(String.join(",", errorLines), onServerStopped);
return;
}
@ -118,11 +118,10 @@ public final class LlamaServerAgent implements Disposable {
CodeGPTBundle.get("llamaServerAgent.serverBootup.description"));
startServerProcessHandler = new OSProcessHandler.Silent(getServerCommandLine(params));
startServerProcessHandler.addProcessListener(
getProcessListener(params.port(), onSuccess,
onServerTerminated));
getProcessListener(params.port(), onSuccess, onServerStopped));
startServerProcessHandler.startNotify();
} catch (ExecutionException ex) {
showServerError(ex.getMessage(), onServerTerminated);
showServerError(ex.getMessage(), onServerStopped);
}
}
};
@ -131,18 +130,18 @@ public final class LlamaServerAgent implements Disposable {
private ProcessListener getProcessListener(
int port,
Runnable onSuccess,
Consumer<ServerProgressPanel> onServerTerminated) {
Consumer<ServerProgressPanel> onServerStopped) {
return new ProcessAdapter() {
private final ObjectMapper objectMapper = new ObjectMapper();
private final List<String> errorLines = new CopyOnWriteArrayList<>();
@Override
public void processTerminated(@NotNull ProcessEvent event) {
LOG.info(format("Server terminated with code %d", event.getExitCode()));
LOG.info(format("Server stopped with code %d", event.getExitCode()));
if (stoppedByUser) {
onServerTerminated.accept(activeServerProgressPanel);
onServerStopped.accept(activeServerProgressPanel);
} else {
showServerError(String.join(",", errorLines), onServerTerminated);
showServerError(String.join(",", errorLines), onServerStopped);
}
}
@ -172,8 +171,8 @@ public final class LlamaServerAgent implements Disposable {
};
}
private void showServerError(String errorText, Consumer<ServerProgressPanel> onServerTerminated) {
onServerTerminated.accept(activeServerProgressPanel);
private void showServerError(String errorText, Consumer<ServerProgressPanel> onServerStopped) {
onServerStopped.accept(activeServerProgressPanel);
LOG.info("Unable to start llama server:\n" + errorText);
OverlayUtil.showClosableBalloon(errorText, MessageType.ERROR, activeServerProgressPanel);
}

View file

@ -1,6 +1,9 @@
package ee.carlrobert.codegpt.settings.service.llama;
import static ee.carlrobert.codegpt.credentials.CredentialsStore.CredentialKey.LLAMA_API_KEY;
import static ee.carlrobert.codegpt.settings.service.ServiceType.LLAMA_CPP;
import static org.apache.commons.lang3.SystemUtils.IS_OS_LINUX;
import static org.apache.commons.lang3.SystemUtils.IS_OS_MAC_OSX;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.PersistentStateComponent;
@ -10,7 +13,13 @@ import ee.carlrobert.codegpt.codecompletions.InfillPromptTemplate;
import ee.carlrobert.codegpt.completions.HuggingFaceModel;
import ee.carlrobert.codegpt.completions.llama.LlamaModel;
import ee.carlrobert.codegpt.credentials.CredentialsStore;
import ee.carlrobert.codegpt.settings.GeneralSettings;
import ee.carlrobert.codegpt.settings.service.llama.form.LlamaSettingsForm;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.jetbrains.annotations.NotNull;
@ -64,4 +73,37 @@ public class LlamaSettings implements PersistentStateComponent<LlamaSettingsStat
form.getLlamaServerPreferencesForm().getApiKey(),
CredentialsStore.getCredential(LLAMA_API_KEY));
}
public static boolean isRunnable() {
return (IS_OS_MAC_OSX || IS_OS_LINUX)
&& GeneralSettings.getCurrentState().getSelectedService() == LLAMA_CPP;
}
public static boolean isRunnable(HuggingFaceModel model) {
return isRunnable() && isModelExists(model);
}
public static boolean isModelExists(HuggingFaceModel model) {
return getLlamaModelsPath().resolve(model.getFileName()).toFile().exists();
}
public static Path getLlamaModelsPath() {
return Paths.get(System.getProperty("user.home"), ".codegpt/models/gguf");
}
// Copied from LlamaModelPreferencesForm
public String getActualModelPath() {
return state.isUseCustomModel()
? state.getCustomLlamaModelPath()
: getLlamaModelsPath() + File.separator
+ state.getHuggingFaceModel().getFileName();
}
public static List<String> getAdditionalParametersList(String additionalParameters) {
return Arrays.stream(additionalParameters.split(","))
.map(String::trim)
.filter(s -> !s.isBlank())
.toList();
}
}

View file

@ -1,5 +1,7 @@
package ee.carlrobert.codegpt.settings.service.llama.form;
import static ee.carlrobert.codegpt.settings.service.llama.LlamaSettings.getLlamaModelsPath;
import static ee.carlrobert.codegpt.settings.service.llama.LlamaSettings.isModelExists;
import static java.lang.String.format;
import static java.util.Collections.emptyMap;
@ -15,7 +17,6 @@ import com.intellij.openapi.ui.ComboBox;
import com.intellij.openapi.ui.TextBrowseFolderListener;
import com.intellij.openapi.ui.TextFieldWithBrowseButton;
import com.intellij.openapi.ui.panel.ComponentPanelBuilder;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.ui.EnumComboBoxModel;
import com.intellij.ui.components.AnActionLink;
import com.intellij.ui.components.JBLabel;
@ -23,7 +24,6 @@ import com.intellij.ui.components.JBRadioButton;
import com.intellij.util.ui.FormBuilder;
import com.intellij.util.ui.JBUI;
import ee.carlrobert.codegpt.CodeGPTBundle;
import ee.carlrobert.codegpt.CodeGPTPlugin;
import ee.carlrobert.codegpt.codecompletions.InfillPromptTemplate;
import ee.carlrobert.codegpt.completions.HuggingFaceModel;
import ee.carlrobert.codegpt.completions.llama.LlamaModel;
@ -37,7 +37,6 @@ import java.awt.CardLayout;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.io.File;
import java.util.Map;
import javax.swing.Box;
import javax.swing.BoxLayout;
@ -100,7 +99,7 @@ public class LlamaModelPreferencesForm {
.filter(model -> model.getParameterSize() == llm.getParameterSize())
.toList();
huggingFaceComboBoxModel.addAll(selectableModels);
huggingFaceComboBoxModel.setSelectedItem(selectableModels.get(0));
huggingFaceComboBoxModel.setSelectedItem(llm);
downloadModelActionLinkWrapper = new JPanel(new BorderLayout());
downloadModelActionLinkWrapper.setBorder(JBUI.Borders.emptyLeft(2));
downloadModelActionLinkWrapper.add(
@ -116,7 +115,10 @@ public class LlamaModelPreferencesForm {
var modelSizeComboBoxModel = new DefaultComboBoxModel<ModelSize>();
var initialModelSizes = llamaModel.getSortedUniqueModelSizes();
modelSizeComboBoxModel.addAll(initialModelSizes);
modelSizeComboBoxModel.setSelectedItem(initialModelSizes.get(0));
var selectedModelSize = initialModelSizes.stream()
.filter(ms -> ms.size() == llm.getParameterSize())
.findFirst().orElse(initialModelSizes.get(0));
modelSizeComboBoxModel.setSelectedItem(selectedModelSize);
var modelComboBoxModel = new EnumComboBoxModel<>(LlamaModel.class);
modelComboBox = createModelComboBox(modelComboBoxModel, llamaModel, modelSizeComboBoxModel);
modelComboBox.setEnabled(!llamaServerAgent.isServerRunning());
@ -194,7 +196,7 @@ public class LlamaModelPreferencesForm {
public String getActualModelPath() {
return isUseCustomLlamaModel()
? getCustomLlamaModelPath()
: CodeGPTPlugin.getLlamaModelsPath() + File.separator + getSelectedModel().getFileName();
: getLlamaModelsPath().resolve(getSelectedModel().getFileName()).toString();
}
private JPanel createFormPanelCards() {
@ -386,11 +388,6 @@ public class LlamaModelPreferencesForm {
return browseButton;
}
private boolean isModelExists(HuggingFaceModel model) {
return FileUtil.exists(
CodeGPTPlugin.getLlamaModelsPath() + File.separator + model.getFileName());
}
private AnActionLink createCancelDownloadLink(
JBLabel progressLabel,
JPanel actionLinkWrapper,

View file

@ -1,6 +1,7 @@
package ee.carlrobert.codegpt.settings.service.llama.form;
import static ee.carlrobert.codegpt.credentials.CredentialsStore.CredentialKey.LLAMA_API_KEY;
import static ee.carlrobert.codegpt.settings.service.llama.LlamaSettings.isModelExists;
import static ee.carlrobert.codegpt.ui.UIUtil.createComment;
import static ee.carlrobert.codegpt.ui.UIUtil.createForm;
import static ee.carlrobert.codegpt.ui.UIUtil.withEmptyLeftBorder;
@ -9,7 +10,6 @@ import com.intellij.icons.AllIcons.Actions;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.util.SystemInfoRt;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.ui.PortField;
import com.intellij.ui.TitledSeparator;
import com.intellij.ui.components.JBLabel;
@ -21,20 +21,17 @@ import com.intellij.util.ui.FormBuilder;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UI;
import ee.carlrobert.codegpt.CodeGPTBundle;
import ee.carlrobert.codegpt.CodeGPTPlugin;
import ee.carlrobert.codegpt.codecompletions.InfillPromptTemplate;
import ee.carlrobert.codegpt.completions.HuggingFaceModel;
import ee.carlrobert.codegpt.completions.llama.LlamaServerAgent;
import ee.carlrobert.codegpt.completions.llama.LlamaServerStartupParams;
import ee.carlrobert.codegpt.completions.llama.PromptTemplate;
import ee.carlrobert.codegpt.credentials.CredentialsStore;
import ee.carlrobert.codegpt.credentials.CredentialsStore.CredentialKey;
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings;
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettingsState;
import ee.carlrobert.codegpt.ui.OverlayUtil;
import ee.carlrobert.codegpt.ui.UIUtil;
import ee.carlrobert.codegpt.ui.UIUtil.RadioButtonWithLayout;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import javax.swing.JButton;
@ -254,7 +251,7 @@ public class LlamaServerPreferencesForm {
CodeGPTBundle.get("settingsConfigurable.service.llama.startServer.label"));
serverButton.setIcon(Actions.Execute);
activeServerProgressPanel.displayComponent(new JBLabel(
CodeGPTBundle.get("settingsConfigurable.service.llama.progress.serverTerminated"),
CodeGPTBundle.get("settingsConfigurable.service.llama.progress.serverStopped"),
Actions.Cancel,
SwingConstants.LEADING));
});
@ -293,11 +290,6 @@ public class LlamaServerPreferencesForm {
return true;
}
private boolean isModelExists(HuggingFaceModel model) {
return FileUtil.exists(
CodeGPTPlugin.getLlamaModelsPath() + File.separator + model.getFileName());
}
private void enableForm(JButton serverButton, ServerProgressPanel progressPanel) {
setFormEnabled(true);
serverButton.setText(
@ -358,10 +350,7 @@ public class LlamaServerPreferencesForm {
}
public List<String> getListOfAdditionalParameters() {
return Arrays.stream(additionalParametersField.getText().split(","))
.map(String::trim)
.filter(s -> !s.isBlank())
.toList();
return LlamaSettings.getAdditionalParametersList(additionalParametersField.getText());
}
public String getAdditionalBuildParameters() {
@ -369,10 +358,7 @@ public class LlamaServerPreferencesForm {
}
public List<String> getListOfAdditionalBuildParameters() {
return Arrays.stream(additionalBuildParametersField.getText().split(","))
.map(String::trim)
.filter(s -> !s.isBlank())
.toList();
return LlamaSettings.getAdditionalParametersList(additionalBuildParametersField.getText());
}
public PromptTemplate getPromptTemplate() {

View file

@ -31,15 +31,38 @@ import org.jetbrains.annotations.NotNull;
public class OverlayUtil {
public static final String NOTIFICATION_GROUP_ID = "CodeGPT Notification Group";
public static final String NOTIFICATION_GROUP_STICKY_ID = "CodeGPT Notification Group Sticky";
private OverlayUtil() {
}
public static Notification getDefaultNotification(String content, NotificationType type) {
return new Notification("CodeGPT Notification Group", "CodeGPT", content, type);
return new Notification(NOTIFICATION_GROUP_ID, "CodeGPT", content, type);
}
public static void showNotification(String content, NotificationType type) {
Notifications.Bus.notify(getDefaultNotification(content, type));
public static Notification getStickyNotification(String content, NotificationType type) {
return new Notification(NOTIFICATION_GROUP_STICKY_ID, "CodeGPT", content, type);
}
public static Notification showNotification(String content) {
return showNotification(content, NotificationType.INFORMATION);
}
public static Notification showNotification(String content, NotificationType type) {
var notification = getDefaultNotification(content, type);
Notifications.Bus.notify(notification);
return notification;
}
public static Notification stickyNotification(String content) {
return stickyNotification(content, NotificationType.INFORMATION);
}
public static Notification stickyNotification(String content, NotificationType type) {
var notification = getStickyNotification(content, type);
Notifications.Bus.notify(notification);
return notification;
}
public static int showDeleteConversationDialog() {

View file

@ -0,0 +1,94 @@
package ee.carlrobert.codegpt.actions
import com.intellij.notification.Notification
import com.intellij.openapi.actionSystem.ActionManager
import com.intellij.openapi.actionSystem.ActionUpdateThread
import com.intellij.openapi.actionSystem.AnActionEvent
import com.intellij.openapi.components.service
import com.intellij.openapi.project.DumbAwareAction
import ee.carlrobert.codegpt.CodeGPTBundle
import ee.carlrobert.codegpt.completions.llama.LlamaServerAgent
import ee.carlrobert.codegpt.completions.llama.LlamaServerStartupParams
import ee.carlrobert.codegpt.settings.GeneralSettings
import ee.carlrobert.codegpt.settings.service.ServiceType.LLAMA_CPP
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings.getAdditionalParametersList
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings.isRunnable
import ee.carlrobert.codegpt.settings.service.llama.form.ServerProgressPanel
import ee.carlrobert.codegpt.ui.OverlayUtil.showNotification
import ee.carlrobert.codegpt.ui.OverlayUtil.stickyNotification
import java.util.function.Consumer
/**
* Start or stop server (if selected model exists) showing notifications
*/
abstract class LlamaServerToggleActions(
private val startServer: Boolean
) : DumbAwareAction() {
companion object {
fun expireOtherNotification(start: Boolean) {
(ActionManager.getInstance().getAction(
if (start) "statusbar.stopServer" else "statusbar.startServer"
) as LlamaServerToggleActions).apply {
this.notification?.expire()
this.notification = null
}
}
}
var notification: Notification? = null
override fun actionPerformed(e: AnActionEvent) {
(GeneralSettings.getCurrentState().selectedService == LLAMA_CPP).takeIf { it } ?: return
notification?.expire()
expireOtherNotification(startServer)
val llamaServerAgent = service<LlamaServerAgent>()
if (startServer) {
notification = stickyNotification(CodeGPTBundle.get("settingsConfigurable.service.llama.progress.startingServer"))
val serverProgressPanel = ServerProgressPanel()
llamaServerAgent.setActiveServerProgressPanel(serverProgressPanel)
val settings = LlamaSettings.getInstance().state
llamaServerAgent.startAgent(
LlamaServerStartupParams(
LlamaSettings.getInstance().actualModelPath,
settings.contextSize,
settings.threads,
settings.serverPort,
getAdditionalParametersList(settings.additionalParameters),
getAdditionalParametersList(settings.additionalBuildParameters)
),
serverProgressPanel,
{
notification?.expire()
notification = showNotification(CodeGPTBundle.get("settingsConfigurable.service.llama.progress.serverRunning"))
},
{
Consumer<ServerProgressPanel> { _: ServerProgressPanel ->
notification?.expire()
notification = showNotification(CodeGPTBundle.get("settingsConfigurable.service.llama.progress.serverStopped"))
}
})
} else {
notification = showNotification(CodeGPTBundle.get("settingsConfigurable.service.llama.progress.stoppingServer"))
llamaServerAgent.stopAgent()
notification?.expire()
notification = showNotification(CodeGPTBundle.get("settingsConfigurable.service.llama.progress.serverStopped"))
}
}
override fun update(e: AnActionEvent) {
val llamaRunnable = isRunnable(LlamaSettings.getInstance().state.huggingFaceModel)
val serverRunning = llamaRunnable && service<LlamaServerAgent>().isServerRunning
val toggle = llamaRunnable && serverRunning != startServer
e.presentation.isVisible = toggle
e.presentation.isEnabled = toggle
}
override fun getActionUpdateThread(): ActionUpdateThread {
return ActionUpdateThread.BGT
}
}
class StartServerAction : LlamaServerToggleActions(true)
class StopServerAction : LlamaServerToggleActions(false)

View file

@ -7,8 +7,9 @@ import com.intellij.openapi.diagnostic.Logger
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.fileEditor.FileDocumentManager
import com.intellij.openapi.progress.ProgressIndicator
import com.intellij.openapi.util.io.FileUtil.createDirectory
import com.intellij.openapi.vfs.VirtualFile
import ee.carlrobert.codegpt.CodeGPTPlugin
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings.getLlamaModelsPath
import java.io.File
import java.io.FileOutputStream
import java.io.IOException
@ -30,11 +31,19 @@ object FileUtil {
private val LOG = Logger.getInstance(FileUtil::class.java)
@JvmStatic
fun createFile(directoryPath: String, fileName: String?, fileContent: String?): File {
fun createFile(directoryPath: Any, fileName: String?, fileContent: String?): File {
requireNotNull(fileContent) { "fileContent null" }
require(!fileName.isNullOrBlank()) { "fileName null or blank" }
val path = when (directoryPath) {
is Path -> directoryPath
is File -> directoryPath.toPath()
is String -> Path.of(directoryPath)
else -> throw IllegalArgumentException("directoryPath must be Path, File or String: $directoryPath")
}
try {
tryCreateDirectory(directoryPath)
tryCreateDirectory(path)
return Files.writeString(
Path.of(directoryPath, fileName),
path.resolve(fileName),
fileContent,
StandardOpenOption.CREATE
).toFile()
@ -52,12 +61,10 @@ object FileUtil {
fileSize: Long,
indicator: ProgressIndicator
) {
tryCreateDirectory(CodeGPTPlugin.getLlamaModelsPath())
tryCreateDirectory(getLlamaModelsPath())
Channels.newChannel(url.openStream()).use { readableByteChannel ->
FileOutputStream(
CodeGPTPlugin.getLlamaModelsPath() + File.separator + fileName
).use { fileOutputStream ->
FileOutputStream(getLlamaModelsPath().resolve(fileName).toFile()).use { fileOutputStream ->
val buffer = ByteBuffer.allocateDirect(1024 * 10)
while (readableByteChannel.read(buffer) != -1) {
if (indicator.isCanceled) {
@ -78,22 +85,15 @@ object FileUtil {
return FileDocumentManager.getInstance().getFile(editor.document)
}
private fun tryCreateDirectory(directoryPath: String) {
private fun tryCreateDirectory(directoryPath: Path) {
Files.exists(directoryPath).takeUnless { it } ?: return
try {
if (!com.intellij.openapi.util.io.FileUtil.exists(directoryPath)) {
if (!com.intellij.openapi.util.io.FileUtil.createDirectory(
Path.of(directoryPath).toFile()
)
) {
throw IOException("Failed to create directory: $directoryPath")
}
}
createDirectory(directoryPath.toFile())
} catch (e: IOException) {
throw RuntimeException("Failed to create directory", e)
}
}.takeIf { it } ?: throw RuntimeException("Failed to create directory: $directoryPath")
}
@JvmStatic
fun getFileExtension(filename: String?): String {
val pattern = Pattern.compile("[^.]+$")

View file

@ -67,6 +67,7 @@
<toolWindow id="CodeGPT" icon="ee.carlrobert.codegpt.Icons.DefaultSmall" anchor="right"
factoryClass="ee.carlrobert.codegpt.toolwindow.ProjectToolWindowFactory"/>
<notificationGroup id="CodeGPT Notification Group" displayType="BALLOON" key="notification.group.name"/>
<notificationGroup id="CodeGPT Notification Group Sticky" displayType="STICKY_BALLOON" key="notification.group.sticky.name"/>
<statusBarWidgetFactory order="first" id="ee.carlrobert.codegpt.statusbar.widget"
implementation="ee.carlrobert.codegpt.statusbar.CodeGPTStatusBarWidgetFactory"/>
</extensions>
@ -114,29 +115,45 @@
<override-text place="popup" use-text-of-place="MainMenu"/>
</action>
<action
id="codegpt.enableCompletions"
text="Enable Completions"
id="statusbar.enableCompletions"
class="ee.carlrobert.codegpt.actions.EnableCompletionsAction">
<keyboard-shortcut first-keystroke="ctrl shift alt c" keymap="$default"/>
<override-text place="MainMenu" text="Enable Completions"/>
<override-text place="MainMenu"/>
<override-text place="popup" use-text-of-place="MainMenu"/>
</action>
<action
id="codegpt.disableCompletions"
text="Disable Completions"
id="statusbar.disableCompletions"
class="ee.carlrobert.codegpt.actions.DisableCompletionsAction">
<keyboard-shortcut first-keystroke="ctrl shift alt c" keymap="$default"/>
<override-text place="MainMenu" text="Disable Completions"/>
<override-text place="MainMenu"/>
<override-text place="popup" use-text-of-place="MainMenu"/>
</action>
<action
id="statusbar.startServer"
class="ee.carlrobert.codegpt.actions.StartServerAction">
<keyboard-shortcut first-keystroke="ctrl shift alt s" keymap="$default"/>
<override-text place="MainMenu"/>
<override-text place="popup" use-text-of-place="MainMenu"/>
</action>
<action
id="statusbar.stopServer"
class="ee.carlrobert.codegpt.actions.StopServerAction">
<keyboard-shortcut first-keystroke="ctrl shift alt s" keymap="$default"/>
<override-text place="MainMenu"/>
<override-text place="popup" use-text-of-place="MainMenu"/>
</action>
<group id="codegpt.statusBarPopup">
<reference id="codegpt.openSettings" />
<separator/>
<reference id="codegpt.disableCompletions" />
<reference id="codegpt.enableCompletions" />
<reference id="statusbar.stopServer" />
<reference id="statusbar.startServer" />
<reference id="statusbar.disableCompletions" />
<reference id="statusbar.enableCompletions" />
</group>
<group id="CodeGPT.GenerateGitCommitMessageGroup">

View file

@ -1,5 +1,6 @@
project.label=CodeGPT
notification.group.name=CodeGPT notification group
notification.group.sticky.name=CodeGPT notification group sticky
action.generateCommitMessage.title=Generate Message
action.generateCommitMessage.description=Generate commit message
action.generateCommitMessage.serviceWarning=Messages can only be generated with OpenAI, Custom OpenAI, or Azure service
@ -12,8 +13,18 @@ action.includeFilesInContext.dialog.repeatableContext.label=Repeatable context:
action.includeFilesInContext.dialog.restoreToDefaults.label=Restore to Defaults
action.openSettings.title=Open Settings
action.openSettings.description=Open CodeGPT settings
action.statusbar.enableCompletions=Enable Completions
action.statusbar.disableCompletions=Disable Completions
action.statusbar.startServer.text=Start Server
action.statusbar.startServer.description=Start LLaMA Server
action.statusbar.startServer.MainMenu.text=Start Server
action.statusbar.stopServer.text=Stop Server
action.statusbar.stopServer.description=Stop LLaMA Server
action.statusbar.stopServer.MainMenu.text=Stop Server
action.statusbar.enableCompletions.text=Enable Completions
action.statusbar.enableCompletions.description=Enable Code Completions
action.statusbar.enableCompletions.MainMenu.text=Enable Completions
action.statusbar.disableCompletions.text=Disable Completions
action.statusbar.disableCompletions.description=Disable Code Completions
action.statusbar.disableCompletions.MainMenu.text=Disable Completions
settings.displayName=CodeGPT: Settings
settings.openaiQuotaExceeded=OpenAI quota exceeded.
settingsConfigurable.displayName.label=Display name:
@ -75,7 +86,7 @@ settingsConfigurable.service.llama.baseHost.comment=URL to existing LLama server
settingsConfigurable.service.llama.startServer.label=Start server
settingsConfigurable.service.llama.stopServer.label=Stop server
settingsConfigurable.service.llama.progress.serverRunning=Server running
settingsConfigurable.service.llama.progress.serverTerminated=Server terminated
settingsConfigurable.service.llama.progress.serverStopped=Server stopped
settingsConfigurable.service.llama.progress.stoppingServer=Stopping a server...
settingsConfigurable.service.llama.progress.startingServer=Starting a server...
settingsConfigurable.service.llama.progress.downloadingModel.title=Downloading Model
@ -214,4 +225,4 @@ shared.chatCompletions=Chat Completions
shared.codeCompletions=Code Completions
codeCompletionsForm.enableFeatureText=Enable code completions
codeCompletionsForm.maxTokensLabel=Max tokens:
codeCompletionsForm.maxTokensComment=The maximum number of tokens that will be generated in the code completion.
codeCompletionsForm.maxTokensComment=The maximum number of tokens that will be generated in the code completion.