feat: custom -f response formatting text in config

This commit is contained in:
Toby Padilla 2023-06-23 15:15:31 -05:00
parent fe198ef271
commit 23b408d108
4 changed files with 19 additions and 12 deletions

View file

@ -186,10 +186,8 @@ file.
`-f`, `--format`, `MODS_FORMAT`
LLMs are very good at generating their response in Markdown format. They
can even organize their content naturally with headers, bullet lists... Use
this option to append the phrase "Format the response as Markdown." to the
prompt.
Ask the LLM to format the response as markdown. You can edit the text passed to
the LLM with `mods -s` then changing the `format-text` value.
#### Max Tokens

View file

@ -51,7 +51,7 @@ func (apis *APIs) UnmarshalYAML(node *yaml.Node) error {
// Config holds the main configuration and is mapped to the YAML settings file.
type Config struct {
Model string `yaml:"default-model" env:"MODEL"`
Markdown bool `yaml:"format" env:"FORMAT"`
Format bool `yaml:"format" env:"FORMAT"`
Quiet bool `yaml:"quiet" env:"QUIET"`
MaxTokens int `yaml:"max-tokens" env:"MAX_TOKENS"`
MaxInputChars int `yaml:"max-input-chars" env:"MAX_INPUT_CHARS"`
@ -63,6 +63,7 @@ type Config struct {
MaxRetries int `yaml:"max-retries" env:"MAX_RETRIES"`
Fanciness uint `yaml:"fanciness" env:"FANCINESS"`
StatusText string `yaml:"status-text" env:"STATUS_TEXT"`
FormatText string `yaml:"format-text" env:"FORMAT_TEXT"`
APIs APIs `yaml:"apis"`
API string
Models map[string]Model
@ -82,7 +83,8 @@ func newConfig() (Config, error) {
"apis": "Aliases and endpoints for OpenAI compatible REST API.",
"model": "Default model (gpt-3.5-turbo, gpt-4, ggml-gpt4all-j...).",
"max-input-chars": "Default character limit on input to model.",
"format": "Format response as markdown.",
"format": "Ask for the response to be formatted as markdown (default).",
"format-text": "Text to append when using the -f flag.",
"prompt": "Include the prompt from the arguments and stdin, truncate stdin to specified number of lines.",
"prompt-args": "Include the prompt from the arguments in the response.",
"quiet": "Quiet mode (hide the spinner while loading).",
@ -166,7 +168,7 @@ func newConfig() (Config, error) {
flag.StringVarP(&c.Model, "model", "m", c.Model, help["model"])
flag.StringVarP(&c.API, "api", "a", c.API, help["api"])
flag.BoolVarP(&c.Markdown, "format", "f", c.Markdown, help["format"])
flag.BoolVarP(&c.Format, "format", "f", c.Format, help["format"])
flag.IntVarP(&c.IncludePrompt, "prompt", "P", c.IncludePrompt, help["prompt"])
flag.BoolVarP(&c.IncludePromptArgs, "prompt-args", "p", c.IncludePromptArgs, help["prompt-args"])
flag.BoolVarP(&c.Quiet, "quiet", "q", c.Quiet, help["quiet"])
@ -184,6 +186,9 @@ func newConfig() (Config, error) {
flag.Usage = usage
flag.CommandLine.SortFlags = false
flag.Parse()
if c.Format && c.FormatText == "" {
c.FormatText = "Format the response as markdown without enclosing backticks."
}
c.Prefix = strings.Join(flag.Args(), " ")
return c, nil

View file

@ -3,6 +3,8 @@ package main
const configTemplate = `
# {{ index .Help "model" }}
default-model: gpt-4
# {{ index .Help "format-text" }}
format-text: Format the response as markdown without enclosing backticks.
# {{ index .Help "max-input-chars" }}
max-input-chars: 12250
# {{ index .Help "format" }}

12
mods.go
View file

@ -18,8 +18,6 @@ import (
openai "github.com/sashabaranov/go-openai"
)
const markdownPrefix = "Format the response as Markdown."
type state int
const (
@ -174,7 +172,11 @@ func (m *Mods) FormattedOutput() string {
}
if m.Config.IncludePromptArgs || m.Config.IncludePrompt != 0 {
out = fmt.Sprintf(prefixFormat, m.Config.Prefix, out)
prefix := m.Config.Prefix
if m.Config.Format {
prefix = fmt.Sprintf("%s %s", prefix, m.Config.FormatText)
}
out = fmt.Sprintf(prefixFormat, prefix, out)
}
return out
@ -279,8 +281,8 @@ func (m *Mods) startCompletionCmd(content string) tea.Cmd {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
prefix := cfg.Prefix
if cfg.Markdown {
prefix = fmt.Sprintf("%s %s", prefix, markdownPrefix)
if cfg.Format {
prefix = fmt.Sprintf("%s %s", prefix, cfg.FormatText)
}
if prefix != "" {
content = strings.TrimSpace(prefix + "\n\n" + content)