96 lines
2.4 KiB
TOML
96 lines
2.4 KiB
TOML
|
[[language]]
|
||
|
name = "rust"
|
||
|
auto-format = true
|
||
|
|
||
|
roots = [
|
||
|
"Cargo.toml",
|
||
|
"Cargo.lock"
|
||
|
]
|
||
|
|
||
|
[language.auto-pairs]
|
||
|
'(' = ')'
|
||
|
'{' = '}'
|
||
|
'[' = ']'
|
||
|
'"' = '"'
|
||
|
'`' = '`'
|
||
|
|
||
|
[language-server.rust-analyzer]
|
||
|
command = "rust-analyzer"
|
||
|
|
||
|
[language-server.rust-analyzer.config]
|
||
|
inlayHints.bindingModeHints.enable = false
|
||
|
inlayHints.closingBraceHints.minLines = 10
|
||
|
inlayHints.closureReturnTypeHints.enable = "with_block"
|
||
|
inlayHints.discriminantHints.enable = "fieldless"
|
||
|
inlayHints.lifetimeElisionHints.enable = "skip_trivial"
|
||
|
inlayHints.typeHints.hideClosureInitialization = false
|
||
|
|
||
|
[language-server.jinja-lsp]
|
||
|
command = "jinja-lsp"
|
||
|
config = { templates = "./templates", backend = ["./src"], lang = "rust"}
|
||
|
timeout = 5
|
||
|
|
||
|
[[language]]
|
||
|
name = "jinja"
|
||
|
language-servers = ["jinja-lsp"]
|
||
|
|
||
|
[[language]]
|
||
|
name = "ai-chat"
|
||
|
language-servers = ["lsp-ai"]
|
||
|
scope = "my-lsp-ai"
|
||
|
file-types = [ { glob = "my-ai-chat.md" } ]
|
||
|
|
||
|
|
||
|
|
||
|
##############################
|
||
|
## Configuration for lsp-ai ##
|
||
|
##############################
|
||
|
|
||
|
[language-server.lsp-ai]
|
||
|
command = "lsp-ai"
|
||
|
|
||
|
[language-server.lsp-ai.config.memory]
|
||
|
file_store = { }
|
||
|
|
||
|
[language-server.lsp-ai.config.models.model1]
|
||
|
type = "mistral_fim"
|
||
|
# fim_endpoint = "https://api.mistral.ai/v1/chat/completions"
|
||
|
fim_endpoint = "https://codestral.mistral.ai/v1/chat/completions"
|
||
|
model = "codestral-latest"
|
||
|
auth_token_env_var_name = "CODESTRAL_API_KEY"
|
||
|
|
||
|
[language-server.lsp-ai.config.models.model2]
|
||
|
type = "mistral_fim"
|
||
|
# fim_endpoint = "https://api.mistral.ai/v1/fim/completions"
|
||
|
fim_endpoint = "https://codestral.mistral.ai/v1/fim/completions"
|
||
|
model = "codestral-latest"
|
||
|
auth_token_env_var_name = "CODESTRAL_API_KEY"
|
||
|
# auth_token_env_var_name = "MISTRAL_API_KEY"
|
||
|
|
||
|
[language-server.lsp-ai.config.models.model3]
|
||
|
type = "anthropic"
|
||
|
chat_endpoint = "https://api.anthropic.com/v1/messages"
|
||
|
model = "claude-3-5-sonnet-20240620"
|
||
|
auth_token_env_var_name = "ANTHROPIC_API_KEY"
|
||
|
|
||
|
[[language-server.lsp-ai.config.chat]]
|
||
|
trigger = "!C"
|
||
|
action_display_name = "Chat"
|
||
|
model = "model1"
|
||
|
|
||
|
[language-server.lsp-ai.config.chat.parameters]
|
||
|
max_context = 4096
|
||
|
max_tokens = 1024
|
||
|
system = "You are a code assistant chatbot. The user will ask you for assistance coding and you will do you best to answer succinctly and accurately"
|
||
|
|
||
|
[language-server.lsp-ai.config.completion]
|
||
|
model = "model2"
|
||
|
|
||
|
[language-server.lsp-ai.config.completion.parameters]
|
||
|
max_tokens = 64
|
||
|
max_context = 1024
|
||
|
|
||
|
[[language]]
|
||
|
name = "rust"
|
||
|
language-servers = ["rust-analyzer", "lsp-ai"]
|