35 lines
707 B
Markdown
35 lines
707 B
Markdown
https://aider.chat/docs/llms/lm-studio.html
|
|
|
|
https://aider.chat/docs/llms/ollama.html
|
|
|
|
aider --model lm_studio/qwen3-30b-a3b-2507
|
|
|
|
|
|
aider --model lm_studio/deepseek-r1-distill-qwen-7b
|
|
|
|
aider --no-show-model-warnings --no-gitignore --model lm_studio/deepseek-r1-distill-qwen-14b
|
|
|
|
aider --no-show-model-warnings --no-gitignore --model ollama_chat/gpt-oss:20b
|
|
|
|
|
|
MCP:
|
|
git
|
|
|
|
pip install mcp-server-git
|
|
python -m mcp_server_git
|
|
|
|
|
|
export OLLAMA_DEBUG=1
|
|
export OLLAMA_GPU_LAYERS=-1
|
|
export OLLAMA_NUM_CTX=14096
|
|
|
|
export LANG=en_US.UTF-8
|
|
export LC_ALL=en_US.UTF-8
|
|
ollama serve
|
|
|
|
aider
|
|
|
|
aider --no-show-model-warnings --no-gitignore --model ollama_chat/gpt-oss:20b
|
|
|
|
|
|
continue config: C:\Users\popov\.continue\config.json |