reorganize
This commit is contained in:
parent
e6fa6e4547
commit
6d0a5c1064
Before Width: | Height: | Size: 1.6 KiB After Width: | Height: | Size: 1.6 KiB |
@ -16,9 +16,9 @@ interpreter --api_base http://192.168.0.137:1234/v1 --api_key "" --model openai/
|
|||||||
192.168.0.137
|
192.168.0.137
|
||||||
|
|
||||||
|
|
||||||
################################# GROQ
|
# ################################ GROQ ########################## working
|
||||||
export OPENAI_API_KEY=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
|
export OPENAI_API_KEY=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
|
||||||
interpreter -y --api_base https://api.groq.com/openai/v1 --model gemma-7b-it ## mixtral-8x7b-32768 # gemma-7b-it # llama2-70b-4096
|
interpreter -y --api_base https://api.groq.com/openai/v1 --model llama2-70b-4096 ## mixtral-8x7b-32768 # gemma-7b-it # llama2-70b-4096
|
||||||
##
|
##
|
||||||
# Load a model, start the server, and run this example in your terminal
|
# Load a model, start the server, and run this example in your terminal
|
||||||
# Choose between streaming and non-streaming mode by setting the "stream" field
|
# Choose between streaming and non-streaming mode by setting the "stream" field
|
||||||
|
34
_doc/scripts/aider.sh
Normal file
34
_doc/scripts/aider.sh
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# python -m pip install git+https://github.com/paul-gauthier/aider.git
|
||||||
|
|
||||||
|
source ~/miniconda3/etc/profile.d/conda.sh # Adjust the path as per your Conda installation
|
||||||
|
conda activate aider
|
||||||
|
export OPENAI_API_KEY=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
|
||||||
|
|
||||||
|
|
||||||
|
# aider --no-auto-commits
|
||||||
|
|
||||||
|
OPENAI_API_BASE=https://api.deepseek.com/v1
|
||||||
|
OPENAI_API_KEY=sk-99df7736351f4536bd72cd64a416318a
|
||||||
|
AIDER_MODEL=deepseek-coder #deepseek-coder, deepseek-chat
|
||||||
|
aider --openai-api-base https://api.deepseek.com/v1 --openai-api-key sk-99df7736351f4536bd72cd64a416318a --model deepseek-coder
|
||||||
|
aider --openai-api-base 'https://api.groq.com/openai/v1' --openai-api-key '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' --model 'llama2-70b-4096'
|
||||||
|
|
||||||
|
usage: aider [-h] [--openai-api-key OPENAI_API_KEY] [--model MODEL] [--skip-model-availability-check SKIP_MODEL_AVAILABILITY_CHECK] [--4] [--4turbo] [--35turbo] [--voice-language VOICE_LANGUAGE]
|
||||||
|
[--openai-api-base OPENAI_API_BASE] [--openai-api-type OPENAI_API_TYPE] [--openai-api-version OPENAI_API_VERSION] [--openai-api-deployment-id OPENAI_API_DEPLOYMENT_ID]
|
||||||
|
[--openai-organization-id OPENAI_ORGANIZATION_ID] [--openrouter] [--edit-format EDIT_FORMAT] [--map-tokens MAP_TOKENS] [--input-history-file INPUT_HISTORY_FILE]
|
||||||
|
[--chat-history-file CHAT_HISTORY_FILE] [--dark-mode] [--light-mode] [--pretty | --no-pretty] [--stream | --no-stream] [--user-input-color USER_INPUT_COLOR]
|
||||||
|
[--tool-output-color TOOL_OUTPUT_COLOR] [--tool-error-color TOOL_ERROR_COLOR] [--assistant-output-color ASSISTANT_OUTPUT_COLOR] [--code-theme CODE_THEME] [--show-diffs]
|
||||||
|
[--git | --no-git] [--gitignore | --no-gitignore] [--aiderignore AIDERIGNORE] [--auto-commits | --no-auto-commits] [--dirty-commits | --no-dirty-commits] [--dry-run | --no-dry-run]
|
||||||
|
[--commit] [--version] [--check-update] [--skip-check-update] [--apply FILE] [--yes] [-v] [--show-repo-map] [--message COMMAND] [--message-file MESSAGE_FILE] [--encoding ENCODING]
|
||||||
|
[-c CONFIG_FILE]
|
||||||
|
|
||||||
|
export OPENAI_API_KEY=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
|
||||||
|
interpreter -y --api_base https://api.groq.com/openai/v1 --model gemma-7b-it ## mixtral-8x7b-32768 # gemma-7b-it # llama2-70b-4096
|
||||||
|
|
||||||
|
|
||||||
|
# Setup OpenRouter access
|
||||||
|
export OPENAI_API_KEY=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
|
||||||
|
export OPENAI_API_BASE=https://api.groq.com/openai/v1
|
||||||
|
# For example, run aider with Claude 3 Opus using the diff editing format
|
||||||
|
aider --model llama2-70b-4096 --edit-format diff
|
@ -1,47 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
interpreter --api_base http://192.168.0.11:11434/v1/
|
|
||||||
|
|
||||||
interpreter --model "gpt-3.5-turbo" # mistral
|
|
||||||
interpreter --model "mistral" --api_base http://192.168.0.11:11434/v1/
|
|
||||||
|
|
||||||
Mac/Linux: 'export OPENAI_API_KEY=your-key-here',
|
|
||||||
Windows: 'setx OPENAI_API_KEY your-key-here' then restart terminal.
|
|
||||||
interpreter --local
|
|
||||||
|
|
||||||
interpreter --api_base http://192.168.0.11:11434/v1 --api_key "" --model openai/local
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
interpreter --api_base http://192.168.0.137:1234/v1 --api_key "" --model openai/local
|
|
||||||
192.168.0.137
|
|
||||||
|
|
||||||
|
|
||||||
# Load a model, start the server, and run this example in your terminal
|
|
||||||
# Choose between streaming and non-streaming mode by setting the "stream" field
|
|
||||||
|
|
||||||
curl http://192.168.0.11:11434/v1/chat/completions \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d '{
|
|
||||||
"messages": [
|
|
||||||
{ "role": "system", "content": "Always answer in rhymes." },
|
|
||||||
{ "role": "user", "content": "Introduce yourself." }
|
|
||||||
],
|
|
||||||
"temperature": 0.7,
|
|
||||||
"max_tokens": -1,
|
|
||||||
"stream": false
|
|
||||||
}'
|
|
||||||
|
|
||||||
|
|
||||||
curl http://192.168.0.137:1234/v1/chat/completions \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d '{
|
|
||||||
"messages": [
|
|
||||||
{ "role": "system", "content": "Always answer in rhymes." },
|
|
||||||
{ "role": "user", "content": "Introduce yourself." }
|
|
||||||
],
|
|
||||||
"temperature": 0.7,
|
|
||||||
"max_tokens": -1,
|
|
||||||
"stream": false
|
|
||||||
}'
|
|
@ -1,12 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ~/miniconda3/etc/profile.d/conda.sh # Adjust the path as per your Conda installation
|
|
||||||
conda activate aider
|
|
||||||
export OPENAI_API_KEY=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
|
|
||||||
|
|
||||||
|
|
||||||
# aider --no-auto-commits
|
|
||||||
|
|
||||||
OPENAI_API_BASE=https://api.deepseek.com/v1
|
|
||||||
OPENAI_API_KEY=sk-99df7736351f4536bd72cd64a416318a
|
|
||||||
AIDER_MODEL=deepseek-coder #deepseek-coder, deepseek-chat
|
|
||||||
aider --openai-api-base https://api.deepseek.com/v1 --openai-api-key sk-99df7736351f4536bd72cd64a416318a --model deepseek-coder
|
|
Loading…
x
Reference in New Issue
Block a user