Compare commits

...

39 Commits

Author SHA1 Message Date
Dobromir Popov
d6a5389a07 wip 2025-09-05 14:41:43 +03:00
Dobromir Popov
34b095d6ff refactoring 2025-09-05 01:15:58 +03:00
Dobromir Popov
48060c360f pool uses proxy 2025-09-05 00:57:22 +03:00
Dobromir Popov
fec5f35cce latest fixes 2025-09-05 00:10:46 +03:00
Dobromir Popov
1bcf7109cf real mining 2025-09-04 21:06:37 +03:00
Dobromir Popov
02c23db39f notes 2025-09-02 19:29:37 +03:00
Dobromir Popov
9be9241daa fix hashrates dash 2025-09-02 13:50:08 +03:00
Dobromir Popov
89e13e91d6 dashboard 2025-09-02 13:21:43 +03:00
Dobromir Popov
1e9a56356e wehb dash! 2025-09-02 12:19:49 +03:00
Dobromir Popov
b3ec402a2e reward distributions 2025-09-02 12:16:56 +03:00
Dobromir Popov
3dd0e55fde address validations 2025-09-02 12:04:28 +03:00
Dobromir Popov
0c8e4989e4 pool worker individual addresses 2025-09-02 12:04:16 +03:00
Dobromir Popov
8bdf9bf9eb stratum working, implement mining pool, support "extranonce" 2025-09-02 11:54:15 +03:00
Dobromir Popov
e816c04c8b stratum protocol proxy 2025-09-02 11:24:21 +03:00
Dobromir Popov
8855429502 thread count solo mine 2025-09-02 11:21:34 +03:00
Dobromir Popov
55909111e1 solo address param 2025-09-02 10:55:45 +03:00
Dobromir Popov
18697e5651 rincoin node wip 2025-09-02 09:55:24 +03:00
Dobromir Popov
0b058ff7d5 rin node 2025-09-02 03:12:05 +03:00
Dobromir Popov
21cd39068a rin mining working 2025-09-02 02:47:05 +03:00
Dobromir Popov
b41889a670 move 2025-09-02 02:16:03 +03:00
Dobromir Popov
0690149510 more scripts 2025-09-02 02:15:10 +03:00
Dobromir Popov
d63591bde9 xmrocean 2025-09-01 14:49:15 +03:00
Dobromir Popov
b902966cb6 best 2025-09-01 14:46:26 +03:00
Dobromir Popov
41434ddc3a mining, cleanup 2025-09-01 14:34:36 +03:00
Dobromir Popov
d6ce6e0870 toolboxes 2025-09-01 14:33:58 +03:00
Dobromir Popov
2bb1c17453 working windows on dockurr 2025-09-01 13:25:05 +03:00
Dobromir Popov
61f52d2c65 windows.yml builds 2025-09-01 00:43:42 +03:00
Dobromir Popov
dae3c933e2 Add dockur-windows files 2025-08-31 19:31:29 +03:00
Dobromir Popov
6d8f9eb67e notes; new windows yml 2025-08-31 19:28:24 +03:00
Dobromir Popov
8746bed388 add linux resx for AMD AI 2025-08-28 22:09:21 +03:00
Dobromir Popov
6a736965be Merge branch 'master' of http://git.d-popov.com/popov/scripts 2025-08-27 22:26:16 +03:00
Dobromir Popov
5dcce058e0 added windows to desktop envs 2025-08-27 22:26:14 +03:00
f875dc4c83 Add portainer-compose-stacks/windows.md 2025-08-27 19:17:27 +00:00
5ee78c9386 add docker rootdir option 2025-08-27 19:16:07 +00:00
Dobromir Popov
4c8d887e78 mcp 2025-08-26 19:05:18 +03:00
Dobromir Popov
89bbe30286 Web_SearchApplications2 revert latest changes 2025-08-20 23:56:56 +03:00
Dobromir Popov
8ba178241c fix SQL compile 2025-08-20 22:27:33 +03:00
Dobromir Popov
61d0f986b1 GW Web_SearchApplications2 fix 2025-08-20 22:15:12 +03:00
Dobromir Popov
3993248a76 misc, GW SP 2 2025-08-20 22:09:38 +03:00
44 changed files with 5614 additions and 30 deletions

3
.gitignore vendored
View File

@@ -1 +1,4 @@
.aider* .aider*
AI/MCP/*
.fuse_hidde*
*.pyc

View File

@@ -1,31 +1,29 @@
{ {
"mcpServers": { "mcpServers": {
"github.com/modelcontextprotocol/servers/tree/main/src/git": {
"command": "uvx",
"args": ["mcp-server-git"],
"disabled": false,
"autoApprove": []
},
"github.com/modelcontextprotocol/servers/tree/main/src/git": { "github.com/modelcontextprotocol/servers/tree/main/src/filesystem": {
"command": "uvx", "command": "npx",
"args": [ "args": [
"mcp-server-git" "-y",
], "@modelcontextprotocol/server-filesystem@0.5.1",
"disabled": false, "C:\\Users\\popov\\Documents",
"autoApprove": [] "D:\\DEV\\workspace"
}, ],
"github.com/modelcontextprotocol/servers/tree/main/src/filesystem": { "disabled": false,
"command": "npx", "autoApprove": []
"args": [ },
"-y",
"@modelcontextprotocol/server-filesystem", "github.com/zcaceres/fetch-mcp": {
"${workspaceFolder}" "command": "node",
], "args": ["C:\\Users\\popov\\Documents\\Cline\\MCP\\fetch-mcp\\dist\\index.js"],
"disabled": false, "disabled": false,
"autoApprove": [] "autoApprove": []
}, }
"github.com/zcaceres/fetch-mcp": { }
"command": "npm", }
"args": [
"start"
],
"disabled": false,
"autoApprove": []
}
}
}

View File

@@ -31,5 +31,11 @@ aider
aider --no-show-model-warnings --no-gitignore --model ollama_chat/gpt-oss:20b aider --no-show-model-warnings --no-gitignore --model ollama_chat/gpt-oss:20b
export GROQ_API_KEY=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
setx GROQ_API_KEY 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
aider --no-show-model-warnings --no-gitignore --list-models groq/
aider --no-show-model-warnings --no-gitignore --model groq/openai/gpt-oss-120b
continue config: C:\Users\popov\.continue\config.json continue config: C:\Users\popov\.continue\config.json

2
AI/cursor.md Normal file
View File

@@ -0,0 +1,2 @@
rules not valid on linux:
We are on Windows 11 host with PS as default shell. We have full administrative control. Do not try to use linux commands like 'tail' or other unix specific.

4
MINE/cmds Normal file
View File

@@ -0,0 +1,4 @@
root@db-NucBox-EVO-X2:/home/db/Downloads/SRBMiner-Multi-2-9-6# ./SRBMiner-MULTI --algorithm randomx --pool xmr-us-east1.nanopool.org:14444 --wallet bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j --disable-gpu --cpu-threads 28

245
MINE/gBench.sh Normal file
View File

@@ -0,0 +1,245 @@
#!/bin/bash
# GMiner algorithm test script - save as test_gminer.sh
# Default docker image (can be overridden with parameter)
DOCKER_IMAGE=${1:-"amdopencl"}
# amd-strix-halo-llama-rocm
#amd-strix-halo-llama-vulkan-radv
# amd-strix-halo-llama-vulkan-amdvlk
BTC_WALLET="bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j"
# Algorithm to coin mapping
declare -A ALGO_COINS=(
["ethash"]="ETH"
["etchash"]="ETC"
["autolykos2"]="ERG"
["ergo"]="ERG"
["equihash125_4"]="ZEL"
["equihash144_5"]="ZEL"
["equihash192_7"]="ZEL"
["equihash210_9"]="ZEL"
["beamhash"]="BEAM"
["cuckaroo29"]="GRIN"
["cuckatoo32"]="GRIN"
["flux"]="FLUX"
["octopus"]="CFX"
)
# Function to fetch current cryptocurrency prices
fetch_prices() {
echo "Fetching current cryptocurrency prices..."
# Use CoinGecko API to get current prices
local api_url="https://api.coingecko.com/api/v3/simple/price?ids=ethereum,ethereum-classic,ergo,zelcash,beam,grin,flux,conflux&vs_currencies=usd"
# Try to fetch prices with timeout and fallback
local prices_json=$(timeout 10s curl -s "$api_url" 2>/dev/null)
if [[ -z "$prices_json" ]]; then
echo "Warning: Could not fetch current prices, using fallback values"
PRICES_ETH=3500.00
PRICES_ETC=35.00
PRICES_ERG=2.50
PRICES_ZEL=0.15
PRICES_BEAM=0.05
PRICES_GRIN=0.05
PRICES_FLUX=0.80
PRICES_CFX=0.20
return
fi
# Parse JSON response and extract prices using jq if available, otherwise use grep
if command -v jq &> /dev/null; then
PRICES_ETH=$(echo "$prices_json" | jq -r '.ethereum.usd // "3500.00"')
PRICES_ETC=$(echo "$prices_json" | jq -r '.ethereum-classic.usd // "35.00"')
PRICES_ERG=$(echo "$prices_json" | jq -r '.ergo.usd // "2.50"')
PRICES_ZEL=$(echo "$prices_json" | jq -r '.zelcash.usd // "0.15"')
PRICES_BEAM=$(echo "$prices_json" | jq -r '.beam.usd // "0.05"')
PRICES_GRIN=$(echo "$prices_json" | jq -r '.grin.usd // "0.05"')
PRICES_FLUX=$(echo "$prices_json" | jq -r '.flux.usd // "0.80"')
PRICES_CFX=$(echo "$prices_json" | jq -r '.conflux.usd // "0.20"')
else
# Fallback to grep parsing
PRICES_ETH=$(echo "$prices_json" | grep -o '"ethereum":{"usd":[0-9]*\.[0-9]*' | grep -o '[0-9]*\.[0-9]*$' || echo "3500.00")
PRICES_ETC=$(echo "$prices_json" | grep -o '"ethereum-classic":{"usd":[0-9]*\.[0-9]*' | grep -o '[0-9]*\.[0-9]*$' || echo "35.00")
PRICES_ERG=$(echo "$prices_json" | grep -o '"ergo":{"usd":[0-9]*\.[0-9]*' | grep -o '[0-9]*\.[0-9]*$' || echo "2.50")
PRICES_ZEL=$(echo "$prices_json" | grep -o '"zelcash":{"usd":[0-9]*\.[0-9]*' | grep -o '[0-9]*\.[0-9]*$' || echo "0.15")
PRICES_BEAM=$(echo "$prices_json" | grep -o '"beam":{"usd":[0-9]*\.[0-9]*' | grep -o '[0-9]*\.[0-9]*$' || echo "0.05")
PRICES_GRIN=$(echo "$prices_json" | grep -o '"grin":{"usd":[0-9]*\.[0-9]*' | grep -o '[0-9]*\.[0-9]*$' || echo "0.05")
PRICES_FLUX=$(echo "$prices_json" | grep -o '"flux":{"usd":[0-9]*\.[0-9]*' | grep -o '[0-9]*\.[0-9]*$' || echo "0.80")
PRICES_CFX=$(echo "$prices_json" | grep -o '"conflux":{"usd":[0-9]*\.[0-9]*' | grep -o '[0-9]*\.[0-9]*$' || echo "0.20")
fi
echo "Current prices fetched successfully:"
echo " ETH: $PRICES_ETH"
echo " ETC: $PRICES_ETC"
echo " ERG: $PRICES_ERG"
echo " ZEL: $PRICES_ZEL"
echo " BEAM: $PRICES_BEAM"
echo " GRIN: $PRICES_GRIN"
echo " FLUX: $PRICES_FLUX"
echo " CFX: $PRICES_CFX"
echo ""
}
# GMiner supported algorithms
ALGOS=(
"ethash"
"etchash"
"autolykos2"
"equihash125_4"
"equihash144_5"
"equihash192_7"
"equihash210_9"
"beamhash"
"cuckaroo29"
"cuckatoo32"
"flux"
"octopus"
"ergo"
)
echo "=== GMiner Algorithm Tests ==="
echo "Using BTC wallet: $BTC_WALLET"
echo "Using Docker image: $DOCKER_IMAGE"
echo "Testing each algorithm for 30 seconds..."
echo "======================================="
# Fetch current prices at startup
fetch_prices
# Function to calculate USD value
calculate_usd_reward() {
local algo=$1
local hashrate=$2
local coin=${ALGO_COINS[$algo]}
# Get price based on coin
local price=0
case $coin in
"ETH") price=$PRICES_ETH ;;
"ETC") price=$PRICES_ETC ;;
"ERG") price=$PRICES_ERG ;;
"ZEL") price=$PRICES_ZEL ;;
"BEAM") price=$PRICES_BEAM ;;
"GRIN") price=$PRICES_GRIN ;;
"FLUX") price=$PRICES_FLUX ;;
"CFX") price=$PRICES_CFX ;;
*) echo "Unknown" && return ;;
esac
if [[ -z "$price" || "$price" == "0" ]]; then
echo "Unknown"
return
fi
# Rough calculation: hashrate * price * 0.000001 (simplified)
local usd_value=$(echo "$hashrate * $price * 0.000001" | bc -l 2>/dev/null)
printf "%.2f" $usd_value 2>/dev/null || echo "Unknown"
}
# Function to extract hashrate from miner output
extract_hashrate() {
local output="$1"
# Look for common hashrate patterns in GMiner output
local hashrate=$(echo "$output" | grep -oE "[0-9]+\.[0-9]+ [KMGT]?H/s" | tail -1 | grep -oE "[0-9]+\.[0-9]+" | tail -1)
# If no decimal found, look for integer hashrates
if [[ -z "$hashrate" ]]; then
hashrate=$(echo "$output" | grep -oE "[0-9]+ [KMGT]?H/s" | tail -1 | grep -oE "[0-9]+" | tail -1)
fi
# If still no hashrate found, look for any number followed by H/s
if [[ -z "$hashrate" ]]; then
hashrate=$(echo "$output" | grep -oE "[0-9]+[\.]?[0-9]* H/s" | tail -1 | grep -oE "[0-9]+[\.]?[0-9]*" | tail -1)
fi
# If still no hashrate, look for "Speed:" patterns
if [[ -z "$hashrate" ]]; then
hashrate=$(echo "$output" | grep -i "speed:" | tail -1 | grep -oE "[0-9]+[\.]?[0-9]*" | tail -1)
fi
# If still no hashrate, look for any number followed by H/s (case insensitive)
if [[ -z "$hashrate" ]]; then
hashrate=$(echo "$output" | grep -ioE "[0-9]+[\.]?[0-9]* h/s" | tail -1 | grep -oE "[0-9]+[\.]?[0-9]*" | tail -1)
fi
echo "$hashrate"
}
for algo in "${ALGOS[@]}"; do
echo ""
echo "Testing: $algo"
echo "------------------------"
case $algo in
"ethash")
output=$(sudo docker exec -it $DOCKER_IMAGE timeout 35s bash -c "/mnt/dl/gminer/miner --algo $algo --server eth.2miners.com:2020 --user '$BTC_WALLET' --pass x" 2>&1)
;;
"etchash")
output=$(sudo docker exec -it $DOCKER_IMAGE timeout 35s bash -c "/mnt/dl/gminer/miner --algo $algo --server etc.2miners.com:1010 --user '$BTC_WALLET' --pass x" 2>&1)
;;
"autolykos2"|"ergo")
output=$(sudo docker exec -it $DOCKER_IMAGE timeout 35s bash -c "/mnt/dl/gminer/miner --algo autolykos2 --server ergo.2miners.com:8888 --user '$BTC_WALLET' --pass x" 2>&1)
;;
"equihash125_4")
output=$(sudo docker exec -it $DOCKER_IMAGE timeout 35s bash -c "/mnt/dl/gminer/miner --algo $algo --server zel.2miners.com:9090 --user '$BTC_WALLET' --pass x" 2>&1)
;;
"equihash144_5")
output=$(sudo docker exec -it $DOCKER_IMAGE timeout 35s bash -c "/mnt/dl/gminer/miner --algo $algo --server zel.2miners.com:9090 --user '$BTC_WALLET' --pass x" 2>&1)
;;
"equihash192_7")
output=$(sudo docker exec -it $DOCKER_IMAGE timeout 35s bash -c "/mnt/dl/gminer/miner --algo $algo --server zel.2miners.com:9090 --user '$BTC_WALLET' --pass x" 2>&1)
;;
"equihash210_9")
output=$(sudo docker exec -it $DOCKER_IMAGE timeout 35s bash -c "/mnt/dl/gminer/miner --algo $algo --server zel.2miners.com:9090 --user '$BTC_WALLET' --pass x" 2>&1)
;;
"beamhash")
output=$(sudo docker exec -it $DOCKER_IMAGE timeout 35s bash -c "/mnt/dl/gminer/miner --algo $algo --server beam.2miners.com:5252 --user '$BTC_WALLET' --pass x" 2>&1)
;;
"cuckaroo29")
output=$(sudo docker exec -it $DOCKER_IMAGE timeout 35s bash -c "/mnt/dl/gminer/miner --algo $algo --server grin.2miners.com:3030 --user '$BTC_WALLET' --pass x" 2>&1)
;;
"cuckatoo32")
output=$(sudo docker exec -it $DOCKER_IMAGE timeout 35s bash -c "/mnt/dl/gminer/miner --algo $algo --server grin.2miners.com:3030 --user '$BTC_WALLET' --pass x" 2>&1)
;;
"flux")
output=$(sudo docker exec -it $DOCKER_IMAGE timeout 35s bash -c "/mnt/dl/gminer/miner --algo $algo --server flux.2miners.com:2020 --user '$BTC_WALLET' --pass x" 2>&1)
;;
"octopus")
output=$(sudo docker exec -it $DOCKER_IMAGE timeout 35s bash -c "/mnt/dl/gminer/miner --algo $algo --server cfx.2miners.com:3254 --user '$BTC_WALLET' --pass x" 2>&1)
;;
*)
echo "No specific pool configured for $algo - skipping"
continue
;;
esac
exit_code=$?
# Extract hashrate and calculate USD value
hashrate=$(extract_hashrate "$output")
coin=${ALGO_COINS[$algo]}
usd_value=$(calculate_usd_reward "$algo" "$hashrate")
if [ $exit_code -eq 0 ]; then
echo "SUCCESS: $algo - Hashrate: ${hashrate}H/s - Coin: $coin - Est. USD/day: $usd_value"
elif [ $exit_code -eq 124 ]; then
echo "TIMEOUT: $algo - Hashrate: ${hashrate}H/s - Coin: $coin - Est. USD/day: $usd_value (likely working)"
else
echo "FAILED: $algo - Error code: $exit_code"
# Debug: show first few lines of output for failed attempts
echo "Debug output (first 5 lines):"
echo "$output" | head -5
fi
sleep 3
done
echo ""
echo "=== GMiner Tests Complete ==="
echo "Usage: $0 [docker_image_name]"
echo "Default: amdopencl"
echo "Example for RockM: $0 rockm"

29
MINE/lolBench.sh Normal file
View File

@@ -0,0 +1,29 @@
#!/bin/bash
# lolMiner benchmark script - save as bench_lolminer.sh
ALGOS=("ETHASH" "ETCHASH" "AUTOLYKOS2" "BEAM-III" "EQUIHASH144_5" "EQUIHASH192_7" "EQUIHASH210_9" "FLUX" "NEXA" "PROGPOW" "PROGPOWZ" "PROGPOW_VERIBLOCK" "PROGPOW_VEIL" "TON")
echo "=== lolMiner Algorithm Benchmark ==="
echo "Testing each algorithm for 15 seconds..."
echo "====================================="
for algo in "${ALGOS[@]}"; do
echo ""
echo "Testing: $algo"
echo "------------------------"
sudo docker exec -it amdopencl timeout 20s bash -c "mnt/dl/lol.1.97/lolMiner --algo $algo --benchmark --benchepochs 1 --benchwarmup 5" 2>/dev/null
if [ $? -eq 0 ]; then
echo "$algo: WORKS"
elif [ $? -eq 124 ]; then
echo "⏱️ $algo: TIMEOUT (likely working)"
else
echo "$algo: FAILED"
fi
sleep 2
done
echo ""
echo "=== Benchmark Complete ==="

144
MINE/notes.md Normal file
View File

@@ -0,0 +1,144 @@
bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j
<!-- MONERO -->
cd ~/Downloads
wget https://github.com/xmrig/xmrig/releases/download/v6.21.0/xmrig-6.21.0-linux-x64.tar.gz
tar -xzf xmrig-6.21.0-linux-x64.tar.gz
cd xmrig-6.21.0
~/Downloads/xmrig-6.21.0$ ./xmrig -o pool.supportxmr.com:443 -u bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j -p NUC --tls -t 28 --donate-level=0
/mnt/xmrig/xmrig -o pool.supportxmr.com:443 -u bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j -p NUC --tls -t 24 --opencl --opencl-devices=0 --donate-level=0
<!-- run inside DOCKER directly. amd-strix-halo-llama-rocm is best for now -->
sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/xmrig-6.21.0/xmrig -o pool.supportxmr.com:443 -u bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j -p NUC --tls -t 28 --donate-level=0"
# proper address XMR
sudo docker exec -it amdopencl bash -c "cd /tmp/xmrig-6.21.0 && ./xmrig -o pool.supportxmr.com:443 -u 47tJRLX5UgK59VmRsN1L7AgcQNZYNBhJ5Lv7Jt4KEViS8WEbCf4hPGcM78rRLcS9xmgbbJdwHzbvjA1mJiKixtX3Q8iiBgu -p NUC --tls -t 32 --donate-level=0"
|| ------------------------------ CURRENT BEST ------------------------------ ||
# MoneroOcean with XMR payout
nice /home/db/Downloads/moneroocean/xmrig
sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/xmrig-6.21.0/xmrig -o gulf.moneroocean.stream:10001 -u 47tJRLX5UgK59VmRsN1L7AgcQNZYNBhJ5Lv7Jt4KEViS8WEbCf4hPGcM78rRLcS9xmgbbJdwHzbvjA1mJiKixtX3Q8iiBgu -p x -t 28 --donate-level=0"
curl -s "http://api.moneroocean.stream/miner/47tJRLX5UgK59VmRsN1L7AgcQNZYNBhJ5Lv7Jt4KEViS8WEbCf4hPGcM78rRLcS9xmgbbJdwHzbvjA1mJiKixtX3Q8iiBgu/stats"
<!-- BTC PAYOUTS -->
# Check Unmineable balance
curl -s "https://api.unmineable.com/v4/address/bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j?coin=BTC"
# Mine RandomX but get paid in BTC
sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/xmrig-6.21.0/xmrig -o rx.unmineable.com:3333 -u BTC:bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j -p x -t 28 --donate-level=0"
<!-- SRBMiner -->
SRBMiner-MULTI --algorithm progpow_zano --pool zano.herominers.com:1112 --wallet bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j --worker StrixHalo
<!-- lol, all containers -->
sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/lol.1.97/lolMiner --algo FISHHASH --pool ironfish.unmineable.com:3333 --user BTC:bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j --worker StrixHalo"
sudo docker exec -it amd-strix-halo-llama-vulkan-radv bash -c "/mnt/dl/lol.1.97/lolMiner --algo FISHHASH --pool ironfish.unmineable.com:3333 --user BTC:bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j --worker StrixHalo"
sudo docker exec -it amd-strix-halo-llama-vulkan-amdvlk bash -c "/mnt/dl/lol.1.97/lolMiner --algo FISHHASH --pool ironfish.unmineable.com:3333 --user BTC:bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j --worker StrixHalo"
sudo docker exec -it amdopencl bash -c "/mnt/dl/lol.1.97/lolMiner --algo FISHHASH --pool ironfish.unmineable.com:3333 --user BTC:bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j --worker StrixHalo"
# Start with IronFish (most profitable supported)
./lolMiner --algo FISHHASH --pool ironfish.unmineable.com:3333 --user BTC:bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j --worker StrixHalo
# Or safer option with Ergo
./lolMiner --algo AUTOLYKOS2 --pool ergo.unmineable.com:3333 --user BTC:bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j --worker StrixHalo
# rinhash
sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer --algo rinhash --benchmark -t 4"
[sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer --algo rinhash --benchmark -t 4"
rinhash.mine.zergpool.com:7148 c=RIN]
<!--------------------------BEST CURRENT CPU -------------------------- Active exchange: Listed on Exbitron (RIN/USDT) -->
sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://rinhash.mine.zergpool.com:7148 -u bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j -p c=BTC,mc=RIN,ID=StrixHalo -t 32"
-----------------------------------------------------------------
SOLO:
/home/db/Downloads/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://192.168.0.188:3333 -u username.workername -p x -t 28
./cpuminer --algo rinhash --url [pool_url] --user [your_rin_wallet] --pass x --threads 32
# GPU CURRENT:
/mnt/shared/DEV/repos/d-popov.com/scripts$ sudo docker exec -it amdopencl bash -c "/mnt/dl/gminer/miner --algo equihash125_4 --server equihash125.mine.zergpool.com:2142 --user bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j
https://zergpool.com/wallet/bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j
https://zergpool.com/api/wallet?address=bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j
https://zergpool.com/api/walletEx?address=<YOUR_WALLET_ADDRESS>
/mnt/shared/DEV/repos/d-popov.com/scripts$ sudo docker exec -it amdopencl bash -c "/mnt/dl/sgminer-gm/ --algo equihash125_4 --server equihash125.mine.zergpool.com:2142 --user bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j
/home/db/Downloads/
sudo docker exec -it amdopencl timeout 35s bash -c "/mnt/dl/gminer/miner --algo equihash125_4 --server equihash.mine.zergpool.com:2142 c=BTC --user 'bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j' --pass 'x'"
-------------------------
To run this as a service (keep mining in background):
Stop current mining (Ctrl+C)
Create a systemd service:
bash
Copy
sudo nano /etc/systemd/system/xmrig.service
Add this content:
ini
Copy
[Unit]
Description=XMRig Monero Miner
After=network.target
[Service]
Type=simple
User=db
WorkingDirectory=/home/db/Downloads/xmrig-6.21.0
ExecStart=/home/db/Downloads/xmrig-6.21.0/xmrig -o pool.supportxmr.com:443 -u bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j -p NUC --tls
Restart=always
RestartSec=10
[Install]
WantedBy=multi-user.target
Enable and start the service:
bash
Copy
sudo systemctl enable xmrig
sudo systemctl start xmrig
sudo systemctl status xmrig
COPY --from=qemux/qemu:7.12 / /
# 7.12
# 7.11
# 7.10
# 7.09
# 7.08
# 7.07
# 7.06
# 7.05
# 7.04
# 7.03
# 7.02
# 7.01
# 7.00
# 6.22
# 6.21
# 6.20
# 6.19
# 6.18

25
MINE/rin/Dockerfile Normal file
View File

@@ -0,0 +1,25 @@
FROM ubuntu:22.04
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y \
build-essential libtool autotools-dev automake pkg-config bsdmainutils \
libevent-dev libboost-all-dev libssl-dev \
libdb5.3-dev libdb5.3++-dev libfmt-dev libsqlite3-dev \
git ca-certificates \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /opt
RUN git clone https://github.com/Rin-coin/rincoin.git && \
cd rincoin && \
./autogen.sh && \
./configure --with-incompatible-bdb && \
make -j$(nproc) && \
make install
# runtime
RUN useradd -m rin && mkdir -p /data && chown -R rin:rin /data
USER rin
VOLUME ["/data"]
EXPOSE 9555 9556
ENTRYPOINT ["/usr/local/bin/rincoind"]
CMD ["-datadir=/data", "-conf=/data/rincoin.conf", "-printtoconsole"]

View File

@@ -0,0 +1,67 @@
# RinCoin Mining Quick Reference
## 🚀 **Quick Commands:**
### **Solo Mining (All Rewards to You)**
```bash
# Start solo mining proxy
./MINE/rin/start_stratum_proxy.sh
# Connect miner
./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u user -p pass -t 28
```
### **Mining Pool (Distribute Rewards)**
```bash
# Start mining pool
./MINE/rin/start_mining_pool.sh
# Miners connect
./cpuminer -a rinhash -o stratum+tcp://YOUR_IP:3333 -u username.workername -p x
```
### **Cleanup**
```bash
# Kill proxy/pool processes
./MINE/rin/kill_stratum_proxy.sh
```
## 📊 **What Each Does:**
| Command | Purpose | Rewards | Miners |
|---------|---------|---------|--------|
| `start_stratum_proxy.sh` | Solo mining | 100% to you | Single |
| `start_mining_pool.sh` | Pool mining | Distributed | Multiple |
## 🌐 **Web Dashboard (Pool Only)**
- **URL**: `http://YOUR_IP:8080`
- **Features**: Stats, miners, blocks, hashrate
## ⚡ **Quick Test**
```bash
# Test solo mining
./MINE/rin/start_stratum_proxy.sh &
sleep 5
./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u user -p pass -t 4
## Check wallets
# First, check available wallets and load one if needed
curl -u rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 -d '{"jsonrpc":"1.0","id":"1","method":"listwalletdir","params":[]}' -H 'content-type: text/plain;' http://127.0.0.1:9556/
# Load wallet (replace "main" with your wallet name)
curl -u rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 -d '{"jsonrpc":"1.0","id":"1","method":"loadwallet","params":["main"]}' -H 'content-type: text/plain;' http://127.0.0.1:9556/
# Total received by your address
curl -u rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 -d '{"jsonrpc":"1.0","id":"1","method":"getreceivedbyaddress","params":["rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q",0]}' -H 'content-type: text/plain;' http://127.0.0.1:9556/
# Wallet balance
curl -u rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 -d '{"jsonrpc":"1.0","id":"1","method":"getbalance","params":[]}' -H 'content-type: text/plain;' http://127.0.0.1:9556/
# Recent transactions
curl -u rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 -d '{"jsonrpc":"1.0","id":"1","method":"listtransactions","params":[]}' -H 'content-type: text/plain;' http://127.0.0.1:9556/
```

322
MINE/rin/README.md Normal file
View File

@@ -0,0 +1,322 @@
# RinCoin Mining Setup Complete! 🎉
## 🎯 **Choose Your Mining Strategy:**
### **Option 1: Solo Mining (Single Miner, All Rewards to You)**
```bash
# Start solo mining proxy
cd /mnt/shared/DEV/repos/d-popov.com/scripts
./MINE/rin/start_stratum_proxy.sh
# Run your miner
./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u user -p pass -t 28
```
**Result**: 100% of block rewards go to your wallet
### **Option 2: Mining Pool (Multiple Miners, Distributed Rewards)**
```bash
# Start mining pool
./MINE/rin/start_mining_pool.sh
# Miners connect with their RinCoin addresses:
# Option 1: Address as username
./cpuminer -a rinhash -o stratum+tcp://YOUR_IP:3333 -u rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q -p x
# Option 2: Address.workername format
./cpuminer -a rinhash -o stratum+tcp://YOUR_IP:3333 -u rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q.worker1 -p x
# Option 3: Traditional username (rewards to pool address)
./cpuminer -a rinhash -o stratum+tcp://192.168.0.188:3333 -u username.workername -p x -t 24
```
**Result**: Block rewards distributed among all miners based on shares
### **Key Differences:**
| Feature | Solo Mining (`stratum_proxy`) | Mining Pool (`stratum_pool`) |
|---------|--------------------------------|------------------------------|
| **Rewards** | 100% to you | Distributed among miners |
| **Miners** | Single | Multiple |
| **Setup** | Simple | More complex |
| **Consistency** | Rare big payouts | Regular small payments |
| **Risk** | High variance | Lower variance |
| **Public** | No | Yes, can be published |
---
## ✅ **Successfully Built and Running:**
### **1. RinCoin Node Container**
- **Container**: `rincoin-node` (ID: 87b5f74a2472)
- **Status**: ✅ **RUNNING**
- **Ports**: 9555 (P2P), 9556 (RPC)
- **Version**: v1.0.1.0-5cf3d4a11
- **Sync Status**: ✅ **FULLY SYNCED** (blocks: 228,082, headers: 228,082)
### **2. Wallet Setup**
- **Wallet Name**: `main`
- **Default RinCoin Address**: `rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q`
- **RPC Credentials**:
- User: `rinrpc`
- Password: `745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90`
### **3. Configuration Files**
- **Config**: `/mnt/data/docker_vol/rincoin/rincoin-node/rincoin.conf`
- **Data Directory**: `/mnt/data/docker_vol/rincoin/rincoin-node/data`
- **Docker Compose**: `MINE/rin/container.yml`
## 🚀 **Ready for Mining:**
### **Pool Mining (Zergpool) - Recommended for Consistent Rewards**
```bash
# CPU Mining RinHash to BTC
sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://rinhash.mine.zergpool.com:7148 -u bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j -p c=BTC,mc=RIN,ID=StrixHalo -t 32"
```
### **Solo Mining (Local Node) - With Stratum Proxy ⭐ RECOMMENDED**
```bash
# Start mining with your RinCoin address (rewards go to this address!)
./MINE/rin/start_mining_with_address.sh rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q 28
# Or use the default address
./MINE/rin/start_mining_with_address.sh
```
### **Manual Solo Mining Setup (Stratum Proxy)**
```bash
# 1. Start Stratum proxy (solo mining)
./MINE/rin/start_stratum_proxy.sh
# 2. In another terminal, connect cpuminer-opt-rin
sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://172.17.0.1:3333 -u user -p pass -t 28"
```
### **Built-in Core Mining (Low Performance)**
```bash
# Solo mining with built-in RinCoin core (not recommended)
bash MINE/rin/solo_mining_core.sh -t 28
```
### **Why cpuminer-opt-rin Can't Mine Directly to Node**
```bash
# This command will fail:
sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o http://127.0.0.1:9556 -u rinrpc -p 745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 -t 28 --coinbase-addr=bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j"
# Reason: Protocol mismatch
# - cpuminer-opt-rin uses Stratum protocol (for mining pools)
# - RinCoin node uses RPC protocol (for direct mining)
# - No built-in protocol conversion available
```
### **Direct CPU Mining Setup (Solo Mining - No Container Needed)**
```bash
# 1. Start stratum proxy (solo mining)
./MINE/rin/start_stratum_proxy.sh
# OR run in background with logging
nohup python3 MINE/rin/stratum_proxy.py > stratum_proxy.log 2>&1 &
# 2. Run cpuminer directly on host
/home/db/Downloads/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u user -p pass -t 28
# 3. Clean up when done
./MINE/rin/kill_stratum_proxy.sh
```
### **Mining Options Explained**
1. **Built-in Core Mining**: Uses RinCoin's `generatetoaddress` RPC command (low performance)
2. **Pool Mining**: Uses cpuminer-opt-rin with Stratum pools (Zergpool) - consistent rewards
3. **Direct RPC Mining**: Would require custom miner implementing `getblocktemplate`
4. **Solo Mining (Stratum Proxy)**: Uses Stratum proxy to bridge cpuminer-opt-rin to RinCoin node - all rewards to you
5. **Mining Pool (Stratum Pool)**: Distributes block rewards among multiple miners - share-based rewards
## 🏊‍♂️ **Mining Pool Setup (Multiple Miners)**
Your Stratum proxy can be enhanced to work as a **full mining pool** that distributes block rewards among multiple miners!
### **Pool Features:**
-**Multiple Miner Support**: Unlimited miners can connect
-**Share-Based Rewards**: Rewards distributed based on share contributions
-**Pool Fee**: 1% fee for pool maintenance
-**Real-Time Statistics**: Web dashboard with live stats
-**Block Reward Distribution**: Automatic distribution when blocks are found
### **Quick Start Pool:**
```bash
# 1. Start the mining pool
./MINE/rin/start_mining_pool.sh
# 2. Miners connect with:
./cpuminer -a rinhash -o stratum+tcp://YOUR_IP:3333 -u username.workername -p x
```
### **Pool vs Solo Mining:**
| Feature | Solo Mining | Mining Pool |
|---------|-------------|-------------|
| **Block Rewards** | 100% to you | Distributed among miners |
| **Consistency** | Rare blocks | Regular small payments |
| **Setup** | Simple | More complex |
| **Miners** | Single | Multiple |
| **Risk** | High variance | Lower variance |
### **Publishing Your Pool:**
#### **1. Public IP Setup:**
```bash
# Get your public IP
curl ifconfig.me
# Configure firewall (if needed)
sudo ufw allow 3333/tcp
sudo ufw allow 8080/tcp # Web interface
```
#### **2. Pool Connection String:**
```
stratum+tcp://YOUR_PUBLIC_IP:3333
```
#### **3. Web Dashboard:**
- **URL**: `http://YOUR_PUBLIC_IP:8080`
- **Features**: Real-time stats, miner rankings, block history
#### **4. Pool Announcement:**
Share your pool details:
- **Algorithm**: RinHash
- **Port**: 3333
- **Fee**: 1%
- **Payout**: Automatic distribution
- **Web**: `http://YOUR_PUBLIC_IP:8080`
### **Pool Configuration:**
```python
# Edit MINE/rin/stratum_pool.py
pool_address = 'rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q' # Pool wallet
pool_fee_percent = 1.0 # Pool fee percentage
```
## build image
sudo bash -lc "cd /mnt/shared/DEV/repos/d-popov.com/scripts/MINE/rin && docker build -t rincoin-node:latest . | cat"
## start container
sudo docker run -d --name rincoin-node \
-p 9555:9555 -p 9556:9556 \
-v /mnt/data/docker_vol/rincoin/rincoin-node/data:/data \
-v /mnt/data/docker_vol/rincoin/rincoin-node/rincoin.conf:/data/rincoin.conf:ro \
rincoin-node:latest -datadir=/data -conf=/data/rincoin.conf -printtoconsole
## check if running
curl --user rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 -H 'content-type: text/plain' --data '{"jsonrpc":"1.0","id":"curl","method":"getblockchaininfo","params":[]}' http://127.0.0.1:9556/
## get wallet
sudo docker exec rincoin-node rincoin-cli -datadir=/data -conf=/data/rincoin.conf createwallet "main"
sudo docker exec rincoin-node rincoin-cli -datadir=/data -conf=/data/rincoin.conf -rpcwallet=main getnewaddress
rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q
```bash
# Solo mining to your RinCoin wallet
./MINE/rin/solo_mining.sh
```
## 📊 **Performance Comparison:**
| Mining Type | Algorithm | Hashrate | Target | Status |
|-------------|-----------|----------|---------|---------|
| **Pool Mining** | RinHash | ~80 kH/s | Zergpool | ✅ Working |
| **Solo Mining** | RinHash | Built-in CPU | Local Node | ✅ Working |
| **GPU Mining** | Equihash 125,4 | 28.8 Sol/s | Zergpool | ✅ Working |
## 🔧 **Management Commands:**
### **Node Management**
```bash
# Start node
sudo docker start rincoin-node
# Stop node
sudo docker stop rincoin-node
# View logs
sudo docker logs -f rincoin-node
# Check sync status
sudo docker exec rincoin-node rincoin-cli -datadir=/data -conf=/data/rincoin.conf getblockchaininfo
```
### **Wallet Management**
```bash
# Get new address
sudo docker exec rincoin-node rincoin-cli -datadir=/data -conf=/data/rincoin.conf -rpcwallet=main getnewaddress
# Check balance
sudo docker exec rincoin-node rincoin-cli -datadir=/data -conf=/data/rincoin.conf -rpcwallet=main getbalance
```
### **RPC Access**
```bash
# Test RPC connection
curl --user rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 \
-H 'content-type: text/plain' \
--data '{"jsonrpc":"1.0","id":"curl","method":"getblockchaininfo","params":[]}' \
http://127.0.0.1:9556/
# Get new address via RPC
curl --user rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 \
-H 'content-type: text/plain' \
--data '{"jsonrpc":"1.0","id":"curl","method":"getnewaddress","params":[]}' \
http://127.0.0.1:9556/
```
## ⚠️ **Important Notes:**
1. **Node Sync**: ✅ **COMPLETE** - Node is fully synced and ready
2. **Solo Mining**: Very low chance of finding blocks solo. Consider pool mining for consistent rewards.
3. **RPC Access**: ✅ **WORKING** - RPC is accessible on port 9556
4. **Address Parameter**: Solo mining script accepts custom addresses or uses default
5. **Block Rewards**: When solo mining, ALL block rewards go to your specified RinCoin address
## 🛠️ **Troubleshooting:**
### **Port 3333 Already in Use**
```bash
# Check what's using the port
sudo netstat -tlnp | grep :3333
# Kill existing processes
./MINE/rin/kill_stratum_proxy.sh
# Or manually kill
sudo lsof -ti:3333 | xargs sudo kill -9
```
### **Container Can't Connect to Proxy**
```bash
# Use Docker gateway IP instead of localhost
sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://172.17.0.1:3333 -u user -p pass -t 28"
```
### **Check Proxy Logs**
```bash
# View real-time logs
tail -f stratum_proxy.log
# Check if proxy is running
ps aux | grep stratum_proxy
```
## 🎯 **Next Steps:**
1.**Node is synced** - Ready for all operations
2. **Choose mining strategy**: Pool mining for consistent income vs Solo mining for block rewards
3. **Monitor performance** and adjust thread count as needed
4. **Set up monitoring** for node health and mining performance

View File

@@ -0,0 +1,40 @@
# Reward Distribution Example
## Scenario: Block Reward = 50 RIN (49 RIN after 1% pool fee)
### Miners Connected:
1. **Miner A**: `rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q.worker1` (30 difficulty)
2. **Miner B**: `user.worker2` (20 difficulty) - No address specified
3. **Miner C**: `rin1qxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx.worker3` (50 difficulty) - **INVALID ADDRESS**
### Total Difficulty: 100
### Reward Distribution:
#### **Step 1: Calculate Individual Shares**
- **Miner A**: (30/100) × 49 = **14.7 RIN**`rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q`
- **Miner B**: (20/100) × 49 = **9.8 RIN****Pool address** (no valid address)
- **Miner C**: (50/100) × 49 = **24.5 RIN****Pool address** (invalid address)
#### **Step 2: Final Distribution**
- **Pool Address**: 1 RIN (fee) + 9.8 RIN (from Miner B) + 24.5 RIN (from Miner C) = **35.3 RIN**
- **Miner A**: **14.7 RIN**
### Pool Logs:
```
[127.0.0.1] ✅ Authorized: miner_rin1qah.worker1 -> rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q
[127.0.0.1] ⚠️ Authorized: user.worker2 (rewards will go to pool address)
[127.0.0.1] ❌ Invalid RinCoin address: rin1qxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
💰 Miner rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q: 14.70000000 RIN (30 difficulty)
⚠️ Miner without address: 20 difficulty -> 9.80000000 RIN to pool
⚠️ Miner without address: 50 difficulty -> 24.50000000 RIN to pool
💰 Pool keeps 34.30000000 RIN from miners without addresses
📊 Summary: 1 miners with addresses, 2 without (rewards to pool)
```
### Key Points:
-**Miners with valid addresses**: Get their full share
- ⚠️ **Miners without addresses**: Contribute to difficulty but rewards go to pool
-**Miners with invalid addresses**: Rejected at connection time
- 💰 **Pool benefits**: Gets additional rewards from careless miners
- 📊 **Transparent**: All distributions clearly logged

18
MINE/rin/container.yml Normal file
View File

@@ -0,0 +1,18 @@
version: "3.8"
services:
rincoin-node:
container_name: rincoin-node
image: rincoin-node:latest
restart: unless-stopped
ports:
- "9555:9555"
- "9556:9556"
volumes:
- /mnt/data/docker_vol/rincoin/rincoin-node/data:/data
- /mnt/data/docker_vol/rincoin/rincoin-node/rincoin.conf:/data/rincoin.conf:ro
command:
- rincoind
- -datadir=/data
- -conf=/data/rincoin.conf
- -printtoconsole

View File

@@ -0,0 +1,40 @@
#!/bin/bash
# Kill RinCoin Stratum Proxy processes
echo "=== Killing RinCoin Stratum Proxy ==="
echo ""
# Find and kill Python processes running stratum_proxy.py
PIDS=$(ps aux | grep "stratum_proxy.py" | grep -v grep | awk '{print $2}')
if [ -n "$PIDS" ]; then
echo "Found Stratum Proxy processes: $PIDS"
echo "Killing processes..."
for pid in $PIDS; do
kill -9 "$pid" 2>/dev/null && echo "Killed PID: $pid" || echo "Failed to kill PID: $pid"
done
else
echo "No Stratum Proxy processes found"
fi
# Also kill any process using port 3333
echo ""
echo "Checking port 3333..."
PORT_PIDS=$(sudo lsof -ti:3333 2>/dev/null)
if [ -n "$PORT_PIDS" ]; then
echo "Found processes using port 3333: $PORT_PIDS"
echo "Killing processes..."
for pid in $PORT_PIDS; do
sudo kill -9 "$pid" 2>/dev/null && echo "Killed PID: $pid" || echo "Failed to kill PID: $pid"
done
else
echo "No processes using port 3333"
fi
echo ""
echo "✅ Cleanup complete!"
echo ""
echo "Port 3333 status:"
netstat -tln | grep ":3333 " || echo "Port 3333 is free"

View File

@@ -0,0 +1,529 @@
#!/usr/bin/env python3
"""
RinCoin Mining Pool Web Interface
Provides web dashboard for pool statistics and miner management
"""
import json
import sqlite3
import requests
from datetime import datetime, timedelta
from http.server import HTTPServer, BaseHTTPRequestHandler
import threading
import time
from requests.auth import HTTPBasicAuth
class PoolWebInterface:
def __init__(self, pool_db, host='0.0.0.0', port=8080, rpc_host='127.0.0.1', rpc_port=9556,
rpc_user='rinrpc', rpc_password='745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90'):
self.pool_db = pool_db
self.host = host
self.port = port
self.rpc_host = rpc_host
self.rpc_port = rpc_port
self.rpc_user = rpc_user
self.rpc_password = rpc_password
self.chart_time_window = 3600 # 1 hour default, adjustable
def set_chart_time_window(self, seconds):
"""Set the chart time window"""
self.chart_time_window = seconds
def format_hashrate(self, hashrate):
"""Format hashrate in human readable format"""
if hashrate >= 1e12:
return f"{hashrate/1e12:.2f} TH/s"
elif hashrate >= 1e9:
return f"{hashrate/1e9:.2f} GH/s"
elif hashrate >= 1e6:
return f"{hashrate/1e6:.2f} MH/s"
elif hashrate >= 1e3:
return f"{hashrate/1e3:.2f} KH/s"
elif hashrate >= 0.01:
return f"{hashrate:.2f} H/s"
elif hashrate > 0:
return f"{hashrate*1000:.2f} mH/s"
else:
return "0.00 H/s"
def get_pool_balance(self):
"""Get pool wallet balance via RPC"""
try:
url = f"http://{self.rpc_host}:{self.rpc_port}/"
headers = {'content-type': 'text/plain'}
auth = HTTPBasicAuth(self.rpc_user, self.rpc_password)
payload = {
"jsonrpc": "1.0",
"id": "pool_balance",
"method": "getbalance",
"params": []
}
response = requests.post(url, json=payload, headers=headers, auth=auth, timeout=10)
if response.status_code == 200:
result = response.json()
if 'error' in result and result['error'] is not None:
print(f"RPC Error getting balance: {result['error']}")
return 0.0
balance = result.get('result', 0)
return float(balance) / 100000000 # Convert from satoshis to RIN
else:
print(f"HTTP Error getting balance: {response.status_code}")
return 0.0
except Exception as e:
print(f"Error getting pool balance: {e}")
return 0.0
def get_pool_stats(self):
"""Get current pool statistics"""
try:
cursor = self.pool_db.cursor()
# Total miners (ever registered)
cursor.execute('SELECT COUNT(DISTINCT id) FROM miners')
total_miners = cursor.fetchone()[0]
# Active miners (last 5 minutes)
cursor.execute('''
SELECT COUNT(DISTINCT m.id) FROM miners m
JOIN shares s ON m.id = s.miner_id
WHERE s.submitted > datetime('now', '-5 minutes')
''')
active_miners = cursor.fetchone()[0]
# Total shares (last 24 hours)
cursor.execute('''
SELECT COUNT(*) FROM shares
WHERE submitted > datetime('now', '-24 hours')
''')
total_shares_24h = cursor.fetchone()[0]
# Pool hashrate: sum of miners.last_hashrate (instantaneous)
cursor.execute('SELECT COALESCE(SUM(last_hashrate), 0) FROM miners')
hashrate = cursor.fetchone()[0] or 0.0
# Debug stats
cursor.execute('''
SELECT SUM(difficulty), COUNT(*) FROM shares
WHERE submitted > datetime('now', '-5 minutes')
''')
rd = cursor.fetchone()
recent_difficulty = rd[0] if rd and rd[0] else 0
recent_share_count = rd[1] if rd and rd[1] else 0
# Get historical hashrate data for chart
cursor.execute('''
SELECT
strftime('%H:%M', submitted) as time,
COUNT(*) as shares,
SUM(difficulty) as total_difficulty
FROM shares
WHERE submitted > datetime('now', '-{} seconds')
GROUP BY strftime('%Y-%m-%d %H:%M', submitted)
ORDER BY submitted DESC
LIMIT 60
'''.format(self.chart_time_window))
historical_data = cursor.fetchall()
# Calculate individual miner hashrates
cursor.execute('''
SELECT
m.user, m.worker,
COUNT(s.id) as shares,
SUM(s.difficulty) as total_difficulty,
m.last_share
FROM miners m
LEFT JOIN shares s ON m.id = s.miner_id
AND s.submitted > datetime('now', '-5 minutes')
GROUP BY m.id, m.user, m.worker
ORDER BY shares DESC
''')
miner_stats = cursor.fetchall()
# Calculate individual hashrates (use miners.last_hashrate)
miner_hashrates = []
for user, worker, shares, difficulty, last_share in miner_stats:
cursor.execute('SELECT last_hashrate FROM miners WHERE user = ? AND worker = ? LIMIT 1', (user, worker))
row = cursor.fetchone()
miner_hashrate = row[0] if row and row[0] else 0.0
miner_hashrates.append((user, worker, shares, miner_hashrate, last_share))
# Total blocks found
cursor.execute('SELECT COUNT(*) FROM blocks')
total_blocks = cursor.fetchone()[0]
# Recent blocks
cursor.execute('''
SELECT block_hash, height, reward, found_at
FROM blocks
ORDER BY found_at DESC
LIMIT 10
''')
recent_blocks = cursor.fetchall()
# Top miners (last 24 hours) - show all miners, even without shares
cursor.execute('''
SELECT m.user, m.worker,
COALESCE(COUNT(s.id), 0) as shares,
m.last_share,
m.created
FROM miners m
LEFT JOIN shares s ON m.id = s.miner_id
AND s.submitted > datetime('now', '-24 hours')
GROUP BY m.id, m.user, m.worker
ORDER BY shares DESC, m.created DESC
LIMIT 20
''')
top_miners = cursor.fetchall()
# All active miners (for better visibility)
cursor.execute('''
SELECT user, worker, created, last_share
FROM miners
ORDER BY created DESC
LIMIT 10
''')
all_miners = cursor.fetchall()
# Get pool balance
pool_balance = self.get_pool_balance()
return {
'total_miners': total_miners,
'active_miners': active_miners,
'total_shares_24h': total_shares_24h,
'hashrate': hashrate,
'total_blocks': total_blocks,
'recent_blocks': recent_blocks,
'top_miners': top_miners,
'all_miners': all_miners,
'miner_hashrates': miner_hashrates,
'historical_data': historical_data,
'pool_balance': pool_balance,
'debug': {
'recent_difficulty': recent_difficulty,
'recent_share_count': recent_share_count,
'total_shares_24h': total_shares_24h
}
}
except Exception as e:
print(f"Error getting pool stats: {e}")
return {}
def generate_html(self, stats):
"""Generate HTML dashboard"""
html = f"""
<!DOCTYPE html>
<html>
<head>
<title>RinCoin Mining Pool</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
body {{ font-family: Arial, sans-serif; margin: 0; padding: 20px; background: #f5f5f5; }}
.container {{ max-width: 1200px; margin: 0 auto; }}
.header {{ background: #2c3e50; color: white; padding: 20px; border-radius: 8px; margin-bottom: 20px; }}
.stats-grid {{ display: grid; grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); gap: 20px; margin-bottom: 30px; }}
.stat-card {{ background: white; padding: 20px; border-radius: 8px; box-shadow: 0 2px 4px rgba(0,0,0,0.1); }}
.stat-value {{ font-size: 2em; font-weight: bold; color: #3498db; }}
.stat-label {{ color: #7f8c8d; margin-top: 5px; }}
.section {{ background: white; padding: 20px; border-radius: 8px; box-shadow: 0 2px 4px rgba(0,0,0,0.1); margin-bottom: 20px; }}
.section h2 {{ margin-top: 0; color: #2c3e50; }}
table {{ width: 100%; border-collapse: collapse; }}
th, td {{ padding: 12px; text-align: left; border-bottom: 1px solid #ddd; }}
th {{ background: #f8f9fa; font-weight: bold; }}
.block-hash {{ font-family: monospace; font-size: 0.9em; }}
.refresh-btn {{ background: #3498db; color: white; border: none; padding: 10px 20px; border-radius: 4px; cursor: pointer; }}
.refresh-btn:hover {{ background: #2980b9; }}
</style>
</head>
<body>
<div class="container">
<div class="header">
<h1>🏊‍♂️ RinCoin Mining Pool</h1>
<p>Distribute block rewards among multiple miners</p>
</div>
<button class="refresh-btn" onclick="location.reload()">🔄 Refresh</button>
<div class="stats-grid">
<div class="stat-card">
<div class="stat-value">{stats.get('total_miners', 0)}</div>
<div class="stat-label">Total Miners</div>
</div>
<div class="stat-card">
<div class="stat-value">{stats.get('active_miners', 0)}</div>
<div class="stat-label">Active Miners</div>
</div>
<div class="stat-card">
<div class="stat-value">{self.format_hashrate(stats.get('hashrate', 0))}</div>
<div class="stat-label">Hashrate</div>
</div>
<div class="stat-card">
<div class="stat-value">{stats.get('total_blocks', 0)}</div>
<div class="stat-label">Blocks Found</div>
</div>
<div class="stat-card">
<div class="stat-value">{stats.get('pool_balance', 0):.2f}</div>
<div class="stat-label">Pool Balance (RIN)</div>
</div>
</div>
<div class="section">
<h2>📊 Pool Statistics</h2>
<p><strong>24h Shares:</strong> {stats.get('total_shares_24h', 0):,}</p>
<p><strong>Pool Fee:</strong> 1%</p>
<p><strong>Pool Balance:</strong> {stats.get('pool_balance', 0):.8f} RIN</p>
<p><strong>Connection String:</strong> <code>stratum+tcp://YOUR_IP:3333</code></p>
<!-- Debug info -->
<details style="margin-top: 20px; padding: 10px; background: #f8f9fa; border-radius: 4px;">
<summary style="cursor: pointer; font-weight: bold;">🔍 Debug Info</summary>
<p><strong>Recent Difficulty (5min):</strong> {stats.get('debug', {}).get('recent_difficulty', 0):.6f}</p>
<p><strong>Recent Share Count (5min):</strong> {stats.get('debug', {}).get('recent_share_count', 0)}</p>
<p><strong>Total Shares (24h):</strong> {stats.get('debug', {}).get('total_shares_24h', 0):,}</p>
<p><strong>Active Miners:</strong> {stats.get('active_miners', 0)} (last 5 minutes)</p>
<p><strong>Total Miners:</strong> {stats.get('total_miners', 0)} (ever registered)</p>
</details>
</div>
<div class="section">
<h2>📈 Hashrate Chart</h2>
<div class="chart-controls">
<label>Time Window: </label>
<select onchange="changeTimeWindow(this.value)">
<option value="3600">1 Hour</option>
<option value="7200">2 Hours</option>
<option value="14400">4 Hours</option>
<option value="86400">24 Hours</option>
</select>
</div>
<div class="chart-container">
<canvas id="hashrateChart"></canvas>
</div>
</div>
<div class="section">
<h2>👥 Connected Miners</h2>
<table>
<tr>
<th>User</th>
<th>Worker</th>
<th>Connected</th>
<th>Last Share</th>
</tr>
"""
for miner in stats.get('all_miners', []):
user, worker, created, last_share = miner
html += f"""
<tr>
<td>{user}</td>
<td>{worker}</td>
<td>{created}</td>
<td>{last_share or 'Never'}</td>
</tr>
"""
if not stats.get('all_miners', []):
html += """
<tr>
<td colspan="4" style="text-align: center; color: #7f8c8d;">No miners connected</td>
</tr>
"""
html += """
</table>
</div>
<div class="section">
<h2>🏆 Top Miners (24h Shares)</h2>
<table>
<tr>
<th>User</th>
<th>Worker</th>
<th>Shares</th>
<th>Hashrate</th>
<th>Last Share</th>
</tr>
"""
for miner in stats.get('miner_hashrates', []):
user, worker, shares, hashrate, last_share = miner
html += f"""
<tr>
<td>{user}</td>
<td>{worker}</td>
<td>{shares:,}</td>
<td>{self.format_hashrate(hashrate)}</td>
<td>{last_share or 'Never'}</td>
</tr>
"""
if not stats.get('top_miners', []):
html += """
<tr>
<td colspan="4" style="text-align: center; color: #7f8c8d;">No shares submitted yet</td>
</tr>
"""
html += """
</table>
</div>
<div class="section">
<h2>🏆 Recent Blocks</h2>
<table>
<tr>
<th>Height</th>
<th>Hash</th>
<th>Reward</th>
<th>Found At</th>
</tr>
"""
for block in stats.get('recent_blocks', []):
block_hash, height, reward, found_at = block
html += f"""
<tr>
<td>{height}</td>
<td class="block-hash">{block_hash[:16]}...</td>
<td>{reward:.8f} RIN</td>
<td>{found_at}</td>
</tr>
"""
html += """
</table>
</div>
<div class="section">
<h2>🔗 Connect to Pool</h2>
<p>Use any RinHash-compatible miner:</p>
<pre><code>./cpuminer -a rinhash -o stratum+tcp://YOUR_IP:3333 -u username.workername -p x</code></pre>
<p><strong>Replace YOUR_IP with your server's public IP address</strong></p>
</div>
</div>
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
<script>
// Historical data for chart
const historicalData = {json.dumps([{
'time': row[0],
'shares': row[1],
'difficulty': row[2] or 0
} for row in stats.get('historical_data', [])])};
// Create hashrate chart
const ctx = document.getElementById('hashrateChart').getContext('2d');
const chart = new Chart(ctx, {{
type: 'line',
data: {{
labels: historicalData.map(d => d.time).reverse(),
datasets: [{{
label: 'Hashrate (H/s)',
data: historicalData.map(d => (d.shares / 60.0) * 0.001).reverse(),
borderColor: '#3498db',
backgroundColor: 'rgba(52, 152, 219, 0.1)',
tension: 0.4
}}]
}},
options: {{
responsive: true,
maintainAspectRatio: false,
scales: {{
y: {{
beginAtZero: true,
title: {{
display: true,
text: 'Hashrate (H/s)'
}}
}},
x: {{
title: {{
display: true,
text: 'Time'
}}
}}
}}
}}
}});
function changeTimeWindow(seconds) {{
// Reload page with new time window
const url = new URL(window.location);
url.searchParams.set('window', seconds);
window.location.href = url.toString();
}}
</script>
<script>
// Auto-refresh every 30 seconds
setTimeout(() => location.reload(), 30000);
</script>
</body>
</html>
"""
return html
class PoolWebHandler(BaseHTTPRequestHandler):
def __init__(self, *args, pool_interface=None, **kwargs):
self.pool_interface = pool_interface
super().__init__(*args, **kwargs)
def do_GET(self):
if self.path == '/':
stats = self.pool_interface.get_pool_stats()
html = self.pool_interface.generate_html(stats)
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(html.encode('utf-8'))
elif self.path == '/api/stats':
stats = self.pool_interface.get_pool_stats()
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(json.dumps(stats).encode('utf-8'))
else:
self.send_response(404)
self.end_headers()
self.wfile.write(b'Not Found')
def log_message(self, format, *args):
# Suppress access logs
pass
def start_web_interface(pool_db, host='0.0.0.0', port=8083, rpc_host='127.0.0.1', rpc_port=9556,
rpc_user='rinrpc', rpc_password='745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90'):
"""Start the web interface server"""
interface = PoolWebInterface(pool_db, host, port, rpc_host, rpc_port, rpc_user, rpc_password)
class Handler(PoolWebHandler):
def __init__(self, *args, **kwargs):
super().__init__(*args, pool_interface=interface, **kwargs)
try:
server = HTTPServer((host, port), Handler)
print(f"🌐 Web interface running on http://{host}:{port}")
print("Press Ctrl+C to stop")
server.serve_forever()
except OSError as e:
if "Address already in use" in str(e):
print(f"⚠️ Port {port} is already in use, web interface not started")
print(f"💡 Try a different port or kill the process using port {port}")
else:
print(f"❌ Failed to start web interface: {e}")
except KeyboardInterrupt:
print("\n🛑 Shutting down web interface...")
server.shutdown()
if __name__ == "__main__":
# This would be called from the main pool server
print("Web interface module loaded")

13
MINE/rin/rincoin.conf Normal file
View File

@@ -0,0 +1,13 @@
server=1
daemon=0
listen=1
txindex=1
rpcuser=rinrpc
rpcpassword=745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90
rpcallowip=0.0.0.0/0
rpcport=9556
# performance
maxconnections=64
dbcache=2048

47
MINE/rin/solo_mining.sh Normal file
View File

@@ -0,0 +1,47 @@
#!/bin/bash
# Solo Mining Script for RinCoin
# Uses local RinCoin node for solo mining
echo "=== RinCoin Solo Mining Setup ==="
echo ""
# Check if rincoin-node container is running
if ! sudo docker ps | grep -q "rincoin-node"; then
echo "Error: rincoin-node container is not running!"
echo "Please start it first:"
echo "sudo docker start rincoin-node"
exit 1
fi
# Get wallet address
RIN_ADDRESS=$(sudo docker exec rincoin-node rincoin-cli -datadir=/data -conf=/data/rincoin.conf -rpcwallet=main getnewaddress 2>/dev/null)
if [ -z "$RIN_ADDRESS" ]; then
echo "Error: Could not get RinCoin address!"
echo "Make sure the wallet is created and the node is synced."
exit 1
fi
echo "RinCoin Address: $RIN_ADDRESS"
echo ""
# Check node sync status
SYNC_STATUS=$(sudo docker exec rincoin-node rincoin-cli -datadir=/data -conf=/data/rincoin.conf getblockchaininfo | grep -o '"initialblockdownload": [^,]*' | cut -d' ' -f2)
if [ "$SYNC_STATUS" = "true" ]; then
echo "⚠️ WARNING: Node is still syncing (initialblockdownload: true)"
echo "Solo mining may not work properly until sync is complete."
echo ""
fi
echo "Starting solo mining with cpuminer-opt-rin..."
echo "Algorithm: rinhash"
echo "Target: Local RinCoin node (127.0.0.1:9555)"
echo "Wallet: $RIN_ADDRESS"
echo ""
echo "Press Ctrl+C to stop mining"
echo ""
# Start solo mining
sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://127.0.0.1:9555 -u $RIN_ADDRESS -p x -t 32"

View File

@@ -0,0 +1,171 @@
#!/bin/bash
# RinCoin Solo Mining using Built-in Core Mining
# Uses RinCoin Core's generatetoaddress command
# Default address (can be overridden with command line parameter)
DEFAULT_ADDRESS="rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q"
# Get total CPU cores for default thread count
TOTAL_CORES=$(nproc)
DEFAULT_THREADS=$TOTAL_CORES
# Parse command line arguments
RIN_ADDRESS=""
THREAD_COUNT=""
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-a|--address)
RIN_ADDRESS="$2"
shift 2
;;
-t|--threads)
THREAD_COUNT="$2"
shift 2
;;
-h|--help)
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Options:"
echo " -a, --address ADDRESS RinCoin address to mine to (default: $DEFAULT_ADDRESS)"
echo " -t, --threads COUNT Number of threads to use (default: $DEFAULT_THREADS)"
echo " -h, --help Show this help message"
echo ""
echo "Examples:"
echo " $0 # Use defaults (all cores, default address)"
echo " $0 -a rin1q... -t 16 # Custom address and 16 threads"
echo " $0 --address rin1q... --threads 8 # Custom address and 8 threads"
exit 0
;;
*)
echo "Unknown option: $1"
echo "Use -h or --help for usage information"
exit 1
;;
esac
done
# Set defaults if not provided
if [ -z "$RIN_ADDRESS" ]; then
RIN_ADDRESS="$DEFAULT_ADDRESS"
echo "No address provided, using default: $RIN_ADDRESS"
fi
if [ -z "$THREAD_COUNT" ]; then
THREAD_COUNT="$DEFAULT_THREADS"
echo "No thread count provided, using all cores: $THREAD_COUNT"
fi
# Validate thread count
if ! [[ "$THREAD_COUNT" =~ ^[0-9]+$ ]] || [ "$THREAD_COUNT" -lt 1 ] || [ "$THREAD_COUNT" -gt "$TOTAL_CORES" ]; then
echo "❌ Error: Invalid thread count: $THREAD_COUNT"
echo "Thread count must be between 1 and $TOTAL_CORES"
exit 1
fi
echo "=== RinCoin Solo Mining (Built-in Core Mining) ==="
echo "CPU Cores Available: $TOTAL_CORES"
echo "Threads to Use: $THREAD_COUNT"
echo "Target Address: $RIN_ADDRESS"
echo ""
echo ""
# Configuration
RPC_HOST="127.0.0.1"
RPC_PORT="9556"
RPC_USER="rinrpc"
RPC_PASS="745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90"
# Function to call RPC
call_rpc() {
local method="$1"
local params="$2"
curl -s --user "$RPC_USER:$RPC_PASS" \
-H 'content-type: text/plain' \
--data "{\"jsonrpc\":\"1.0\",\"id\":\"curl\",\"method\":\"$method\",\"params\":$params}" \
"http://$RPC_HOST:$RPC_PORT/"
}
# Wait for node to be ready
echo "Waiting for RinCoin node to be ready..."
while true; do
response=$(call_rpc "getblockchaininfo" "[]")
if [[ $response != *"Loading block index"* ]]; then
break
fi
echo "Node still loading... waiting 10 seconds"
sleep 10
done
echo "✅ Node is ready!"
echo ""
# Load wallet if not already loaded
echo "Loading wallet..."
wallet_response=$(call_rpc "loadwallet" "[\"main\"]")
if [[ $wallet_response == *"error"* ]] && [[ $wallet_response == *"already loaded"* ]]; then
echo "✅ Wallet already loaded"
else
echo "✅ Wallet loaded successfully"
fi
echo ""
# Validate the provided address (basic check)
if [[ ! "$RIN_ADDRESS" =~ ^rin1[a-zA-Z0-9]{25,}$ ]]; then
echo "❌ Error: Invalid RinCoin address format: $RIN_ADDRESS"
echo "RinCoin addresses should start with 'rin1' and be ~30 characters long"
exit 1
fi
echo "✅ Using RinCoin Address: $RIN_ADDRESS"
echo ""
# Get blockchain info
echo "Blockchain Status:"
blockchain_info=$(call_rpc "getblockchaininfo" "[]")
blocks=$(echo "$blockchain_info" | grep -o '"blocks":[^,]*' | cut -d':' -f2)
headers=$(echo "$blockchain_info" | grep -o '"headers":[^,]*' | cut -d':' -f2)
difficulty=$(echo "$blockchain_info" | grep -o '"difficulty":[^,]*' | cut -d':' -f2)
echo "Blocks: $blocks"
echo "Headers: $headers"
echo "Difficulty: $difficulty"
echo ""
echo "⚠️ IMPORTANT: Built-in Core Mining Limitations:"
echo "1. Uses CPU only (not GPU)"
echo "2. Very low hashpower compared to specialized miners"
echo "3. Extremely low chance of finding blocks solo"
echo "4. Best for testing, not profitable mining"
echo "5. Thread count affects mining attempts per cycle"
echo ""
echo "🚀 Starting Built-in Solo Mining..."
echo "Target Address: $RIN_ADDRESS"
echo "Threads: $THREAD_COUNT"
echo "Press Ctrl+C to stop mining"
echo ""
# Start built-in mining with specified thread count
# Note: RinCoin Core's generatetoaddress doesn't directly support thread count
# but we can run multiple instances or adjust the maxtries parameter
while true; do
echo "Attempting to mine 1 block with $THREAD_COUNT threads..."
# Adjust maxtries based on thread count for better distribution
adjusted_tries=$((1000000 * THREAD_COUNT / TOTAL_CORES))
mining_result=$(call_rpc "generatetoaddress" "[1, \"$RIN_ADDRESS\", $adjusted_tries]")
if [[ $mining_result == *"result"* ]] && [[ $mining_result != *"[]"* ]]; then
echo "🎉 BLOCK FOUND!"
echo "Result: $mining_result"
break
else
echo "No block found in this attempt (tries: $adjusted_tries). Retrying..."
sleep 5
fi
done

View File

@@ -0,0 +1,81 @@
#!/bin/bash
# Remote Solo Mining Script for RinCoin
# Connects to RinCoin node over network/RPC
# Configuration
RPC_HOST="127.0.0.1" # Change to your server IP for remote access
RPC_PORT="9556"
RPC_USER="rinrpc"
RPC_PASS="745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90"
echo "=== Remote RinCoin Solo Mining Setup ==="
echo "RPC Host: $RPC_HOST:$RPC_PORT"
echo ""
# Test RPC connection
echo "Testing RPC connection..."
RPC_RESPONSE=$(curl -s --user "$RPC_USER:$RPC_PASS" \
-H 'content-type: text/plain' \
--data '{"jsonrpc":"1.0","id":"curl","method":"getblockchaininfo","params":[]}' \
"http://$RPC_HOST:$RPC_PORT/")
if [[ $RPC_RESPONSE == *"error"* ]]; then
echo "❌ Error: Could not connect to RinCoin RPC!"
echo "Response: $RPC_RESPONSE"
echo ""
echo "Check:"
echo "1. RinCoin node is running"
echo "2. RPC port $RPC_PORT is accessible"
echo "3. Firewall allows connections to port $RPC_PORT"
exit 1
fi
echo "✅ RPC connection successful!"
echo ""
# Get wallet address via RPC
echo "Getting wallet address..."
WALLET_RESPONSE=$(curl -s --user "$RPC_USER:$RPC_PASS" \
-H 'content-type: text/plain' \
--data '{"jsonrpc":"1.0","id":"curl","method":"getnewaddress","params":[]}' \
"http://$RPC_HOST:$RPC_PORT/")
RIN_ADDRESS=$(echo "$WALLET_RESPONSE" | grep -o '"result":"[^"]*"' | cut -d'"' -f4)
if [ -z "$RIN_ADDRESS" ]; then
echo "❌ Error: Could not get RinCoin address!"
echo "Response: $WALLET_RESPONSE"
echo ""
echo "Make sure wallet 'main' exists:"
echo "curl --user $RPC_USER:$RPC_PASS -H 'content-type: text/plain' --data '{\"jsonrpc\":\"1.0\",\"id\":\"curl\",\"method\":\"createwallet\",\"params\":[\"main\"]}' http://$RPC_HOST:$RPC_PORT/"
exit 1
fi
echo "✅ RinCoin Address: $RIN_ADDRESS"
echo ""
# Check node sync status
SYNC_RESPONSE=$(curl -s --user "$RPC_USER:$RPC_PASS" \
-H 'content-type: text/plain' \
--data '{"jsonrpc":"1.0","id":"curl","method":"getblockchaininfo","params":[]}' \
"http://$RPC_HOST:$RPC_PORT/")
SYNC_STATUS=$(echo "$SYNC_RESPONSE" | grep -o '"initialblockdownload":[^,]*' | cut -d':' -f2 | tr -d ' ')
if [ "$SYNC_STATUS" = "true" ]; then
echo "⚠️ WARNING: Node is still syncing (initialblockdownload: true)"
echo "Solo mining may not work properly until sync is complete."
echo ""
fi
echo "Starting remote solo mining with cpuminer-opt-rin..."
echo "Algorithm: rinhash"
echo "Target: RinCoin node at $RPC_HOST:9555"
echo "Wallet: $RIN_ADDRESS"
echo ""
echo "Press Ctrl+C to stop mining"
echo ""
# Start solo mining (connect to P2P port, not RPC)
sudo docker exec -it amd-strix-halo-llama-rocm bash -c "/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://$RPC_HOST:9555 -u $RIN_ADDRESS -p x -t 32"

View File

@@ -0,0 +1,76 @@
#!/bin/bash
# RinCoin Solo Mining via RPC
# This script uses RinCoin's RPC interface for solo mining
echo "=== RinCoin Solo Mining via RPC ==="
echo ""
# Configuration
RPC_HOST="127.0.0.1"
RPC_PORT="9556"
RPC_USER="rinrpc"
RPC_PASS="745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90"
# Function to call RPC
call_rpc() {
local method="$1"
local params="$2"
curl -s --user "$RPC_USER:$RPC_PASS" \
-H 'content-type: text/plain' \
--data "{\"jsonrpc\":\"1.0\",\"id\":\"curl\",\"method\":\"$method\",\"params\":$params}" \
"http://$RPC_HOST:$RPC_PORT/"
}
# Wait for node to be ready
echo "Waiting for RinCoin node to be ready..."
while true; do
response=$(call_rpc "getblockchaininfo" "[]")
if [[ $response != *"Loading block index"* ]]; then
break
fi
echo "Node still loading... waiting 10 seconds"
sleep 10
done
echo "✅ Node is ready!"
echo ""
# Get wallet address
echo "Getting wallet address..."
wallet_response=$(call_rpc "getnewaddress" "[]")
rin_address=$(echo "$wallet_response" | grep -o '"result":"[^"]*"' | cut -d'"' -f4)
if [ -z "$rin_address" ]; then
echo "❌ Error: Could not get RinCoin address!"
echo "Response: $wallet_response"
exit 1
fi
echo "✅ RinCoin Address: $rin_address"
echo ""
# Get blockchain info
echo "Blockchain Status:"
blockchain_info=$(call_rpc "getblockchaininfo" "[]")
blocks=$(echo "$blockchain_info" | grep -o '"blocks":[^,]*' | cut -d':' -f2)
headers=$(echo "$blockchain_info" | grep -o '"headers":[^,]*' | cut -d':' -f2)
difficulty=$(echo "$blockchain_info" | grep -o '"difficulty":[^,]*' | cut -d':' -f2)
echo "Blocks: $blocks"
echo "Headers: $headers"
echo "Difficulty: $difficulty"
echo ""
echo "⚠️ IMPORTANT: RinCoin solo mining requires:"
echo "1. A fully synced node (currently at block $blocks of $headers)"
echo "2. Mining software that supports RinCoin's RPC mining protocol"
echo "3. Very high hashpower to find blocks solo"
echo ""
echo "For now, we recommend pool mining for consistent rewards:"
echo ""
echo "Pool Mining Command:"
echo "sudo docker exec -it amd-strix-halo-llama-rocm bash -c \"/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://rinhash.mine.zergpool.com:7148 -u bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j -p c=BTC,mc=RIN,ID=StrixHalo -t 32\""
echo ""
echo "Your RinCoin address for solo mining: $rin_address"

View File

@@ -0,0 +1,83 @@
#!/bin/bash
# RinCoin Mining Pool Server Startup Script
# Distributes block rewards among multiple miners
echo "=== RinCoin Mining Pool Server ==="
echo ""
# Check if RinCoin node is running
echo "Checking RinCoin node status..."
if ! curl -s -u rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 \
-H 'content-type: text/plain' \
--data '{"jsonrpc":"1.0","id":"curl","method":"getblockchaininfo","params":[]}' \
http://127.0.0.1:9556/ > /dev/null; then
echo "❌ RinCoin node is not running!"
echo "Start it first with: docker start rincoin-node"
exit 1
fi
echo "✅ RinCoin node is running"
# Check Python dependencies
echo "Checking Python dependencies..."
python3 -c "import requests, sqlite3" 2>/dev/null || {
echo "Installing python3-requests..."
sudo apt-get update && sudo apt-get install -y python3-requests
}
echo "✅ Python dependencies ready"
# Check if port 3333 is already in use
if netstat -tln | grep -q ":3333 "; then
echo ""
echo "⚠️ Port 3333 is already in use!"
echo ""
echo "🔍 Process using port 3333:"
sudo netstat -tlnp | grep ":3333 " || echo "Could not determine process"
echo ""
echo "🛑 To kill existing process:"
echo "sudo lsof -ti:3333 | xargs sudo kill -9"
echo ""
read -p "Kill existing process and continue? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
echo "Killing processes using port 3333..."
sudo lsof -ti:3333 | xargs sudo kill -9 2>/dev/null || echo "No processes to kill"
sleep 2
else
echo "Exiting..."
exit 1
fi
fi
echo ""
echo "🚀 Starting Mining Pool Server..."
echo "This will distribute block rewards among multiple miners"
echo ""
echo "Pool Features:"
echo "- Multiple miner support"
echo "- Share-based reward distribution"
echo "- Pool fee: 1%"
echo "- Real-time statistics"
echo "- Web dashboard on port 8083"
echo ""
echo "After it starts, miners can connect with:"
echo ""
echo "Option 1: Address as username"
echo "./cpuminer -a rinhash -o stratum+tcp://YOUR_IP:3333 -u rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q -p x"
echo ""
echo "Option 2: Address.workername format"
echo "./cpuminer -a rinhash -o stratum+tcp://YOUR_IP:3333 -u rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q.worker1 -p x"
echo ""
echo "Option 3: Traditional username (rewards to pool address)"
echo "./cpuminer -a rinhash -o stratum+tcp://YOUR_IP:3333 -u username.workername -p x"
echo ""
echo "🌐 Web Dashboard: http://YOUR_IP:8083"
echo "📊 View real-time pool statistics, miners, and blocks"
echo ""
echo "Press Ctrl+C to stop the pool"
# Start the mining pool
python3 MINE/rin/stratum_pool.py

View File

@@ -0,0 +1,81 @@
#!/bin/bash
# Start RinCoin Solo Mining with Custom Address
# Usage: ./start_mining_with_address.sh [rincoin_address] [threads]
# Default values
DEFAULT_ADDRESS="rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q"
DEFAULT_THREADS="28"
# Parse arguments
RINCOIN_ADDRESS="${1:-$DEFAULT_ADDRESS}"
THREADS="${2:-$DEFAULT_THREADS}"
echo "=== RinCoin Solo Mining Setup ==="
echo "RinCoin Address: $RINCOIN_ADDRESS"
echo "Threads: $THREADS"
echo ""
# Validate RinCoin address format
if [[ ! "$RINCOIN_ADDRESS" =~ ^rin1[a-zA-Z0-9]{25,}$ ]]; then
echo "❌ Error: Invalid RinCoin address format: $RINCOIN_ADDRESS"
echo "RinCoin addresses should start with 'rin1' and be ~30 characters long"
exit 1
fi
# Check if RinCoin node is running
if ! sudo docker ps | grep -q "rincoin-node"; then
echo "❌ Error: rincoin-node container is not running!"
echo "Please start it first: sudo docker start rincoin-node"
exit 1
fi
echo "✅ RinCoin node is running"
# Check dependencies
if ! command -v python3 &> /dev/null; then
echo "❌ Error: python3 is not installed!"
exit 1
fi
python3 -c "import requests" 2>/dev/null || {
echo "Installing python3-requests..."
sudo apt-get update && sudo apt-get install -y python3-requests
}
echo "✅ Dependencies ready"
echo ""
# Create temporary proxy script with custom address
TEMP_PROXY="/tmp/rincoin_proxy_${RANDOM}.py"
sed "s/target_address='[^']*'/target_address='$RINCOIN_ADDRESS'/" MINE/rin/stratum_proxy.py > "$TEMP_PROXY"
echo "🚀 Starting Stratum Proxy with address: $RINCOIN_ADDRESS"
echo ""
# Start proxy in background
python3 "$TEMP_PROXY" &
PROXY_PID=$!
# Wait for proxy to start
sleep 3
echo "📋 Mining Commands:"
echo ""
echo "1. For Docker container mining:"
echo "sudo docker exec -it amd-strix-halo-llama-rocm bash -c \"/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://172.17.0.1:3333 -u user -p pass -t $THREADS\""
echo ""
echo "2. For native mining (if cpuminer is installed locally):"
echo "/home/db/Downloads/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u user -p pass -t $THREADS"
echo ""
echo "💡 Tips:"
echo "- Use 172.17.0.1:3333 from Docker containers"
echo "- Use 127.0.0.1:3333 from host system"
echo "- All block rewards will go to: $RINCOIN_ADDRESS"
echo ""
echo "Press Ctrl+C to stop the proxy and mining"
# Wait for user to stop
trap "echo ''; echo 'Stopping proxy...'; kill $PROXY_PID 2>/dev/null; rm -f '$TEMP_PROXY'; exit 0" INT
wait $PROXY_PID

View File

@@ -0,0 +1,69 @@
#!/bin/bash
# Start RinCoin Stratum Proxy Server
# Bridges cpuminer-opt-rin to RinCoin node
echo "=== RinCoin Stratum Proxy Server ==="
echo ""
# Check if RinCoin node is running
if ! sudo docker ps | grep -q "rincoin-node"; then
echo "❌ Error: rincoin-node container is not running!"
echo "Please start it first:"
echo "sudo docker start rincoin-node"
exit 1
fi
echo "✅ RinCoin node is running"
# Check if Python3 and requests are available
if ! command -v python3 &> /dev/null; then
echo "❌ Error: python3 is not installed!"
echo "Please install it: sudo apt-get install python3"
exit 1
fi
# Install requests if not available
python3 -c "import requests" 2>/dev/null || {
echo "Installing python3-requests..."
sudo apt-get update && sudo apt-get install -y python3-requests
}
echo "✅ Python dependencies ready"
# Check if port 3334 is already in use
if netstat -tln | grep -q ":3334 "; then
echo ""
echo "⚠️ Port 3334 is already in use!"
echo ""
echo "🔍 Process using port 3334:"
sudo netstat -tlnp | grep ":3334 " || echo "Could not determine process"
echo ""
echo "🛑 To kill existing process:"
echo "sudo lsof -ti:3334 | xargs sudo kill -9"
echo ""
read -p "Kill existing process and continue? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
echo "Killing processes using port 3334..."
sudo lsof -ti:3334 | xargs sudo kill -9 2>/dev/null || echo "No processes to kill"
sleep 2
else
echo "Exiting..."
exit 1
fi
fi
echo ""
echo "🚀 Starting Stratum Proxy Server..."
echo "This will bridge cpuminer-opt-rin to your RinCoin node"
echo ""
echo "After it starts, connect your miner with:"
echo "sudo docker exec -it amd-strix-halo-llama-rocm bash -c \"/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3334 -u user -p pass -t 28\""
echo ""
echo "Press Ctrl+C to stop the proxy"
echo ""
# Start the proxy
cd "$(dirname "$0")"
python3 stratum_proxy.py

602
MINE/rin/stratum_pool.py Normal file
View File

@@ -0,0 +1,602 @@
#!/usr/bin/env python3
"""
RinCoin Mining Pool Server
Distributes block rewards among multiple miners based on share contributions
"""
import socket
import threading
import json
import time
import requests
import hashlib
import struct
import sqlite3
from datetime import datetime
from requests.auth import HTTPBasicAuth
# Import web interface
from pool_web_interface import start_web_interface
# Import stratum base class
from stratum_proxy import RinCoinStratumBase
class RinCoinMiningPool(RinCoinStratumBase):
def __init__(self, stratum_host='0.0.0.0', stratum_port=3333,
rpc_host='127.0.0.1', rpc_port=9556,
rpc_user='rinrpc', rpc_password='745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90',
pool_address='rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q',
pool_fee_percent=1.0):
# Initialize base class
super().__init__(stratum_host, stratum_port, rpc_host, rpc_port, rpc_user, rpc_password, pool_address)
self.pool_address = pool_address
self.pool_fee_percent = pool_fee_percent
# Pool statistics
self.total_shares = 0
self.total_blocks = 0
self.pool_hashrate = 0
# Database for persistent storage
self.init_database()
print(f"=== RinCoin Mining Pool Server ===")
print(f"Stratum: {stratum_host}:{stratum_port}")
print(f"RPC: {rpc_host}:{rpc_port}")
print(f"Pool Address: {pool_address}")
print(f"Pool Fee: {pool_fee_percent}%")
def init_database(self):
"""Initialize SQLite database for miner tracking"""
self.db = sqlite3.connect(':memory:', check_same_thread=False)
cursor = self.db.cursor()
# Create tables
cursor.execute('''
CREATE TABLE IF NOT EXISTS miners (
id INTEGER PRIMARY KEY,
user TEXT NOT NULL,
worker TEXT NOT NULL,
address TEXT,
shares INTEGER DEFAULT 0,
last_share TIMESTAMP,
last_hashrate REAL DEFAULT 0,
created TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
''')
cursor.execute('''
CREATE TABLE IF NOT EXISTS shares (
id INTEGER PRIMARY KEY,
miner_id INTEGER,
job_id TEXT,
difficulty REAL,
submitted TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (miner_id) REFERENCES miners (id)
)
''')
cursor.execute('''
CREATE TABLE IF NOT EXISTS blocks (
id INTEGER PRIMARY KEY,
block_hash TEXT,
height INTEGER,
reward REAL,
pool_fee REAL,
miner_rewards TEXT, -- JSON of {address: amount}
found_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
''')
# Samples for pool hashrate chart
cursor.execute('''
CREATE TABLE IF NOT EXISTS hashrate_samples (
id INTEGER PRIMARY KEY,
ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
hashrate REAL
)
''')
self.db.commit()
def get_pool_block_template(self):
"""Get new block template and create pool-style job"""
template = super().get_block_template()
if template:
# Convert to pool-style job format if needed
job = self.current_job
if job:
# Add pool-specific fields
job["coinb1"] = "01000000" + "0" * 60
job["coinb2"] = "ffffffff"
job["merkle_branch"] = []
job["clean_jobs"] = True
return job
return None
def validate_rincoin_address(self, address):
"""Validate if an address is a valid RinCoin address"""
try:
return self.decode_bech32_address(address) is not None
except:
return False
def register_miner(self, user, worker, address=None):
"""Register or update miner in database"""
cursor = self.db.cursor()
# Check if miner exists
cursor.execute('SELECT id, address FROM miners WHERE user = ? AND worker = ?', (user, worker))
result = cursor.fetchone()
if result:
miner_id, existing_address = result
if address and not existing_address:
cursor.execute('UPDATE miners SET address = ? WHERE id = ?', (address, miner_id))
self.db.commit()
return miner_id
else:
# Create new miner
cursor.execute('INSERT INTO miners (user, worker, address) VALUES (?, ?, ?)', (user, worker, address))
self.db.commit()
return cursor.lastrowid
def record_share(self, miner_id, job_id, difficulty):
"""Record a share submission"""
cursor = self.db.cursor()
# Record share
cursor.execute('INSERT INTO shares (miner_id, job_id, difficulty) VALUES (?, ?, ?)',
(miner_id, job_id, difficulty))
# Update miner stats
cursor.execute('UPDATE miners SET shares = shares + 1, last_share = CURRENT_TIMESTAMP WHERE id = ?', (miner_id,))
self.db.commit()
self.total_shares += 1
def distribute_block_reward(self, block_hash, block_height, total_reward):
"""Distribute block reward among miners based on their shares"""
cursor = self.db.cursor()
# Calculate pool fee
pool_fee = total_reward * (self.pool_fee_percent / 100.0)
miner_reward = total_reward - pool_fee
# Get shares from last 24 hours
cursor.execute('''
SELECT m.address, COUNT(s.id) as share_count, SUM(s.difficulty) as total_difficulty
FROM miners m
JOIN shares s ON m.id = s.miner_id
WHERE s.submitted > datetime('now', '-1 day')
GROUP BY m.id, m.address
HAVING share_count > 0
''')
miners = cursor.fetchall()
if not miners:
print("No miners with shares in last 24 hours")
return
# Calculate total difficulty
total_difficulty = sum(row[2] for row in miners)
# Separate miners with and without addresses
miners_with_addresses = []
miners_without_addresses = []
total_difficulty_with_addresses = 0
total_difficulty_without_addresses = 0
for address, share_count, difficulty in miners:
if address:
miners_with_addresses.append((address, share_count, difficulty))
total_difficulty_with_addresses += difficulty
else:
miners_without_addresses.append((address, share_count, difficulty))
total_difficulty_without_addresses += difficulty
# Calculate total difficulty
total_difficulty = total_difficulty_with_addresses + total_difficulty_without_addresses
if total_difficulty == 0:
print("No valid difficulty found")
return
# Distribute rewards
miner_rewards = {}
# First, distribute to miners with valid addresses
if miners_with_addresses:
for address, share_count, difficulty in miners_with_addresses:
reward_share = (difficulty / total_difficulty) * miner_reward
miner_rewards[address] = reward_share
print(f"💰 Miner {address}: {reward_share:.8f} RIN ({difficulty} difficulty)")
# Calculate undistributed rewards (from miners without addresses)
if miners_without_addresses:
undistributed_reward = 0
for address, share_count, difficulty in miners_without_addresses:
undistributed_reward += (difficulty / total_difficulty) * miner_reward
print(f"⚠️ Miner without address: {difficulty} difficulty -> {undistributed_reward:.8f} RIN to pool")
# Keep undistributed rewards for pool (no redistribution)
print(f"💰 Pool keeps {undistributed_reward:.8f} RIN from miners without addresses")
# Record block
cursor.execute('''
INSERT INTO blocks (block_hash, height, reward, pool_fee, miner_rewards)
VALUES (?, ?, ?, ?, ?)
''', (block_hash, block_height, total_reward, pool_fee, json.dumps(miner_rewards)))
self.db.commit()
self.total_blocks += 1
print(f"🎉 Block {block_height} reward distributed!")
print(f"💰 Pool fee: {pool_fee:.8f} RIN")
print(f"💰 Total distributed: {sum(miner_rewards.values()):.8f} RIN")
# Summary
if miners_without_addresses:
print(f"📊 Summary: {len(miners_with_addresses)} miners with addresses, {len(miners_without_addresses)} without (rewards to pool)")
# Use inherited send_stratum_response and send_stratum_notification from base class
def handle_stratum_message(self, client, addr, message):
"""Handle incoming Stratum message from miner"""
try:
data = json.loads(message.strip())
method = data.get("method")
msg_id = data.get("id")
params = data.get("params", [])
print(f"[{addr}] {method}: {params}")
if method == "mining.subscribe":
# Subscribe response
self.send_stratum_response(client, msg_id, [
[["mining.set_difficulty", "subscription_id"], ["mining.notify", "subscription_id"]],
"extranonce1",
4
])
# Send difficulty (lower for CPU mining)
self.send_stratum_notification(client, "mining.set_difficulty", [0.0001])
# Send initial job
if self.get_pool_block_template():
job = self.current_job
self.send_stratum_notification(client, "mining.notify", [
job["job_id"],
job["prevhash"],
job["coinb1"],
job["coinb2"],
job["merkle_branch"],
f"{job['version']:08x}",
job["bits"],
job["ntime"],
job["clean_jobs"]
])
elif method == "mining.extranonce.subscribe":
# Handle extranonce subscription
print(f"[{addr}] Extranonce subscription requested")
self.send_stratum_response(client, msg_id, True)
elif method == "mining.authorize":
# Parse user.worker format
if len(params) >= 2:
user_worker = params[0]
password = params[1] if len(params) > 1 else ""
# Extract user and worker
if '.' in user_worker:
user, worker = user_worker.split('.', 1)
else:
user = user_worker
worker = "default"
# Check if user contains a RinCoin address (starts with 'rin')
miner_address = None
if user.startswith('rin'):
# User is a RinCoin address
if self.validate_rincoin_address(user):
miner_address = user
user = f"miner_{miner_address[:8]}" # Create a user ID from address
print(f"[{addr}] ✅ Miner using valid RinCoin address: {miner_address}")
else:
print(f"[{addr}] ❌ Invalid RinCoin address: {user}")
self.send_stratum_response(client, msg_id, False, "Invalid RinCoin address")
return
elif '.' in user and user.split('.')[0].startswith('rin'):
# Format: rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q.workername
address_part, worker_part = user.split('.', 1)
if address_part.startswith('rin'):
if self.validate_rincoin_address(address_part):
miner_address = address_part
user = f"miner_{miner_address[:8]}"
worker = worker_part
print(f"[{addr}] ✅ Miner using valid RinCoin address format: {miner_address}.{worker}")
else:
print(f"[{addr}] ❌ Invalid RinCoin address: {address_part}")
self.send_stratum_response(client, msg_id, False, "Invalid RinCoin address")
return
# Register miner with address
miner_id = self.register_miner(user, worker, miner_address)
# Store client info
self.clients[addr] = {
'client': client,
'user': user,
'worker': worker,
'miner_id': miner_id,
'address': miner_address,
'shares': 0,
'last_share': time.time(),
'extranonce1': '00000000' # Default extranonce1
}
if miner_address:
print(f"[{addr}] ✅ Authorized: {user}.{worker} -> {miner_address}")
else:
print(f"[{addr}] ⚠️ Authorized: {user}.{worker} (rewards will go to pool address)")
self.send_stratum_response(client, msg_id, True)
else:
self.send_stratum_response(client, msg_id, False, "Invalid authorization")
elif method == "mining.submit":
# Submit share
if addr not in self.clients:
self.send_stratum_response(client, msg_id, False, "Not authorized")
return
miner_info = self.clients[addr]
try:
if self.current_job and len(params) >= 5:
username = params[0]
job_id = params[1]
extranonce2 = params[2]
ntime = params[3]
nonce = params[4]
# Use base class to validate and submit share
extranonce1 = miner_info.get('extranonce1', '00000000')
miner_address = miner_info.get('address')
# For pool mining, always mine to pool address
success, message = self.submit_share(
self.current_job, extranonce1, extranonce2, ntime, nonce,
target_address=self.pool_address
)
if success:
# Record share with estimated difficulty
actual_difficulty = 0.00133 # Estimated for ~381 kH/s
self.record_share(miner_info['miner_id'], job_id, actual_difficulty)
# Update miner stats
now_ts = time.time()
prev_ts = miner_info.get('last_share') or now_ts
dt = max(now_ts - prev_ts, 1e-3)
miner_hashrate = actual_difficulty * (2**32) / dt
if miner_info['shares'] == 0:
miner_hashrate = 381000 # Default estimate
miner_info['shares'] += 1
miner_info['last_share'] = now_ts
# Update database
try:
cursor = self.db.cursor()
cursor.execute('UPDATE miners SET last_share = CURRENT_TIMESTAMP, last_hashrate = ? WHERE id = ?',
(miner_hashrate, miner_info['miner_id']))
self.db.commit()
except Exception as e:
print(f"DB update error: {e}")
print(f"[{addr}] ✅ Share accepted from {miner_info['user']}.{miner_info['worker']} (Total: {miner_info['shares']})")
self.send_stratum_response(client, msg_id, True)
# If block was found, distribute rewards
if "Block found" in message:
print(f"🎉 [{addr}] BLOCK FOUND!")
# Get block info and distribute rewards
total_reward = self.current_job['coinbasevalue'] / 100000000 if self.current_job else 25.0
self.distribute_block_reward("pending", self.current_job['height'] if self.current_job else 0, total_reward)
else:
# Accept as share for pool statistics even if block validation fails
self.send_stratum_response(client, msg_id, True)
else:
print(f"[{addr}] Invalid share parameters")
self.send_stratum_response(client, msg_id, False, "Invalid parameters")
except Exception as e:
print(f"[{addr}] Share processing error: {e}")
# Still accept the share for mining statistics
self.send_stratum_response(client, msg_id, True)
else:
print(f"[{addr}] ⚠️ Unknown method: {method}")
# Send null result for unknown methods (standard Stratum behavior)
self.send_stratum_response(client, msg_id, None, None)
except json.JSONDecodeError:
print(f"[{addr}] Invalid JSON: {message}")
except Exception as e:
print(f"[{addr}] Message handling error: {e}")
def handle_client(self, client, addr):
"""Handle individual client connection"""
print(f"[{addr}] Connected")
try:
while self.running:
data = client.recv(4096)
if not data:
break
# Handle multiple messages in one packet
messages = data.decode('utf-8').strip().split('\n')
for message in messages:
if message:
self.handle_stratum_message(client, addr, message)
except Exception as e:
print(f"[{addr}] Client error: {e}")
finally:
client.close()
if addr in self.clients:
del self.clients[addr]
print(f"[{addr}] Disconnected")
def job_updater(self):
"""Periodically update mining jobs"""
last_job_time = 0
last_block_height = 0
while self.running:
try:
# Check for new blocks every 10 seconds
time.sleep(10)
# Get current blockchain info
blockchain_info = self.rpc_call("getblockchaininfo")
if blockchain_info:
current_height = blockchain_info.get('blocks', 0)
# Create new job if:
# 1. New block detected
# 2. 30+ seconds since last job
# 3. No current job exists
should_create_job = (
current_height != last_block_height or
time.time() - last_job_time > 30 or
not self.current_job
)
if should_create_job:
if self.get_pool_block_template():
job = self.current_job
last_job_time = time.time()
last_block_height = current_height
print(f"📦 New job created: {job['job_id']} (block {current_height})")
# Send to all connected clients
for addr, miner_info in list(self.clients.items()):
try:
self.send_stratum_notification(miner_info['client'], "mining.notify", [
job["job_id"],
job["prevhash"],
job["coinb1"],
job["coinb2"],
job["merkle_branch"],
f"{job['version']:08x}",
job["bits"],
job["ntime"],
job["clean_jobs"]
])
except Exception as e:
print(f"Failed to send job to {addr}: {e}")
except Exception as e:
print(f"Job updater error: {e}")
def stats_updater(self):
"""Periodically update pool statistics"""
while self.running:
try:
time.sleep(60) # Update every minute
cursor = self.db.cursor()
# Pool hashrate is the sum of miners' last hashrates
cursor.execute('SELECT COALESCE(SUM(last_hashrate), 0) FROM miners')
self.pool_hashrate = cursor.fetchone()[0] or 0.0
# Sample for chart
cursor.execute('INSERT INTO hashrate_samples (hashrate) VALUES (?)', (self.pool_hashrate,))
self.db.commit()
print(f"📊 Pool Stats: {len(self.clients)} miners, {self.total_shares} shares, {self.pool_hashrate/1000:.2f} kH/s")
except Exception as e:
print(f"Stats updater error: {e}")
def start(self):
"""Start the mining pool server"""
try:
# Test RPC connection
blockchain_info = self.rpc_call("getblockchaininfo")
if not blockchain_info:
print("❌ Failed to connect to RinCoin node!")
return
print(f"✅ Connected to RinCoin node (block {blockchain_info.get('blocks', 'unknown')})")
# Start background threads
job_thread = threading.Thread(target=self.job_updater, daemon=True)
job_thread.start()
stats_thread = threading.Thread(target=self.stats_updater, daemon=True)
stats_thread.start()
# Start web interface in background
web_thread = threading.Thread(target=start_web_interface,
args=(self.db, '0.0.0.0', 8083,
self.rpc_host, self.rpc_port,
self.rpc_user, self.rpc_password),
daemon=True)
web_thread.start()
print(f"🌐 Web dashboard started on http://0.0.0.0:8083")
# Start Stratum server
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind((self.stratum_host, self.stratum_port))
server_socket.listen(10)
print(f"🚀 Mining pool listening on {self.stratum_host}:{self.stratum_port}")
print("Ready for multiple miners...")
print("")
print(f"💰 Pool address: {self.pool_address}")
print(f"💰 Pool fee: {self.pool_fee_percent}%")
print("")
print("Connect miners with:")
print(f"./cpuminer -a rinhash -o stratum+tcp://{self.stratum_host}:{self.stratum_port} -u username.workername -p x")
print("")
while self.running:
try:
client, addr = server_socket.accept()
client_thread = threading.Thread(target=self.handle_client, args=(client, addr), daemon=True)
client_thread.start()
except KeyboardInterrupt:
print("\n🛑 Shutting down pool...")
self.running = False
break
except Exception as e:
print(f"Server error: {e}")
except OSError as e:
if "Address already in use" in str(e):
print(f"❌ Port {self.stratum_port} is already in use!")
print("")
print("🔍 Check what's using the port:")
print(f"sudo netstat -tlnp | grep :{self.stratum_port}")
print("")
print("🛑 Kill existing process:")
print(f"sudo lsof -ti:{self.stratum_port} | xargs sudo kill -9")
print("")
print("🔄 Or use a different port by editing the script")
else:
print(f"Failed to start server: {e}")
except Exception as e:
print(f"Failed to start server: {e}")
finally:
print("Pool server stopped")
if __name__ == "__main__":
pool = RinCoinMiningPool()
pool.start()

View File

@@ -0,0 +1,550 @@
#!/usr/bin/env python3
"""
RinCoin Stratum Proxy Server - PRODUCTION VERSION
Bridges cpuminer-opt-rin (Stratum protocol) to RinCoin node (RPC protocol)
For real solo mining with actual block construction and submission
"""
import socket
import threading
import json
import time
import requests
import hashlib
import struct
import binascii
from requests.auth import HTTPBasicAuth
class RinCoinStratumProxy:
def __init__(self, stratum_host='0.0.0.0', stratum_port=3333,
rpc_host='127.0.0.1', rpc_port=9556,
rpc_user='rinrpc', rpc_password='745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90',
target_address='rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q'):
self.stratum_host = stratum_host
self.stratum_port = stratum_port
self.rpc_host = rpc_host
self.rpc_port = rpc_port
self.rpc_user = rpc_user
self.rpc_password = rpc_password
self.target_address = target_address
self.clients = {}
self.job_counter = 0
self.current_job = None
self.running = True
self.extranonce1_counter = 0
print(f"🔥 RinCoin PRODUCTION Stratum Proxy Server")
print(f"Stratum: {stratum_host}:{stratum_port}")
print(f"RPC: {rpc_host}:{rpc_port}")
print(f"Target: {target_address}")
def rpc_call(self, method, params=[]):
"""Make RPC call to RinCoin node"""
try:
url = f"http://{self.rpc_host}:{self.rpc_port}/"
headers = {'content-type': 'text/plain'}
auth = HTTPBasicAuth(self.rpc_user, self.rpc_password)
payload = {
"jsonrpc": "1.0",
"id": "stratum_proxy",
"method": method,
"params": params
}
response = requests.post(url, json=payload, headers=headers, auth=auth, timeout=10)
if response.status_code == 200:
result = response.json()
if 'error' in result and result['error'] is not None:
print(f"RPC Error: {result['error']}")
return None
return result.get('result')
else:
print(f"HTTP Error: {response.status_code}")
return None
except Exception as e:
print(f"RPC Call Error: {e}")
return None
def create_coinbase_tx(self, template, extranonce1, extranonce2):
"""Create coinbase transaction"""
try:
# Get coinbase value (block reward + fees)
coinbase_value = template.get('coinbasevalue', 2500000000) # 25 RIN in satoshis
# Create coinbase transaction
# Version (4 bytes)
coinbase_tx = struct.pack('<L', 1)
# Input count (1 byte) - always 1 for coinbase
coinbase_tx += b'\x01'
# Previous output hash (32 bytes of zeros for coinbase)
coinbase_tx += b'\x00' * 32
# Previous output index (4 bytes, 0xffffffff for coinbase)
coinbase_tx += b'\xff\xff\xff\xff'
# Script length and coinbase script
height = template.get('height', 0)
height_bytes = struct.pack('<L', height)[:3] # BIP34 - height in coinbase
# Coinbase script: height + extranonces + arbitrary data
coinbase_script = height_bytes + extranonce1.encode() + extranonce2.encode()
coinbase_script += b'/RinCoin Stratum Pool/' # Pool signature
# Script length (varint) + script
coinbase_tx += struct.pack('B', len(coinbase_script)) + coinbase_script
# Sequence (4 bytes)
coinbase_tx += b'\xff\xff\xff\xff'
# Output count (1 byte) - 1 output to our address
coinbase_tx += b'\x01'
# Output value (8 bytes)
coinbase_tx += struct.pack('<Q', coinbase_value)
# Output script (simplified - you'd need proper address decoding)
# For now, we'll use a simplified P2WPKH script
script_pubkey = self.address_to_script_pubkey(self.target_address)
coinbase_tx += struct.pack('B', len(script_pubkey)) + script_pubkey
# Lock time (4 bytes)
coinbase_tx += struct.pack('<L', 0)
return coinbase_tx
except Exception as e:
print(f"Coinbase creation error: {e}")
return None
def address_to_script_pubkey(self, address):
"""Convert bech32 address to script pubkey (simplified)"""
# This is a simplified version - in production you'd use proper bech32 decoding
# For now, return a standard P2WPKH template
# TODO: Implement proper bech32 decoding
return b'\x00\x14' + b'\x00' * 20 # OP_0 + 20-byte pubkey hash placeholder
def calculate_merkle_root(self, transactions):
"""Calculate merkle root from list of transaction hashes"""
if not transactions:
return b'\x00' * 32
# Convert hex strings to bytes
tx_hashes = [bytes.fromhex(tx) if isinstance(tx, str) else tx for tx in transactions]
while len(tx_hashes) > 1:
if len(tx_hashes) % 2 == 1:
tx_hashes.append(tx_hashes[-1]) # Duplicate last hash if odd number
new_level = []
for i in range(0, len(tx_hashes), 2):
combined = tx_hashes[i] + tx_hashes[i + 1]
hash_result = hashlib.sha256(hashlib.sha256(combined).digest()).digest()
new_level.append(hash_result)
tx_hashes = new_level
return tx_hashes[0] if tx_hashes else b'\x00' * 32
def get_block_template(self):
"""Get new block template from RinCoin node"""
try:
template = self.rpc_call("getblocktemplate", [{"rules": ["segwit"]}])
if not template:
return None
self.job_counter += 1
# Calculate target from bits
bits = template.get('bits', '1d00ffff')
target = self.bits_to_target(bits)
# Prepare transaction list (without coinbase)
transactions = template.get('transactions', [])
tx_hashes = [bytes.fromhex(tx['hash'])[::-1] for tx in transactions] # Reverse for little-endian
job = {
"job_id": f"job_{self.job_counter:08x}",
"template": template,
"prevhash": template.get("previousblockhash", "0" * 64),
"version": template.get('version', 1),
"bits": bits,
"ntime": int(time.time()),
"target": target,
"transactions": transactions,
"tx_hashes": tx_hashes,
"height": template.get('height', 0),
"coinbasevalue": template.get('coinbasevalue', 2500000000)
}
self.current_job = job
print(f"📦 New job: {job['job_id']} | Height: {job['height']} | Reward: {job['coinbasevalue']/100000000:.2f} RIN")
return job
except Exception as e:
print(f"Get block template error: {e}")
return None
def bits_to_target(self, bits_hex):
"""Convert bits to target (difficulty)"""
try:
bits = int(bits_hex, 16)
exponent = bits >> 24
mantissa = bits & 0xffffff
target = mantissa * (256 ** (exponent - 3))
return f"{target:064x}"
except:
return "0000ffff00000000000000000000000000000000000000000000000000000000"
def construct_block_header(self, job, extranonce1, extranonce2, ntime, nonce):
"""Construct block header for submission"""
try:
# Create coinbase transaction
coinbase_tx = self.create_coinbase_tx(job['template'], extranonce1, extranonce2)
if not coinbase_tx:
return None, None
# Calculate coinbase hash
coinbase_hash = hashlib.sha256(hashlib.sha256(coinbase_tx).digest()).digest()[::-1] # Reverse for little-endian
# Create full transaction list (coinbase + other transactions)
all_tx_hashes = [coinbase_hash] + job['tx_hashes']
# Calculate merkle root
merkle_root = self.calculate_merkle_root(all_tx_hashes)
# Construct block header (80 bytes)
header = b''
header += struct.pack('<L', job['version']) # Version (4 bytes)
header += bytes.fromhex(job['prevhash'])[::-1] # Previous block hash (32 bytes, reversed)
header += merkle_root[::-1] # Merkle root (32 bytes, reversed)
header += struct.pack('<L', int(ntime, 16)) # Timestamp (4 bytes)
header += bytes.fromhex(job['bits'])[::-1] # Bits (4 bytes, reversed)
header += struct.pack('<L', int(nonce, 16)) # Nonce (4 bytes)
# Construct full block
block = header
# Transaction count (varint)
tx_count = 1 + len(job['transactions'])
if tx_count < 253:
block += struct.pack('B', tx_count)
else:
block += b'\xfd' + struct.pack('<H', tx_count)
# Add coinbase transaction
block += coinbase_tx
# Add other transactions
for tx in job['transactions']:
block += bytes.fromhex(tx['data'])
return header, block
except Exception as e:
print(f"Block construction error: {e}")
return None, None
def validate_and_submit_block(self, job, extranonce1, extranonce2, ntime, nonce):
"""Validate proof of work and submit block if valid"""
try:
# Construct block
header, full_block = self.construct_block_header(job, extranonce1, extranonce2, ntime, nonce)
if not header or not full_block:
return False, "Block construction failed"
# Calculate block hash (double SHA256 of header)
block_hash = hashlib.sha256(hashlib.sha256(header).digest()).digest()
# Convert to hex (reversed for display)
block_hash_hex = block_hash[::-1].hex()
# Check if hash meets target (proof of work validation)
target_int = int(job['target'], 16)
hash_int = int(block_hash_hex, 16)
print(f"🔍 Hash: {block_hash_hex}")
print(f"🎯 Target: {job['target']}")
print(f"✅ Valid PoW: {hash_int < target_int}")
if hash_int < target_int:
# Valid block! Submit to node
block_hex = full_block.hex()
print(f"🚀 Submitting block {block_hash_hex[:16]}...")
result = self.rpc_call("submitblock", [block_hex])
if result is None: # Success
print(f"🎉 BLOCK ACCEPTED! Hash: {block_hash_hex}")
print(f"💰 Reward: {job['coinbasevalue']/100000000:.2f} RIN -> {self.target_address}")
return True, "Block accepted"
else:
print(f"❌ Block rejected: {result}")
return False, f"Block rejected: {result}"
else:
# Valid share but not a block
return True, "Share accepted"
except Exception as e:
print(f"Block submission error: {e}")
return False, f"Submission error: {e}"
def send_stratum_response(self, client, msg_id, result, error=None):
"""Send Stratum response to client"""
try:
response = {
"id": msg_id,
"result": result,
"error": error
}
message = json.dumps(response) + "\n"
client.send(message.encode('utf-8'))
except Exception as e:
print(f"Send response error: {e}")
def send_stratum_notification(self, client, method, params):
"""Send Stratum notification to client"""
try:
notification = {
"id": None,
"method": method,
"params": params
}
message = json.dumps(notification) + "\n"
client.send(message.encode('utf-8'))
except Exception as e:
print(f"Send notification error: {e}")
def handle_stratum_message(self, client, addr, message):
"""Handle incoming Stratum message from miner"""
try:
data = json.loads(message.strip())
method = data.get("method")
msg_id = data.get("id")
params = data.get("params", [])
if method == "mining.subscribe":
# Generate unique extranonce1 for this connection
self.extranonce1_counter += 1
extranonce1 = f"ex{self.extranonce1_counter:06x}"
# Store extranonce1 for this client
if addr not in self.clients:
self.clients[addr] = {}
self.clients[addr]['extranonce1'] = extranonce1
# Subscribe response
self.send_stratum_response(client, msg_id, [
[["mining.set_difficulty", "subscription_id"], ["mining.notify", "subscription_id"]],
extranonce1,
4 # extranonce2 size
])
# Send difficulty (simplified - always 1 for now)
self.send_stratum_notification(client, "mining.set_difficulty", [1])
# Send initial job
if self.current_job:
self.send_job_to_client(client, self.current_job)
else:
# Get new job if none exists
if self.get_block_template():
self.send_job_to_client(client, self.current_job)
elif method == "mining.authorize":
# Authorization (accept any user/pass for now)
username = params[0] if params else "anonymous"
self.clients[addr]['username'] = username
self.send_stratum_response(client, msg_id, True)
print(f"[{addr}] Authorized as {username}")
elif method == "mining.submit":
# Submit share/block
if len(params) >= 5:
username = params[0]
job_id = params[1]
extranonce2 = params[2]
ntime = params[3]
nonce = params[4]
print(f"[{addr}] Submit: job={job_id}, nonce={nonce}")
# Validate submission
if self.current_job and job_id == self.current_job['job_id']:
extranonce1 = self.clients[addr].get('extranonce1', 'ex000000')
# Validate and potentially submit block
success, message = self.validate_and_submit_block(
self.current_job, extranonce1, extranonce2, ntime, nonce
)
if success:
self.send_stratum_response(client, msg_id, True)
if "Block accepted" in message:
# Broadcast new job after block found
threading.Thread(target=self.update_job_after_block, daemon=True).start()
else:
self.send_stratum_response(client, msg_id, False, message)
else:
self.send_stratum_response(client, msg_id, False, "Stale job")
else:
self.send_stratum_response(client, msg_id, False, "Invalid parameters")
else:
print(f"[{addr}] Unknown method: {method}")
self.send_stratum_response(client, msg_id, None, "Unknown method")
except json.JSONDecodeError:
print(f"[{addr}] Invalid JSON: {message}")
except Exception as e:
print(f"[{addr}] Message handling error: {e}")
def send_job_to_client(self, client, job):
"""Send mining job to specific client"""
try:
self.send_stratum_notification(client, "mining.notify", [
job["job_id"],
job["prevhash"],
"", # coinb1 (empty - we handle coinbase internally)
"", # coinb2 (empty - we handle coinbase internally)
[], # merkle_branch (empty - we calculate merkle root)
f"{job['version']:08x}",
job["bits"],
f"{job['ntime']:08x}",
True # clean_jobs
])
except Exception as e:
print(f"Failed to send job: {e}")
def update_job_after_block(self):
"""Update job after a block is found"""
time.sleep(2) # Brief delay to let network propagate
if self.get_block_template():
self.broadcast_new_job()
def broadcast_new_job(self):
"""Broadcast new job to all connected clients"""
if not self.current_job:
return
print(f"📡 Broadcasting job {self.current_job['job_id']} to {len(self.clients)} clients")
for addr, client_data in list(self.clients.items()):
try:
if 'socket' in client_data:
self.send_job_to_client(client_data['socket'], self.current_job)
except Exception as e:
print(f"Failed to send job to {addr}: {e}")
def handle_client(self, client, addr):
"""Handle individual client connection"""
print(f"[{addr}] Connected")
if addr not in self.clients:
self.clients[addr] = {}
self.clients[addr]['socket'] = client
try:
while self.running:
data = client.recv(4096)
if not data:
break
# Handle multiple messages in one packet
messages = data.decode('utf-8').strip().split('\n')
for message in messages:
if message:
self.handle_stratum_message(client, addr, message)
except Exception as e:
print(f"[{addr}] Client error: {e}")
finally:
client.close()
if addr in self.clients:
del self.clients[addr]
print(f"[{addr}] Disconnected")
def job_updater(self):
"""Periodically update mining jobs"""
while self.running:
try:
time.sleep(30) # Update every 30 seconds
old_height = self.current_job['height'] if self.current_job else 0
if self.get_block_template():
new_height = self.current_job['height']
if new_height > old_height:
print(f"🆕 New block detected! Broadcasting new job...")
self.broadcast_new_job()
except Exception as e:
print(f"Job updater error: {e}")
def start(self):
"""Start the Stratum proxy server"""
try:
# Test RPC connection
blockchain_info = self.rpc_call("getblockchaininfo")
if not blockchain_info:
print("❌ Failed to connect to RinCoin node!")
return
print(f"✅ Connected to RinCoin node")
print(f"📊 Current height: {blockchain_info.get('blocks', 'unknown')}")
print(f"⛓️ Chain: {blockchain_info.get('chain', 'unknown')}")
# Get initial block template
if not self.get_block_template():
print("❌ Failed to get initial block template!")
return
# Start job updater thread
job_thread = threading.Thread(target=self.job_updater, daemon=True)
job_thread.start()
# Start Stratum server
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind((self.stratum_host, self.stratum_port))
server_socket.listen(10)
print(f"🚀 PRODUCTION Stratum proxy ready!")
print(f"📡 Listening on {self.stratum_host}:{self.stratum_port}")
print(f"💰 Mining to: {self.target_address}")
print(f"⚡ Current job: {self.current_job['job_id']}")
print("")
print("🔧 Miner command:")
print(f"./cpuminer -a rinhash -o stratum+tcp://{self.stratum_host}:{self.stratum_port} -u worker1 -p x -t 4")
print("")
while self.running:
try:
client, addr = server_socket.accept()
client_thread = threading.Thread(
target=self.handle_client,
args=(client, addr),
daemon=True
)
client_thread.start()
except KeyboardInterrupt:
print("\n🛑 Shutting down...")
self.running = False
break
except Exception as e:
print(f"Server error: {e}")
except Exception as e:
print(f"Failed to start server: {e}")
finally:
print("💤 Server stopped")
if __name__ == "__main__":
proxy = RinCoinStratumProxy()
proxy.start()

693
MINE/rin/stratum_proxy.py Normal file
View File

@@ -0,0 +1,693 @@
#!/usr/bin/env python3
"""
RinCoin Stratum Proxy Server - REAL MINING VERSION
Properly constructs Stratum jobs and validates/submits real blocks
"""
import socket
import threading
import json
import time
import requests
import hashlib
import struct
from requests.auth import HTTPBasicAuth
class RinCoinStratumBase:
def __init__(self, stratum_host='0.0.0.0', stratum_port=3334,
rpc_host='127.0.0.1', rpc_port=9556,
rpc_user='rinrpc', rpc_password='745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90',
target_address='rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q'):
self.stratum_host = stratum_host
self.stratum_port = stratum_port
self.rpc_host = rpc_host
self.rpc_port = rpc_port
self.rpc_user = rpc_user
self.rpc_password = rpc_password
self.target_address = target_address
self.clients = {}
self.job_counter = 0
self.current_job = None
self.running = True
self.extranonce1_counter = 0
print(f"RinCoin Stratum Proxy Server - REAL MINING")
print(f"Stratum: {stratum_host}:{stratum_port}")
print(f"RPC: {rpc_host}:{rpc_port}")
print(f"Target: {target_address}")
def rpc_call(self, method, params=[]):
"""Make RPC call to RinCoin node"""
try:
url = f"http://{self.rpc_host}:{self.rpc_port}/"
headers = {'content-type': 'text/plain'}
auth = HTTPBasicAuth(self.rpc_user, self.rpc_password)
payload = {
"jsonrpc": "1.0",
"id": "stratum_proxy",
"method": method,
"params": params
}
response = requests.post(url, json=payload, headers=headers, auth=auth, timeout=30)
if response.status_code == 200:
result = response.json()
if 'error' in result and result['error'] is not None:
print(f"RPC Error: {result['error']}")
return None
return result.get('result')
else:
print(f"HTTP Error: {response.status_code}")
return None
except Exception as e:
print(f"RPC Call Error: {e}")
return None
def encode_varint(self, n):
"""Encode integer as Bitcoin-style varint"""
if n < 0xfd:
return bytes([n])
elif n <= 0xffff:
return b"\xfd" + struct.pack('<H', n)
elif n <= 0xffffffff:
return b"\xfe" + struct.pack('<I', n)
else:
return b"\xff" + struct.pack('<Q', n)
def decode_bech32_address(self, address):
"""Decode RinCoin bech32 address to script"""
try:
if not address or not address.startswith('rin1'):
raise ValueError("Not a RinCoin bech32 address")
result = self.rpc_call("validateaddress", [address])
if not result or not result.get('isvalid'):
raise ValueError("Address not valid per node")
script_hex = result.get('scriptPubKey')
if not script_hex:
raise ValueError("Node did not return scriptPubKey")
return bytes.fromhex(script_hex)
except Exception as e:
print(f"Address decode error: {e}")
return None
def build_coinbase_transaction(self, template, extranonce1, extranonce2):
"""Build coinbase transaction variants (with and without witness) for default address"""
return self.build_coinbase_transaction_for_address(template, extranonce1, extranonce2, self.target_address)
def build_coinbase_transaction_for_address(self, template, extranonce1, extranonce2, target_address):
"""Build coinbase transaction variants (with and without witness)"""
try:
has_witness_commitment = template.get('default_witness_commitment') is not None
# Common parts
value = template.get('coinbasevalue', 0)
script_pubkey = self.decode_bech32_address(target_address)
if not script_pubkey:
return None, None
witness_commitment = template.get('default_witness_commitment')
# ScriptSig (block height minimal push + tag + extranonces)
height = template.get('height', 0)
height_bytes = struct.pack('<I', height)
height_compact = bytes([len(height_bytes.rstrip(b'\x00'))]) + height_bytes.rstrip(b'\x00')
scriptsig = height_compact + b'/RinCoin/' + extranonce1.encode() + extranonce2.encode()
# Helper to build outputs blob
def build_outputs_blob() -> bytes:
outputs_blob = b''
outputs_list = []
# Main output
outputs_list.append(struct.pack('<Q', value) + self.encode_varint(len(script_pubkey)) + script_pubkey)
# Witness commitment OP_RETURN output if present
if witness_commitment:
commit_script = bytes.fromhex(witness_commitment)
outputs_list.append(struct.pack('<Q', 0) + self.encode_varint(len(commit_script)) + commit_script)
outputs_blob += self.encode_varint(len(outputs_list))
for out in outputs_list:
outputs_blob += out
return outputs_blob
# Build non-witness serialization (txid serialization)
cb_nowit = b''
cb_nowit += struct.pack('<I', 1) # version
cb_nowit += b'\x01' # input count
cb_nowit += b'\x00' * 32 # prevout hash
cb_nowit += b'\xff\xff\xff\xff' # prevout index
cb_nowit += self.encode_varint(len(scriptsig)) + scriptsig
cb_nowit += b'\xff\xff\xff\xff' # sequence
cb_nowit += build_outputs_blob() # outputs
cb_nowit += struct.pack('<I', 0) # locktime
# Build with-witness serialization (block serialization)
if has_witness_commitment:
cb_wit = b''
cb_wit += struct.pack('<I', 1) # version
cb_wit += b'\x00\x01' # segwit marker+flag
cb_wit += b'\x01' # input count
cb_wit += b'\x00' * 32 # prevout hash
cb_wit += b'\xff\xff\xff\xff' # prevout index
cb_wit += self.encode_varint(len(scriptsig)) + scriptsig
cb_wit += b'\xff\xff\xff\xff' # sequence
cb_wit += build_outputs_blob() # outputs
# Witness stack for coinbase (32-byte reserved value)
cb_wit += b'\x01' # witness stack count
cb_wit += b'\x20' # item length
cb_wit += b'\x00' * 32 # reserved value
cb_wit += struct.pack('<I', 0) # locktime
else:
cb_wit = cb_nowit
return cb_wit, cb_nowit
except Exception as e:
print(f"Coinbase construction error: {e}")
return None, None
def calculate_merkle_root(self, coinbase_txid, transactions):
"""Calculate merkle root with coinbase at index 0"""
try:
# Start with all transaction hashes (coinbase + others)
hashes = [coinbase_txid]
for tx in transactions:
hashes.append(bytes.fromhex(tx['hash'])[::-1]) # Reverse for little-endian
# Build merkle tree
while len(hashes) > 1:
if len(hashes) % 2 == 1:
hashes.append(hashes[-1]) # Duplicate last hash if odd
next_level = []
for i in range(0, len(hashes), 2):
combined = hashes[i] + hashes[i + 1]
next_level.append(hashlib.sha256(hashlib.sha256(combined).digest()).digest())
hashes = next_level
return hashes[0] if hashes else b'\x00' * 32
except Exception as e:
print(f"Merkle root calculation error: {e}")
return b'\x00' * 32
def bits_to_target(self, bits_hex):
"""Convert bits to target"""
try:
bits = int(bits_hex, 16)
exponent = bits >> 24
mantissa = bits & 0xffffff
target = mantissa * (256 ** (exponent - 3))
return f"{target:064x}"
except:
return "0000ffff00000000000000000000000000000000000000000000000000000000"
def get_block_template(self):
"""Get new block template and create Stratum job"""
try:
template = self.rpc_call("getblocktemplate", [{"rules": ["segwit", "mweb"]}])
if not template:
return None
self.job_counter += 1
job = {
"job_id": f"job_{self.job_counter:08x}",
"template": template,
"prevhash": template.get("previousblockhash", "0" * 64),
"version": template.get('version', 1),
"bits": template.get('bits', '1d00ffff'),
"ntime": f"{int(time.time()):08x}",
"target": self.bits_to_target(template.get('bits', '1d00ffff')),
"height": template.get('height', 0),
"coinbasevalue": template.get('coinbasevalue', 0),
"transactions": template.get('transactions', [])
}
self.current_job = job
timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
network_difficulty = self.calculate_network_difficulty(job['target'])
print(f"[{timestamp}] 🆕 NEW JOB: {job['job_id']} | Height: {job['height']} | Reward: {job['coinbasevalue']/100000000:.2f} RIN")
print(f" 🎯 Network Difficulty: {network_difficulty:.6f} | Bits: {job['bits']}")
print(f" 📍 Target: {job['target'][:16]}... | Transactions: {len(job['transactions'])}")
return job
except Exception as e:
print(f"Get block template error: {e}")
return None
def calculate_share_difficulty(self, hash_hex, target_hex):
"""Calculate actual share difficulty from hash"""
try:
hash_int = int(hash_hex, 16)
target_int = int(target_hex, 16)
if hash_int == 0:
return float('inf') # Perfect hash
# Bitcoin-style difficulty calculation
# Lower hash = higher difficulty
# Difficulty 1.0 = finding hash that meets network target exactly
max_target = 0x00000000FFFF0000000000000000000000000000000000000000000000000000
# Share difficulty = how hard this specific hash was to find
difficulty = max_target / hash_int
return difficulty
except Exception as e:
print(f"Difficulty calculation error: {e}")
return 0.0
def calculate_network_difficulty(self, target_hex):
"""Calculate network difficulty from target"""
try:
target_int = int(target_hex, 16)
# Bitcoin difficulty 1.0 target
max_target = 0x00000000FFFF0000000000000000000000000000000000000000000000000000
# Network difficulty = how much harder than difficulty 1.0
network_difficulty = max_target / target_int
return network_difficulty
except Exception as e:
print(f"Network difficulty calculation error: {e}")
return 1.0
def submit_share(self, job, extranonce1, extranonce2, ntime, nonce, target_address=None):
"""Validate share and submit block if valid"""
try:
# Use provided address or default
address = target_address or self.target_address
# Build coinbase (with and without witness)
coinbase_wit, coinbase_nowit = self.build_coinbase_transaction_for_address(
job['template'], extranonce1, extranonce2, address)
if not coinbase_wit or not coinbase_nowit:
return False, "Coinbase construction failed"
# Calculate coinbase txid (non-witness serialization)
coinbase_txid = hashlib.sha256(hashlib.sha256(coinbase_nowit).digest()).digest()[::-1]
# Calculate merkle root
merkle_root = self.calculate_merkle_root(coinbase_txid, job['transactions'])
# Build block header
header = b''
header += struct.pack('<I', job['version']) # Version
header += bytes.fromhex(job['prevhash'])[::-1] # Previous block hash
header += merkle_root[::-1] # Merkle root (reversed for big-endian)
header += struct.pack('<I', int(ntime, 16)) # Timestamp
header += bytes.fromhex(job['bits'])[::-1] # Bits
header += struct.pack('<I', int(nonce, 16)) # Nonce
# Calculate block hash
block_hash = hashlib.sha256(hashlib.sha256(header).digest()).digest()
block_hash_hex = block_hash[::-1].hex()
# Calculate real difficulties
share_difficulty = self.calculate_share_difficulty(block_hash_hex, job['target'])
network_difficulty = self.calculate_network_difficulty(job['target'])
# Check if hash meets target
hash_int = int(block_hash_hex, 16)
target_int = int(job['target'], 16)
# Enhanced logging
timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
difficulty_percentage = (share_difficulty / network_difficulty) * 100 if network_difficulty > 0 else 0
# Progress indicator based on percentage
if difficulty_percentage >= 100:
progress_icon = "🎉" # Block found!
elif difficulty_percentage >= 50:
progress_icon = "🔥" # Very close
elif difficulty_percentage >= 10:
progress_icon = "" # Getting warm
elif difficulty_percentage >= 1:
progress_icon = "💫" # Some progress
else:
progress_icon = "📊" # Low progress
print(f"[{timestamp}] {progress_icon} SHARE: job={job['job_id']} | nonce={nonce} | hash={block_hash_hex[:16]}...")
print(f" 🎯 Share Diff: {share_difficulty:.2e} | Network Diff: {network_difficulty:.6f}")
print(f" 📈 Progress: {difficulty_percentage:.4f}% of network difficulty")
print(f" 📍 Target: {job['target'][:16]}... | Height: {job['height']}")
print(f" ⏰ Time: {ntime} | Extranonce: {extranonce1}:{extranonce2}")
if hash_int > target_int:
# Valid share but not a block - still send to node for validation
print(f" ✅ Share accepted (below network difficulty)")
# Send to node anyway to validate our work
try:
# Build complete block for validation
block = header
tx_count = 1 + len(job['transactions'])
block += self.encode_varint(tx_count)
block += coinbase_wit
for tx in job['transactions']:
block += bytes.fromhex(tx['data'])
block_hex = block.hex()
print(f" 🔍 Sending share to node for validation...")
result = self.rpc_call("submitblock", [block_hex])
if result is None:
print(f" 🎉 SURPRISE BLOCK! Node accepted our 'low difficulty' share as valid block!")
return True, "Block found and submitted"
else:
print(f" 📊 Node rejected as expected: {result}")
return True, "Share validated by node"
except Exception as e:
print(f" ⚠️ Node validation error: {e}")
return True, "Share accepted (node validation failed)"
return True, "Share accepted"
# Valid block! Build full block and submit
print(f" 🎉 BLOCK FOUND! Hash: {block_hash_hex}")
print(f" 💰 Reward: {job['coinbasevalue']/100000000:.2f} RIN -> {address}")
print(f" 📊 Block height: {job['height']}")
print(f" 🔍 Difficulty: {share_difficulty:.6f} (target: {network_difficulty:.6f})")
# Build complete block
block = header
# Transaction count
tx_count = 1 + len(job['transactions'])
block += self.encode_varint(tx_count)
# Add coinbase transaction (witness variant for block body)
block += coinbase_wit
# Add other transactions
for tx in job['transactions']:
block += bytes.fromhex(tx['data'])
# Submit block
block_hex = block.hex()
print(f" 📦 Submitting block of size {len(block_hex)//2} bytes...")
result = self.rpc_call("submitblock", [block_hex])
if result is None:
print(f" ✅ Block accepted by network!")
return True, "Block found and submitted"
else:
print(f" ❌ Block rejected: {result}")
print(f" 🔍 Debug: Block size {len(block_hex)//2} bytes, {len(job['transactions'])} transactions")
return False, f"Block rejected: {result}"
except Exception as e:
print(f"Share submission error: {e}")
return False, f"Submission error: {e}"
except Exception as e:
print(f"Share submission error: {e}")
return False, f"Submission error: {e}"
def send_stratum_response(self, client, msg_id, result, error=None):
"""Send Stratum response to client"""
try:
response = {
"id": msg_id,
"result": result,
"error": error
}
message = json.dumps(response) + "\n"
client.send(message.encode('utf-8'))
except Exception as e:
print(f"Send response error: {e}")
def send_stratum_notification(self, client, method, params):
"""Send Stratum notification to client"""
try:
notification = {
"id": None,
"method": method,
"params": params
}
message = json.dumps(notification) + "\n"
client.send(message.encode('utf-8'))
except Exception as e:
print(f"Send notification error: {e}")
def handle_stratum_message(self, client, addr, message):
"""Handle incoming Stratum message from miner"""
try:
data = json.loads(message.strip())
method = data.get("method")
msg_id = data.get("id")
params = data.get("params", [])
if method == "mining.subscribe":
# Generate unique extranonce1 for this connection
self.extranonce1_counter += 1
extranonce1 = f"{self.extranonce1_counter:08x}"
# Store extranonce1 for this client
if addr not in self.clients:
self.clients[addr] = {}
self.clients[addr]['extranonce1'] = extranonce1
# Subscribe response
self.send_stratum_response(client, msg_id, [
[["mining.set_difficulty", "subscription_id"], ["mining.notify", "subscription_id"]],
extranonce1,
4 # extranonce2 size
])
# Send difficulty
self.send_stratum_notification(client, "mining.set_difficulty", [0.0001])
# Send initial job
if self.current_job:
self.send_job_to_client(client, self.current_job)
else:
if self.get_block_template():
self.send_job_to_client(client, self.current_job)
elif method == "mining.authorize":
username = params[0] if params else "anonymous"
self.clients[addr]['username'] = username
self.send_stratum_response(client, msg_id, True)
timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
print(f"[{timestamp}] 🔐 [{addr}] Authorized as {username}")
elif method == "mining.extranonce.subscribe":
# Handle extranonce subscription
self.send_stratum_response(client, msg_id, True)
elif method == "mining.submit":
if len(params) >= 5:
username = params[0]
job_id = params[1]
extranonce2 = params[2]
ntime = params[3]
nonce = params[4]
print(f"[{addr}] Submit: {username} | job={job_id} | nonce={nonce}")
# Validate submission
if self.current_job and job_id == self.current_job['job_id']:
extranonce1 = self.clients[addr].get('extranonce1', '00000000')
# Submit share
success, message = self.submit_share(self.current_job, extranonce1, extranonce2, ntime, nonce)
if success:
self.send_stratum_response(client, msg_id, True)
if "Block found" in message:
# Get new job after block found
threading.Thread(target=self.update_job_after_block, daemon=True).start()
else:
self.send_stratum_response(client, msg_id, False, message)
else:
# For stale jobs, still validate for blocks but don't require exact job match
# This prevents missing blocks due to job timing issues
if self.current_job:
extranonce1 = self.clients[addr].get('extranonce1', '00000000')
# Use current job template but allow stale job_id
success, message = self.submit_share(self.current_job, extranonce1, extranonce2, ntime, nonce)
if success:
self.send_stratum_response(client, msg_id, True)
if "Block found" in message:
# Get new job after block found
threading.Thread(target=self.update_job_after_block, daemon=True).start()
else:
# Accept as share even if block validation fails for stale jobs
self.send_stratum_response(client, msg_id, True)
else:
self.send_stratum_response(client, msg_id, True)
else:
self.send_stratum_response(client, msg_id, False, "Invalid parameters")
else:
print(f"[{addr}] Unknown method: {method}")
self.send_stratum_response(client, msg_id, None, "Unknown method")
except json.JSONDecodeError:
print(f"[{addr}] Invalid JSON: {message}")
except Exception as e:
print(f"[{addr}] Message handling error: {e}")
def send_job_to_client(self, client, job):
"""Send mining job to specific client"""
try:
# Send proper Stratum job
self.send_stratum_notification(client, "mining.notify", [
job["job_id"],
job["prevhash"],
"", # coinb1 (empty for now - miner handles coinbase)
"", # coinb2 (empty for now - miner handles coinbase)
[], # merkle_branch (empty for now - we calculate merkle root)
f"{job['version']:08x}",
job["bits"],
job["ntime"],
True # clean_jobs
])
except Exception as e:
print(f"Failed to send job: {e}")
def update_job_after_block(self):
"""Update job after a block is found"""
time.sleep(2) # Brief delay to let network propagate
if self.get_block_template():
self.broadcast_new_job()
def broadcast_new_job(self):
"""Broadcast new job to all connected clients"""
if not self.current_job:
return
print(f"Broadcasting new job to {len(self.clients)} clients")
for addr, client_data in list(self.clients.items()):
try:
if 'socket' in client_data:
self.send_job_to_client(client_data['socket'], self.current_job)
except Exception as e:
print(f"Failed to send job to {addr}: {e}")
def handle_client(self, client, addr):
"""Handle individual client connection"""
print(f"[{addr}] Connected")
if addr not in self.clients:
self.clients[addr] = {}
self.clients[addr]['socket'] = client
try:
while self.running:
data = client.recv(4096)
if not data:
break
# Handle multiple messages in one packet
messages = data.decode('utf-8').strip().split('\n')
for message in messages:
if message:
self.handle_stratum_message(client, addr, message)
except Exception as e:
print(f"[{addr}] Client error: {e}")
finally:
client.close()
if addr in self.clients:
del self.clients[addr]
print(f"[{addr}] Disconnected")
def job_updater(self):
"""Periodically update mining jobs"""
while self.running:
try:
time.sleep(30) # Update every 30 seconds
old_height = self.current_job['height'] if self.current_job else 0
if self.get_block_template():
new_height = self.current_job['height']
if new_height > old_height:
print(f"New block detected! Broadcasting new job...")
self.broadcast_new_job()
except Exception as e:
print(f"Job updater error: {e}")
def start(self):
"""Start the Stratum proxy server"""
try:
# Test RPC connection
blockchain_info = self.rpc_call("getblockchaininfo")
if not blockchain_info:
print("Failed to connect to RinCoin node!")
return
print(f"Connected to RinCoin node")
print(f"Current height: {blockchain_info.get('blocks', 'unknown')}")
print(f"Chain: {blockchain_info.get('chain', 'unknown')}")
# Get initial block template
if not self.get_block_template():
print("Failed to get initial block template!")
return
# Start job updater thread
job_thread = threading.Thread(target=self.job_updater, daemon=True)
job_thread.start()
# Start Stratum server
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind((self.stratum_host, self.stratum_port))
server_socket.listen(10)
timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
print(f"[{timestamp}] 🚀 REAL Mining Stratum proxy ready!")
print(f" 📡 Listening on {self.stratum_host}:{self.stratum_port}")
print(f" 💰 Mining to: {self.target_address}")
print(f" 📊 Current job: {self.current_job['job_id'] if self.current_job else 'None'}")
print("")
print(" 🔧 Miner command:")
print(f" ./cpuminer -a rinhash -o stratum+tcp://{self.stratum_host}:{self.stratum_port} -u worker1 -p x -t 4")
print("")
while self.running:
try:
client, addr = server_socket.accept()
client_thread = threading.Thread(
target=self.handle_client,
args=(client, addr),
daemon=True
)
client_thread.start()
except KeyboardInterrupt:
print("\nShutting down...")
self.running = False
break
except Exception as e:
print(f"Server error: {e}")
except Exception as e:
print(f"Failed to start server: {e}")
finally:
print("Server stopped")
class RinCoinStratumProxy(RinCoinStratumBase):
"""Solo mining stratum proxy implementation"""
def __init__(self, stratum_host='0.0.0.0', stratum_port=3334,
rpc_host='127.0.0.1', rpc_port=9556,
rpc_user='rinrpc', rpc_password='745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90',
target_address='rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q'):
super().__init__(stratum_host, stratum_port, rpc_host, rpc_port, rpc_user, rpc_password, target_address)
if __name__ == "__main__":
proxy = RinCoinStratumProxy()
proxy.start()

View File

@@ -0,0 +1,52 @@
#!/bin/bash
# RinCoin Stratum Proxy for cpuminer-opt-rin
# Bridges cpuminer's Stratum protocol to RinCoin's RPC mining
echo "=== RinCoin Stratum Proxy ==="
echo "This script creates a bridge between cpuminer and RinCoin node"
echo ""
# Configuration
RPC_HOST="127.0.0.1"
RPC_PORT="9556"
RPC_USER="rinrpc"
RPC_PASS="745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90"
STRATUM_PORT="3333"
TARGET_ADDRESS="rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q"
# Function to call RPC
call_rpc() {
local method="$1"
local params="$2"
curl -s --user "$RPC_USER:$RPC_PASS" \
-H 'content-type: text/plain' \
--data "{\"jsonrpc\":\"1.0\",\"id\":\"curl\",\"method\":\"$method\",\"params\":$params}" \
"http://$RPC_HOST:$RPC_PORT/"
}
echo "⚠️ IMPORTANT: This is a simplified proxy demonstration."
echo "For production use, you would need a full Stratum server implementation."
echo ""
echo "Current RinCoin mining options:"
echo "1. Built-in Core Mining (Recommended for solo):"
echo " bash MINE/rin/solo_mining_core.sh -t 28"
echo ""
echo "2. Pool Mining (Recommended for consistent rewards):"
echo " sudo docker exec -it amd-strix-halo-llama-rocm bash -c \"/mnt/dl/rinhash/cpuminer-opt-rin/cpuminer -a rinhash -o stratum+tcp://rinhash.mine.zergpool.com:7148 -u bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j -p c=BTC,mc=RIN,ID=StrixHalo -t 28\""
echo ""
echo "3. Direct RPC Mining (Advanced - requires custom miner):"
echo " Use getblocktemplate RPC calls directly"
echo ""
echo "❌ cpuminer-opt-rin cannot directly mine to RinCoin node because:"
echo " - cpuminer uses Stratum protocol"
echo " - RinCoin node uses RPC protocol"
echo " - No built-in protocol conversion"
echo ""
echo "✅ Recommended approach:"
echo " Use the built-in core mining script for solo mining"
echo " Use pool mining for consistent rewards"

View File

@@ -0,0 +1,78 @@
#!/bin/bash
# Test RinCoin Address Validation and Behavior
echo "=== RinCoin Address Validation Test ==="
echo ""
# Kill any existing processes
./MINE/rin/kill_stratum_proxy.sh
echo "🧪 Testing different address types with RinCoin node:"
echo ""
echo "1⃣ Valid RinCoin address:"
curl -s -u rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 \
-H 'content-type: text/plain' \
--data '{"jsonrpc":"1.0","id":"curl","method":"validateaddress","params":["rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q"]}' \
http://127.0.0.1:9556/ | jq '.result'
echo ""
echo "2⃣ Invalid BTC address:"
curl -s -u rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 \
-H 'content-type: text/plain' \
--data '{"jsonrpc":"1.0","id":"curl","method":"validateaddress","params":["bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j"]}' \
http://127.0.0.1:9556/ | jq '.result'
echo ""
echo "3⃣ Invalid Litecoin address:"
curl -s -u rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 \
-H 'content-type: text/plain' \
--data '{"jsonrpc":"1.0","id":"curl","method":"validateaddress","params":["LQnYyekHhQ7nMUTGJ1ZnYz8s9QJ2mKLM9P"]}' \
http://127.0.0.1:9556/ | jq '.result'
echo ""
echo "4⃣ Test generatetoaddress with invalid address:"
curl -s -u rinrpc:745ce784d5d537fc06105a1b935b7657903cfc71a5fb3b90 \
-H 'content-type: text/plain' \
--data '{"jsonrpc":"1.0","id":"curl","method":"generatetoaddress","params":[1, "bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j", 1]}' \
http://127.0.0.1:9556/ | jq '.error'
echo ""
echo "🚀 Starting mining pool to test address validation..."
./MINE/rin/start_mining_pool.sh &
POOL_PID=$!
echo ""
echo "⏳ Waiting for pool to start..."
sleep 5
echo ""
echo "🧪 Testing pool with different address types:"
echo ""
echo "Test 1: Valid RinCoin address"
echo "Expected: ✅ Accept connection"
timeout 5s ./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q -p x -t 1
echo ""
echo "Test 2: Invalid BTC address"
echo "Expected: ❌ Reject connection"
timeout 5s ./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j -p x -t 1
echo ""
echo "Test 3: Traditional username (no address)"
echo "Expected: ⚠️ Accept but warn no address"
timeout 5s ./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u user.worker -p x -t 1
echo ""
echo "🧹 Cleaning up..."
kill $POOL_PID 2>/dev/null
./MINE/rin/kill_stratum_proxy.sh
echo ""
echo "📋 Summary:"
echo "✅ Valid RinCoin addresses (rin1q...) - Accepted"
echo "❌ Invalid addresses (bc1q..., LQnY...) - Rejected"
echo "⚠️ Traditional usernames - Accepted but no rewards"
echo "💰 Block rewards always go to pool address, then distributed"

View File

@@ -0,0 +1,69 @@
#!/bin/bash
# Test different mining pool connection methods
echo "=== Testing Mining Pool Connection Methods ==="
echo ""
# Kill any existing processes
./MINE/rin/kill_stratum_proxy.sh
echo "🚀 Starting mining pool..."
./MINE/rin/start_mining_pool.sh &
POOL_PID=$!
echo ""
echo "⏳ Waiting for pool to start..."
sleep 5
echo ""
echo "🧪 Testing different connection methods:"
echo ""
echo "1⃣ Test 1: Address as username"
echo "Command: ./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q -p x -t 2"
echo "Expected: Pool should recognize this as a RinCoin address"
echo ""
echo "2⃣ Test 2: Address.workername format"
echo "Command: ./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q.worker1 -p x -t 2"
echo "Expected: Pool should recognize address and worker separately"
echo ""
echo "3⃣ Test 3: Traditional username"
echo "Command: ./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u user.worker -p x -t 2"
echo "Expected: Pool should use default pool address for rewards"
echo ""
echo "📊 Pool Status:"
echo "Web Dashboard: http://127.0.0.1:8080"
echo "Pool Address: rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q"
echo ""
echo "Press Enter to run test 1..."
read
echo "Running Test 1..."
timeout 10s ./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q -p x -t 2
echo ""
echo "Press Enter to run test 2..."
read
echo "Running Test 2..."
timeout 10s ./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q.worker1 -p x -t 2
echo ""
echo "Press Enter to run test 3..."
read
echo "Running Test 3..."
timeout 10s ./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u user.worker -p x -t 2
echo ""
echo "🧹 Cleaning up..."
kill $POOL_PID 2>/dev/null
./MINE/rin/kill_stratum_proxy.sh
echo ""
echo "✅ Test complete! Check the pool logs above to see how each connection was handled."

View File

@@ -0,0 +1,75 @@
#!/bin/bash
# Test Reward Redistribution Logic
echo "=== Testing Reward Redistribution Logic ==="
echo ""
# Kill any existing processes
./MINE/rin/kill_stratum_proxy.sh
echo "🧪 Testing reward distribution scenarios:"
echo ""
echo "Scenario 1: All miners have valid addresses"
echo "Expected: Normal distribution"
echo ""
echo "Scenario 2: Some miners without addresses"
echo "Expected: Redistribution of their rewards to miners with addresses"
echo ""
echo "Scenario 3: All miners without addresses"
echo "Expected: All rewards go to pool"
echo ""
echo "🚀 Starting mining pool..."
./MINE/rin/start_mining_pool.sh &
POOL_PID=$!
echo ""
echo "⏳ Waiting for pool to start..."
sleep 5
echo ""
echo "🧪 Test 1: Miner with valid address"
echo "Expected: Gets full share of rewards"
timeout 5s ./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q.worker1 -p x -t 1
echo ""
echo "🧪 Test 2: Miner without address"
echo "Expected: Contributes to difficulty but gets no direct rewards"
timeout 5s ./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u user.worker2 -p x -t 1
echo ""
echo "🧪 Test 3: Another miner with valid address"
echo "Expected: Gets base reward + redistribution from miner without address"
timeout 5s ./cpuminer -a rinhash -o stratum+tcp://127.0.0.1:3333 -u rin1qahvvv9d5f3443wtckeqavwp9950wacxfmwv20q.worker3 -p x -t 1
echo ""
echo "📊 Pool Log Analysis:"
echo "Look for these patterns in the logs above:"
echo "1. '💰 Miner rin1q...: X.XX RIN (difficulty)' - Base rewards"
echo "2. '⚠️ Miner without address: X difficulty -> X.XX RIN to pool' - Undistributed"
echo "3. '💰 Pool keeps X.XX RIN from miners without addresses' - Pool keeps rewards"
echo "4. '📊 Summary: X miners with addresses, Y without (rewards to pool)' - Final summary"
echo ""
echo "🧹 Cleaning up..."
kill $POOL_PID 2>/dev/null
./MINE/rin/kill_stratum_proxy.sh
echo ""
echo "📋 Reward Distribution Logic Summary:"
echo ""
echo "✅ Miners with valid RinCoin addresses:"
echo " - Get reward based on their difficulty"
echo " - Rewards sent directly to their addresses"
echo ""
echo "⚠️ Miners without addresses:"
echo " - Contribute to total difficulty"
echo " - Their reward share goes to pool address"
echo " - No direct rewards received"
echo ""
echo "💰 Pool fee: Always 1% of total block reward"
echo "💰 Pool bonus: Additional rewards from miners without addresses"

54
MINE/zergBench.sh Normal file
View File

@@ -0,0 +1,54 @@
#!/bin/bash
# Test top Zergpool algorithms - save as test_zergpool.sh
BTC_WALLET="bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j"
echo "=== Testing Top Zergpool Algorithms ==="
echo "Wallet: $BTC_WALLET"
echo "======================================"
# Top algorithms by profitability
declare -A algos=(
["rinhash"]="rinhash.mine.zergpool.com:7148 c=RIN"
["kawpow"]="kawpow.mine.zergpool.com:3638 c=BTC"
["evrprogpow"]="evrprogpow.mine.zergpool.com:3002 c=BTC"
["equihash125_4"]="equihash.mine.zergpool.com:2142 c=BTC"
["karlsenhashv2"]="karlsenhashv2.mine.zergpool.com:3200 c=BTC"
)
for algo in "${!algos[@]}"; do
echo ""
echo "Testing: $algo"
echo "------------------------"
# Parse config
read -r server pass <<< "${algos[$algo]}"
echo "Server: $server"
echo "Testing for 30 seconds..."
sudo docker exec -it amdopencl timeout 35s bash -c "/mnt/dl/gminer/miner --algo $algo --server $server --user '$BTC_WALLET' --pass '$pass'"
result=$?
if [ $result -eq 0 ]; then
echo "$algo: SUCCESS"
elif [ $result -eq 124 ]; then
echo "⏱️ $algo: TIMEOUT (likely working)"
else
echo "$algo: FAILED - trying alternative miner..."
# Try lolMiner for failed algorithms
sudo docker exec -it amdopencl timeout 35s bash -c "/mnt/dl/lolMiner_v1.88_Lin64/lolMiner --algo ${algo^^} --pool $server --user '$BTC_WALLET' --pass '$pass'" 2>/dev/null
if [ $? -eq 124 ]; then
echo "⏱️ $algo: WORKS with lolMiner"
else
echo "$algo: Not supported"
fi
fi
sleep 3
done
echo ""
echo "=== Zergpool Testing Complete ==="

View File

@@ -0,0 +1,818 @@
-- Stored procedure used on the /Company/Applications page
IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].[Web_SearchApplications2]') AND type in (N'P', N'PC')) BEGIN
DROP PROCEDURE dbo.[Web_SearchApplications2]
END
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[Web_SearchApplications2]
-- Username of the CompanyPerson who's doing the search
@UserName varchar(256),
-- If we want only users who uploaded a profile picture
@HasPortrait bit = NULL,
-- If we want only users who are explicitely searching for an offer
@IsSearchingAppr bit = NULL,
-- If we want only users with whom the company has unread messages
@IsMessageUnread bit = NULL,
-- If we want only users with whom the company has unanswered messages
@IsMessageUnanswered bit = NULL,
-- If we want to hide anonymous Applicants
@HideAnonymousApplicant bit = NULL,
-- Filter for company locations
@CompanyLocationsFilter varchar(MAX) = NULL, -- Combined string of CompanyLocationProfessionIds, like '49601|49602|49600|49599|49592|49598'
-- Filter for ApprenticeshipTypes
@ApprenticeshipTypesFilter varchar(MAX) = NULL, -- Combined string of ProfessionIds, like '1|2'
-- Filter for company professions
@MessageProfessionsFilter varchar(MAX) = NULL, -- Combined string of ProfessionIds, like '101080|102519'
-- Filter for ABCD ratings, using the professions chosen
@RatingProfessionsFilter varchar(MAX) = NULL, -- Combined string of ProfessionIds, like '101080|102519'
-- Filter for ABCD ratings, using the rating itself
@RatingFilter varchar(MAX) = NULL, -- Combined string of ApplicantProfessionRatingTypeId, like '1|2'
-- Filters for Responsible Persons
-- If true, we also want results where there is no RP defined
-- If false, we don't want results where there is no RP
@SearchResponsiblePersonIsNotDefine bit = 0,
-- If set, we'll get the results where these specific CompanyPersons are RP
@ApprenticeshipResponsiblePersonsFilter varchar(MAX) = NULL, -- Combined string of CompanyPersonIds, like '41099|41103|41100'
-- Filter to limit the results to selected companies
@CoCompaniesFilter varchar(MAX) = NULL, -- Combined string of CompanyProfileIds, like '181|1201'
-- Filter for statuses
@WorkflowStatesFilter varchar(MAX) = NULL, -- Combined string of CompanyApplicantStateAvailableIds, like '382|383'
-- Filter for content of possible comments
@CompanySetApplicantComment varchar(MAX) = NULL,
-- Filters for Applicant's personal info
-- Filter for first or last name of the Applicant
@ApplicantNamesFilter varchar(MAX) = NULL,
-- Filter for exact birth date of the Applicant
@ApplicantBirthDate datetime = NULL,
-- Filter for Applicant's language
@SchoolLangFilter varchar(MAX) = NULL,
-- Message filters
-- Filter after and before dates for when the first message was sent between a Company and an Applicant,
-- i.e. when an Application was created.
@FirstMessageStartDate datetime = NULL,
@FirstMessageEndDate datetime = NULL,
-- Apprenticeship date filters
-- Filter after and before dates for when the Apprenticeship is set to begin
@ApprenticeshipStartDate datetime = NULL,
@ApprenticeshipEndDate datetime = NULL,
-- Filters for inactive / deleted data
-- If we want to include Applications related to deleted Apprenticeships, company locations or professions
@IncludeDeletedApprenticeship bit = 0,
-- If we want to include Applicants not visible on gateway
@IncludeInactiveApplicantsFilter bit = 0,
-- If we want to include inactive Applications
@IncludeInactiveApplications bit = 0,
-- If we want to include Applicants created by the company
@IncludeApplicantsCreatedByCompanyFilter bit = 0,
-- Language to use for the translations, for example the profession names
@Language varchar(2) = 'de',
-- Sorting
@SortColumn varchar(50) = NULL,
@SortAsc bit = 1,
-- Paging
@CurrentPage int = 1,
@PageSize int = 20,
-- Company person roles allowed to see applicants
@CompanyPersonRolesAllowedToSeeApplicants varchar(MAX) = NULL -- Combined string roles like '10|20'
AS
BEGIN
LINENO 108 -- To give correct line number in error messages
SET NOCOUNT ON;
IF @UserName = '' OR NOT EXISTS(SELECT CompanyProfileId FROM CompanyPerson WHERE UserName = @UserName)
RAISERROR(N'Parameter "@UserName" must not be empty, and must be a valid CompanyPerson username!', 4, 1);
-- Internal variables
DECLARE @true bit,
@false bit,
@FirstRow int,
@companySetId int,
@companyProfileId int,
@companyPersonId int,
@companyPersonRole int;
SET @true = 1;
SET @false = 0;
SET @FirstRow = (@PageSize * (@CurrentPage - 1)) + 1;
-- Get the CompanyPerson, CompanyProfile and CompanySet info via the logged in CompanyPerson
SELECT @companyPersonId = t.Id,
@companyPersonRole = t.Role,
@companyProfileId = t.CompanyProfileId
FROM (SELECT Id, Role, CompanyProfileId FROM CompanyPerson WHERE UserName = @UserName) t;
SET @companySetId = (SELECT CompanySetId FROM CompanyProfile WHERE Id = @companyProfileId);
-- See if the company has the CanShareDossiers specific permission
DECLARE @SharingEnabled bit;
SET @SharingEnabled = COALESCE((SELECT csp.CanShareDossiers FROM CompanySpecificPermissions csp WHERE csp.CompanyProfileId = @companyProfileId), 0)
----------------------------------------------------------------------
------ 1. CENTRAL TABLE WITH AS MANY FILTERS AS POSSIBLE --------
----------------------------------------------------------------------
-- This temp table is used as a first filter. We use the CompanyApplicantRelationsState table
-- to get all interactions between the CompanySet and the Applicants.
DECLARE @tmpCompanyApplicantRelationState TABLE (
CompanyProfileId INT,
ApplicantProfileId INT,
LastEditDossierDate DATETIME,
DossierId INT,
-- Optional, not always available
ApplicationId INT,
ApplicationCreateDate DATETIME,
ApprenticeshipId INT,
CompanyLocationProfessionId INT,
CompanyLocationProfileId INT,
CompanyPersonId INT NOT NULL DEFAULT 0,
ApplicantProfessionProfileId INT,
ProfessionId INT,
CompanyApplicantStateAvailableId INT,
IsActiveApplication BIT,
-- Type of relation
HasComment BIT,
HasRating BIT,
HasResponsiblePersonForAnyApplication BIT,
-- For the CSV
SocialSecurityNumber NVARCHAR(30));
INSERT INTO @tmpCompanyApplicantRelationState
SELECT DISTINCT cars.CompanyProfileId,
cars.ApplicantProfileId,
cars.LastEditDossierDate,
cars.DossierId,
ap.Id AS ApplicationId,
ap.CreatedDate AS ApplicationCreateDate,
apr.Id AS ApprenticeshipId,
clpro.Id AS CompanyLocationProfessionId,
clocprof.Id AS CompanyLocationProfileId,
ISNULL(csar.CompanyPersonId, 0) AS CompanyPersonId, -- TODO: is ISNULL still necessary?
ap.ApplicantProfessionProfileId,
clpro.ProfessionId,
cas.CompanyApplicantStateAvailableId,
ap.IsActive AS IsActiveApplication,
cars.HasComment,
cars.HasRating,
cars.HasResponsiblePersonForAnyApplication,
csaplntr.SocialSecurityNumber
FROM CompanyApplicantRelationsState cars WITH(NOLOCK)
INNER JOIN CompanyProfile cp WITH(NOLOCK) ON (cp.Id = cars.CompanyProfileId AND cp.CompanySetId = @companySetId)
INNER JOIN CompanyPerson cpers WITH(NOLOCK) ON cpers.CompanyProfileId = cp.Id
INNER JOIN ApplicantProfile apro WITH(NOLOCK) ON cars.ApplicantProfileId = apro.Id
-- Here we try get some Application-related info, to apply some filters
-- We use outer joins in case there is no Application (just a rating for example)
LEFT OUTER JOIN DossierApplication da WITH(NOLOCK) ON cars.DossierId = da.DossierId
LEFT OUTER JOIN Application ap WITH(NOLOCK) ON ap.Id = da.ApplicationId
LEFT OUTER JOIN Apprenticeship apr WITH(NOLOCK) ON ap.ApprenticeshipId = apr.Id
LEFT OUTER JOIN CompanyLocationProfession clpro WITH(NOLOCK) ON clpro.Id = apr.CompanyLocationProfessionId
LEFT OUTER JOIN CompanyLocationProfile clocprof WITH(NOLOCK) ON clocprof.Id = apr.CompanyLocationProfileId
LEFT OUTER JOIN CompanyApplicantState cas WITH(NOLOCK) ON cas.ApplicationId = ap.Id
-- ABCD Ratings
LEFT OUTER JOIN ApplicantProfessionRating aprofrate WITH (NOLOCK) ON (cars.ApplicantProfileId = aprofrate.ApplicantProfileId AND aprofrate.CompanyPersonId = cpers.Id)
-- Comments
LEFT OUTER JOIN CompanySetApplicantComment csacomm ON (csacomm.ApplicantProfileId = cars.ApplicantProfileId AND csacomm.CompanySetId = cp.CompanySetId)
-- Responsible Person
LEFT OUTER JOIN CompanySetApplicationRelation csar WITH(NOLOCK) ON csar.ApplicationId = ap.Id
-- Messages
LEFT OUTER JOIN Message msg WITH(NOLOCK) ON msg.ApplicationId = ap.Id
-- To get the social security number
LEFT OUTER JOIN CompanySetApplicantRelation csaplntr WITH(NOLOCK) ON (csaplntr.ApplicantProfileId = cars.ApplicantProfileId AND csaplntr.CompanySetId = @companySetId)
-- FILTERS
WHERE cars.IsActive = 1
AND ap.IsDeletedByCompany = 0
-- Inactive data
AND (@IncludeInactiveApplications = 1 OR (ap.Id IS NULL OR ap.IsActive = 1))
AND (@IncludeDeletedApprenticeship = 1 OR (apr.Id IS NULL OR (apr.IsDeleted = 0 AND clocprof.IsDeleted = 0 AND clpro.IsDeleted = 0)))
AND (@IncludeInactiveApplicantsFilter = 1 OR apro.IsVisibleOnGateway = 1)
-- Apprenticeship Type
AND (@ApprenticeshipTypesFilter IS NULL OR apr.ApprenticeshipType IN (SELECT * FROM dbo.CreateInFilter(@ApprenticeshipTypesFilter,'|')))
-- Profession
AND (@MessageProfessionsFilter IS NULL OR clpro.ProfessionId IN (SELECT * FROM dbo.CreateInFilter(@MessageProfessionsFilter,'|')))
-- ABCD ratings
AND (
(@RatingProfessionsFilter IS NULL AND @RatingFilter IS NULL)
OR
(
(@RatingProfessionsFilter IS NULL
OR
EXISTS (
SELECT 1
FROM ApplicantProfessionRating apr
WHERE apr.DossierId = cars.DossierId
AND apr.ProfessionId = clpro.ProfessionId
AND apr.ProfessionId IN (SELECT value FROM dbo.CreateInFilter(@RatingProfessionsFilter,'|'))
))
AND
(@RatingFilter IS NULL
OR
EXISTS (
SELECT 1
FROM ApplicantProfessionRating apr
WHERE apr.DossierId = cars.DossierId
AND apr.ProfessionId = clpro.ProfessionId
AND apr.ApplicantProfessionRatingTypeId IN (SELECT value FROM dbo.CreateInFilter(@RatingFilter,'|'))
)
)
)
)
-- Comments
AND (@CompanySetApplicantComment IS NULL OR EXISTS (
SELECT * FROM CompanySetApplicantComment csacomm
WHERE csacomm.ApplicantProfileId = cars.ApplicantProfileId
AND csacomm.CompanySetId = @companySetId
AND csacomm.Comment LIKE '%' + @CompanySetApplicantComment + '%'))
-- Responsible Person
AND
(
-- No RP filter is selected, we show all results (TODO: this is not possible in UI anymore, and it could be useful, typically with CompanySets)
(
@ApprenticeshipResponsiblePersonsFilter IS NULL
AND
@SearchResponsiblePersonIsNotDefine = 0
)
-- Show results that have a RP selected
OR csar.CompanyPersonId IN (SELECT * FROM dbo.CreateInFilter(@ApprenticeshipResponsiblePersonsFilter,'|'))
-- shared dossiers if the RP is the current user
OR (EXISTS (SELECT * FROM DossierViewPermission dvp
WHERE (dvp.TargetCompanyPersonId
IN (SELECT TOP 1 * FROM dbo.CreateInFilter(@ApprenticeshipResponsiblePersonsFilter,'|') f
WHERE f.value = @companyPersonId )
AND dvp.DossierId IN (SELECT d.Id FROM Dossier d
WHERE d.ApplicantProfileId = cars.ApplicantProfileId
AND d.CompanySetId = @companySetId)
AND dvp.Type = 3 -- Specific permission
AND (
-- Dossier-level permission
dvp.ApplicationId IS NULL
-- Application-level permission for this specific application
OR dvp.ApplicationId = ap.Id
)
))
)
-- Show results that don't have a RP
OR (@SearchResponsiblePersonIsNotDefine = 1 AND csar.CompanyPersonId IS NULL)
)
-- Messages
AND (msg.Id IS NULL OR msg.MessageStatusTypeId IN (1,2,3,5,9)) --(ApplicantApplied, CompanyCvAsked, ApplicantCvAskAccepted, ApplicantDeclined, GenericMessage) TODO: document why we only get these
AND (@FirstMessageStartDate IS NULL OR (CONVERT(DATE, ap.CreatedDate) >= @FirstMessageStartDate))
AND (@FirstMessageEndDate IS NULL OR (CONVERT(DATE, ap.CreatedDate) <= @FirstMessageEndDate))
-- Apprenticeship start date
AND (@ApprenticeshipStartDate IS NULL OR (CONVERT(DATE, apr.StartDate) >= @ApprenticeshipStartDate))
AND (@ApprenticeshipEndDate IS NULL OR (CONVERT(DATE, apr.StartDate) <= @ApprenticeshipEndDate))
-- If the Applicant is searching for any type of offer
AND (@IsSearchingAppr IS NULL
OR (@IsSearchingAppr = 1
AND (apro.IsSearchingApprenticeship = 1
OR apro.IsSearchingInternship = 1
OR apro.IsSearchingTrialApprenticeship = 1
OR apro.IsSearchingProfessionalOffer = 1
OR apro.IsSearchingStudyOffer = 1
OR apro.IsSearchingStage = 1
OR apro.IsSearchingJob = 1)))
-- Applicant's personal info
AND (@ApplicantBirthDate IS NULL OR apro.BornDate = @ApplicantBirthDate)
AND (@ApplicantNamesFilter IS NULL OR (
((dbo.InLike(apro.FirstName, @ApplicantNamesFilter) = 1)
OR (dbo.InLike(apro.LastName, @ApplicantNamesFilter) = 1))
AND EXISTS(SELECT * FROM CompanyApplicantRelationsState c_aRel -- To make sure the Dossier is released
INNER JOIN CompanyProfile cpBoughtDossier ON cpBoughtDossier.Id = c_aRel.CompanyProfileId
WHERE c_aRel.DossierReleased = 1 AND c_aRel.ApplicantProfileId = apro.Id AND cpBoughtDossier.CompanySetId = @companySetId ))
)
-- Company info
AND cars.CompanyProfileId IN (SELECT Id FROM CompanyProfile WHERE CompanySetId = @companySetId) -- Useful to reduce the amount of SQL operations
AND (@CoCompaniesFilter IS NULL OR cars.CompanyProfileId IN (SELECT * FROM dbo.CreateInFilter(@CoCompaniesFilter,'|')))
-- Location
AND (clocprof.Id IS NULL OR clocprof.Id IN (SELECT Id FROM CompanyLocationProfile WHERE CompanyProfileId IN (SELECT Id FROM CompanyProfile WHERE CompanySetId = @companySetId))) -- TODO: do we need SELECT Id FROM... ?
AND (@CompanyLocationsFilter IS NULL OR clocprof.Id IN (SELECT * FROM dbo.CreateInFilter(@CompanyLocationsFilter,'|')))
-- Applicant's language
AND (@SchoolLangFilter IS NULL OR (apro.[Language] IN (SELECT * FROM dbo.CreateInFilter(@SchoolLangFilter,'|'))))
-- Statuses
AND (@WorkflowStatesFilter IS NULL
OR cas.CompanyApplicantStateAvailableId IN (SELECT * FROM dbo.CreateInFilter(@WorkflowStatesFilter,'|'))
OR (cas.CompanyApplicantStateAvailableId IS NULL AND 0 IN (SELECT * FROM dbo.CreateInFilter(@WorkflowStatesFilter,'|'))) -- TODO: is this needed?
)
OPTION (RECOMPILE)
----------------------------------------------------------------------
------ 2. GROUP RESULTS AND FILTER MESSAGE STATES --------
----------------------------------------------------------------------
-- Group data (to remove duplicates) and apply unread/unanswered messages filter
CREATE TABLE #tmpResults2 (
Id int identity(1,1), -- TODO: do we need that?
ApplicantProfileId int,
ApplicantProfessionProfileId int,
CompanyLocationProfileId int,
CompanyPersonId int,
ApprenticeshipId int,
ApplicationId int,
ApplicationCreateDate DATETIME,
CompanyApplicantAvailableStateId int,
DossierPayed BIT, -- New (compared to #tmpResults2)
AverageCompanySetCompanyRatings float, -- New (compared to #tmpResults2)
SocialSecurityNumber varchar(30),
UnreadMsgCount int,
UnansweredMsgCount int,
LastEditDossierDate DATETIME,
IsActiveApplication bit,
LastSentMessageDate DATETIME,
LastReceivedMessageDate DATETIME
);
CREATE CLUSTERED INDEX IDX_tmpResults2_ID ON #tmpResults2 (Id)
CREATE INDEX IDX_tmpResults2_Profile ON #tmpResults2 (Id, ApplicantProfileId, ApprenticeshipId) --TODO: do we need it?
CREATE INDEX IDX_tmpResults2_ApplicationId ON #tmpResults2 (ApplicationId)
INSERT #tmpResults2 (
ApplicantProfileId ,
ApplicantProfessionProfileId ,
CompanyLocationProfileId ,
CompanyPersonId ,
ApprenticeshipId ,
ApplicationId ,
ApplicationCreateDate ,
CompanyApplicantAvailableStateId ,
DossierPayed,
AverageCompanySetCompanyRatings,
SocialSecurityNumber,
LastEditDossierDate,
IsActiveApplication
)
SELECT DISTINCT
r.ApplicantProfileId,
ApplicantProfessionProfileId,
r.CompanyLocationProfileId,
r.CompanyPersonId,
ApprenticeshipId,
r.ApplicationId,
ApplicationCreateDate,
CompanyApplicantStateAvailableId, --caas.Id AS CompanyApplicantAvailableStateId,
CASE WHEN EXISTS(SELECT *
FROM CompanyApplicantRelationsState c_aRel
INNER JOIN CompanyProfile cpBoughtDossier ON cpBoughtDossier.Id = c_aRel.CompanyProfileId
WHERE c_aRel.DossierReleased = 1 AND c_aRel.ApplicantProfileId=r.ApplicantProfileId AND cpBoughtDossier.CompanySetId = @companySetId)
THEN @true
ELSE @false
END,
(SELECT TOP 1 ISNULL(apprating.CompanySetRatingAverage, 0)
FROM ApplicantProfileRatings apprating
WHERE apprating.ApplicantProfileId = r.ApplicantProfileId
AND apprating.CompanySetId = @companySetId),
r.SocialSecurityNumber,
MAX(r.LastEditDossierDate), -- To avoid duplicate results if an Applicant's RP has been chosen in a different company than the Application's one
r.IsActiveApplication
FROM @tmpCompanyApplicantRelationState r
GROUP BY r.ApplicantProfileId,
ApplicantProfessionProfileId,
r.CompanyLocationProfileId,
r.CompanyPersonId,
ApprenticeshipId,
r.ApplicationId,
ApplicationCreateDate,
r.SocialSecurityNumber,
r.CompanyApplicantStateAvailableId,
r.IsActiveApplication
OPTION (RECOMPILE)
UPDATE t
SET UnreadMsgCount = MessageUnread,
UnansweredMsgCount = MessageUnanswered
FROM (
SELECT UnreadMsgCount,
UnansweredMsgCount,
(CASE WHEN (EXISTS (SELECT Id FROM [Message] WHERE IsRead = @false AND Id = msg.Id)) THEN 1 ELSE NULL END) AS MessageUnread,
(CASE WHEN (EXISTS (SELECT Id FROM [Message] WHERE IsReplied = @false AND MessageStatusTypeId <> 5 AND Id = msg.Id)) THEN 1 ELSE NULL END) AS MessageUnanswered
FROM #tmpResults2 t
JOIN Apprenticeship app ON t.ApprenticeshipId = app.Id
JOIN [Message] msg ON msg.ApplicationId IN (
SELECT a.Id
FROM [Application] a
WHERE a.ApprenticeshipId = t.ApprenticeshipId
AND a.ApplicantProfessionProfileId = t.ApplicantProfessionProfileId
AND (@IncludeInactiveApplications = 1 OR a.IsActive = @true)
)
AND msg.SentByCompanyPersonId IS NULL
AND msg.IsDeletedByCompany = @false
) AS t
WHERE (@IsMessageUnread IS NULL OR MessageUnread IS NOT NULL)
AND (@IsMessageUnanswered IS NULL OR MessageUnanswered IS NOT NULL)
IF @IsMessageUnread IS NOT NULL BEGIN
DELETE FROM #tmpResults2 WHERE UnreadMsgCount IS NULL
END
IF @IsMessageUnanswered IS NOT NULL BEGIN
DELETE FROM #tmpResults2 WHERE UnansweredMsgCount IS NULL
END
;WITH DistinctApps AS (
SELECT DISTINCT ApplicationId
FROM #tmpResults2
WHERE ApplicationId IS NOT NULL
), LatestMsgDates AS (
SELECT m.ApplicationId,
MAX(CASE WHEN m.SentByCompanyPersonId IS NOT NULL THEN m.SentDateTime END) AS LastSentMessageDate,
MAX(CASE WHEN m.SentByCompanyPersonId IS NULL THEN m.SentDateTime END) AS LastReceivedMessageDate
FROM [Message] m WITH(NOLOCK)
JOIN DistinctApps da ON da.ApplicationId = m.ApplicationId
GROUP BY m.ApplicationId
)
UPDATE t
SET t.LastSentMessageDate = l.LastSentMessageDate,
t.LastReceivedMessageDate = l.LastReceivedMessageDate
FROM #tmpResults2 t
LEFT JOIN LatestMsgDates l ON l.ApplicationId = t.ApplicationId
----------------------------------------------------------------------
------ 3. SELECT RESULT, APPLY PAGING & ORDERING ---------
----------------------------------------------------------------------
--GET THE RESULT--
;WITH paged AS (
SELECT
d.Id AS DossierID,
res.ApplicantProfileId,
res.ApprenticeshipId,
@false AS Checked,
apro.Id,
res.DossierPayed,
apro.IsVisibleOnGateway,
apro.PlaceName AS ApplicantCity,
apro.IsMale,
apro.LastName,
apro.FirstName,
apro.Email,
apro.Phone,
apro.PortraitImageUrl,
apro.BornDate,
apro.PlaceName AS ApplicantProfilePlaceName,
apro.Street AS ApplicantProfileStreet,
apro.MotherTongue,
apro.PlaceOfOrigin,
apro.WorkPermit,
apr.StartDate AS ApprenticeshipStartDate,
apr.ApplyPeriodType,
apr.ApplyPeriodStartDate,
apr.CompanyLocationProfileId,
res.ApplicationCreateDate,
COALESCE(cpn.FirstName,'') AS CompanyPersonFirstName,
COALESCE(cpn.LastName,'') AS CompanyPersonLastName,
COALESCE(cpn.FirstName + ' ' +cpn.LastName,'') AS CompanyPersonName,
cpn.Salutation AS Salutation,
COALESCE(cpn.CompanyProfileId, cpnapp.CompanyProfileId) AS CompanyProfileId,
cpn.Email AS CompanyPersonEmail,
(SELECT TOP 1 proT.Name
FROM ProfessionTranslation proT
WHERE prot.Language = @Language
AND prot.ProfessionId = clpro.ProfessionId
) AS Profession,
(SELECT TOP 1 aprt.ShortCode
FROM ApplicantProfessionRatingType aprt
JOIN ApplicantProfessionRating apr ON apr.ApplicantProfessionRatingTypeId = aprt.Id
WHERE apr.DossierId = d.Id AND apr.ProfessionId = clpro.ProfessionId) AS RatingTypeResult,
(SELECT COUNT(Id) FROM CompanySetApplicantComment WHERE DossierId = d.Id) AS CommentsCount,
(SELECT TOP 1 Comment FROM CompanySetApplicantComment WHERE DossierId = d.Id ORDER BY LastChangeDate DESC) AS LastComment,
(SELECT TOP 1 CompanySetApplicantCommentTemplateId FROM CompanySetApplicantComment WHERE DossierId = d.Id ORDER BY LastChangeDate DESC) AS CommentTemplateId,
res.ApplicationId AS ApplicationId,
clprof.PlaceName AS CompanyLocationProfilePlaceName,
clprof.Street AS CompanyLocationProfileStreet,
clprof.Plz AS CompanyLocationProfilePlz,
apr.ApprenticeshipType,
res.ApplicantProfessionProfileId,
caas.ShortCode + ': ' + caas.Name AS WorkflowState,
caas.[Description] AS WorkflowStateDescription,
res.AverageCompanySetCompanyRatings AS CompanySetRatingAverage,
res.SocialSecurityNumber,
CASE WHEN EXISTS (
SELECT * FROM CompanySetApplicationRelation csar
WHERE csar.EmailNotificationWhenDocumentsChange = @true
AND csar.ApplicationId IN
(SELECT Id
FROM [Application] a
WHERE a.ApprenticeshipId = res.ApprenticeshipId
AND (@IncludeInactiveApplications = 1 OR a.IsActive = @true)
AND a.ApplicantProfessionProfileId = res.ApplicantProfessionProfileId) -- GAT-3420 must find record for particular ApplicantProfessionProfileId from res table, not only for ApplicationId
AND csar.CompanyPersonId = COALESCE(res.CompanyPersonId, (SELECT a.CompanyPersonId FROM Apprenticeship a WHERE a.Id = res.ApprenticeshipId))
) THEN @true ELSE @false END AS DocChangeNotificationEnabled,
CASE WHEN clprof.Id = (SELECT HeadquarterCompanyLocationProfileId
FROM CompanyProfile c
WHERE c.Id = clprof.CompanyProfileId)
THEN @true ELSE @false END AS IsHeadquarterCompanyLocationProfile,
COALESCE(res.UnreadMsgCount, 0) AS UnreadMsgCount,
COALESCE(res.UnansweredMsgCount, 0) AS UnansweredMsgCount,
@false AS IsCreatedByCompany,
apro.Language AS ApplicantProfileLanguage,
apro.Plz AS ApplicantProfilePlz,
res.LastEditDossierDate,
apro.HealthInsuranceNumber AS HealthInsuranceNumber,
apro.FirstNationalityId AS NationalityId,
apro.CountryId AS CountryId,
apro.Mobile AS Mobile,
res.IsActiveApplication,
clpro.ExternalReferenceId,
apro.CompletionPercentage AS CompletionPercentage,
(SELECT ColorCode FROM CompanyApplicantAvailableStateColor
WHERE Id = caas.ColorId) AS ColorCode,
res.LastSentMessageDate,
res.LastReceivedMessageDate,
clprof.CompanyAppendix AS CompanyLocationProfileAppendix
FROM #tmpResults2 res
LEFT JOIN Dossier d ON d.ApplicantProfileId = res.ApplicantProfileId AND d.CompanySetId = @companySetId
LEFT JOIN ApplicantProfile apro ON apro.Id = res.ApplicantProfileId
LEFT JOIN Apprenticeship apr ON apr.Id = res.ApprenticeshipId
LEFT JOIN CompanyPerson cpnapp ON cpnapp.Id = apr.CompanyPersonId
LEFT JOIN CompanyPerson cpn ON cpn.Id = res.CompanyPersonId
LEFT JOIN CompanyLocationProfession clpro ON clpro.Id = apr.CompanyLocationProfessionId
LEFT JOIN CompanyLocationProfile clprof ON clprof.Id = apr.CompanyLocationProfileId
LEFT JOIN CompanyApplicantAvailableState caas ON caas.Id = res.CompanyApplicantAvailableStateId
WHERE
-- Anonymous Applicants
((@HideAnonymousApplicant IS NULL OR @HideAnonymousApplicant = 0)
OR (@HideAnonymousApplicant=1 AND res.DossierPayed = @true ))
AND (@SharingEnabled = @false
-- Own Dossiers.
OR (res.CompanyPersonId = @companyPersonId)
-- All Admins.
OR EXISTS (SELECT * FROM DossierViewPermission dvp
WHERE (@companyPersonRole IN (SELECT * FROM dbo.CreateInFilter(@CompanyPersonRolesAllowedToSeeApplicants,'|'))) -- 20 "Administration role", 10 "Recruiting role"
AND dvp.DossierId IN (SELECT d.Id FROM Dossier d
WHERE d.ApplicantProfileId = res.ApplicantProfileId
AND d.CompanySetId = @companySetId)
AND dvp.Type = 0 -- CompanySet wide permission (Dossier).
-- Shared to specific CP
) OR EXISTS (SELECT * FROM DossierViewPermission dvp
WHERE (dvp.TargetCompanyPersonId = @companyPersonId
AND dvp.DossierId IN (SELECT d.Id FROM Dossier d
WHERE d.ApplicantProfileId = res.ApplicantProfileId
AND d.CompanySetId = @companySetId)
AND dvp.Type = 3 -- Specific CP
AND (dvp.ApplicationId IS NULL
OR dvp.ApplicationId = res.ApplicationId
)
)
)
)
-- Portrait
-- We need the filter here, because we also don't want to show anonymous pictures (see GAT-4224)
AND (@HasPortrait IS NULL
OR (@HasPortrait = 1 AND apro.PortraitImageUrl IS NOT NULL AND res.DossierPayed = 1))
--------------------------------------------------------------------------------
-- Add the ApplicantProfilesCreatedByCompany to the result set
UNION
--------------------------------------------------------------------------------
SELECT
D.Id AS DossierID,
ISNULL(apcc.ApplicantProfileId, 0) AS ApplicantProfileId, --(because the C# code doesn't expect a NULL) -- res.ApplicantProfileId,
app.Id AS ApprenticeshipId, -- res.ApprenticeshipId
@false AS Checked,
apcc.Id,
@true AS DossierPayed, -- (never anonymous, since it was inserted by the company), --res.DossierPayed,
@true AS IsVisibleOnGateway, -- (always true, since it has to be visible by the company) --apro.IsVisibleOnGateway
apcc.PlaceName AS ApplicantCity, -- apro.PlaceName AS ApplicantCity,
apcc.IsMale,
apcc.LastName,
apcc.FirstName,
apcc.Email,
apcc.Phone, -- apro.Phone,
apcc.PortraitImageUrl, -- apro.PortraitImageUrl,
apcc.BornDate, -- apro.BornDate,
apcc.PlaceName AS ApplicantProfilePlaceName, -- apro.PlaceName as ApplicantProfilePlaceName,
apcc.Street AS ApplicantProfileStreet, -- apro.Street as ApplicantProfileStreet,
'' as MotherTongue,
'' as PlaceOfOrigin,
NULL as WorkPermit,
app.StartDate AS ApprenticeshipStartDate, -- apr.StartDate AS ApprenticeshipStartDate,
app.ApplyPeriodType,
app.ApplyPeriodStartDate,
app.CompanyLocationProfileId, -- apr.CompanyLocationProfileId,
apcc.DateCreated AS ApplicationCreateDate, -- Similar enough to Application.CreateDate -- res.ApplicationCreateDate,
cpers.FirstName AS CompanyPersonFirstName, -- COALESCE(cpn.FirstName,'') AS CompanyPersonFirstName,
cpers.LastName AS CompanyPersonLastName, -- COALESCE(cpn.LastName,'') AS CompanyPersonLastName,
cpers.FirstName + ' ' + cpers.LastName AS CompanyPersonName, --cpn.FirstName + ' '+cpn.LastName as CompanyPersonName,
cpers.Salutation AS Salutation,
cp.Id AS CompanyProfileId, -- COALESCE(cpn.CompanyProfileId, cpnapp.CompanyProfileId) AS CompanyProfileId,
cpers.Email AS CompanyPersonEmail, -- cpn.Email as CompanyPersonEmail,
(SELECT TOP 1 proT.Name
FROM ProfessionTranslation prot
WHERE prot.Language = @Language
AND prot.ProfessionId = cprof.ProfessionId
) AS Profession,
(SELECT TOP 1 aprt.ShortCode
FROM ApplicantProfessionRatingType aprt
JOIN ApplicantProfessionRating apr ON apr.ApplicantProfessionRatingTypeId = aprt.Id
WHERE apr.DossierId = d.Id AND apr.ProfessionId = cprof.ProfessionId) AS RatingTypeResult,
0 AS CommentsCount,
NULL AS LastComment,
NULL AS CommentTemplateId,
NULL AS ApplicationId,
cloc.PlaceName AS CompanyLocationProfilePlaceName, -- clprof.PlaceName AS CompanyLocationProfilePlaceName,
cloc.Street AS CompanyLocationProfileStreet, -- clprof.Street as CompanyLocationProfileStreet,
cloc.Plz AS CompanyLocationProfilePlz, -- clprof.Plz as CompanyLocationProfilePlz,
app.ApprenticeshipType, -- apr.ApprenticeshipType,
NULL, -- res.ApplicantProfessionProfileId,
NULL AS WorkflowState, -- caas.ShortCode + ': ' + caas.Name AS WorkflowState,
NULL AS WorkflowStateDescription,
NULL AS CompanySetRatingAverage, -- res.AverageCompanySetCompanyRatings as CompanySetRatingAverage,
NULL AS SocialSecurityNumber, -- res.SocialSecurityNumber
@false AS DocChangeNotificationEnabled,
@false AS IsHeadquarterCompanyLocationProfile,
0 AS UnreadMsgCount,
0 AS UnansweredMsgCount,
@true AS IsCreatedByCompany,
apcc.Language ApplicantProfileLanguage,
apcc.Plz ApplicantProfilePlz,
apcc.DateModified AS LastEditDossierDate,
NULL AS HealthInsuranceNumber,
NULL AS NationalityId,
apcc.CountryId AS CountryId,
apcc.Mobile AS Mobile,
@true AS IsActiveApplication,
cprof.ExternalReferenceId,
5 AS CompletionPercentage, -- We set CompletionPercentage to 5% for applicant created by company person
NULL AS ColorCode,
NULL AS LastSentMessageDate,
NULL AS LastReceivedMessageDate,
cloc.CompanyAppendix AS CompanyLocationProfileAppendix
FROM ApplicantProfileCreatedByCompany apcc
LEFT JOIN Dossier d ON d.ApplicantProfileId = apcc.ApplicantProfileId AND d.ApplicantProfileCreatedByCompanyId = apcc.Id AND d.CompanySetId = @companySetId
JOIN ApplicantProfileCreatedByCompanyApprenticeship apcc_app ON apcc_app.ApplicantProfileCreatedByCompany_Id = apcc.Id
JOIN Apprenticeship app ON apcc_app.Apprenticeship_Id = app.Id
JOIN CompanyLocationProfile cloc ON app.CompanyLocationProfileId = cloc.Id
JOIN CompanyLocationProfession cprof ON app.CompanyLocationProfessionId = cprof.Id
JOIN CompanyPerson cpers ON app.CompanyPersonId = cpers.Id
JOIN CompanyProfile cp ON cpers.CompanyProfileId = cp.Id
WHERE cp.CompanySetId = @companySetId
AND (@ApplicantNamesFilter IS NULL
OR
(
(dbo.InLike(apcc.FirstName, @ApplicantNamesFilter) = 1)
OR (dbo.InLike(apcc.LastName, @ApplicantNamesFilter) = 1))
)
AND (@ApplicantBirthDate IS NULL OR apcc.BornDate = @ApplicantBirthDate)
AND (
@ApprenticeshipResponsiblePersonsFilter IS NULL
OR app.CompanyPersonId IN
(SELECT * FROM dbo.CreateInFilter(@ApprenticeshipResponsiblePersonsFilter,'|'))
-- Shared dossiers if the RP is the current user
OR (EXISTS (SELECT * FROM DossierViewPermission dvp
WHERE (dvp.TargetCompanyPersonId
IN (SELECT TOP 1 * FROM dbo.CreateInFilter(@ApprenticeshipResponsiblePersonsFilter,'|') f
WHERE f.value = @companyPersonId )
AND dvp.DossierId IN (SELECT d.Id FROM Dossier d
WHERE d.ApplicantProfileId = apcc.ApplicantProfileId
AND d.CompanySetId = @companySetId)
AND dvp.Type = 3 -- Specific permission
AND dvp.ApplicationId IS NULL -- Only dossier-level permissions for ApplicantProfileCreatedByCompany
))
)
)
AND (
@CompanyLocationsFilter IS NULL
OR app.CompanyLocationProfileId IN (SELECT * FROM dbo.CreateInFilter(@CompanyLocationsFilter,'|'))
)
AND (
@ApprenticeshipTypesFilter IS NULL
OR app.ApprenticeshipType IN (SELECT * FROM dbo.CreateInFilter(@ApprenticeshipTypesFilter,'|'))
)
-- AND ( -- TODO: why is this commented?
-- @MessageProfessionsFilter IS NULL
-- OR cprof.ProfessionId IN (SELECT * FROM dbo.CreateInFilter(@MessageProfessionsFilter,'|'))
-- )
-- Date Filter
-- For the Application start and end dates, we use the profile creation dates, it's similar enough
AND (
@FirstMessageStartDate IS NULL
OR (CONVERT(DATE, apcc.DateCreated) >= @FirstMessageStartDate)
)
AND (
@FirstMessageEndDate IS NULL
OR (CONVERT(DATE, apcc.DateCreated) <= @FirstMessageEndDate)
)
AND (
@ApprenticeshipStartDate IS NULL
OR (CONVERT(DATE, app.StartDate) >= @ApprenticeshipStartDate)
)
AND (
@ApprenticeshipEndDate IS NULL
OR (CONVERT(DATE, app.StartDate) <= @ApprenticeshipEndDate)
)
AND (
(@CoCompaniesFilter IS NULL AND apcc.CompanyProfileId IN (SELECT Id FROM CompanyProfile WHERE CompanySetId = @companySetId))
OR apcc.CompanyProfileId IN (SELECT * FROM dbo.CreateInFilter(@CoCompaniesFilter,'|'))
)
-- The filters below can't apply to ApplicantProfileCreatedByCompany,
-- so they have to be null (unused) to return resultset
AND (@RatingProfessionsFilter IS NULL AND @RatingFilter IS NULL)
AND (@SchoolLangFilter IS NULL)
AND (@WorkflowStatesFilter IS NULL)
-- Because we can't have unread or unanswered messages for ApplicantProfilesCreatedByCompany
AND (@IsMessageUnread IS NULL)
AND (@IsMessageUnanswered IS NULL)
AND (@IncludeApplicantsCreatedByCompanyFilter != 0)
),
ordered as
(
SELECT ROW_NUMBER() OVER (ORDER BY
CASE WHEN @SortColumn = 'ApplicantName' THEN DossierPayed END DESC,
CASE WHEN @SortColumn = 'ApplicantName' AND @SortAsc = 1 THEN LastName END ASC,
CASE WHEN @SortColumn = 'ApplicantName' AND @SortAsc = 0 THEN LastName END DESC,
CASE WHEN @SortColumn = 'ApplicantName' AND @SortAsc = 1 THEN FirstName END ASC,
CASE WHEN @SortColumn = 'ApplicantName' AND @SortAsc = 0 THEN FirstName END DESC,
CASE WHEN @SortColumn = 'ApplicantCity' AND @SortAsc = 1 THEN ApplicantCity END ASC,
CASE WHEN @SortColumn = 'ApplicantCity' AND @SortAsc = 0 THEN ApplicantCity END DESC,
CASE WHEN @SortColumn = 'Profession' AND @SortAsc = 1 THEN Profession END ASC,
CASE WHEN @SortColumn = 'Profession' AND @SortAsc = 0 THEN Profession END DESC,
CASE WHEN @SortColumn = 'CompanyLocationProfilePlaceName' AND @SortAsc = 1 THEN CompanyLocationProfilePlaceName END ASC,
CASE WHEN @SortColumn = 'CompanyLocationProfilePlaceName' AND @SortAsc = 0 THEN CompanyLocationProfilePlaceName END DESC,
CASE WHEN @SortColumn = 'CompanyPersonName' AND @SortAsc = 1 THEN CompanyPersonLastName END ASC,
CASE WHEN @SortColumn = 'CompanyPersonName' AND @SortAsc = 0 THEN CompanyPersonLastName END DESC,
CASE WHEN @SortColumn = 'CompanyPersonName' AND @SortAsc = 1 THEN CompanyPersonFirstName END ASC,
CASE WHEN @SortColumn = 'CompanyPersonName' AND @SortAsc = 0 THEN CompanyPersonFirstName END DESC,
CASE WHEN @SortColumn = 'WorkflowState' AND @SortAsc = 1 THEN [WorkflowState] END ASC,
CASE WHEN @SortColumn = 'WorkflowState' AND @SortAsc = 0 THEN [WorkflowState] END DESC,
CASE WHEN @SortColumn = 'CompanySetRatingAverage' AND @SortAsc = 1 THEN RatingTypeResult END ASC,
CASE WHEN @SortColumn = 'CompanySetRatingAverage' AND @SortAsc = 0 THEN RatingTypeResult END DESC,
CASE WHEN @SortColumn = 'CompanySetRatingAverage' AND @SortAsc = 1 THEN [CompanySetRatingAverage] END ASC,
CASE WHEN @SortColumn = 'CompanySetRatingAverage' AND @SortAsc = 0 THEN [CompanySetRatingAverage] END DESC,
CASE WHEN @SortColumn = 'ApplicationCreateDate' AND @SortAsc = 1 THEN [ApplicationCreateDate] END ASC,
CASE WHEN @SortColumn = 'ApplicationCreateDate' AND @SortAsc = 0 THEN [ApplicationCreateDate] END DESC,
CASE WHEN @SortColumn = 'LastEditDossierDate' AND @SortAsc = 1 THEN [LastEditDossierDate] END ASC,
CASE WHEN @SortColumn = 'LastEditDossierDate' AND @SortAsc = 0 THEN [LastEditDossierDate] END DESC,
CASE WHEN @SortColumn = 'LastReceivedMessageDate' AND @SortAsc = 1 THEN LastReceivedMessageDate END ASC,
CASE WHEN @SortColumn = 'LastReceivedMessageDate' AND @SortAsc = 0 THEN LastReceivedMessageDate END DESC,
CASE WHEN @SortColumn = 'LastSentMessageDate' AND @SortAsc = 1 THEN LastSentMessageDate END ASC,
CASE WHEN @SortColumn = 'LastSentMessageDate' AND @SortAsc = 0 THEN LastSentMessageDate END DESC,
Id
) AS RowNumber,
*
FROM paged
)
SELECT (
SELECT COUNT(*)
FROM paged res2
WHERE ((@HideAnonymousApplicant IS NULL OR @HideAnonymousApplicant=0)
OR (@HideAnonymousApplicant=1 AND res2.DossierPayed=@true ))
) AS TotalItems,
cpro.Name AS CompanyName,
cpro.Id AS CompanyProfileId,
ordered.*
FROM ordered
LEFT JOIN CompanyProfile cpro ON cpro.Id = ordered.CompanyProfileId
WHERE (ordered.RowNumber >= @FirstRow)
AND (ordered.RowNumber < (@FirstRow + @PageSize))
ORDER BY ordered.RowNumber
END

1
build.log Normal file
View File

@@ -0,0 +1 @@
make: *** No targets specified and no makefile found. Stop.

View File

@@ -8,8 +8,18 @@ sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubun
sudo apt-get update sudo apt-get update
sudo apt-get install docker-ce sudo apt-get install docker-ce
# set storage
<!-- create or edit /etc/docker/daemon.json -->
{
"data-root":"/mnt/data/docker"
}
systemctl stop docker
systemctl start docker
docker info
# add portainer # add portainer
docker run -d -p 8000:8000 -p 9000:9000 --name=portainer --restart=always --pull=always -v /var/run/docker.sock:/var/run/docker.sock -v /mnt/storage/docker_volumes/portainer_data:/data portainer/portainer-ce docker run -d -p 8000:8000 -p 9000:9000 --name=portainer --restart=always --pull=always -v /var/run/docker.sock:/var/run/docker.sock -v /mnt/data/docker_vol/portainer:/data portainer/portainer-ce
# change portainer admin password # change portainer admin password
docker stop portainer docker stop portainer

View File

@@ -0,0 +1,4 @@
https://www.youtube.com/watch?v=wCBLMXgk3No
https://github.com/kyuz0/amd-strix-halo-toolboxes?tab=readme-ov-file#211-toolbox-refresh-script-automatic-updates
https://hub.docker.com/r/kyuz0/amd-strix-halo-toolboxes/tags

View File

@@ -0,0 +1,100 @@
version: '3.8'
services:
amd-strix-halo-llama-rocm:
image: kyuz0/amd-strix-halo-toolboxes:rocm-7rc-rocwmma
container_name: amd-strix-halo-llama-rocm
restart: unless-stopped
privileged: true
devices:
- /dev/kfd:/dev/kfd
- /dev/dri:/dev/dri
group_add:
- video
volumes:
- ./models:/models
- ./data:/data
- /home/${USER}:/home/${USER}:rslave
- /home/db/Downloads/:/mnt/dl
environment:
- DISPLAY=${DISPLAY}
- NVIDIA_VISIBLE_DEVICES=all
- NVIDIA_DRIVER_CAPABILITIES=all
ports:
- "8080:8080" # For web UI if available
working_dir: /models
command: /bin/bash
stdin_open: true
tty: true
# Alternative Vulkan backend
amd-strix-halo-llama-vulkan-radv:
image: kyuz0/amd-strix-halo-toolboxes:vulkan-radv
container_name: amd-strix-halo-llama-vulkan-radv
restart: unless-stopped
privileged: true
devices:
- /dev/dri:/dev/dri
group_add:
- video
volumes:
- ./models:/models
- ./data:/data
- /home/${USER}:/home/${USER}:rslave
- /home/db/Downloads/:/mnt/dl
environment:
- DISPLAY=${DISPLAY}
ports:
- "8081:8080" # Different port to avoid conflicts
working_dir: /models
command: /bin/bash
stdin_open: true
tty: true
# Alternative Vulkan AMDVLK backend
amd-strix-halo-llama-vulkan-amdvlk:
image: kyuz0/amd-strix-halo-toolboxes:vulkan-amdvlk
container_name: amd-strix-halo-llama-vulkan-amdvlk
restart: unless-stopped
privileged: true
devices:
- /dev/dri:/dev/dri
group_add:
- video
volumes:
- ./models:/models
- ./data:/data
- /home/${USER}:/home/${USER}:rslave
# - /home/db/Downloads/xmrig-6.21.0:/mnt/xmrig
- /home/db/Downloads/:/mnt/dl
environment:
- DISPLAY=${DISPLAY}
ports:
- "8082:8080" # Different port to avoid conflicts
working_dir: /models
command: /bin/bash
stdin_open: true
tty: true
amdopencl:
image: pbsprotest/amdopencl:24
container_name: amdopencl
devices:
- /dev/dri
- /dev/kfd
volumes:
- ./workspace:/workspace
# - /home/db/Downloads/xmrig-6.21.0:/mnt/xmrig
- /home/db/Downloads/:/mnt/dl
stdin_open: true
tty: true
volumes:
models:
driver: local
data:
driver: local
networks:
default:
name: amd-strix-halo-network

View File

@@ -0,0 +1,109 @@
# AMD Strix Halo Toolboxes Docker Compose
This Docker Compose setup provides pre-built containers for running LLMs on AMD Ryzen AI Max "Strix Halo" integrated GPUs.
## Prerequisites
- AMD Ryzen AI Max "Strix Halo" system (e.g., Ryzen AI MAX+ 395)
- Docker and Docker Compose installed
- At least 128GB RAM recommended for larger models
- Proper kernel configuration for unified memory
## Kernel Configuration
Add these boot parameters to `/etc/default/grub`:
```bash
amd_iommu=off amdgpu.gttsize=131072 ttm.pages_limit=33554432
```
Then apply:
```bash
sudo grub2-mkconfig -o /boot/grub2/grub.cfg
sudo reboot
```
## Usage
### Start all services
```bash
docker-compose up -d
```
### Start specific backend
```bash
# ROCm backend (best for prompt processing)
docker-compose up -d amd-strix-halo-llama-rocm
# Vulkan RADV backend (fastest token generation)
docker-compose up -d amd-strix-halo-llama-vulkan-radv
# Vulkan AMDVLK backend
docker-compose up -d amd-strix-halo-llama-vulkan-amdvlk
```
### Access containers
```bash
# Enter ROCm container
docker exec -it amd-strix-halo-llama-rocm bash
# Enter Vulkan RADV container
docker exec -it amd-strix-halo-llama-vulkan-radv bash
# Enter Vulkan AMDVLK container
docker exec -it amd-strix-halo-llama-vulkan-amdvlk bash
```
## Directory Structure
```
amd-strix-halo-toolboxes/
├── models/ # Mount point for GGUF models
├── data/ # Mount point for data
└── amd-strix-halo-toolboxes.yml
```
## Download Models
Inside the container, download GGUF models:
```bash
# Example: Download Llama-2-7B
wget https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q4_K_M.gguf
# Run the model
./llama.cpp/main -m llama-2-7b-chat.Q4_K_M.gguf -n 128 --repeat_penalty 1.1
```
## Backend Performance
Based on benchmarks:
- **ROCm 6.4.3 + ROCWMMA (hipBLASLt)**: Best for prompt processing
- **Vulkan RADV**: Fastest for token generation
- **Vulkan AMDVLK**: Good balance
## Memory Planning
Use the VRAM estimator inside containers:
```bash
python3 gguf-vram-estimator.py your-model.gguf --contexts 4096 32768 1048576
```
## Ports
- ROCm backend: `8080`
- Vulkan RADV backend: `8081`
- Vulkan AMDVLK backend: `8082`
## Troubleshooting
1. **Permission issues**: Ensure your user is in the `video` group
2. **GPU not detected**: Check kernel parameters and reboot
3. **Out of memory**: Use the VRAM estimator to plan model sizes
## References
- [Original Repository](https://github.com/kyuz0/amd-strix-halo-toolboxes)
- [Strix Halo Hardware Database](https://strixhalo-homelab.d7.wtf/)

View File

@@ -0,0 +1,100 @@
version: '3.8'
services:
amd-strix-halo-llama-rocm:
image: kyuz0/amd-strix-halo-toolboxes:rocm-7rc-rocwmma
container_name: amd-strix-halo-llama-rocm
restart: unless-stopped
privileged: true
devices:
- /dev/kfd:/dev/kfd
- /dev/dri:/dev/dri
group_add:
- video
volumes:
- ./models:/models
- ./data:/data
- /home/${USER}:/home/${USER}:rslave
- /home/db/Downloads/:/mnt/dl
environment:
- DISPLAY=${DISPLAY}
- NVIDIA_VISIBLE_DEVICES=all
- NVIDIA_DRIVER_CAPABILITIES=all
ports:
- "8080:8080" # For web UI if available
working_dir: /models
command: /bin/bash
stdin_open: true
tty: true
# Alternative Vulkan backend
amd-strix-halo-llama-vulkan-radv:
image: kyuz0/amd-strix-halo-toolboxes:vulkan-radv
container_name: amd-strix-halo-llama-vulkan-radv
restart: unless-stopped
privileged: true
devices:
- /dev/dri:/dev/dri
group_add:
- video
volumes:
- ./models:/models
- ./data:/data
- /home/${USER}:/home/${USER}:rslave
- /home/db/Downloads/:/mnt/dl
environment:
- DISPLAY=${DISPLAY}
ports:
- "8081:8080" # Different port to avoid conflicts
working_dir: /models
command: /bin/bash
stdin_open: true
tty: true
# Alternative Vulkan AMDVLK backend
amd-strix-halo-llama-vulkan-amdvlk:
image: kyuz0/amd-strix-halo-toolboxes:vulkan-amdvlk
container_name: amd-strix-halo-llama-vulkan-amdvlk
restart: unless-stopped
privileged: true
devices:
- /dev/dri:/dev/dri
group_add:
- video
volumes:
- ./models:/models
- ./data:/data
- /home/${USER}:/home/${USER}:rslave
# - /home/db/Downloads/xmrig-6.21.0:/mnt/xmrig
- /home/db/Downloads/:/mnt/dl
environment:
- DISPLAY=${DISPLAY}
ports:
- "8082:8080" # Different port to avoid conflicts
working_dir: /models
command: /bin/bash
stdin_open: true
tty: true
amdopencl:
image: pbsprotest/amdopencl:24
container_name: amdopencl
devices:
- /dev/dri
- /dev/kfd
volumes:
- ./workspace:/workspace
# - /home/db/Downloads/xmrig-6.21.0:/mnt/xmrig
- /home/db/Downloads/:/mnt/dl
stdin_open: true
tty: true
volumes:
models:
driver: local
data:
driver: local
networks:
default:
name: amd-strix-halo-network

View File

@@ -0,0 +1,40 @@
#!/bin/bash
# AMD Strix Halo Toolboxes Startup Script
echo "Starting AMD Strix Halo Toolboxes..."
# Check if Docker is running
if ! docker info > /dev/null 2>&1; then
echo "Error: Docker is not running. Please start Docker first."
exit 1
fi
# Check if we're in the right directory
if [ ! -f "amd-strix-halo-toolboxes.yml" ]; then
echo "Error: amd-strix-halo-toolboxes.yml not found. Please run this script from the amd-strix-halo-toolboxes directory."
exit 1
fi
# Pull the latest images
echo "Pulling latest images..."
docker-compose pull
# Start the services
echo "Starting services..."
docker-compose up -d
echo "Services started successfully!"
echo ""
echo "Available containers:"
echo "- amd-strix-halo-llama-rocm (ROCm backend)"
echo "- amd-strix-halo-llama-vulkan-radv (Vulkan RADV backend)"
echo "- amd-strix-halo-llama-vulkan-amdvlk (Vulkan AMDVLK backend)"
echo ""
echo "To access a container:"
echo "docker exec -it amd-strix-halo-llama-rocm bash"
echo ""
echo "To view logs:"
echo "docker-compose logs -f"

View File

@@ -0,0 +1,103 @@
version: "3.8"
services:
ubuntu-vnc-xfce-firefox-g3: # zelenakravapase
deploy:
replicas: 1
#https://hub.docker.com/r/accetto/ubuntu-vnc-xfce-firefox-g3
#https://github.com/accetto/ubuntu-vnc-xfce-g3/tree/master/docker/xfce-firefox
image: accetto/ubuntu-vnc-xfce-firefox-g3
container_name: desktop-ubuntu
restart: unless-stopped
#network_mode: "host"
ports:
- "6080:80"
- 6901:6901
environment:
- VNC_PW=tixooo! #zelenakravapase
- VNC_RESOLUTION=1900x1000
#1024x768 1600 x 900 HD+ 1920x1080
# - USER_UID=1000
# - USER_GID=1000
devices:
- "/dev/binder:/dev/binder"
- "/dev/ashmem:/dev/ashmem"
privileged: true
cap_add:
- NET_ADMIN
user: "0:0"
volumes:
- /mnt/apps/docker_volumes/DESK/firefox/:/headless/data
- /mnt/apps/docker_volumes/DESK/firefox/desktop:/home/headless/Desktop
- /mnt:/mnt
#- /dev/ppp:/dev/ppp # for VPN
#network_mode: "host"
winehq:
deploy:
replicas: 0
image: ich777/winehq-novnc-baseimage
ports:
- "8089:8080"
- "5909:5900"
volumes:
- /mnt/apps/docker_volumes/DESK/wine:/wine-data
environment:
- TZ=Europe/Sofia
restart: unless-stopped
android-container:
deploy:
replicas: 0
image: budtmo/docker-android:emulator_14.0 #budtmo/docker-android
container_name: android-container
ports:
- "6081:6080"
- "5554:5554"
- "5555:5555"
environment:
EMULATOR_DEVICE: Samsung Galaxy S10
WEB_VNC: "true"
devices:
- "/dev/kvm:/dev/kvm"
volumes:
- "/mnt/storage/DEV/workspace/repos/git.d-popov.com/ai-kevin/agent-mobile/artimobile:/home/androidusr/tmp"
- "/mnt/storage/DEV/workspace/repos/git.d-popov.com/ai-kevin/agent-mobile/jdk:/home/androidusr/download"
#- /mnt/apps/docker_volumes/android_emulator/opt:/opt/android
# - /mnt/apps/docker_volumes/android_emulator/jdk:/home/androidusr/jdk
#- "/mnt/apps/docker_volumes/android_emulator/data:/home/androidusr/.android"
#- /mnt/apps/docker_volumes/android_emulator/.android:/home/androidusr/.android
#- /mnt/apps/docker_volumes/android_emulator/android_emulator:/home/androidusr/emulator
working_dir: /home/androidusr/tmp
command: /bin/bash -c "cd /home/androidusr/tmp && npx react-native run-android"
# https://github.com/dockur/windows
windows:
image: dockurr/windows
container_name: windows
devices:
- /dev/kvm
cap_add:
- NET_ADMIN
ports:
- 8006:8006
- 3389:3389/tcp
- 3389:3389/udp
stop_grace_period: 2m
restart: on-failure
environment:
VERSION: "win11"
RAM_SIZE: "6G"
CPU_CORES: "6"
DISK_SIZE: "64G"
volumes:
- /mnt/apps/docker_volumes/win:/storage
- /mnt/storage/:/data
- /mnt/apps/docker_volumes/DESK/firefox/desktop:/data2
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]

View File

@@ -0,0 +1,22 @@
services:
windows:
image: dockurr/windows # https://github.com/dockur/windows
container_name: windows
environment:
VERSION: "11"
devices:
- /dev/kvm
- /dev/net/tun
cap_add:
- NET_ADMIN
ports:
- 8006:8006
- 3389:3389/tcp
- 3389:3389/udp
volumes:
#- /dev/nvme0n1p7:/disk1 # blind mount - not working for now
- /mnt/data/docker_vol/windows:/storage # storage (img file)location
#- /mnt/data/:/data
restart: always
stop_grace_period: 2m

10
ssh.md Normal file
View File

@@ -0,0 +1,10 @@
get keys
ssh-keygen -t ed25519 -C "vastai-$(whoami)@$(hostname)-$(Get-Date -UFormat %Y%m%d)" -f "D:\Nextcloud\_STORAGE\keys\vastai\id_ed25519"
Get-Content "D:\Nextcloud\_STORAGE\keys\vastai\id_ed25519.pub" -Raw | Set-Clipboard
SSH
ssh -i "D:\Nextcloud\_STORAGE\keys\vastai\id_ed25519" -p 12547 root@ssh8.vast.ai -L 8080:localhost:8080
port forward
ssh -i "D:\Nextcloud\_STORAGE\keys\vastai\id_ed25519" -o IdentitiesOnly=yes -N -L 11434:localhost:21434 -L 7500:localhost:17500 -p 12547 root@ssh8.vast.ai

1
stratum_proxy.log Normal file
View File

@@ -0,0 +1 @@
nohup: ignoring input