Browse Source

Initial commit

main
vpei 6 months ago
commit
695f6c3008
99 changed files with 110410 additions and 0 deletions
  1. +291
    -0
      .github/workflows/Bak.yml.bak
  2. +167
    -0
      .github/workflows/Publish-node-2023-0.yml
  3. +21
    -0
      LICENSE
  4. +26
    -0
      README.md
  5. +56
    -0
      cls/CharDecode.py
  6. +111
    -0
      cls/IpAddress.py
  7. +66
    -0
      cls/IsValid.py
  8. +15
    -0
      cls/ListFile.py
  9. +51
    -0
      cls/LocalFile.py
  10. +111
    -0
      cls/NetFile.py
  11. +435
    -0
      cls/PingIP.py
  12. +58
    -0
      cls/StrText.py
  13. +627
    -0
      cls/SubConvert.py
  14. +25
    -0
      cls/TimeText.py
  15. +15
    -0
      cls/__init__.py
  16. +117
    -0
      ipfs.py
  17. +8
    -0
      ipfs/LICENSE
  18. +5
    -0
      ipfs/LICENSE-APACHE
  19. +19
    -0
      ipfs/LICENSE-MIT
  20. +28
    -0
      ipfs/README.md
  21. BIN
      ipfs/config/blocks/6Y/CIQA4T3TD3BP3C2M3GXCGRCRTCCHV7XSGAZPZJOAOHLPOI6IQR3H6YQ.data
  22. +4
    -0
      ipfs/config/blocks/75/CIQBEM7N2AM5YRAMJY7WDI6TJ4MGYIWVBA7POWSBPYKENY5IKK2I75Y.data
  23. +4
    -0
      ipfs/config/blocks/BE/CIQCXBHBZAHEHBHU6P7PEA72E7UZQRJALHH7OH2FCWSWMTU7DMWVBEA.data
  24. +3
    -0
      ipfs/config/blocks/DR/CIQMIFPMESX5YLCWLPHMOSQYMWITJ2BX5SLQ2D7YQDX72SPDAV2DDRY.data
  25. +3
    -0
      ipfs/config/blocks/HB/CIQMDQRK7B5DSZKBYOX4353TGN5J3JXS5VS6YNSAEJBOXBG26R76HBY.data
  26. +163
    -0
      ipfs/config/blocks/HO/CIQDUCDJ3AUUMXKOB7E2KVJP5FDHTJ6TPBUHFUNCDWE6ADL57A76HOY.data
  27. +115
    -0
      ipfs/config/blocks/I2/CIQBZNLCBI3U2I5F7O636DRBO552SCMSK2X2WYVCQ6BMYJN4MJTRI2Q.data
  28. +8
    -0
      ipfs/config/blocks/IL/CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data
  29. BIN
      ipfs/config/blocks/IY/CIQB4655YD5GLBB7WWEUAHCO6QONU5ICBONAA5JEPBIOEIVZ5RXTIYY.data
  30. +3
    -0
      ipfs/config/blocks/JN/CIQPHMHGQLLZXC32FQQW2YVM4KGFORVFJAQYY55VK3WJGLZ2MS4RJNQ.data
  31. +3
    -0
      ipfs/config/blocks/KE/CIQD44K6LTXM6PHWK2RHB3G2VCYFPMVBTALE572GSMETJGBJTELFKEI.data
  32. +3
    -0
      ipfs/config/blocks/MJ/CIQHQFRJK4MU2CVNFR3QG6KZB3FZG6OG7EBI4SUNB5K4S4T5UVECMJA.data
  33. BIN
      ipfs/config/blocks/N6/CIQGFYPT5OBMRC7ZMUFC2R3ZQPKOGBMHJEDDFEVS5ALYBKIZCXPTN6Y.data
  34. +27
    -0
      ipfs/config/blocks/OO/CIQBT4N7PS5IZ5IG2ZOUGKFK27IE33WKGJNDW2TY3LSBNQ34R6OVOOQ.data
  35. +3
    -0
      ipfs/config/blocks/QD/CIQL4QZR6XGWMPEV5Q2FCTDFD7MF3G5OOC5CMEDUHNA5VXYZVDLFQDA.data
  36. +28
    -0
      ipfs/config/blocks/R3/CIQBED3K6YA5I3QQWLJOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data
  37. +3
    -0
      ipfs/config/blocks/RO/CIQDRD2UT66U4EATJW53PSVWMFFPGNAN42PVWMDLHJD6FA5EVNNZROI.data
  38. +1
    -0
      ipfs/config/blocks/SHARDING
  39. +3
    -0
      ipfs/config/blocks/TJ/CIQPYOPHHKZVNLBDOINZWEWHEMTTFWYPPQUQZOF6JAC5XAXQMTG6TJA.data
  40. +55
    -0
      ipfs/config/blocks/TP/CIQCODPXR5G237BYM7E5JF4A624CLH2TQDLC4QI6HEZK7FUWZQESTPI.data
  41. +5
    -0
      ipfs/config/blocks/U2/CIQHFTCY7XL57YWLVDQ6UAXUOND3ADYQYJKYXA6G7A5IMD7SMO22U2A.data
  42. +9
    -0
      ipfs/config/blocks/UC/CIQFKVEG2CPWTPRG5KNRUAWMOABRSTYUFHFK3QF6KN3M67G5E3ILUCY.data
  43. BIN
      ipfs/config/blocks/V3/CIQAPZYJAKUKALYI4YTB5PUMEN5BZYZHUQZWGFL4Q3HZUV26SYX2V3Q.data
  44. +3
    -0
      ipfs/config/blocks/VN/CIQPEOA2TS3RMLOBOF55ZOEZE3TNBQG3HCNFOYC3BATAIJBOIE5FVNY.data
  45. +2
    -0
      ipfs/config/blocks/X3/CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data
  46. BIN
      ipfs/config/blocks/XV/CIQGAS6MQJCEC37C2IIH5ZFYJCSTT7TCKJP3F7SLGNVSDVZSMACCXVA.data
  47. +30
    -0
      ipfs/config/blocks/_README
  48. +1
    -0
      ipfs/config/blocks/diskUsage.cache
  49. +161
    -0
      ipfs/config/config
  50. BIN
      ipfs/config/datastore/000002.ldb
  51. BIN
      ipfs/config/datastore/000005.ldb
  52. BIN
      ipfs/config/datastore/000010.ldb
  53. +1
    -0
      ipfs/config/datastore/CURRENT
  54. +1
    -0
      ipfs/config/datastore/CURRENT.bak
  55. +0
    -0
      ipfs/config/datastore/LOCK
  56. +75
    -0
      ipfs/config/datastore/LOG
  57. BIN
      ipfs/config/datastore/MANIFEST-000016
  58. +1
    -0
      ipfs/config/datastore_spec
  59. +1
    -0
      ipfs/config/version
  60. +39
    -0
      ipfs/install.sh
  61. +1
    -0
      ipfs/tmp/001.out
  62. +1
    -0
      ipfs/tmp/002.out
  63. +1
    -0
      ipfs/tmp/003.out
  64. +24880
    -0
      ipfs/tmp/err.log
  65. +5359
    -0
      ipfs/tmp/info.log
  66. +1
    -0
      ipfs/tmp/tmp.out
  67. +903
    -0
      main.py
  68. +1
    -0
      o/allnode.txt
  69. +8034
    -0
      o/clash.yaml
  70. +257
    -0
      o/clashnode.txt
  71. +1
    -0
      o/node.txt
  72. +1
    -0
      o/nodecn.txt
  73. +8034
    -0
      o/openclash.yaml
  74. +254
    -0
      o/proxies.txt
  75. +35
    -0
      res/README.md
  76. +149
    -0
      res/clash-1.txt
  77. +3554
    -0
      res/clash-2.txt
  78. BIN
      res/d181a7d1ab093.PNG
  79. +305
    -0
      res/errnode.txt
  80. +9833
    -0
      res/expire.txt
  81. +29696
    -0
      res/fakedomain.txt
  82. +15902
    -0
      res/fakeip.txt
  83. BIN
      res/ip.exe
  84. +120
    -0
      res/ipfs
  85. +1
    -0
      res/nod-0.txt
  86. +1
    -0
      res/nod-1.txt
  87. +1
    -0
      res/nod-10.txt
  88. +1
    -0
      res/nod-11.txt
  89. +1
    -0
      res/nod-2.txt
  90. +1
    -0
      res/nod-3.txt
  91. +1
    -0
      res/nod-4.txt
  92. +1
    -0
      res/nod-5.txt
  93. +1
    -0
      res/nod-6.txt
  94. +1
    -0
      res/nod-7.txt
  95. +1
    -0
      res/nod-8.txt
  96. +1
    -0
      res/nod-9.txt
  97. +58
    -0
      res/node.json
  98. BIN
      res/qqwry.dat
  99. +8
    -0
      res/requirements.txt

+ 291
- 0
.github/workflows/Bak.yml.bak View File

@ -0,0 +1,291 @@
# github.com/vpei/Free-Node-Merge
# Description: Automatically Build SSR for OpenWrt
# Cron: min (0 - 59) / hour (0 - 23) / day of month (1 - 31) / month (1 - 12) / day of week (0 - 6)(Sunday - Saturday)
# Source code repository: https://github.com/openwrt/openwrt / Branch: master
#========================================================================================================================
name: Publish-Node-To-Ipfs-2022
# Controls when the action will run.
on:
push:
branches: [ main ]
schedule:
- cron: '0 */2 * * *'
workflow_dispatch:
env:
DELETE_RELEASE: true
DELETE_ARTIFACTS: true
KEEP_DAY: 1
KEEP_MININUM_RUNS: 10
KEEP_MININUM_RELEASE: 10
jobs:
Build:
runs-on: ubuntu-latest
steps:
- name: 'Set System Timezone'
run:
sudo timedatectl set-timezone Asia/Shanghai
- name: Checkout
uses: actions/checkout@v2
- name: 'Set up Python'
uses: actions/setup-python@v1
with:
python-version: 3.7
- name: 'Install requirements'
run: |
#pip install requests
#python -m pip install -U wheel
#pip install qqwry-py3
sudo -E apt-get -qq update
sudo -E apt-get -qq install inetutils-ping
pip install -r ./res/requirements.txt
- name: 'Sync resfile from ipfs'
id: init
env:
#RESURL = 'https://cf-ipfs.com/ipns/k2k4r8n10q07nqe02zysssxw1b9qboab0dd3ooljd32i9ro3edry6hv6/'
RESURL: ${{ secrets.RESURL }}
run: |
python main.py init
echo "::set-output name=status::success"
- name: 'Update and merge node from url'
id: update
env:
RESURL: ${{ secrets.RESURL }}
if: steps.init.outputs.status == 'success' && !cancelled()
run: |
python main.py update
echo "::set-output name=status::success"
- name: 'Update node ip-address info'
id: ipdomain
env:
RESURL: ${{ secrets.RESURL }}
run: |
python main.py optnode
echo "::set-output name=status::success"
- name: 'Compare new file with old file MD5'
id: diff
run: |
file1=./res/vpei-new.txt
file2=./res/node-0.txt
#dataline1=$(< $file1)
dataline1=$(md5sum $file1|cut -d ' ' -f1)
echo $dataline1
#dataline2=$(< $file2)
dataline2=$(md5sum $file2|cut -d ' ' -f1)
echo $dataline2
if [ "$dataline1" == "$dataline2" ];then
rm $file1
echo "Both file are same"
echo "::set-output name=status::noupdate"
else
mv $file1 $file2
echo "Both file are different"
echo "::set-output name=status::success"
fi
- name: 'Send telegram message on push'
env:
TELEGRAM_TO: ${{ secrets.TELEGRAM_TO }}
TELEGRAM_TOKEN: ${{ secrets.TELEGRAM_TOKEN }}
uses: appleboy/telegram-action@master
if: steps.diff.outputs.status == 'success' && env.TELEGRAM_TO != '' && env.TELEGRAM_TOKEN != '' && !cancelled()
with:
to: ${{ secrets.TELEGRAM_TO }}
token: ${{ secrets.TELEGRAM_TOKEN }}
message: |
Mixed Nodes-1: http://185.177.124.24:8080/ipns/k51qzi5uqu5dh78dgxj9xt0od8sk97bxdnr84474rctjuln517hnl3v937di61/
Mixed Nodes-2: https://gateway.ipfs.io/ipns/k51qzi5uqu5dh78dgxj9xt0od8sk97bxdnr84474rctjuln517hnl3v937di61/
- name: 'Commit files first'
if: steps.diff.outputs.status == 'success' && !cancelled()
run: |
rm -rf ./cls/__pycache__
#rm -rf ./ip/__pycache__
git config --global user.email actions@github.com
git config --global user.name "GitHub Actions"
git pull
git add .
git commit -m "Automatic upgrade by robot at $(date +"%Y.%m.%d.%H%M")" -a
- name: 'Push changes first'
uses: ad-m/github-push-action@master
if: steps.diff.outputs.status == 'success' && !cancelled()
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: main
#branch: ${{ github.ref }}
- name: 'Upload Files To Ipfs Network'
id: publish
env:
PEERID: ${{ secrets.PEERID }}
PRIVKEY: ${{ secrets.PRIVKEY }}
CID: cid
if: steps.ipdomain.outputs.status == 'success' && env.PEERID != '' && env.PRIVKEY != ''
run: |
echo 下载软件客户端,同步至IPFS网络,取消下载,在发布ipfs时直接添加CID
# python main.py soft
#git clone https://github.com/letseeqiji/oneinstall.git
#cd oneinstall/golang
#sh goinstall.sh
go version
#git clone https://github.com/ipfs/go-ipfs.git
#cd go-ipfs
#make install
# wget http://dweb.link/ipfs/QmNtGe5WSnQA4chbxZcDVCVPZDRNMtrjYmLYVW5pLBfvvk/go-ipfs/ipfs -O ./ipfs/ipfs
chmod 7777 ./ipfs/ipfs
#wget http://dweb.link/ipfs/QmNtGe5WSnQA4chbxZcDVCVPZDRNMtrjYmLYVW5pLBfvvk/go-ipfs/install.sh -O install.sh
chmod 7777 ./ipfs/install.sh
sh ./ipfs/install.sh
#ipfs init
mkdir /home/runner/.ipfs
cp -r ./ipfs/config/* /home/runner/.ipfs
sed -i 's/vpeiPEERID/${{ secrets.PEERID }}/g' /home/runner/.ipfs/config
sed -i 's/vpeiPRIVKEY/${{ env.PRIVKEY }}/g' /home/runner/.ipfs/config
# cp -r /home/runner/.ipfs config
# cp -f /home/runner/.ipfs/config ./config
# cd /home/runner/.ipfs
# ls
#file1=./ipfs/tmp/001.out
# 说明:nohup加在一个命令的最前面,表示不挂断的运行命令
# -u 表示实时输出到.out
# &加在一个命令的最后面,表示这个命令放在后台执行
# nohup ipfs add -r ./out >$file1 &
# sleep 5
# dataline1=$(< $file1)
# echo $dataline1
ipfs id
dataline1=$(ipfs add -r ./out)
echo $dataline1
echo 获取最后50个字符(46 + 4),数字50需要根据文件发布的目录调整./out,目录长度增长,50数字增加。
CID=${dataline1: -50}
echo $CID
echo
echo 删除空格后所有
CID=${CID% *}
echo ID:$CID
ipfs id
echo 将文件夹CID改名
ipfs files cp /ipfs/$CID /clash
echo 运行软件客户端
file1=./ipfs/tmp/002.out
# nohup ipfs daemon >$file1 &
nohup ipfs daemon >/dev/null 2>&1 &
sleep 50
#echo 下载缓存
#nohup sudo wget http://127.0.0.1:8080/ipfs/Qmczp7Sp6bsia8f6kxdMRvzqHKzrQM6NMYec9RfQJ3ksnq/ -O $file1 >$file1 &
# echo 将软件文件夹CID添加到发布文件夹
#sleep 30
#ipfs files cp /ipfs/Qmczp7Sp6bsia8f6kxdMRvzqHKzrQM6NMYec9RfQJ3ksnq /clash/soft
#sleep 30
# echo 重新获取CID-1
#dataline1=$(ipfs files stat "/clash" )
#echo $dataline1
# echo 重新获取CID-2
#CID=${dataline1:0:46}
echo ID:$CID
sleep 5
# echo 对发布文件夹的新CID进行远程固定
# curl -X POST http://116.207.131.38:5001/api/v0/pin/add?arg=/ipfs/Qmczp7Sp6bsia8f6kxdMRvzqHKzrQM6NMYec9RfQJ3ksnq
# curl -X POST https://ipfs.infura.io:5001/api/v0/pin/add?arg=/ipfs/$CID
# curl -X POST http://122.9.166.5:5001/api/v0/pin/add?arg=/ipfs/Qmczp7Sp6bsia8f6kxdMRvzqHKzrQM6NMYec9RfQJ3ksnq
echo 对发布文件夹的新CID进行本地固定
ipfs pin add $CID
# 对发布的文件进行加载, CID从001.out文件获取或直接传递
# nohup sudo wget http://127.0.0.1:8080/ipfs/$CID/ -O $file1 >$file1 &
sleep 30
echo 对网络文件进行循环加载,提高IPFS发布成功率
python ./ipfs.py ipfs $CID
# echo 退出ipfs软件 ipfs #kill ipfs
# ipfs shutdown
# sleep 30
# echo 显示所有进程
# ps -A
echo 离线发布ipfs name publish /ipfs/$CID --allow-offline=true --lifetime=24h
# ipfs name publish /ipfs/$CID --allow-offline=true --lifetime=24h
ipfs name publish /ipfs/$CID --lifetime=24h
sleep 3
# echo 后台运行ipfs软件
# file1=./ipfs/tmp/003.out
# nohup ipfs daemon >$file1 &
ipfs name publish /ipfs/$CID --lifetime=24h
sleep 30
echo ipfs软件已运行
python ./ipfs.py ipns
# sudo rm -rf ./install1 ./install2 ./install3
# sudo cp -f ./res/README.md ./README.md
# 需要安装sed 软件
# sudo sed -i 's/ipfs_auto_url/${{ secrets.RESURL }}code.txt\n\n${{ secrets.RESURL }}clash\n\n${{ secrets.RESURL }}openclash/g' ./README.md
dataline1=$(jobs -l|grep ipfs| grep -v grep | awk '{print $2}')
kill -9 $dataline1
sleep 10
echo "::set-output name=status::success"
- name: 'Commit files Second for readMe file'
if: steps.publish.outputs.status == 'success'
run: |
rm -rf ./cls/__pycache__
#rm -rf ./ip/__pycache__
rm -rf ./out/soft
git config --global user.email actions@github.com
git config --global user.name "GitHub Actions"
git pull
git add .
git commit -m "Automatic upgrade by robot at $(date +"%Y.%m.%d.%H%M")" -a
- name: 'Push changes Second for readMe file'
uses: ad-m/github-push-action@master
if: steps.publish.outputs.status == 'success' && !cancelled()
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: main
#branch: ${{ github.ref }}
- name: 'Delete older workflow runs and artifacts new'
uses: Mattraks/delete-workflow-runs@main
if: env.DELETE_ARTIFACTS == 'true'
with:
token: ${{ secrets.GITHUB_TOKEN }}
repository: ${{ github.repository }}
retain_days: ${{ env.KEEP_DAY }}
keep_minimum_runs: ${{ env.KEEP_MININUM_RUNS }}
- name: 'Send telegram message on push'
env:
TELEGRAM_TO: ${{ secrets.TELEGRAM_TO }}
TELEGRAM_TOKEN: ${{ secrets.TELEGRAM_TOKEN }}
uses: appleboy/telegram-action@master
if: steps.publish.outputs.status == 'success' && env.TELEGRAM_TO != '' && env.TELEGRAM_TOKEN != '' && !cancelled()
with:
to: ${{ secrets.TELEGRAM_TO }}
token: ${{ secrets.TELEGRAM_TOKEN }}
message: |
Publish files To ipfs network is finished.

+ 167
- 0
.github/workflows/Publish-node-2023-0.yml View File

@ -0,0 +1,167 @@
# github.com/vpei/Free-Node-Merge
# Description: Automatically Build SS/SSR/VMESS for Clash || V2ray
# Cron: min (0 - 59) / hour (0 - 23) / day of month (1 - 31) / month (1 - 12) / day of week (0 - 6)(Sunday - Saturday)
#========================================================================================================================
name: Publish-Node-To-Clash-2023-0
# Controls when the action will run.
on:
push:
branches: [ main ]
schedule:
- cron: '0 1,5,8,10,12,14,16,18,19,20,21,20 * * *'
workflow_dispatch:
env:
DELETE_RELEASE: true
DELETE_ARTIFACTS: true
KEEP_DAY: 1
KEEP_MININUM_RUNS: 3
KEEP_MININUM_RELEASE: 5
jobs:
Build:
runs-on: ubuntu-latest
steps:
- name: 'Set System Timezone'
run:
sudo timedatectl set-timezone Asia/Shanghai
- name: Checkout
uses: actions/checkout@v2
- name: 'Set up Python'
uses: actions/setup-python@v1
with:
python-version: 3.9
- name: 'Install requirements'
run: |
#pip install requests
#python -m pip install -U wheel
#pip install qqwry-py3
sudo -E apt-get -qq update
sudo -E apt-get -qq install inetutils-ping
pip install -r ./res/requirements.txt
- name: 'Update resfile from ipfs'
id: init
env:
#RESURL = 'https://ipfs.io/ipns/k2k4r8kms1l1k3wljk4o8eopnb2dltfvh8pypr0zkeyjunyagft3aqvs/'
RESURL: ${{ secrets.RESURL }}
run: |
python main.py init
#echo "::set-output name=status::success"
echo "status=success" >> $GITHUB_OUTPUT
#echo "::set-output name={name}::{value}"
#echo "{name}={value}" >> $GITHUB_OUTPUT
- name: 'merge expire node from url'
id: expire
env:
RESURL: ${{ secrets.RESURL }}
if: steps.init.outputs.status == 'success' && !cancelled()
run: |
python main.py expire
#echo "::set-output name=status::success"
echo "status=success" >> $GITHUB_OUTPUT
- name: 'Update and merge node from url'
id: update
env:
RESURL: ${{ secrets.RESURL }}
if: steps.expire.outputs.status == 'success' && !cancelled()
run: |
python main.py update
#echo "::set-output name=status::success"
echo "status=success" >> $GITHUB_OUTPUT
- name: 'Update node ip-address info to node.txt'
id: ipdomain
env:
RESURL: ${{ secrets.RESURL }}
run: |
python main.py optnode
echo "status=success" >> $GITHUB_OUTPUT
- name: 'Update allclash from node.txt'
id: allclash
env:
RESURL: ${{ secrets.RESURL }}
run: |
python main.py allclash
echo "status=success" >> $GITHUB_OUTPUT
- name: 'Compare new file with old file MD5'
id: diff
run: |
file1=./res/node.txt
file2=./o/node.txt
#dataline1=$(< $file1)
dataline1=$(md5sum $file1|cut -d ' ' -f1)
echo $dataline1
#dataline2=$(< $file2)
dataline2=$(md5sum $file2|cut -d ' ' -f1)
echo $dataline2
if [ "$dataline1" == "$dataline2" ];then
rm $file1
echo "Both file are same"
echo "status=noupdate" >> $GITHUB_OUTPUT
else
mv $file1 $file2
echo "Both file are different"
echo "status=success" >> $GITHUB_OUTPUT
fi
- name: 'Send telegram message on push'
env:
TELEGRAM_TO: ${{ secrets.TELEGRAM_TO }}
TELEGRAM_TOKEN: ${{ secrets.TELEGRAM_TOKEN }}
uses: appleboy/telegram-action@master
if: steps.diff.outputs.status == 'success' && env.TELEGRAM_TO != '' && env.TELEGRAM_TOKEN != '' && !cancelled()
with:
to: ${{ secrets.TELEGRAM_TO }}
token: ${{ secrets.TELEGRAM_TOKEN }}
message: |
Mixed Nodes: http://185.177.124.24:8080/ipns/k51qzi5uqu5dh78dgxj9xt0od8sk97bxdnr84474rctjuln517hnl3v937di61/
- name: 'Commit files first'
if: steps.diff.outputs.status == 'success' && !cancelled()
run: |
rm -rf ./cls/__pycache__
#rm -rf ./ip/__pycache__
git config --global user.email actions@github.com
git config --global user.name "GitHub Actions"
git pull
git add .
git commit -m "Automatic upgrade by robot at $(date +"%Y.%m.%d.%H%M")" -a
- name: 'Push changes first'
uses: ad-m/github-push-action@master
if: steps.diff.outputs.status == 'success' && !cancelled()
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: main
#branch: ${{ github.ref }}
- name: 'Delete older workflow runs and artifacts new'
uses: Mattraks/delete-workflow-runs@main
if: env.DELETE_ARTIFACTS == 'true'
with:
token: ${{ secrets.GITHUB_TOKEN }}
repository: ${{ github.repository }}
retain_days: ${{ env.KEEP_DAY }}
keep_minimum_runs: ${{ env.KEEP_MININUM_RUNS }}
- name: 'Send telegram message on push'
env:
TELEGRAM_TO: ${{ secrets.TELEGRAM_TO }}
TELEGRAM_TOKEN: ${{ secrets.TELEGRAM_TOKEN }}
uses: appleboy/telegram-action@master
if: steps.publish.outputs.status == 'success' && env.TELEGRAM_TO != '' && env.TELEGRAM_TOKEN != '' && !cancelled()
with:
to: ${{ secrets.TELEGRAM_TO }}
token: ${{ secrets.TELEGRAM_TOKEN }}
message: |
Publish files To ipfs network is finished.

+ 21
- 0
LICENSE View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 codingbox
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

+ 26
- 0
README.md View File

@ -0,0 +1,26 @@
# ⏰ 自动获取订阅链接
- 用 Python 实现自动合并 ss ssr trojan vmess vless 等免费节点链接。
## 💌 Clash更新推送 适用于Clash 订阅,适用于 Clash .NET 等。openclash.yaml支持udp:true参数。
```
http://122.225.207.101:8080/ipfs/QmdRhmLxx6JaFE3iG7wxi47bDhva6xqKQEoJuUwpKHfgLS?filename=clash.yaml
```
## 👩‍👩‍👧‍👦 Telegram电报群(发送订阅链接获取最新Clash链接)
- https://t.me/opmhth
- 沟通请加Telegram群
## 🚀 Speed测试
![images](./res/d181a7d1ab093.PNG)
## ⚠️ 注意
- 欢迎免费使用本订阅,链接来自网络,仅作学习使用。使用页面所提供的任意资源时,请务必遵守当地法律。
## ⭐ 特别感谢
- https://github.com/codingbox/Free-Node-Merge
- https://github.com/animalize/qqwry-python3

+ 56
- 0
cls/CharDecode.py View File

@ -0,0 +1,56 @@
#!/usr/bin/env python3
import chardet
from chardet.universaldetector import UniversalDetector
class ChardeCode(): # 将订阅链接中YAML,Base64等内容转换为 Url 链接内容
# 获取文件编码类型
def get_encoding(file):
# 二进制方式读取,获取字节数据,检测类型
with open(file, 'rb') as f:
data = f.read()
return chardet.detect(data)['encoding']
#file_name = 'main1.c'
#enc = get_encoding(file_name)
#print(enc)
def get_encode_info(file):
with open(file, 'rb') as f:
detector = UniversalDetector()
for line in f.readlines():
detector.feed(line)
if detector.done:
break
detector.close()
return detector.result['encoding']
def read_file(file):
with open(file, 'rb') as f:
return f.read()
def write_file(content, file):
with open(file, 'wb') as f:
f.write(content)
def convert_encode2utf8(file, original_encode, des_encode):
file_content = read_file(file)
file_decode = file_content.decode(original_encode,'ignore')
file_encode = file_decode.encode(des_encode)
write_file(file_encode, file)
def encodeFile2Utf8(filename):
file_content = read_file(filename)
encode_info = get_encode_info(filename)
if encode_info != 'utf-8':
convert_encode2utf8(filename, encode_info, 'utf-8')
filename = r'main2.c'
encode_info = get_encode_info(filename) #获取文件编码
print(encode_info)
encodeFile2Utf8(filename) #转文件编码到utf8格式

+ 111
- 0
cls/IpAddress.py View File

@ -0,0 +1,111 @@
#!/usr/bin/env python3
import requests
import socket
import json
#from ip import QQwry
from qqwry import QQwry
from cls.IsValid import IsValid
class IpAddress():
# 通过域名获取IP
def getIP(domain):
try:
print('get-domain-Ip:' + domain)
domain = socket.getaddrinfo(domain, 'http')
return domain[0][4][0]
except:
return '127.0.0.1'
def get_ip_list(domain): # 获取域名解析出的IP列表
ip_list = []
try:
addrs = socket.getaddrinfo(domain, None)
for item in addrs:
if item[4][0] not in ip_list:
ip_list.append(item[4][0])
except Exception as e:
# print(str(e))
pass
return ip_list
# 通过IP获取国家名称并添加国旗符号,print(get_country)
def get_country(ipdomain):
if(ipdomain != ''):
global ip_info
ip_country = ''
ipdomainurl = ''
try:
q = QQwry()
q.load_file('./res/qqwry.dat', loadindex=False)
#q.lookup('8.8.8.8')
if(IsValid.isIP(ipdomain) == False):
domain = ipdomain
ipdomain = IpAddress.getIP(ipdomain)
print(domain + '---' + ipdomain)
#print('QQwryIp is loaded. ' + str(q.is_loaded()) + '-' + str(IsValid.isIP(ipdomain)) + '-' + ipdomain + '-' + q.lookup(ipdomain)[0]) #+ '-' + q.lookup(ipdomain)[1]) #('国家', '省份')
if(q.is_loaded() == True):
ip_country = q.lookup(ipdomain)[0]
if(ip_country == 'None'):
ip_country = ''
if(ip_country == ''):
#rq = requests.get("http://ip-api.com/json/{}?lang=zh-CN".format(node['add']), timeout=30) #连接超时 和 读取超时 均为30
ipdomainurl = 'http://ip-api.com/json/' + ipdomain + '?lang=zh-CN'
#https://api.ip.sb/geoip/1.1.1.1
rq = requests.get(ipdomainurl, timeout=10) #连接超时 和 读取超时 均为30
if (rq.status_code == 200):
ip_info = json.loads(rq.content)
if (ip_info['status'] == 'success'):
ip_country = ip_info['country']
else:
print('Line-128: download sub error on link: [' + str(rq.status_code) + ']' + ipdomainurl)
ipdomainurl = 'http://ip.360.cn/IPQuery/ipquery?ip=' + ipdomain
rq = requests.get(ipdomainurl, timeout=10)
if (rq.status_code == 200):
ip_info = json.loads(rq.content)
if (ip_info['errno'] == '0'):
#ip_country = ip_info['data'].encode('utf-8').decode('unicode_escape')
ip_country = ip_info['data'].encode('utf-8').decode('utf-8')
else:
print('Line-137: download sub error on link: [' + str(rq.status_code) + ']' + ipdomainurl)
ipdomainurl = 'http://ipinfo.io/' + ipdomain + '?token=7f459101a94acc'
rq = requests.get(ipdomainurl, timeout=10)
if (rq.status_code == 200):
ip_info = json.loads(rq.content)
ip_country = ip_info['country'].encode('utf-8').decode('utf-8')
else:
ip_country = "未知"
print('Line-145: download sub error on link: [' + str(rq.status_code) + ']' + ipdomainurl)
#print(ip_country)
ip_country = ip_country.encode('utf-8').decode('utf-8')
ip_country = ip_country.replace('台湾省', '台湾', 1)
#if(len(ip_country)>3):
# old_ip_country = ip_country[0:3]
#else:
# old_ip_country = ip_country
emoji = {
'US': '🇺🇸', 'HK': '🇭🇰', 'SG': '🇸🇬', 'JP': '🇯🇵', 'TW': '🇹🇼', 'CA': '🇨🇦', 'GB': '🇬🇧', 'CN': '🇨🇳', 'NL': '🇳🇱',
'TH': '🇹🇭', 'BE': '🇧🇪', 'IN': '🇮🇳', 'IT': '🇮🇹', 'PE': '🇵🇪', 'RO': '🇷🇴', 'AU': '🇦🇺', 'DE': '🇩🇪', 'RU': '🇷🇺',
'KR': '🇰🇷', 'DK': '🇩🇰', 'PT': '🇵🇹', 'CY': '🇨🇾', 'ES': '🇪🇸', 'RELAY': '🏁', 'NOWHERE_LAND': '🇦🇶',
'澳大利亚': '🇦🇺', '阿尔巴尼亚': '🇦🇱', '阿根廷': '🇦🇷', '比利时': '🇧🇪', '秘鲁': '🇵🇪', '波兰': '🇵🇱', '德国': '🇩🇪', '俄罗斯': '🇷🇺',
'法国': '🇫🇷', '加拿大': '🇨🇦', '罗马尼亚': '🇷🇴', '日本': '🇯🇵', '韩国': '🇰🇷', '荷兰': '🇳🇱',
'美国': '🇺🇸', '南非': '🇿🇦', '挪威': '🇳🇴', '葡萄牙': '🇵🇹', '瑞典': '🇸🇪', '泰国': '🇹🇭', '台湾': '🇹🇼', '斯洛伐克': '🇸🇰',
'瑞士': '🇨🇭', '乌克兰': '🇺🇦', '西班牙': '🇪🇸', '香港': '🇭🇰', '新加坡': '🇸🇬', '新西兰': '🇳🇿',
'意大利': '🇮🇹', '伊朗': '🇮🇷', '英国': '🇬🇧', '印度': '🇮🇳', '智利': '🇨🇱', '中国': '🇨🇳', '欧洲': '🇪🇸',
}
if ip_country in emoji:
ip_country = emoji[ip_country] + '-' + ip_country
else:
# 方法三: 最快,推荐方法
for k,v in emoji.items():
if(ip_country.find(k) > -1):
ip_country = v + '-' + ip_country
#print('n:' + v)
break
if(ip_country.find('-') == -1 and (ip_country.find('') == -1 or ip_country.find('') == -1)):
ip_country = emoji['NOWHERE_LAND'] + '-' + ip_country
except Exception as ex:
print('IpAddress-Line-124-Exception: ' + str(ex) + '\nipdomainurl:' + ipdomainurl + '-ipdomain:' + ipdomain)
return ip_country.encode('utf8').decode('utf-8')
else:
print('Line-122: 域名或IP为空')

+ 66
- 0
cls/IsValid.py View File

@ -0,0 +1,66 @@
#!/usr/bin/env python3
import requests
import re
import json
import base64
import urllib.parse
class IsValid(): # 将订阅链接中YAML,Base64等内容转换为 Url 链接内容
# 检测文本是否是BASE64格式
def isBase64(s):
try:
_base64_code = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I',
'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R',
'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a',
'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j',
'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's',
't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1',
'2', '3', '4','5', '6', '7', '8', '9', '+',
'/', '=' ]
# Check base64 OR codeCheck % 4
code_fail = [ i for i in s if i not in _base64_code]
if code_fail or len(s) % 4 != 0:
return False
return True
except Exception as ex:
print('Line-43: is_base64_code(s) err: ' + str(ex) + '\n' + s)
return False
pattern = re.compile(
r'^(([a-zA-Z]{1})|([a-zA-Z]{1}[a-zA-Z]{1})|'
r'([a-zA-Z]{1}[0-9]{1})|([0-9]{1}[a-zA-Z]{1})|'
r'([a-zA-Z0-9][-_.a-zA-Z0-9]{0,61}[a-zA-Z0-9]))\.'
r'([a-zA-Z]{2,13}|[a-zA-Z0-9-]{2,30}.[a-zA-Z]{2,3})$'
)
# 检测文本是否是域名格式
def isDomain(domain):
"""
Return whether or not given value is a valid domain.
If the value is valid domain name this function returns ``True``, otherwise False
:param value: domain string to validate
"""
return True if pattern.match(domain) else False
# 检测文本是否是IP格式
def isIP(strIp):
try:
p = re.compile('^((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)$')
if p.match(strIp):
return True
else:
return False
except:
return False
def isIPorDomain(strs):
if(len(strs) > 0):
if(strs.find('.') > -1):
return True
else:
return False
else:
return False
# str.isnumeric()

+ 15
- 0
cls/ListFile.py View File

@ -0,0 +1,15 @@
#!/usr/bin/env python3
import requests
import os
import time
class ListFile(): # 将订阅链接中YAML,Base64等内容转换为 Url 链接内容
def get_list_sort(s):
global list
# 先将列表转化为set,再转化为list就可以实现去重操作
list = list(set(s))
# 将list进行排序 .sort(reverse=True)表示倒序
list.sort()
return list

+ 51
- 0
cls/LocalFile.py View File

@ -0,0 +1,51 @@
#!/usr/bin/env python3
import datetime
import os
class LocalFile(): # 将订阅链接中YAML,Base64等内容转换为 Url 链接内容
# 从本地文本文件中读取字符串
def read_LocalFile(fname):
retxt = ""
try:
with open(fname, "r", encoding='utf-8') as f: # 打开文件
retxt = f.read() # 读取文件
except Exception as ex:
print('LocalFile-Line-15-Exception:\n' + str(ex))
return retxt
# 写入字符串到本地文件
def write_LogFile(fcont):
print(fcont)
fname = './ipfs/tmp/err.log'
if(fcont.find('Exception') == -1):
fname = './ipfs/tmp/info.log'
fcont = '[' + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + '] ' + fcont
LocalFile.write_LocalFile(fname, fcont)
# 写入字符串到本地文件
def write_LocalFile(fname, fcont):
try:
# 创建目录 # os.makedirs(os.path.split(fname)[0])
if(fname.find('/') > -1):
dirs = fname.rsplit('/', 1)[0]
if not os.path.exists(dirs):
os.makedirs(dirs)
# ”w"代表着每次运行都覆盖内容 #只需要将之前的”w"改为“a"即可,代表追加内容
wtype = 'w'
if(os.path.exists(fname)):
fsize = os.path.getsize(fname) # 文件路径及文件名
if(fname.find('.log') > -1 and fsize < 80000000):
fcont = '\n\n' + fcont
wtype = 'a'
else:
fsize = len(fcont)
# 内容格式转换
_file = open(fname, wtype, encoding='utf-8')
_file.write(fcont.encode("utf-8").decode("utf-8"))
_file.close()
if(fcont.find('Exception') > -1):
print('LocalFile-Line-49-Write-OK-Type(a-add,w-write): ' + wtype + '-Size:' + str(fsize) + '-Path:' + fname)
except Exception as ex:
print('LocalFile-Line-51-write-Exception:\n' + str(ex) + '\nPath:' + fname + '-Fcont:' + fcont)

+ 111
- 0
cls/NetFile.py View File

@ -0,0 +1,111 @@
#!/usr/bin/env python3
import requests
import urllib.request as urllib2
import urllib3
from cls.LocalFile import LocalFile
from urllib3 import PoolManager
class NetFile(): # 将订阅链接中YAML,Base64等内容转换为 Url 链接内容
# 从网络文件是否存在
def url_stat(r_url, linktime, readtime):
retxt = 0
try:
urllib3.disable_warnings() # 将这段代码放到调用https的代码段中,避免其他模块调用时仍报该错
requests.adapters.DEFAULT_RETRIES = 3 # 增加重连次数
s = requests.session()
s.keep_alive = False # 关闭多余连接
rq = s.get(r_url, timeout=(linktime, readtime), verify=False) # 发送https请求时,加入verify=False,忽略证书验证
retxt = rq.status_code
# http = PoolManager()
# http.request('GET', 'https://www.google.com/', headers={'Accept-Encoding': 'br'})
rq.close()
except Exception as ex:
print('\nNetFile-Line-34: down res file err: ' + str(ex) + '\n' + r_url)
return retxt
# 从网络下载文件,返回文本信息
def url_to_str(r_url, linktime, readtime):
retxt = ''
try:
urllib3.disable_warnings() # 将这段代码放到调用https的代码段中,避免其他模块调用时仍报该错
requests.adapters.DEFAULT_RETRIES = 3 # 增加重连次数
s = requests.session()
s.keep_alive = False # 关闭多余连接
s.verify = False
rq = s.get(r_url, timeout=(linktime, readtime))
#rq = requests.get(url, timeout=(30, 60)) #连接超时 和 读取超时
if (rq.status_code != 200):
print("\nNetFile-Line-18: Download File error.][" + str(rq.status_code) + "]-Url: " + r_url)
else:
#retxt = rq.content.decode("utf-8")
if(rq.encoding == None):
rq.encoding = rq.apparent_encoding
if(rq.encoding == 'ISO-8859-1'):
retxt = rq.text.encode(rq.encoding).decode('gbk').encode('utf8')
elif(rq.encoding == 'Windows-1252'):
rq.encoding = 'utf-8'
retxt = rq.text.encode(rq.encoding)
elif(rq.encoding == 'UTF-8-SIG'):
# b'\xef\xbb\xbf
retxt = rq.text.encode(rq.encoding)[3:]
# retxt = retxt.replace('\ufeff', '')
else:
retxt = rq.text.encode(rq.encoding)
retxt = retxt.decode('utf-8')
rq.close()
except Exception as ex:
print('\nNetFile-Line-34: down res file err: ' + str(ex) + '\n' + r_url)
return retxt
# 从网络下载配置文件,下载失败则读取本地文件
def down_res_file(r_url, fname, linktime, readtime):
retxt = ''
try:
urllib3.disable_warnings() # 将这段代码放到调用https的代码段中,避免其他模块调用时仍报该错
r_url = r_url + '' + fname
rq = requests.get(r_url, timeout=(linktime, readtime))
#rq = requests.get(url, timeout=(30, 60)) #连接超时 和 读取超时
if (rq.status_code != 200):
print("NetFile-Line-33:" + str(rq.status_code) + "] Download sub error on link, Read local file. " + r_url)
retxt = LocalFile.read_LocalFile("./res/" + fname)
else:
print("NetFile-Line-36:" + str(rq.status_code) + " get file from " + r_url)
#retxt = rq.text
#print(type(ret)) # 返回类型 <class 'requests.models.Response'>
#print(ret) # 返回值:<Response [200]>
#print(ret.text) # 输出文本信息
#print(ret.content) # 以二进制输出
#retxt = rq.content.decode("utf-8")
retxt = rq.text.encode(rq.encoding).decode('utf-8')
LocalFile.write_LocalFile('./res/' + fname, retxt)
except Exception as ex:
retxt = LocalFile.read_LocalFile("./res/" + fname)
print('NetFile-Line-46: down res file err: ' + str(ex) + '\n' + r_url)
return retxt
def getRemoteFileSize(url, proxy = None):
''' 通过content-length头获取远程文件大小
url - URL
proxy - '''
opener = urllib2.build_opener()
if proxy:
if url.lower().startswith('https://'):
opener.add_handler(urllib2.ProxyHandler({'https' : proxy}))
else:
opener.add_handler(urllib2.ProxyHandler({'http' : proxy}))
try:
request = urllib2.Request(url)
request.get_method = lambda: 'HEAD'
response = opener.open(request)
response.read()
except Exception:
return 0
else:
print(response.headers)
fileSize = dict(response.headers).get('Content-Length', 0)
if(fileSize == 0):
fileSize = dict(response.headers).get('content-length', 0)
return int(fileSize)

+ 435
- 0
cls/PingIP.py View File

@ -0,0 +1,435 @@
#!/usr/bin/env python3
import base64
import json
import requests
import socket
import subprocess
import time
#import logging
#logger = logging.getLogger("Sub")
from cls.LocalFile import LocalFile
from cls.IsValid import IsValid
from cls.StrText import StrText
class PingIP():
def tcp_ping(host, port):
alt=0
suc=0
fac=0
_list = []
while True:
if fac >= 3 or (suc != 0 and fac + suc >= 10):
break
# logger.debug("fac: {}, suc: {}".format(fac, suc))
try:
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
st=time.time()
s.settimeout(3)
s.connect((host, int(port)))
s.close()
deltaTime = time.time()-st
alt += deltaTime
suc += 1
_list.append(deltaTime)
except (socket.timeout):
fac+=1
_list.append(0)
#logger.warn("TCP Ping (%s,%d) Timeout %d times." % (host,port,fac))
print("TCP Ping Timeout %d times." % fac)
except Exception as ex:
#logger.exception("TCP Ping Exception:")
#print("TCP Ping Exception:" + str(ex))
_list.append(0)
fac+=1
if suc==0:
# return (0,0,_list)
return suc
#return (alt/suc,suc/(suc+fac),_list)
mstime = int(alt*1000/suc)
return mstime
def google_ping(address, port=1080):
alt=0
suc=0
fac=0
_list = []
while True:
if fac >= 3 or (suc != 0 and fac + suc >= 10):
break
try:
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(3)
s.connect((address,port))
st=time.time()
s.send(b"\x05\x01\x00")
s.recv(2)
s.send(b"\x05\x01\x00\x03\x0agoogle.com\x00\x50")
s.recv(10)
s.send(b"GET / HTTP/1.1\r\nHost: google.com\r\nUser-Agent: curl/11.45.14\r\n\r\n")
s.recv(1)
s.close()
deltaTime = time.time()-st
alt += deltaTime
suc += 1
_list.append(deltaTime)
except (socket.timeout):
fac += 1
_list.append(0)
#logger.warn("Google Ping Timeout %d times." % (fac))
except Exception:
print("Google Ping Exception:")
_list.append(0)
fac += 1
if (suc == 0):
return (0,0,_list)
return (alt/suc,suc/(suc+fac),_list)
def get_ping_time(ip):
num = 0
try:
result = subprocess.call('ping -w 1000 -n 1 ' + ip,stdout=subprocess.PIPE,shell=True)
if result == 0:
h = subprocess.getoutput('ping ' + ip)
num = h.split('平均 = ')[1].replace('ms', '')
except:
num = 0
return num
def check_alive(ip):
result = subprocess.call('ping -w 1000 -n 1 %s' %ip,stdout=subprocess.PIPE,shell=True)
if result == 0:
h = subprocess.getoutput('ping ' + ip)
returnnum = h.split('平均 = ')[1]
info = ('\033[32m%s\033[0m 能ping通,延迟平均值为:%s' %(ip,returnnum))
print('\033[32m%s\033[0m 能ping通,延迟平均值为:%s' %(ip,returnnum))
#return info
else:
with open('notong.txt','a') as f:
f.write(ip)
info = ('\033[31m%s\033[0m ping 不通!' % ip)
#return info
print('\033[31m%s\033[0m ping 不通!' % ip)
def nodespeedtest(confile):
# 启动v2ray
# s = subprocess.Popen(["./clients/v2ray-core/v2ray","--config","%s/clients/config.json" % os.getcwd()],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
# s = subprocess.Popen(["./clients/v2ray-core/v2ray","--config","{}/clients/config.json".format(os.getcwd())],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
# s = subprocess.Popen(["./clients/v2ray-core/v2ray.exe","--config","{}/clients/config.json".format(os.getcwd())],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
# s = subprocess.Popen(["./clients/v2ray-core/v2ray","--config","%s/clients/config.json" % os.getcwd()])
# s = subprocess.Popen(["./clients/xray/xray.exe","--config","{}/clients/config.json".format(os.getcwd())],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
# s = subprocess.Popen(["./clients/xray/xray","--config","%s/clients/config.json" % os.getcwd()])
# s = subprocess.Popen(["./clients/xray/xray","--config","/mnt/mmcblk2p4/NodeSpeed/clients/config.json"])
# s = subprocess.Popen(["./clients/xray/xray --config /mnt/mmcblk2p4/NodeSpeed/clients/config.json"])
# s = subprocess.Popen("./clients/xray/xray --config /mnt/mmcblk2p4/NodeSpeed/clients/config.json" shell=True)
# s = subprocess.Popen(["./clients/xray/xray", "--config", "/mnt/mmcblk2p4/NodeSpeed/clients/config.json"])
# s = subprocess.Popen(["./clients/v2ray-core/v2ray","--config","/mnt/mmcblk2p4/NodeSpeed/clients/config.json"],shell=True,stdout=subprocess.PIPE)
# s = subprocess.Popen(["./clients/v2ray-core/v2ray","--config","/mnt/mmcblk2p4/NodeSpeed/clients/config.json"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
print('confile:' + confile)
# s = subprocess.Popen(["./clients/v2ray-core/v2ray","config","/mnt/mmcblk2p4/NodeSpeed/clients/v2ray-core/config.json"])
# s = subprocess.Popen(["./clients/v2ray-core/v2ray","--config","%s/config.json" % os.getcwd()])
# s = subprocess..Popen(["./clients/v2ray-core/v2ray","--config","%s/config.json" % os.getcwd()],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
# s = subprocess.Popen(["./clients/v2ray-core/v2ray","--config",confile])
# s = subprocess.Popen(["./clients/v2ray-core/v2ray test /mnt/mmcblk2p4/NodeSpeed/clients/v2ray-core/config.json"])
s = subprocess.Popen(["./clients/v2ray-core/v2ray","run","/mnt/mmcblk2p4/NodeSpeed/clients/v2ray-core/config.json"], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
print('s.pid:' + str(s.pid))
time.sleep(2)
'''
serverStr = '127.0.0.1'
port = 8080
proxies = {'http': 'http://localhost:' + str(port),
'https': 'http://localhost:' + str(port)}
#proxies = {'http': 'http://' + serverStr + ':' + str(port),
# 'https': 'http://' + serverStr + ':' + str(port)}
session = requests.Session()
session.proxies.update(proxies)
url = 'https://policies.google.com/terms?hl=zh-CN&fg=1#toc-intro'
html = session.get(url).text
print(html)
TIME_OUT_RPing = 100
URL_webtest = 'https://www.google.com/generate_204'
try:
response = session.get(URL_webtest,timeout= TIME_OUT_RPing*2);
response = session.get(URL_webtest,timeout= TIME_OUT_RPing);
tDelay = response.elapsed.total_seconds()*1000
print('tDelay:' + str(tDelay))
except Exception as e:
print('wSpeed response Error end at port: %s with host: %s' % ( port, serverStr))
#configJson['RPingTime'] = float("inf")
#return -1
url = 'http://clients3.google.com/generate_204'
try:
response = requests.get(url, timeout=5)
if response.status_code == 204:
print(response.status_code)
response.raise_for_status()
except requests.exceptions.RequestException as err:
print("OOps: Something Else", err)
except requests.exceptions.HTTPError as errh:
print("Http Error:", errh)
except requests.exceptions.ConnectionError as errc:
print("Error Connecting:", errc)
except requests.exceptions.Timeout as errt:
print("Timeout Error:", errt)
print("开始批量ping所有IP!")
with open('ip.txt', 'r') as f: #ip.txt为本地文件记录所有需要检测连通性的ip
for i in f:
p = multiprocessing.Process(target=PingIP.check_alive, args=(i,))
#p.start()
print('查询域名IP-' + IpAddress.get_country('www.baidu.com'))
'''
# socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1087)
# socket.socket = socks.socksocket
delay = -1
filesize = 0
deltaTime = 0
st = time.time()
'''
try:
s = requests.session()
s.proxies = {'http': 'socks5://127.0.0.1:1087'}
s.headers = {'Connection':'close'}
s.keep_alive = False
print('S-CONT-LEN:' + str(len(s.get('https://www.baidu.com/').text)))
except Exception as ex:
time.sleep(3)
print('Line-500-Exception:' + str(ex))
'''
try:
serverStr = '127.0.0.1'
port = 1087
session = requests.Session()
#proxies = {'http': 'http://localhost:' + str(port),
# 'https': 'http://localhost:' + str(port)}
#session.proxies.update(proxies)
#session.proxies = {'http': 'socks5://localhost:' + str(port)}
session.proxies = {'http': 'socks5://' + serverStr + ':' + str(port)}
session.headers = {"Connection":"close"}
session.keep_alive = False # 关闭多余连接
# url = 'https://policies.google.com/terms?hl=zh-CN&fg=1#toc-intro'
# url = 'https://www.baidu.com'
url = 'https://cachefly.cachefly.net/1mb.test'
# rq = session.get(url, timeout = 20)
rq = session.get(url, timeout = 20)
rq.encoding = "utf-8"
if (rq.status_code == 200):
#filez = rq.text
filesize = len(rq.text)
deltaTime = time.time() - st
#filesize = 1048576
#filesize = 341553
if(filesize >= 1048000):
delay = int(filesize / 1024 / deltaTime)
else:
delay = 0
else:
delay = 0
print('rq.status_code-[' + str(rq.status_code) + ']-filesize-[' + str(filesize) + ']-deltaTime-[' + str(deltaTime) + ']-kbs-[' + str(delay) + 'KB/s]')
time.sleep(3)
session.close()
s.kill()
# s.kill(SIGKILL)
# os.killpg(p.pid, signal.SIGUSR1)
except Exception as ex:
time.sleep(3)
print('Down-File-is-False:filesize-[' + str(filesize) + ']-deltaTime:[' + str(deltaTime) + ']-delay-[' + str(delay) + ']-Exception:\n' + str(ex))
return delay
def node_config_json(j, confile):
try:
if(j.find('ss://') == 0):
onenode = j.split("#", 1) # 第二个参数为 1,返回两个参数列表
oldname = onenode[1]
onenode = (base64.b64decode(onenode[0][5:].encode("utf-8")).decode("utf-8"))
# chacha20-ietf-poly1305:12f4863a-b470-40c9-8c3a-47606b1012b5@n18.emovpn.xyz:443
port = onenode.rsplit(':', 1)[1]
address = StrText.get_str_btw(onenode, "@", ":")
method = onenode.split(':', 1)[0]
password = StrText.get_str_btw(onenode, ":", "@")
onenode = ' "outbound": {\n'
onenode = onenode + ' "protocol": "shadowsocks",\n'
onenode = onenode + ' "settings": {\n'
onenode = onenode + ' "servers":\n'
onenode = onenode + ' [\n'
onenode = onenode + ' {\n'
onenode = onenode + ' "email": "love@v2ray.com",\n'
onenode = onenode + ' "address": "' + address + '",\n'
onenode = onenode + ' "port": ' + port + ',\n'
onenode = onenode + ' "method": "' + method + '",\n'
onenode = onenode + ' "password": "' + password + '",\n'
onenode = onenode + ' "ota": false,\n'
onenode = onenode + ' "level": 0\n'
onenode = onenode + ' }\n'
onenode = onenode + ' ]\n'
onenode = onenode + ' }\n'
onenode = onenode + ' },'
elif(j.find('trojan://') == 0):
#trojan://8cf83f44-79ff-4e50-be1a-585c82338912@t2.ssrsub.com:8443?sni=douyincdn.com#name
onenode = j[9:].replace('?', '#')
password = onenode.split('@', 1)[0]
address = StrText.get_str_btw(onenode, '@', ':')
if(onenode.find('#') == -1):
onenode = onenode + '#' + address
port = StrText.get_str_btw(onenode, ':', '#')
sni = ''
if(onenode.find('sni') > -1):
sni = StrText.get_str_btw(onenode, 'sni=', '#')
onenode = ' "outbound": {\n'
onenode = onenode + ' "protocol": "trojan",\n'
onenode = onenode + ' "settings": {\n'
onenode = onenode + ' "servers":\n'
onenode = onenode + ' [\n'
onenode = onenode + ' {\n'
onenode = onenode + ' "email": "love@v2ray.com",\n'
onenode = onenode + ' "address": "' + address + '",\n'
onenode = onenode + ' "port": ' + port + ',\n'
onenode = onenode + ' "password": "' + password + '",\n'
onenode = onenode + ' "level": 0\n'
onenode = onenode + ' }\n'
onenode = onenode + ' ]\n'
onenode = onenode + ' },\n'
onenode = onenode + ' "streamSettings": {\n'
if(sni != ''):
onenode = onenode + ' "security": "tcp",\n'
onenode = onenode + ' "security": "tls",\n'
onenode = onenode + ' "sni": "' + sni + '"\n'
onenode = onenode + ' }\n'
onenode = onenode + ' },'
elif(j.find('vmess://') == 0):
aonenode = (base64.b64decode(j[8:].encode("utf-8")).decode("utf-8"))
'''
{
"v": "2",
"ps": "-美国-137.175.30.251",
"add": "137.175.30.251",
"port": "111",
"id": "77cd775c-1c0a-11ec-a1a8-00163c1393a8",
"aid": "0",
"scy": "auto",
"net": "tcp",
"type": "vmess",
"host": "",
"path": "/",
"tls": "",
"sni": ""
}
{
"v": "2",
"ps": "https://1808.ga",
"add": "ff5.uuv2.co.uk",
"port": "80",
"id": "fbf53107-1b42-3da5-a77d-6ad22544c0e9",
"aid": "2",
"scy": "auto",
"net": "ws",
"type": "none",
"host": "t.me/vpnhat",
"path": "/v2ray",
"tls": "none",
"sni": ""
}
"streamSettings": {
"network": "ws",
"security": "tls",
"wsSettings": {
"path": "/ws",
"headers": {
"host": "tls.glloyd.com"
}
}
}
'''
node = json.loads(aonenode)
onenode = ' "outbound": {\n'
onenode = onenode + ' "protocol": "vmess",\n'
onenode = onenode + ' "settings": {\n'
onenode = onenode + ' "vnext":\n'
onenode = onenode + ' [\n'
onenode = onenode + ' {\n'
onenode = onenode + ' "address": "' + node['add'] + '",\n'
onenode = onenode + ' "port": ' + node['port'] + ',\n'
onenode = onenode + ' "users": [{"id": "' + node['id'] + '", "alterId": ' + node['aid'] + ', "security": "' + node['scy'] + '", "level": 0}]\n'
onenode = onenode + ' }\n'
onenode = onenode + ' ],\n'
onenode = onenode + ' "servers": null,\n'
onenode = onenode + ' "response": null\n'
onenode = onenode + ' },\n'
onenode = onenode + ' "streamSettings":\n'
onenode = onenode + ' {\n'
onenode = onenode + ' "network": "' + node['net'] + '",\n'
if(node['tls'] == 'tls' or node['tls'] == 'True' or node['tls'] == 'true'):
onenode = onenode + ' "security": "tls",\n'
if(aonenode.find('"sni":') > -1):
onenode = onenode + ' "sni":"' + node['sni'] + '",\n'
if(aonenode.find('certificateFile') > -1 and aonenode.find('keyFile:') > -1):
onenode = onenode + ' "tlsSettings": {\n'
onenode = onenode + ' "certificates": [\n'
onenode = onenode + ' "{\n'
onenode = onenode + ' " "certificateFile": "/etc/v2ray/v2ray.crt", // 证书文件 \n'
onenode = onenode + ' " "keyFile": "/etc/v2ray/v2ray.key" // 密钥文件 \n'
onenode = onenode + ' "}]\n'
onenode = onenode + ' },\n'
else: #none ''
onenode = onenode + ' "tlsSettings": {},\n'
else:
onenode = onenode + ' "security": "none",\n'
# 不同传输协议,配置不同信息
if(node['net'] == 'tcp'):
onenode = onenode + ' "tcpSettings": {}\n'
elif(node['net'] == 'kcp'):
onenode = onenode + ' "kcpSettings": {}\n'
elif(node['net'] == 'ws'):
onenode = onenode + ' "wsSettings": \n'
onenode = onenode + ' {\n'
if(aonenode.find('"path":') > -1 and aonenode.find('"ws-headers":') > -1):
onenode = onenode + ' "path": "' + node['path'] + '",\n'
if(aonenode.find('"ws-headers":') > -1 and aonenode.find('"Host":') > -1):
onenode = onenode + ' "headers": {"host": "' + node['Host'] + '"}\n'
elif(aonenode.find('"ws-headers":') > -1 and aonenode.find('"host":') > -1):
onenode = onenode + ' "headers": {"host": "' + node['host'] + '"}\n'
elif(aonenode.find('"ws-headers":') > -1 and aonenode.find('"HOST":') > -1):
onenode = onenode + ' "headers": {"host": "' + node['HOST'] + '"}\n'
else:
if(aonenode.find('"path":') > -1):
onenode = onenode + ' "path": "' + node['path'] + '"\n'
elif(aonenode.find('"ws-headers":') > -1):
if(aonenode.find('"ws-headers":') > -1 and aonenode.find('"Host":') > -1):
onenode = onenode + ' "headers": {"host": "' + node['Host'] + '"}\n'
elif(aonenode.find('"ws-headers":') > -1 and aonenode.find('"host":') > -1):
onenode = onenode + ' "headers": {"host": "' + node['host'] + '"}\n'
elif(aonenode.find('"ws-headers":') > -1 and aonenode.find('"HOST":') > -1):
onenode = onenode + ' "headers": {"host": "' + node['HOST'] + '"}\n'
onenode = onenode + ' }\n'
elif(node['net'] == 'quic'):
onenode = onenode + ' "quicSettings": {}\n'
elif(node['net'] == 'grpc'):
onenode = onenode + ' "grpcSettings": {\n'
onenode = onenode + ' "serviceName": "" //填写你的 ServiceName\n'
onenode = onenode + ' }\n'
#if(node['net'] == 'http'):
#onenode = onenode + ' "httpSettings":{\n'
#onenode = onenode + ' "path": "' + node['path'] + '"\n'
#onenode = onenode + ' },\n'
onenode = onenode + ' }\n'
onenode = onenode + ' },'
log = LocalFile.read_LocalFile('./res/config-log.json')
inbound = LocalFile.read_LocalFile('./res/config-inbound.json')
levels = LocalFile.read_LocalFile("./res/config-levels.json")
onenode = log + '\n' + inbound + '\n' + onenode + '\n' + levels
print('开始生成节点文件:' + confile)
LocalFile.write_LocalFile(confile, onenode)
time.sleep(2)
except Exception as ex:
print('Create-File-Config-Exception:\n' + str(ex))
return onenode

+ 58
- 0
cls/StrText.py View File

@ -0,0 +1,58 @@
#!/usr/bin/env python3
class StrText():
# Base64加密文本转换为标准格式
def get_str_base64(origStr):
missing_padding = 4 - len(origStr) % 4
if missing_padding:
origStr += '=' * missing_padding
return origStr
# 从字符中取两不同字符串中间的字符,print(sub_link),参数:文本,第一字符串,第二字符串,是否保留字符串
def get_str_btw(s, f, b, y):
par = s.partition(f)
if(y == 0):
return (par[2].partition(b))[0][:]
else:
return f + '' + (par[2].partition(b))[0][:] + '' + b
# 大小换算
def bytes_conversion(self, number: float):
"""
:param number: byte字节单位
:return:
"""
symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
prefix = dict()
for a, s in enumerate(symbols):
prefix[s] = 1 << (a + 1) * 10
for s in reversed(symbols):
if int(number) >= prefix[s]:
value = float(number) / prefix[s]
return '%.1f%s/s' % (value, s)
return "%sB/s" % number
def hum_convert(value):
value=float(value)
units = ["B", "KB", "MB", "GB", "TB", "PB"]
size = 1024.0
for i in range(len(units)):
if (value / size) < 1:
return "%.2f%s" % (value, units[i])
value = value / size
#检验是否全是中文字符
def is_all_chinese(strs):
for _char in strs:
if not '\u4e00' <= _char <= '\u9fa5':
return False
return True
#检验是否含有中文字符
def is_contains_chinese(strs):
for _char in strs:
if '\u4e00' <= _char <= '\u9fa5':
return True
return False

+ 627
- 0
cls/SubConvert.py View File

@ -0,0 +1,627 @@
#!/usr/bin/env python3
import base64
import json
from cls.IpAddress import IpAddress
from cls.LocalFile import LocalFile
from cls.StrText import StrText
class SubConvert():
# allvmess-->json-->name-->v2ray or clash (v2ray --> clash or clash --> v2ray)
# 非标准格式的vmess明文地址转化为标准格式的明文json格式vmess
def clash_all_to_json(onenode):
try:
# print('SubConvert-Line-83-oldnode:' + onenode)
# V2ray格式
# onenode = '{name: "211.72.35.110", server: 211.72.35.110, port: 443, type: vmess, uuid: 541ca026-58d3-48f1-d6ef-3a05543ddcb7, alterId: 0, cipher: auto, tls: true, skip-cert-verify: false, network: ws, ws-opts: {path: /, headers: {Host: ru.tzccifq.ga}}, udp: true}'
# onenode = '{"add":"v.ssr.com", "v":"2", "ps":"\'v.ssr.com\'", "port":"168", "id":"e54a480c-77e3-41ca-8f8b-17ffb50dbd08", "aid":"0", "net":"ws", "type":"", "host":"", "path":"/ssrsub", "tls":"tls"}'
# onenode = '{add:v1-asw-sg-14.niaoyun.online,port:666,id:b9cc1e88-5db0-37ff-840a-b882345e22d1,aid:1,scy:auto,net:ws,host:v1-asw-sg-14.niaoyun.online,path:/niaocloud,tls:,sni:,v:2,ps:Relay_新加坡-_7234,type:none,serverPort:0,nation:}'
# Clash格式
#- {alterId: 2, cipher: auto, name: '7.y.com', network: ws, port: 80, server: 7.y.com, tls: false, type: vmess, uuid: bac18e70-9964-3f99-805a-d809c4bdc6cb, path: /ny}
#- {name: CA-ss1.ssr.com, server: ss1.ssr.com, port: 10443, type: ss, cipher: aes-128-gcm, password: suo.yt.ssr, plugin: obfs, plugin-opts: {mode: tls, host: n46hm52773.wns.windows.com}, udp: true}
#- {name: US-107.173.157.168, server: 107.173.157.168, port: 443, type: vmess, uuid: 4f6aa0c3-7be1-4eaa-a64c-a23418070422, alterId: 6, cipher: auto, skip-cert-vertify: false, network: ws, path: /b06fde1/, tls: True, headers: {Host: www.shunxin.ml}}
#- {name: "172.67.196.0", server: 172.67.196.0, port: 443, type: vmess, uuid: 4db99e96-3ee3-419c-b1fb-856975801380, alterId: 64, cipher: auto, tls: true, skip-cert-verify: false, network: ws, ws-opts: {path: /ray, headers: {Host: localhoster.ml}}, udp: true}
# onenode = '{name: 35.77.5.55, server: 034.ap.pop.bigairport.net, port: 12356, type: vmess, uuid: a6f82e7d-6e99-4a4e-8981-8e91453c13f7, alterId: 1, cipher: auto, skip-cert-vertify: false, network: ws, path: /, tls: True, headers: {Host: t.me/vpnhat}}'
if(onenode == '' or onenode == '{}'):
return ''
if(len(onenode) < 20):
LocalFile.write_LogFile('SubConvert-Line-42-onenode长度不能小于20, onenode:' + onenode)
return ''
nenode = ''
# 格式转变,生成标准的json格式字符串,方便后期字典生成
onenode = onenode.replace(' ', '').replace('"', '').replace('\'', '')
# 多行链接转换成一行
onenode = onenode.replace('\r', ',').replace('\n', ',')
# 去掉标题及广告信息
oname = StrText.get_str_btw(onenode, 'name:', ',', 1)
if(oname != ''):
onenode = onenode.replace(oname, 'name:,')
ops = StrText.get_str_btw(onenode, 'ps:', ',', 1)
if(ops != ''):
onenode = onenode.replace(ops, 'ps:,')
# 密码带些值出错
onenode = onenode.replace('<', '').replace('>', '')
# 去掉插件的{}和,值
onenode = onenode.replace('{', ',').replace('}', ',').replace(',,', ',').replace(',,', ',').strip(',')
# 避免,给处理掉,先替换,后还原
onenode = onenode.replace('h2,http', 'h2=http')
for i in onenode.split(','):
if(i.find(':') > -1):
a = i.split(':', 1)[0]
if(a != ''):
b = i.split(':', 1)[1]
if(b.find(':') > -1):
nenode = nenode + ',"' + a + '":""'
nenode = nenode + ',"' + b.split(':')[0] + '":"' + b.split(':')[1] + '"'
else:
nenode = nenode + ',"' + a + '":"' + b + '"'
else: