YuruC3 17423793f1 Working OPNsense thing.
Now to integrate it into main
2025-05-14 09:57:06 +02:00

167 lines
5.2 KiB
Python

import requests, schedule, time
from bs4 import BeautifulSoup
from whatDomain import *
DEBSECURITYURL = ["https://security.debian.org/debian-security/"]
EXTRASURL = ["https://download.docker.com/linux/debian/",
# Double just to be sure. Even though they point to the same IP
"http://download.proxmox.com/debian/",
"https://enterprise.proxmox.com/debian/pve/",
# That's for nvidia docker toolkit something something
# stuff that makes containers use nvenc and shit
"https://nvidia.github.io/libnvidia-container/stable/deb/",
"https://nvidia.github.io/libnvidia-container/experimental/deb/"]
DEBMIRRORURL = "https://www.debian.org/mirror/list"
IPv4FILE = "../MirrorListV4"
IPv6FILE = "../MirrorListV6"
# Define EU and American countries as well as Security for security updates
target_countries = set([
# Europe
"Austria", "Belgium", "Bulgaria", "Croatia", "Czech Republic", "Denmark",
"Estonia", "Finland", "France", "Germany", "Greece", "Hungary", "Iceland",
"Ireland", "Italy", "Latvia", "Lithuania", "Netherlands", "Norway", "Poland",
"Portugal", "Romania", "Slovakia", "Slovenia", "Spain", "Sweden", "Switzerland",
"United Kingdom", "Moldova",
# America
"Argentina", "Brazil", "Canada", "Chile", "Colombia", "Costa Rica", "Ecuador",
"Mexico", "Peru", "United States", "Uruguay", "Venezuela",
# Others
"Security", "Extras"
])
def sanitizeURL(inpurl: str):
if "https://" in inpurl:
outurl = inpurl[8:]
elif "http://" in inpurl:
outurl = inpurl[7:]
elif "http://" or "https://" not in url:
outurl = inpurl
else:
return -1
i = 0
for char in outurl:
i += 1
if char == "/":
outurl = outurl[:i]
if char == "/":
outurl = outurl[:-1]
return outurl
def getFreshData():
payload = requests.get(DEBMIRRORURL)
soup = BeautifulSoup(payload.content, "html.parser")
return soup
def sanitizeUrlsGodWhatTheFuckIsThis(SoupInput: BeautifulSoup):
outMirrorDict = {}
current_country = None
# Iterate through all table rows
for table in SoupInput.find_all("table"):
for row in table.find_all("tr"):
# Check for country name in a full-row header (<strong><big>)
strong = row.find("strong")
if strong:
country_name = strong.get_text(strip=True)
if country_name in target_countries:
current_country = country_name
else:
current_country = None
continue # move to next row
# Check for inline country name in first column
cols = row.find_all("td")
if len(cols) >= 2:
possible_country = cols[0].get_text(strip=True)
link_tag = cols[1].find("a", href=True)
if possible_country in target_countries:
current_country = possible_country
if current_country and link_tag:
url = link_tag['href']
if current_country not in outMirrorDict:
outMirrorDict[current_country] = []
outMirrorDict[current_country].append(url)
outMirrorDict.update({"Security": DEBSECURITYURL})
outMirrorDict.update({"Extras": EXTRASURL})
return outMirrorDict
def LeJob():
print("Starting lookup")
LeSoup = getFreshData()
LeMirrorDict = sanitizeUrlsGodWhatTheFuckIsThis(LeSoup)
# print(LeMirrorDict)
with open(IPv4FILE, "r",) as fR, open(IPv4FILE, "w",) as fW:
for key, urls in LeMirrorDict.items():
# print(urls)
if key in target_countries:
for url in urls:
# print(url)
if url not in fR:
goodurl = sanitizeURL(url)
# print(goodurl)
ip4Dict = ermWhatATheIpFromDomainYaCrazy(goodurl)
try:
for key, ip in ip4Dict.items():
print(ip)
fW.write(ip + "/32" + "\n")
except AttributeError:
continue
with open(IPv6FILE, "r",) as fR, open(IPv6FILE, "w",) as fW:
for key, urls in LeMirrorDict.items():
if key in target_countries:
for url in urls:
if url not in fR:
goodurl = sanitizeURL(url)
# print(goodurl)
ip6Dict = ermWhatAAAATheIpFromDomainYaCrazy(goodurl)
try:
for key, ip in ip6Dict.items():
# print(ip)
fW.write(ip + "/128" + "\n")
except AttributeError:
continue
# schedule.every().day.at("12:45").do(LeJob)
schedule.every().day.at("17:44").do(LeJob)
while True:
schedule.run_pending()
print("Waiting...")
time.sleep(30) #Wait one minute
# LeJob()