feat: Added proxy
All checks were successful
Build Docker / BuildImage (push) Successful in 1m5s

This commit is contained in:
2025-02-27 22:22:40 +11:00
parent ec59656cad
commit 50275ba482
4 changed files with 106 additions and 5 deletions

View File

@@ -1,4 +1,5 @@
import random
from urllib.parse import urlparse
import dns.resolver
import subprocess
import tempfile
@@ -8,10 +9,19 @@ from cryptography.hazmat.backends import default_backend
import datetime
from dns import resolver
import requests
import re
from bs4 import BeautifulSoup
from requests_doh import DNSOverHTTPSSession, add_dns_provider
import urllib3
resolver = dns.resolver.Resolver()
resolver.nameservers = ["194.50.5.28","194.50.5.27","194.50.5.26"]
resolver.port = 53
add_dns_provider("HNSDoH", "https://hnsdoh.com/dns-query")
# Disable warnings
urllib3.disable_warnings()
def check_ssl(domain: str):
@@ -179,14 +189,56 @@ def curl(url: str):
url = "http://" + url
try:
# curl --doh-url https://hnsdoh.com/dns-query {url} --insecure
commmand = f"curl --doh-url https://hnsdoh.com/dns-query {url} --insecure --silent"
response = subprocess.run(commmand, shell=True, capture_output=True, text=True)
command = f"curl --doh-url https://hnsdoh.com/dns-query {url} --insecure --silent"
response = subprocess.run(command, shell=True, capture_output=True, text=True)
if response.returncode != 0:
return {"success": False, "error": response.stderr}
else:
return {"success": True, "result": response.stdout}
except:
return {"success": False, "error": "An error occurred"}
except Exception as e:
return {"success": False, "error": "An error occurred", "message": str(e)}
def proxy(url: str) -> requests.Response:
session = DNSOverHTTPSSession("HNSDoH")
r = session.get(url,verify=False)
return r
def cleanProxyContent(htmlContent: str,url:str, proxyHost: str):
# Find all instances of the url in the html
hostUrl = f"{urlparse(url).scheme}://{urlparse(url).netloc}"
proxyUrl = f"{proxyHost}proxy/{hostUrl}"
# htmlContent = htmlContent.replace(hostUrl,proxyUrl)
# parse html
soup = BeautifulSoup(htmlContent, 'html.parser')
# find all resources
for linkType in ['link','img','script', 'a']:
links = soup.find_all(linkType)
for link in links:
for attrib in ['src','href']:
if link.has_attr(attrib):
if str(link[attrib]).startswith('/'):
link.attrs[attrib] = proxyUrl + link[attrib]
continue
if str(link[attrib]).startswith('http'):
link.attrs[attrib] = str(link[attrib]).replace(hostUrl,proxyUrl)
continue
ignored = False
for ignore in ["data:", "mailto:", "tel:", "javascript:", "blob:"]:
if str(link[attrib]).startswith(ignore):
ignored = True
break
if not ignored:
link.attrs[attrib] = f"{proxyUrl}/{link[attrib]}"
return soup.prettify()
# if __name__ == "__main__":
# print(curl("https://dso.dprofile"))