import requests
import colorama
from os import system
colorama.init(autoreset=True)
system("title Proxy-Scraper")
def fetch_proxies(url):
response = requests.get(url)
return response.text.splitlines()
def save_proxies(proxies, filename):
with open(filename, "w") as file:
for proxy in proxies:
file.write(proxy + "\n")
def main():
proxy_sources = {
"https": "https://api.proxyscrape.com/?request=displayproxies&proxytype=https&timeout=7000&country=ALL&anonymity=elite&ssl=no",
"http": "https://api.proxyscrape.com/?request=displayproxies&proxytype=http&timeout=7000&country=ALL&anonymity=elite&ssl=no",
"socks4": "https://www.proxy-list.download/api/v1/get?type=socks4",
"socks5": "https://www.proxy-list.download/api/v1/get?type=socks5",
}
proxies = {ptype: fetch_proxies(url) for ptype, url in proxy_sources.items()}
for ptype, proxy_list in proxies.items():
save_proxies(proxy_list, f"{ptype}.txt")
for proxy in proxy_list:
print(f"[{colorama.Fore.GREEN}+{colorama.Fore.RESET}] Successfully Found {ptype.upper()} >> {proxy}")
print(f"[{colorama.Fore.CYAN}i{colorama.Fore.RESET}] All proxies successfully grabbed & saved")
if __name__ == "__main__":
main()
06.06.2024
Optimizations:
- Simplified the logic for loading and saving proxies.
- Removed unnecessary variables and functions.
- Used a dictionary to store proxy URLs and types, improving readability and scalability.
- Consolidated common operations into
fetch_proxies
andsave_proxies
functions to reduce code duplication.
Comments (0)
There are no comments here yet, you can be the first!