import requests
import colorama
from os import system

colorama.init(autoreset=True)
system("title Proxy-Scraper")

def fetch_proxies(url):
    response = requests.get(url)
    return response.text.splitlines()

def save_proxies(proxies, filename):
    with open(filename, "w") as file:
        for proxy in proxies:
            file.write(proxy + "\n")

def main():
    proxy_sources = {
        "https": "https://api.proxyscrape.com/?request=displayproxies&proxytype=https&timeout=7000&country=ALL&anonymity=elite&ssl=no",
        "http": "https://api.proxyscrape.com/?request=displayproxies&proxytype=http&timeout=7000&country=ALL&anonymity=elite&ssl=no",
        "socks4": "https://www.proxy-list.download/api/v1/get?type=socks4",
        "socks5": "https://www.proxy-list.download/api/v1/get?type=socks5",
    }

    proxies = {ptype: fetch_proxies(url) for ptype, url in proxy_sources.items()}
    
    for ptype, proxy_list in proxies.items():
        save_proxies(proxy_list, f"{ptype}.txt")
        for proxy in proxy_list:
            print(f"[{colorama.Fore.GREEN}+{colorama.Fore.RESET}] Successfully Found {ptype.upper()} >> {proxy}")
    
    print(f"[{colorama.Fore.CYAN}i{colorama.Fore.RESET}] All proxies successfully grabbed & saved")

if __name__ == "__main__":
    main()

06.06.2024

Optimizations:

  1. Simplified the logic for loading and saving proxies.
  2. Removed unnecessary variables and functions.
  3. Used a dictionary to store proxy URLs and types, improving readability and scalability.
  4. Consolidated common operations into fetch_proxies and save_proxies functions to reduce code duplication.

Comments (0)

There are no comments here yet, you can be the first!

Leave a Reply

Your email address will not be published. Required fields are marked *


Choose and Buy Proxy

Datacenter Proxies

Rotating Proxies

UDP Proxies

Trusted By 10000+ Customers Worldwide

Proxy Customer
Proxy Customer
Proxy Customer flowch.ai
Proxy Customer
Proxy Customer
Proxy Customer