Skip to content

Commit

Permalink
add flake8 linting
Browse files Browse the repository at this point in the history
  • Loading branch information
hoopengo committed May 25, 2022
1 parent d8be4f8 commit 89f344d
Show file tree
Hide file tree
Showing 6 changed files with 294 additions and 140 deletions.
8 changes: 8 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
[flake8]
exclude = .git,__pycache__,env,venv,.eggs,.tox,.nox,build,dist
max-line-lenght = 120
max-complexity = 8
ignore = W,BLK,
E24,E121,E123,E125,E126,E221,E226,E266,E704,
E265,E722,E501,E731,E306,E401,E302,E222,E303,
E402,E305,E261,E262,E203,N816
9 changes: 9 additions & 0 deletions .markdownlint.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
default: true
blank_lines: false
bullet: false
html: false
indentation: false
line_length: false
spaces: false
url: false
whitespace: false
8 changes: 8 additions & 0 deletions dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
flake8==4.0.1
flake8-black==0.3.3
flake8-bugbear==22.4.25
flake8-builtins==1.5.3
flake8-commas==2.1.0
flake8-isort==4.1.1
flake8-polyfill==1.0.2
pep8-naming==0.12.1
154 changes: 88 additions & 66 deletions proxyChecker.py
Original file line number Diff line number Diff line change
@@ -1,81 +1,103 @@
import urllib.request
import threading
import random
import argparse
from time import time, sleep
import random
import threading
import urllib.request
from time import sleep, time

user_agent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/37.0.2062.94 Chrome/37.0.2062.94 Safari/537.36"
proxyType = ""


def random_user_agent(file='user_agents.txt'):
with open(file, "r") as f:
lines = f.readlines()
user_agent = random.choice(lines).replace("\n", "")
f.close()
sleep(0.1)
return str(user_agent)
def random_user_agent(file="user_agents.txt"):
with open(file, "r") as f:
lines = f.readlines()
user_agent = random.choice(lines).replace("\n", "")
f.close()
sleep(0.1)
return str(user_agent)

proxyType = ''

def checkproxy(txtfile):
global out_file
candidate_proxies = open(txtfile).readlines()
filedl = open(txtfile, "w")
filedl.close()
out_file = open(txtfile, "a")
threads = []
for i in candidate_proxies:
t = threading.Thread(target=checker, args=[i])
t.start()
threads.append(t)
def check_proxy(txtfile):
global out_file
candidate_proxies = open(txtfile).readlines()
filedl = open(txtfile, "w")
filedl.close()
out_file = open(txtfile, "a")
threads = []
for i in candidate_proxies:
t = threading.Thread(target=checker, args=[i])
t.start()
threads.append(t)

for t in threads:
t.join()
for t in threads:
t.join()

out_file.close()
if args.verbose:
print("\n\nCurrent IPs in proxylist: %s\n" % (len(open(txtfile).readlines())))
out_file.close()
if args.verbose:
print("\n\nCurrent IPs in proxylist: %s\n" % (len(open(txtfile).readlines())))


def checker(i):
proxy = proxyType + '://' + i
proxy_support = urllib.request.ProxyHandler({proxyType: proxy})
opener = urllib.request.build_opener(proxy_support)
urllib.request.install_opener(opener)
global site
req = urllib.request.Request(proxyType + '://' + site)
global user_agent
if args.random_agent:
user_agent = random_user_agent()
req.add_header("User-Agent", user_agent)
try:
global chosenTimeout
start_time = time()
urllib.request.urlopen(req, timeout=chosenTimeout)
end_time = time()
time_taken = end_time - start_time
out_file.write(i)
if args.verbose:
print ("%s works!" % proxy)
print('time: ' + str(time_taken))
print('user_agent: ' + user_agent +'\n')
except Exception as e:
print(e)
pass
if args.verbose:
print ("%s does not respond.\n" % proxy)
def checker(value):
proxy = proxyType + "://" + value
proxy_support = urllib.request.ProxyHandler({proxyType: proxy})
opener = urllib.request.build_opener(proxy_support)
urllib.request.install_opener(opener)
global site
req = urllib.request.Request(proxyType + "://" + site)
global user_agent
if args.random_agent:
user_agent = random_user_agent()
req.add_header("User-Agent", user_agent)
try:
global chosenTimeout
start_time = time()
urllib.request.urlopen(req, timeout=chosenTimeout)
end_time = time()
time_taken = end_time - start_time
out_file.write(value)
if args.verbose:
print("%s works!" % proxy)
print("time: " + str(time_taken))
print("user_agent: " + user_agent + "\n")
except Exception as e:
print(e)
pass
if args.verbose:
print("%s does not respond.\n" % proxy)


# Добавление аргументов в парсер
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--timeout", type=int, help="dismiss the proxy after -t seconds", default=20)
parser.add_argument("-p", "--proxy", help="check HTTPS or HTTP proxies", default='http')
parser.add_argument("-l", "--list", help="path to your list.txt", default='output.txt')
parser.add_argument("-s", "--site", help="check with specific website like google.com", default='https://google.com/')
parser.add_argument("-v", "--verbose", help="increase output verbosity", action="store_true")
parser.add_argument("-r", "--random_agent", help="use a random user agent per proxy", action="store_true")
args = parser.parse_args() # Получение всех аргументов
txtfile = args.list # list.txt
site = args.site # Сайт указанный пользователем (default="https://google.com/")
proxyType = args.proxy # Какие прокси парсить (default="http")
parser.add_argument(
"-t",
"--timeout",
type=int,
help="dismiss the proxy after -t seconds",
default=20,
)
parser.add_argument("-p", "--proxy", help="check HTTPS or HTTP proxies", default="http")
parser.add_argument("-l", "--list", help="path to your list.txt", default="output.txt")
parser.add_argument(
"-s",
"--site",
help="check with specific website like google.com",
default="https://google.com/",
)
parser.add_argument(
"-v",
"--verbose",
help="increase output verbosity",
action="store_true",
)
parser.add_argument(
"-r",
"--random_agent",
help="use a random user agent per proxy",
action="store_true",
)
args = parser.parse_args() # Получение всех аргументов
txtfile = args.list # list.txt
site = args.site # Сайт указанный пользователем (default="https://google.com/")
proxyType = args.proxy # Какие прокси парсить (default="http")
chosenTimeout = args.timeout
threading.Thread(target=checkproxy, args=(txtfile,)).start() # старт
threading.Thread(target=check_proxy, args=(txtfile,)).start() # старт
Loading

0 comments on commit 89f344d

Please sign in to comment.