Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
  • Loading branch information
0MeMo07 authored Dec 12, 2023
1 parent a525adc commit 8dd7191
Show file tree
Hide file tree
Showing 3 changed files with 218 additions and 54 deletions.
23 changes: 12 additions & 11 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
asyncio
beautifulsoup4
bs4
certifi
chardet
idna
pystyle
requests
tqdm
urllib3
aiohttp
asyncio
beautifulsoup4
bs4
certifi
chardet
idna
pystyle
requests
tqdm
urllib3
aiohttp
argparse
85 changes: 85 additions & 0 deletions src/Sql_injection_seeker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
import argparse
import os
import signal
from bs4 import BeautifulSoup
from urllib.parse import urljoin
import requests
from pystyle import Colors, Colorate

R = '\033[31m'
G = '\033[32m'
W = '\033[0m'

class Seeker:
def __init__(self, base_url):
self.base_url = base_url
self.visited_urls = set()
self.queue = [base_url]
self.sql_injection_urls = []
self.sql_patterns = ["'"]

def extract_links_from_page(self, url):
try:
response = requests.get(url)
response_text = response.text
soup = BeautifulSoup(response_text, "html.parser")
links = [urljoin(url, link.get("href")) for link in soup.find_all("a")]
return links
except requests.RequestException as e:
print(R + "[!]Error ->", e)
return []

def check_sql_injection(self, url):
try:
response = requests.get(url)
soup = BeautifulSoup(response.content, "html.parser")
page_text = soup.get_text()

if "SQL syntax" in page_text or "MySQL Query Error" in page_text or "Fatal error" in page_text or "Uncaught Error" in page_text:
return True
else:
return False
except requests.RequestException as e:
print(R + "[!]Error ->", e)
return False

def handle_interrupt(self, signum, frame):
print(Colorate.Vertical(Colors.green_to_blue, "**************************************************************************"))
if self.sql_injection_urls:
print(G + "\n[+]" + W + "Links with potential SQL injections:" + W)
for url in self.sql_injection_urls:
print(W + url)

if not self.sql_injection_urls:
print(print(R + "\n[-]" + W + "Potential SQL injection not found:" + W))
exit()

def seek_injections(self):
signal.signal(signal.SIGINT, lambda signum, frame: self.handle_interrupt(signum, frame))

while self.queue:
current_url = self.queue.pop(0)
if current_url in self.visited_urls:
continue

print(Colorate.Vertical(Colors.green_to_blue, "**************************************************************************"))
print(G + "Scanning:" + W, current_url)
self.visited_urls.add(current_url)

if self.check_sql_injection(current_url):
print(G + "[+]" + W + "Potential SQL injection found ->" + G, current_url)
self.sql_injection_urls.append(current_url)

links = self.extract_links_from_page(current_url)
for link in links:
if link not in self.visited_urls:
self.queue.append(link)
for pattern in self.sql_patterns:
modified_url = link + pattern
if self.check_sql_injection(modified_url):
print(G + "[+]" + W + "Potential SQL injection found ->" + G, modified_url)
self.sql_injection_urls.append(modified_url)
with open('Sql_İnjection_found.txt', 'a', encoding='utf-8', errors='ignore') as file:
file.write(modified_url + '\n')

signal.signal(signal.SIGINT, signal.SIG_DFL)
164 changes: 121 additions & 43 deletions url_seeker.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import os
import asyncio
import requests
import argparse
from signal import SIGINT, signal
import bs4, tqdm
from glob import glob
Expand All @@ -18,14 +19,27 @@
from pystyle import Colors, Colorate
from functools import wraps
import urllib3
from src import Sql_injection_seeker


urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
proxy = False
R = '\033[31m'
G = '\033[32m'
W = '\033[0m'

d0rk = [line.strip() for line in open("src/d0rks.txt", "r", encoding="utf-8")]


def parse_arguments():
parser = argparse.ArgumentParser(description='URL Seeker - Enhanced Dorking')
parser.add_argument('--sites', help='Target Domain Ex: .com, .org, .net')
parser.add_argument('--dorks', type=int, default=0, help='Number of dorks (0 for all)')
parser.add_argument('--threads', type=int, help='Number of threads')
parser.add_argument('--pages', type=int, help='Number of search engine pages to crawl per dork')
parser.add_argument('--rdork' ,help="'Y' if you want the dorks to be randomly selected, 'N' if not")
parser.add_argument('--S', help='URL or .txt file')
parser.add_argument('--O', action='store_true' ,help='for automatic sql injection scanning --O')
return parser.parse_args()

def logo():
os.system('cls' if os.name == 'nt' else 'clear')
Expand All @@ -39,23 +53,41 @@ def logo():
░░▒░ ░ ░ ░▒ ░ ▒░░ ░ ▒ ░ ░ ░▒ ░ ░ ░ ░ ░ ░ ░ ░░ ░▒ ▒░ ░ ░ ░ ░▒ ░ ▒░
░░░ ░ ░ ░░ ░ ░ ░ ░ ░ ░ ░ ░ ░ ░░ ░ ░ ░░ ░
░ ░ ░ ░ ░ ░ ░ ░ ░░ ░ ░ ░ ░
https://github.com/0MeMo07/ URL Seeker < 1.0.0 >
https://github.com/0MeMo07/ URL Seeker < 2.0.0 >
Enhanced Dorking
"""))

def f_menu():
global args
args = parse_arguments()
import time
global proxy
logo()
f_scan()

def f_scan():
print(R +f"[1]{W} Dork Search")
print(R +f"[2]{W} SQL injection scanning\n")
select = input(R + "> " + W)

if select == "1":
f_scan(args)
if select == "2":
input_value = input("\nEnter a URL or txt file to scan: ")

if input_value.lower().endswith('.txt'):
with open(input_value, 'r', encoding='iso-8859-9') as file:
for line in file:
url = line.strip()
seeker_instance = Sql_injection_seeker.Seeker(base_url=url)
seeker_instance.seek_injections()
else:
seeker_instance = Sql_injection_seeker.Seeker(base_url=input_value)
seeker_instance.seek_injections()

def f_scan(args):
import time

global pages_pulled_as_one
global usearch
global numthreads
global threads
global finallist
global unsorted
global finallist2
Expand All @@ -65,51 +97,75 @@ def f_scan():
global loaded_Dorks
global unsorted
global sites

threads = []
finallist = []
finallist2 = []
unsorted = []
col = []
darkurl = []
loaded_Dorks = []
print(W)
sites = input(
"\nTarget Domain Ex: .com, .org, .net : "
)
sitearray = list(map(str, sites.split(",")))
dorks = input(
"Randomly select the number of dorks (0 for all of them... may take some time!) : "
)
if int(dorks) == 0:
i = 0
while i < len(d0rk):
loaded_Dorks.append(d0rk[i])
i += 1

if args.S:
input_value = args.S

if input_value.lower().endswith('.txt'):
with open(input_value, 'r', encoding='iso-8859-9') as file:
for line in file:
url = line.strip()
seeker_instance = Sql_injection_seeker.Seeker(base_url=url)
seeker_instance.seek_injections()
else:
seeker_instance = Sql_injection_seeker.Seeker(base_url=input_value)
seeker_instance.seek_injections()
else:
i = 0
while i < int(dorks):
loaded_Dorks.append(d0rk[i])
i += 1
numthreads = input("Enter the number of threads - 50-500 : ")
pages_pulled_as_one = input(
"Enter the number of Search Engine Pages to crawl per D0rk, between 25 and 100 @ 25 increments : "
)
print(R + "==============================")
print(W + f"Threads :{G}", numthreads)
print(W + f"Dorks :{G}", len(loaded_Dorks))
print(W + f"Pages :{G}", pages_pulled_as_one)
print(R + "==============================")
time.sleep(5)
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
usearch = loop.run_until_complete(search(pages_pulled_as_one))
sites = args.sites or input("\nTarget Domain Ex: .com, .org, .net : ")

sitearray = list(map(str, sites.split(",")))

dorks = args.dorks or input("Randomly select the number of dorks (0 for all of them... may take some time!) : ")

random_dorks = args.rdork or input("'Y' if you want the dorks to be randomly selected, 'N' if not : ")

d0rk = [line.strip() for line in open("src/d0rks.txt", "r", encoding="utf-8")]

if int(dorks) == 0:
if random_dorks == 'Y' or random_dorks == 'y':
loaded_Dorks = random.sample(d0rk)
else:
loaded_Dorks = d0rk
else:
if random_dorks == 'Y' or random_dorks == 'y':
loaded_Dorks = random.sample(d0rk, int(dorks))
else:
loaded_Dorks = d0rk[:int(dorks)]


async def search(pages_pulled_as_one):

numthreads = args.threads or input("Enter the number of threads - 50-500 : ")

pages_pulled_as_one = args.pages or input("Enter the number of Search Engine Pages to crawl per D0rk, between 25 and 100 @ 25 increments : ")

print(R + "==============================")
print(W + f"Sites :{G}", sites)
print(W + f"Threads :{G}", numthreads)
print(W + f"Dorks :{G}", len(loaded_Dorks))
print(W + f"Pages :{G}", pages_pulled_as_one)
print(W + f"Random Dork :{G}", random_dorks)
print(R + "==============================")
time.sleep(5)

try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
usearch = loop.run_until_complete(search(pages_pulled_as_one, numthreads, loaded_Dorks, sitearray))




async def search(pages_pulled_as_one, numthreads, loaded_Dorks, sitearray):
random.shuffle(loaded_Dorks)
urls = []
urls_len_last = 0
Expand Down Expand Up @@ -210,8 +266,8 @@ def output():
print(R + f"[2] {W}Save current UNSORTED URLs to file")
print(R + f"[3] {W}Print all the UNSORTED URLs ")
print(R + f"[4] {W}Print all SORTED URLs")
print(R + f"[5] {W}SQL injection scanning SORTED URLs (Coming Soon)")
print(R + f"[6] {W}SQL injection scanning UNSORTED URLs (Coming Soon)\n")
print(R + f"[5] {W}SQL injection scanning SORTED URLs")
print(R + f"[6] {W}SQL injection scanning UNSORTED URLs\n")
sec = input(R + "> ")
if sec == "1":
print(G + "\nSaving sorted URLs (" + str(len(finallist)) + ") to file\n")
Expand Down Expand Up @@ -253,14 +309,36 @@ def output():
output()
elif sec == "5":
logo()
from src import Sql_injection_seeker
finallist.sort()
for url in finallist:
seeker_instance = Sql_injection_seeker.Seeker(base_url=url)
seeker_instance.seek_injections()
output()
elif sec == "6":
logo()
from src import Sql_injection_seeker
unsorted.sort()
for url in unsorted:
seeker_instance = Sql_injection_seeker.Seeker(base_url=url)
seeker_instance.seek_injections()
output()

except KeyboardInterrupt:
os.system('cls' if os.name == 'nt' else 'clear')
output()

if args.O:
print(f"\n\n{R}[+] {W}URLS (unsorted): {G}", len(urls))
print(f"{R}[+] {W}URLS (sorted) with rubbish removed: {G}", len(finallist))
print("")
from src import Sql_injection_seeker
finallist.sort()
for url in finallist:
seeker_instance = Sql_injection_seeker.Seeker(base_url=url)
seeker_instance.seek_injections()
output()
else:
output()
return finallist

def ignoring_get(url):
Expand Down

0 comments on commit 8dd7191

Please sign in to comment.