Merge branch 'Tib3rius:main' into main
This commit is contained in:
commit
8e7127ec3d
|
@ -7,8 +7,7 @@ jobs:
|
|||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install --upgrade pip poetry
|
||||
- run: pip install bandit black codespell flake8 flake8-bugbear
|
||||
flake8-comprehensions isort mypy pytest pyupgrade safety
|
||||
- run: pip install bandit black codespell flake8 flake8-bugbear flake8-comprehensions isort mypy pytest pyupgrade safety requests
|
||||
- run: bandit --recursive --skip B101 . || true # B101 is assert statements
|
||||
- run: black --check . || true
|
||||
- run: codespell --skip="./autorecon/wordlists" # --ignore-words-list="" --skip="*.css,*.js,*.lock"
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
__pycache__
|
||||
*.pyc
|
||||
results/
|
||||
poetry.*
|
10
README.md
10
README.md
|
@ -43,7 +43,7 @@ sudo apt update
|
|||
|
||||
### Python 3
|
||||
|
||||
AutoRecon requires the usage of Python 3.7+ and pip, which can be installed on Kali Linux using the following commands:
|
||||
AutoRecon requires the usage of Python 3.8+ and pip, which can be installed on Kali Linux using the following commands:
|
||||
|
||||
```bash
|
||||
sudo apt install python3
|
||||
|
@ -174,9 +174,7 @@ Assuming you did not modify any of the content in the AutoRecon directory, this
|
|||
|
||||
### Plugins
|
||||
|
||||
A plugin update process is in the works. Until then, after upgrading, remove the ~/.config/AutoRecon directory and run AutoRecon with any argument to repopulate with the latest files.
|
||||
|
||||
If you depend on the ~/.config/AutoRecon/config.toml file (i.e. you have made modifications to it) then simply remove everything in the ~/.config/AutoRecon apart from the config.toml file (including the VERSION-x.x.x file).
|
||||
A plugin update process is in the works. Until then, after upgrading, remove the ~/.local/share/AutoRecon directory and run AutoRecon with any argument to repopulate with the latest files.
|
||||
|
||||
## Usage
|
||||
|
||||
|
@ -224,7 +222,7 @@ optional arguments:
|
|||
Override --tags / --exclude-tags for the listed ServiceScan plugins (comma separated). Default: None
|
||||
--reports PLUGINS Override --tags / --exclude-tags for the listed Report plugins (comma separated). Default: None
|
||||
--plugins-dir PLUGINS_DIR
|
||||
The location of the plugins directory. Default: ~/.config/AutoRecon/plugins
|
||||
The location of the plugins directory. Default: ~/.local/share/AutoRecon/plugins
|
||||
--add-plugins-dir PLUGINS_DIR
|
||||
The location of an additional plugins directory to add to the main one. Default: None
|
||||
-l [TYPE], --list [TYPE]
|
||||
|
@ -266,7 +264,7 @@ plugin arguments:
|
|||
The tool to use for directory busting. Default: feroxbuster
|
||||
--dirbuster.wordlist VALUE [VALUE ...]
|
||||
The wordlist(s) to use when directory busting. Separate multiple wordlists with spaces. Default:
|
||||
['~/.config/AutoRecon/wordlists/dirbuster.txt']
|
||||
['~/.local/share/AutoRecon/wordlists/dirbuster.txt']
|
||||
--dirbuster.threads VALUE
|
||||
The number of threads to use when directory busting. Default: 10
|
||||
--dirbuster.ext VALUE
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import appdirs, os
|
||||
|
||||
config_dir = appdirs.user_config_dir('AutoRecon')
|
||||
data_dir = appdirs.user_data_dir('AutoRecon')
|
||||
|
||||
configurable_keys = [
|
||||
'ports',
|
||||
|
@ -42,9 +43,10 @@ configurable_boolean_keys = [
|
|||
]
|
||||
|
||||
config = {
|
||||
'protected_classes': ['autorecon', 'target', 'service', 'commandstreamreader', 'plugin', 'portscan', 'servicescan', 'global', 'pattern'],
|
||||
'service_exceptions': ['mc-nmf', 'ncacn_http', 'smux', 'status', 'tcpwrapped', 'unknown'],
|
||||
'protected_classes': ['autorecon', 'target', 'service', 'commandstreamreader', 'plugin', 'portscan', 'report', 'servicescan', 'global', 'pattern'],
|
||||
'service_exceptions': ['infocrypt', 'mc-nmf', 'ncacn_http', 'smux', 'status', 'tcpwrapped', 'unknown'],
|
||||
'config_dir': config_dir,
|
||||
'data_dir': data_dir,
|
||||
'global_file': None,
|
||||
'ports': None,
|
||||
'max_scans': 50,
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
from autorecon.plugins import ServiceScan
|
||||
|
||||
class BruteforceSMB(ServiceScan):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.name = 'Bruteforce SMB'
|
||||
self.tags = ['default', 'safe', 'active-directory']
|
||||
|
||||
def configure(self):
|
||||
self.match_service('tcp', 445, '^microsoft\-ds')
|
||||
self.match_service('tcp', 139, '^netbios')
|
||||
|
||||
def manual(self, service, plugin_was_run):
|
||||
service.add_manual_command('Bruteforce SMB', [
|
||||
'crackmapexec smb {address} --port={port} -u "' + self.get_global('username_wordlist', default='/usr/share/seclists/Usernames/top-usernames-shortlist.txt') + '" -p "' + self.get_global('password_wordlist', default='/usr/share/seclists/Passwords/darkweb2017-top100.txt') + '"'
|
||||
])
|
|
@ -14,67 +14,74 @@ class DirBuster(ServiceScan):
|
|||
|
||||
def configure(self):
|
||||
self.add_choice_option('tool', default='feroxbuster', choices=['feroxbuster', 'gobuster', 'dirsearch', 'ffuf', 'dirb'], help='The tool to use for directory busting. Default: %(default)s')
|
||||
self.add_list_option('wordlist', default=[os.path.join(config['config_dir'], 'wordlists', 'dirbuster.txt')], help='The wordlist(s) to use when directory busting. Separate multiple wordlists with spaces. Default: %(default)s')
|
||||
self.add_list_option('wordlist', default=[os.path.join(config['data_dir'], 'wordlists', 'dirbuster.txt')], help='The wordlist(s) to use when directory busting. Separate multiple wordlists with spaces. Default: %(default)s')
|
||||
self.add_option('threads', default=10, help='The number of threads to use when directory busting. Default: %(default)s')
|
||||
self.add_option('ext', default='txt,html,php,asp,aspx,jsp', help='The extensions you wish to fuzz (no dot, comma separated). Default: %(default)s')
|
||||
self.add_true_option('recursive', help='Enables recursive searching (where available). Warning: This may cause significant increases to scan times. Default: %(default)s')
|
||||
self.add_option('extras', default='', help='Any extra options you wish to pass to the tool when it runs. e.g. --dirbuster.extras=\'-s 200,301 --discover-backup\'')
|
||||
self.match_service_name('^http')
|
||||
self.match_service_name('^nacn_http$', negative_match=True)
|
||||
|
||||
def check(self):
|
||||
tool = self.get_option('tool')
|
||||
if tool == 'feroxbuster':
|
||||
if which('feroxbuster') is None:
|
||||
self.error('The feroxbuster program could not be found. Make sure it is installed. (On Kali, run: sudo apt install feroxbuster)')
|
||||
elif tool == 'gobuster':
|
||||
if which('gobuster') is None:
|
||||
self.error('The gobuster program could not be found. Make sure it is installed. (On Kali, run: sudo apt install gobuster)')
|
||||
elif tool == 'dirsearch':
|
||||
if which('dirsearch') is None:
|
||||
self.error('The dirsearch program could not be found. Make sure it is installed. (On Kali, run: sudo apt install dirsearch)')
|
||||
if tool == 'feroxbuster' and which('feroxbuster') is None:
|
||||
self.error('The feroxbuster program could not be found. Make sure it is installed. (On Kali, run: sudo apt install feroxbuster)')
|
||||
return False
|
||||
elif tool == 'gobuster' and which('gobuster') is None:
|
||||
self.error('The gobuster program could not be found. Make sure it is installed. (On Kali, run: sudo apt install gobuster)')
|
||||
return False
|
||||
elif tool == 'dirsearch' and which('dirsearch') is None:
|
||||
self.error('The dirsearch program could not be found. Make sure it is installed. (On Kali, run: sudo apt install dirsearch)')
|
||||
return False
|
||||
elif tool == 'ffuf' and which('ffuf') is None:
|
||||
self.error('The ffuf program could not be found. Make sure it is installed. (On Kali, run: sudo apt install ffuf)')
|
||||
return False
|
||||
elif tool == 'dirb' and which('dirb') is None:
|
||||
self.error('The dirb program could not be found. Make sure it is installed. (On Kali, run: sudo apt install dirb)')
|
||||
return False
|
||||
|
||||
async def run(self, service):
|
||||
dot_extensions = ','.join(['.' + x for x in self.get_option('ext').split(',')])
|
||||
for wordlist in self.get_option('wordlist'):
|
||||
name = os.path.splitext(os.path.basename(wordlist))[0]
|
||||
if self.get_option('tool') == 'feroxbuster':
|
||||
await service.execute('feroxbuster -u {http_scheme}://{addressv6}:{port}/ -t ' + str(self.get_option('threads')) + ' -w ' + wordlist + ' -x "' + self.get_option('ext') + '" -v -k ' + ('' if self.get_option('recursive') else '-n ') + '-q -e -o "{scandir}/{protocol}_{port}_{http_scheme}_feroxbuster_' + name + '.txt"')
|
||||
await service.execute('feroxbuster -u {http_scheme}://{addressv6}:{port}/ -t ' + str(self.get_option('threads')) + ' -w ' + wordlist + ' -x "' + self.get_option('ext') + '" -v -k ' + ('' if self.get_option('recursive') else '-n ') + '-q -e -r -o "{scandir}/{protocol}_{port}_{http_scheme}_feroxbuster_' + name + '.txt"' + (' ' + self.get_option('extras') if self.get_option('extras') else ''))
|
||||
|
||||
elif self.get_option('tool') == 'gobuster':
|
||||
await service.execute('gobuster dir -u {http_scheme}://{addressv6}:{port}/ -t ' + str(self.get_option('threads')) + ' -w ' + wordlist + ' -e -k -x "' + self.get_option('ext') + '" -z -o "{scandir}/{protocol}_{port}_{http_scheme}_gobuster_' + name + '.txt"')
|
||||
await service.execute('gobuster dir -u {http_scheme}://{addressv6}:{port}/ -t ' + str(self.get_option('threads')) + ' -w ' + wordlist + ' -e -k -x "' + self.get_option('ext') + '" -z -r -o "{scandir}/{protocol}_{port}_{http_scheme}_gobuster_' + name + '.txt"' + (' ' + self.get_option('extras') if self.get_option('extras') else ''))
|
||||
|
||||
elif self.get_option('tool') == 'dirsearch':
|
||||
if service.target.ipversion == 'IPv6':
|
||||
service.error('dirsearch does not support IPv6.')
|
||||
else:
|
||||
await service.execute('dirsearch -u {http_scheme}://{address}:{port}/ -t ' + str(self.get_option('threads')) + ' -e "' + self.get_option('ext') + '" -f -q ' + ('-r ' if self.get_option('recursive') else '') + '-w ' + wordlist + ' --format=plain -o "{scandir}/{protocol}_{port}_{http_scheme}_dirsearch_' + name + '.txt"')
|
||||
await service.execute('dirsearch -u {http_scheme}://{address}:{port}/ -t ' + str(self.get_option('threads')) + ' -e "' + self.get_option('ext') + '" -f -q -F ' + ('-r ' if self.get_option('recursive') else '') + '-w ' + wordlist + ' --format=plain -o "{scandir}/{protocol}_{port}_{http_scheme}_dirsearch_' + name + '.txt"' + (' ' + self.get_option('extras') if self.get_option('extras') else ''))
|
||||
|
||||
elif self.get_option('tool') == 'ffuf':
|
||||
await service.execute('ffuf -u {http_scheme}://{addressv6}:{port}/FUZZ -t ' + str(self.get_option('threads')) + ' -w ' + wordlist + ' -e "' + dot_extensions + '" -v ' + ('-recursion ' if self.get_option('recursive') else '') + '-noninteractive | tee {scandir}/{protocol}_{port}_{http_scheme}_ffuf_' + name + '.txt')
|
||||
await service.execute('ffuf -u {http_scheme}://{addressv6}:{port}/FUZZ -t ' + str(self.get_option('threads')) + ' -w ' + wordlist + ' -e "' + dot_extensions + '" -v -r ' + ('-recursion ' if self.get_option('recursive') else '') + '-noninteractive' + (' ' + self.get_option('extras') if self.get_option('extras') else '') + ' | tee {scandir}/{protocol}_{port}_{http_scheme}_ffuf_' + name + '.txt')
|
||||
|
||||
elif self.get_option('tool') == 'dirb':
|
||||
await service.execute('dirb {http_scheme}://{addressv6}:{port}/ ' + wordlist + ' -l ' + ('' if self.get_option('recursive') else '-r ') + '-S -X ",' + dot_extensions + '" -o "{scandir}/{protocol}_{port}_{http_scheme}_dirb_' + name + '.txt"')
|
||||
await service.execute('dirb {http_scheme}://{addressv6}:{port}/ ' + wordlist + ' -l ' + ('' if self.get_option('recursive') else '-r ') + '-S -X ",' + dot_extensions + '" -f -o "{scandir}/{protocol}_{port}_{http_scheme}_dirb_' + name + '.txt"' + (' ' + self.get_option('extras') if self.get_option('extras') else ''))
|
||||
|
||||
def manual(self, service, plugin_was_run):
|
||||
dot_extensions = ','.join(['.' + x for x in self.get_option('ext').split(',')])
|
||||
if self.get_option('tool') == 'feroxbuster':
|
||||
service.add_manual_command('(feroxbuster) Multi-threaded recursive directory/file enumeration for web servers using various wordlists:', [
|
||||
'feroxbuster -u {http_scheme}://{addressv6}:{port} -t ' + str(self.get_option('threads')) + ' -w /usr/share/wordlists/dirbuster/directory-list-2.3-medium.txt -x "' + self.get_option('ext') + '" -v -k ' + ('' if self.get_option('recursive') else '-n ') + '-e -o {scandir}/{protocol}_{port}_{http_scheme}_feroxbuster_dirbuster.txt'
|
||||
'feroxbuster -u {http_scheme}://{addressv6}:{port} -t ' + str(self.get_option('threads')) + ' -w /usr/share/wordlists/dirbuster/directory-list-2.3-medium.txt -x "' + self.get_option('ext') + '" -v -k ' + ('' if self.get_option('recursive') else '-n ') + '-e -r -o {scandir}/{protocol}_{port}_{http_scheme}_feroxbuster_dirbuster.txt' + (' ' + self.get_option('extras') if self.get_option('extras') else '')
|
||||
])
|
||||
elif self.get_option('tool') == 'gobuster':
|
||||
service.add_manual_command('(gobuster v3) Multi-threaded directory/file enumeration for web servers using various wordlists:', [
|
||||
'gobuster dir -u {http_scheme}://{addressv6}:{port}/ -t ' + str(self.get_option('threads')) + ' -w /usr/share/wordlists/dirbuster/directory-list-2.3-medium.txt -e -k -x "' + self.get_option('ext') + '" -o "{scandir}/{protocol}_{port}_{http_scheme}_gobuster_dirbuster.txt"'
|
||||
'gobuster dir -u {http_scheme}://{addressv6}:{port}/ -t ' + str(self.get_option('threads')) + ' -w /usr/share/wordlists/dirbuster/directory-list-2.3-medium.txt -e -k -x "' + self.get_option('ext') + '" -r -o "{scandir}/{protocol}_{port}_{http_scheme}_gobuster_dirbuster.txt"' + (' ' + self.get_option('extras') if self.get_option('extras') else '')
|
||||
])
|
||||
elif self.get_option('tool') == 'dirsearch':
|
||||
if service.target.ipversion == 'IPv4':
|
||||
service.add_manual_command('(dirsearch) Multi-threaded recursive directory/file enumeration for web servers using various wordlists:', [
|
||||
'dirsearch -u {http_scheme}://{address}:{port}/ -t ' + str(self.get_option('threads')) + ' -e "' + self.get_option('ext') + '" -f ' + ('-r ' if self.get_option('recursive') else '') + '-w /usr/share/wordlists/dirbuster/directory-list-2.3-medium.txt --format=plain --output="{scandir}/{protocol}_{port}_{http_scheme}_dirsearch_dirbuster.txt"'
|
||||
'dirsearch -u {http_scheme}://{address}:{port}/ -t ' + str(self.get_option('threads')) + ' -e "' + self.get_option('ext') + '" -f -F ' + ('-r ' if self.get_option('recursive') else '') + '-w /usr/share/wordlists/dirbuster/directory-list-2.3-medium.txt --format=plain --output="{scandir}/{protocol}_{port}_{http_scheme}_dirsearch_dirbuster.txt"' + (' ' + self.get_option('extras') if self.get_option('extras') else '')
|
||||
])
|
||||
elif self.get_option('tool') == 'ffuf':
|
||||
service.add_manual_command('(ffuf) Multi-threaded recursive directory/file enumeration for web servers using various wordlists:', [
|
||||
'ffuf -u {http_scheme}://{addressv6}:{port}/FUZZ -t ' + str(self.get_option('threads')) + ' -w /usr/share/seclists/Discovery/Web-Content/directory-list-2.3-medium.txt -e "' + dot_extensions + '" -v ' + ('-recursion ' if self.get_option('recursive') else '') + '-noninteractive | tee {scandir}/{protocol}_{port}_{http_scheme}_ffuf_dirbuster.txt'
|
||||
'ffuf -u {http_scheme}://{addressv6}:{port}/FUZZ -t ' + str(self.get_option('threads')) + ' -w /usr/share/seclists/Discovery/Web-Content/directory-list-2.3-medium.txt -e "' + dot_extensions + '" -v -r ' + ('-recursion ' if self.get_option('recursive') else '') + '-noninteractive' + (' ' + self.get_option('extras') if self.get_option('extras') else '') + ' | tee {scandir}/{protocol}_{port}_{http_scheme}_ffuf_dirbuster.txt'
|
||||
])
|
||||
elif self.get_option('tool') == 'dirb':
|
||||
service.add_manual_command('(dirb) Recursive directory/file enumeration for web servers using various wordlists:', [
|
||||
'dirb {http_scheme}://{addressv6}:{port}/ /usr/share/wordlists/dirbuster/directory-list-2.3-medium.txt -l ' + ('' if self.get_option('recursive') else '-r ') + '-X ",' + dot_extensions + '" -o "{scandir}/{protocol}_{port}_{http_scheme}_dirb_dirbuster.txt"'
|
||||
'dirb {http_scheme}://{addressv6}:{port}/ /usr/share/wordlists/dirbuster/directory-list-2.3-medium.txt -l ' + ('' if self.get_option('recursive') else '-r ') + '-X ",' + dot_extensions + '" -f -o "{scandir}/{protocol}_{port}_{http_scheme}_dirb_dirbuster.txt"' + (' ' + self.get_option('extras') if self.get_option('extras') else '')
|
||||
])
|
||||
|
|
|
@ -16,6 +16,7 @@ class DnsReconSubdomainBruteforce(ServiceScan):
|
|||
def check(self):
|
||||
if which('dnsrecon') is None:
|
||||
self.error('The program dnsrecon could not be found. Make sure it is installed. (On Kali, run: sudo apt install dnsrecon)')
|
||||
return False
|
||||
|
||||
def manual(self, service, plugin_was_run):
|
||||
domain_name = '<DOMAIN-NAME>'
|
||||
|
|
|
@ -16,6 +16,7 @@ class DnsRecon(ServiceScan):
|
|||
def check(self):
|
||||
if which('dnsrecon') is None:
|
||||
self.error('The program dnsrecon could not be found. Make sure it is installed. (On Kali, run: sudo apt install dnsrecon)')
|
||||
return False
|
||||
|
||||
def manual(self, service, plugin_was_run):
|
||||
service.add_manual_command('Use dnsrecon to automatically query data from the DNS server. You must specify the target domain name.', [
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from autorecon.plugins import ServiceScan
|
||||
from shutil import which
|
||||
|
||||
class Enum4Linux(ServiceScan):
|
||||
|
||||
|
@ -8,11 +9,26 @@ class Enum4Linux(ServiceScan):
|
|||
self.tags = ['default', 'safe', 'active-directory']
|
||||
|
||||
def configure(self):
|
||||
self.add_choice_option('tool', default=('enum4linux-ng' if which('enum4linux-ng') else 'enum4linux'), choices=['enum4linux-ng', 'enum4linux'], help='The tool to use for doing Windows and Samba enumeration. Default: %(default)s')
|
||||
self.match_service_name(['^ldap', '^smb', '^microsoft\-ds', '^netbios'])
|
||||
self.match_port('tcp', [139, 389, 445])
|
||||
self.match_port('udp', 137)
|
||||
self.run_once(True)
|
||||
|
||||
def check(self):
|
||||
tool = self.get_option('tool')
|
||||
if tool == 'enum4linux' and which('enum4linux') is None:
|
||||
self.error('The enum4linux program could not be found. Make sure it is installed. (On Kali, run: sudo apt install enum4linux)')
|
||||
return False
|
||||
elif tool == 'enum4linux-ng' and which('enum4linux-ng') is None:
|
||||
self.error('The enum4linux-ng program could not be found. Make sure it is installed. (https://github.com/cddmp/enum4linux-ng)')
|
||||
return False
|
||||
|
||||
async def run(self, service):
|
||||
if service.target.ipversion == 'IPv4':
|
||||
await service.execute('enum4linux -a -M -l -d {address} 2>&1', outfile='enum4linux.txt')
|
||||
tool = self.get_option('tool')
|
||||
if tool is not None:
|
||||
if tool == 'enum4linux':
|
||||
await service.execute('enum4linux -a -M -l -d {address} 2>&1', outfile='enum4linux.txt')
|
||||
elif tool == 'enum4linux-ng':
|
||||
await service.execute('enum4linux-ng -A -d -v {address} 2>&1', outfile='enum4linux-ng.txt')
|
||||
|
|
|
@ -12,5 +12,5 @@ class LookupSID(ServiceScan):
|
|||
|
||||
def manual(self, service, plugin_was_run):
|
||||
service.add_manual_command('Lookup SIDs', [
|
||||
'lookupsid.py [username]:[password]@{address}'
|
||||
'impacket-lookupsid \'[username]:[password]@{address}\''
|
||||
])
|
||||
|
|
|
@ -11,6 +11,10 @@ class Nikto(ServiceScan):
|
|||
self.match_service_name('^http')
|
||||
self.match_service_name('^nacn_http$', negative_match=True)
|
||||
|
||||
def manual(self, service, plugin_was_run):
|
||||
async def run(self, service):
|
||||
if service.target.ipversion == 'IPv4':
|
||||
await service.execute('nikto -ask=no -Tuning=x4567890ac -nointeractive -host {http_scheme}://{address}:{port} 2>&1 | tee "{scandir}/{protocol}_{port}_{http_scheme}_nikto.txt"')
|
||||
|
||||
def manual(self, service, plugin_was_run):
|
||||
if service.target.ipversion == 'IPv4' and not plugin_was_run:
|
||||
service.add_manual_command('(nikto) old but generally reliable web server enumeration tool:', 'nikto -ask=no -h {http_scheme}://{address}:{port} 2>&1 | tee "{scandir}/{protocol}_{port}_{http_scheme}_nikto.txt"')
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
from autorecon.plugins import ServiceScan
|
||||
|
||||
class NmapAJP(ServiceScan):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.name = 'Nmap AJP'
|
||||
self.tags = ['default', 'safe', 'ajp']
|
||||
|
||||
def configure(self):
|
||||
self.match_service_name(['^ajp13'])
|
||||
|
||||
async def run(self, service):
|
||||
await service.execute('nmap {nmap_extra} -sV -p {port} --script="banner,(ajp-* or ssl*) and not (brute or broadcast or dos or external or fuzzer)" -oN "{scandir}/{protocol}_{port}_ajp_nmap.txt" -oX "{scandir}/xml/{protocol}_{port}_ajp_nmap.xml" {address}')
|
|
@ -14,6 +14,7 @@ class OracleScanner(ServiceScan):
|
|||
def check(self):
|
||||
if which('oscanner') is None:
|
||||
self.error('The oscanner program could not be found. Make sure it is installed. (On Kali, run: sudo apt install oscanner)')
|
||||
return False
|
||||
|
||||
async def run(self, service):
|
||||
await service.execute('oscanner -v -s {address} -P {port} 2>&1', outfile='{protocol}_{port}_oracle_scanner.txt')
|
||||
|
|
|
@ -14,6 +14,7 @@ class OracleTNScmd(ServiceScan):
|
|||
def check(self):
|
||||
if which('tnscmd10g') is None:
|
||||
self.error('The tnscmd10g program could not be found. Make sure it is installed. (On Kali, run: sudo apt install tnscmd10g)')
|
||||
return False
|
||||
|
||||
async def run(self, service):
|
||||
if service.target.ipversion == 'IPv4':
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from autorecon.plugins import PortScan
|
||||
from autorecon.config import config
|
||||
import requests
|
||||
|
||||
class QuickTCPPortScan(PortScan):
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ class RedisCli(ServiceScan):
|
|||
def check(self):
|
||||
if which('redis-cli') is None:
|
||||
self.error('The redis-cli program could not be found. Make sure it is installed. (On Kali, run: sudo apt install redis-tools)')
|
||||
return False
|
||||
|
||||
async def run(self, service):
|
||||
if which('redis-cli') is not None:
|
||||
|
|
|
@ -11,14 +11,8 @@ class SMBVuln(ServiceScan):
|
|||
self.match_service_name(['^smb', '^microsoft\-ds', '^netbios'])
|
||||
|
||||
async def run(self, service):
|
||||
await service.execute('nmap {nmap_extra} -sV -p {port} --script="smb-vuln-ms06-025" --script-args="unsafe=1" -oN "{scandir}/{protocol}_{port}_smb_ms06-025.txt" -oX "{scandir}/xml/{protocol}_{port}_smb_ms06-025.xml" {address}')
|
||||
await service.execute('nmap {nmap_extra} -sV -p {port} --script="smb-vuln-ms07-029" --script-args="unsafe=1" -oN "{scandir}/{protocol}_{port}_smb_ms07-029.txt" -oX "{scandir}/xml/{protocol}_{port}_smb_ms07-029.xml" {address}')
|
||||
await service.execute('nmap {nmap_extra} -sV -p {port} --script="smb-vuln-ms08-067" --script-args="unsafe=1" -oN "{scandir}/{protocol}_{port}_smb_ms08-067.txt" -oX "{scandir}/xml/{protocol}_{port}_smb_ms08-067.xml" {address}')
|
||||
await service.execute('nmap {nmap_extra} -sV -p {port} --script="smb-vuln-*" --script-args="unsafe=1" -oN "{scandir}/{protocol}_{port}_smb_vulnerabilities.txt" -oX "{scandir}/xml/{protocol}_{port}_smb_vulnerabilities.xml" {address}')
|
||||
|
||||
def manual(self, service, plugin_was_run):
|
||||
if not plugin_was_run: # Only suggest these if they weren't run.
|
||||
service.add_manual_commands('Nmap scans for SMB vulnerabilities that could potentially cause a DoS if scanned (according to Nmap). Be careful:', [
|
||||
'nmap {nmap_extra} -sV -p {port} --script="smb-vuln-ms06-025" --script-args="unsafe=1" -oN "{scandir}/{protocol}_{port}_smb_ms06-025.txt" -oX "{scandir}/xml/{protocol}_{port}_smb_ms06-025.xml" {address}',
|
||||
'nmap {nmap_extra} -sV -p {port} --script="smb-vuln-ms07-029" --script-args="unsafe=1" -oN "{scandir}/{protocol}_{port}_smb_ms07-029.txt" -oX "{scandir}/xml/{protocol}_{port}_smb_ms07-029.xml" {address}',
|
||||
'nmap {nmap_extra} -sV -p {port} --script="smb-vuln-ms08-067" --script-args="unsafe=1" -oN "{scandir}/{protocol}_{port}_smb_ms08-067.txt" -oX "{scandir}/xml/{protocol}_{port}_smb_ms08-067.xml" {address}'
|
||||
])
|
||||
service.add_manual_commands('Nmap scans for SMB vulnerabilities that could potentially cause a DoS if scanned (according to Nmap). Be careful:', 'nmap {nmap_extra} -sV -p {port} --script="smb-vuln-* and dos" --script-args="unsafe=1" -oN "{scandir}/{protocol}_{port}_smb_vulnerabilities.txt" -oX "{scandir}/xml/{protocol}_{port}_smb_vulnerabilities.xml" {address}')
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from autorecon.plugins import ServiceScan
|
||||
from shutil import which
|
||||
import os, random, string
|
||||
import os, requests, random, string, urllib3
|
||||
urllib3.disable_warnings()
|
||||
|
||||
class VirtualHost(ServiceScan):
|
||||
|
||||
|
@ -30,10 +31,9 @@ class VirtualHost(ServiceScan):
|
|||
for wordlist in self.get_option('wordlist'):
|
||||
name = os.path.splitext(os.path.basename(wordlist))[0]
|
||||
for hostname in hostnames:
|
||||
_, stdout, _ = await service.execute('curl -sk -o /dev/null -H "Host: ' + ''.join(random.choice(string.ascii_letters) for i in range(20)) + '.' + hostname + '" {http_scheme}://' + hostname + ':{port}/ -w "%{{size_download}}"')
|
||||
wildcard = requests.get(('https' if service.secure else 'http') + '://' + service.target.address + ':' + str(service.port) + '/', headers={'Host':''.join(random.choice(string.ascii_letters) for i in range(20)) + '.' + hostname}, verify=False)
|
||||
|
||||
size = ''.join(await stdout.readlines())
|
||||
|
||||
await service.execute('ffuf -u {http_scheme}://' + hostname + ':{port}/ -t ' + str(self.get_option('threads')) + ' -w ' + wordlist + ' -H "Host: FUZZ.' + hostname + '" -fs ' + size + ' -noninteractive -s | tee "{scandir}/{protocol}_{port}_{http_scheme}_' + hostname + '_vhosts_' + name + '.txt"')
|
||||
size = str(len(wildcard.content))
|
||||
await service.execute('ffuf -u {http_scheme}://' + hostname + ':{port}/ -t ' + str(self.get_option('threads')) + ' -w ' + wordlist + ' -H "Host: FUZZ.' + hostname + '" -fs ' + size + ' -r -noninteractive -s | tee "{scandir}/{protocol}_{port}_{http_scheme}_' + hostname + '_vhosts_' + name + '.txt"')
|
||||
else:
|
||||
service.info('The target was not a hostname, nor was a hostname provided as an option. Skipping virtual host enumeration.')
|
||||
|
|
|
@ -19,14 +19,14 @@ class WinRMDetection(ServiceScan):
|
|||
|
||||
def manual(self, service, plugin_was_run):
|
||||
service.add_manual_commands('Bruteforce logins:', [
|
||||
'crackmapexec winrm {address} -d ' + self.get_global('domain', default='<domain>') + ' -u ' + self.get_global('username_wordlist', default='/usr/share/seclists/Usernames/top-usernames-shortlist.txt') + ' -p ' + self.get_global('password_wordlist', default='/usr/share/seclists/Passwords/darkweb2017-top100.txt')
|
||||
'crackmapexec winrm {address} -d \'' + self.get_global('domain', default='<domain>') + '\' -u \'' + self.get_global('username_wordlist', default='/usr/share/seclists/Usernames/top-usernames-shortlist.txt') + '\' -p \'' + self.get_global('password_wordlist', default='/usr/share/seclists/Passwords/darkweb2017-top100.txt') + '\''
|
||||
])
|
||||
|
||||
service.add_manual_commands('Check login (requires credentials):', [
|
||||
'crackmapexec winrm {address} -d ' + self.get_global('domain', default='<domain>') + ' -u <username> -p <password> -x "whoami"'
|
||||
'crackmapexec winrm {address} -d \'' + self.get_global('domain', default='<domain>') + '\' -u \'<username>\' -p \'<password>\''
|
||||
])
|
||||
|
||||
service.add_manual_commands('Evil WinRM (gem install evil-winrm):', [
|
||||
'evil-winrm -u <user> -p <password> -i {address}',
|
||||
'evil-winrm -u <user> -H <hash> -i {address}'
|
||||
'evil-winrm -u \'<user>\' -p \'<password>\' -i {address}',
|
||||
'evil-winrm -u \'<user>\' -H \'<hash>\' -i {address}'
|
||||
])
|
||||
|
|
|
@ -15,6 +15,7 @@ class WkHTMLToImage(ServiceScan):
|
|||
def check(self):
|
||||
if which('wkhtmltoimage') is None:
|
||||
self.error('The wkhtmltoimage program could not be found. Make sure it is installed. (On Kali, run: sudo apt install wkhtmltopdf)')
|
||||
return False
|
||||
|
||||
async def run(self, service):
|
||||
if which('wkhtmltoimage') is not None:
|
||||
|
|
|
@ -4,7 +4,7 @@ import argparse, asyncio, importlib.util, inspect, ipaddress, math, os, re, sele
|
|||
from datetime import datetime
|
||||
|
||||
try:
|
||||
import appdirs, colorama, toml, unidecode
|
||||
import appdirs, colorama, impacket, psutil, requests, toml, unidecode
|
||||
from colorama import Fore, Style
|
||||
except ModuleNotFoundError:
|
||||
print('One or more required modules was not installed. Please run or re-run: ' + ('sudo ' if os.getuid() == 0 else '') + 'python3 -m pip install -r requirements.txt')
|
||||
|
@ -17,7 +17,7 @@ from autorecon.io import slugify, e, fformat, cprint, debug, info, warn, error,
|
|||
from autorecon.plugins import Pattern, PortScan, ServiceScan, Report, AutoRecon
|
||||
from autorecon.targets import Target, Service
|
||||
|
||||
VERSION = "2.0.25"
|
||||
VERSION = "2.0.32"
|
||||
|
||||
if not os.path.exists(config['config_dir']):
|
||||
shutil.rmtree(config['config_dir'], ignore_errors=True, onerror=None)
|
||||
|
@ -25,22 +25,32 @@ if not os.path.exists(config['config_dir']):
|
|||
open(os.path.join(config['config_dir'], 'VERSION-' + VERSION), 'a').close()
|
||||
shutil.copy(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.toml'), os.path.join(config['config_dir'], 'config.toml'))
|
||||
shutil.copy(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'global.toml'), os.path.join(config['config_dir'], 'global.toml'))
|
||||
shutil.copytree(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'default-plugins'), os.path.join(config['config_dir'], 'plugins'))
|
||||
shutil.copytree(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'wordlists'), os.path.join(config['config_dir'], 'wordlists'))
|
||||
else:
|
||||
if not os.path.exists(os.path.join(config['config_dir'], 'config.toml')):
|
||||
shutil.copy(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.toml'), os.path.join(config['config_dir'], 'config.toml'))
|
||||
if not os.path.exists(os.path.join(config['config_dir'], 'global.toml')):
|
||||
shutil.copy(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'global.toml'), os.path.join(config['config_dir'], 'global.toml'))
|
||||
if not os.path.exists(os.path.join(config['config_dir'], 'plugins')):
|
||||
shutil.copytree(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'default-plugins'), os.path.join(config['config_dir'], 'plugins'))
|
||||
if not os.path.exists(os.path.join(config['config_dir'], 'wordlists')):
|
||||
shutil.copytree(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'wordlists'), os.path.join(config['config_dir'], 'wordlists'))
|
||||
if not os.path.exists(os.path.join(config['config_dir'], 'VERSION-' + VERSION)):
|
||||
warn('It looks like the config/plugins in ' + config['config_dir'] + ' are outdated. Please remove the ' + config['config_dir'] + ' directory and re-run AutoRecon to rebuild them.')
|
||||
warn('It looks like the config in ' + config['config_dir'] + ' is outdated. Please remove the ' + config['config_dir'] + ' directory and re-run AutoRecon to rebuild it.')
|
||||
|
||||
# Save current terminal settings so we can restore them.
|
||||
terminal_settings = termios.tcgetattr(sys.stdin.fileno())
|
||||
|
||||
if not os.path.exists(config['data_dir']):
|
||||
shutil.rmtree(config['data_dir'], ignore_errors=True, onerror=None)
|
||||
os.makedirs(config['data_dir'], exist_ok=True)
|
||||
open(os.path.join(config['data_dir'], 'VERSION-' + VERSION), 'a').close()
|
||||
shutil.copytree(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'default-plugins'), os.path.join(config['data_dir'], 'plugins'))
|
||||
shutil.copytree(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'wordlists'), os.path.join(config['data_dir'], 'wordlists'))
|
||||
else:
|
||||
if not os.path.exists(os.path.join(config['data_dir'], 'plugins')):
|
||||
shutil.copytree(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'default-plugins'), os.path.join(config['data_dir'], 'plugins'))
|
||||
if not os.path.exists(os.path.join(config['data_dir'], 'wordlists')):
|
||||
shutil.copytree(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'wordlists'), os.path.join(config['data_dir'], 'wordlists'))
|
||||
if not os.path.exists(os.path.join(config['data_dir'], 'VERSION-' + VERSION)):
|
||||
warn('It looks like the plugins in ' + config['data_dir'] + ' are outdated. Please remove the ' + config['data_dir'] + ' directory and re-run AutoRecon to rebuild them.')
|
||||
|
||||
|
||||
# Saves current terminal settings so we can restore them.
|
||||
terminal_settings = None
|
||||
|
||||
autorecon = AutoRecon()
|
||||
|
||||
|
@ -82,21 +92,39 @@ def calculate_elapsed_time(start_time, short=False):
|
|||
else:
|
||||
return ', '.join(elapsed_time)
|
||||
|
||||
def cancel_all_tasks(signal, frame):
|
||||
# sig and frame args are only present so the function
|
||||
# works with signal.signal() and handles Ctrl-C.
|
||||
# They are not used for any other purpose.
|
||||
def cancel_all_tasks(sig, frame):
|
||||
for task in asyncio.all_tasks():
|
||||
task.cancel()
|
||||
|
||||
processes = []
|
||||
|
||||
for target in autorecon.scanning_targets:
|
||||
for process_list in target.running_tasks.values():
|
||||
for process_dict in process_list['processes']:
|
||||
try:
|
||||
process_dict['process'].kill()
|
||||
except ProcessLookupError: # Will get raised if the process finishes before we get to killing it.
|
||||
parent = psutil.Process(process_dict['process'].pid)
|
||||
processes.extend(parent.children(recursive=True))
|
||||
processes.append(parent)
|
||||
except psutil.NoSuchProcess:
|
||||
pass
|
||||
|
||||
|
||||
for process in processes:
|
||||
try:
|
||||
process.send_signal(signal.SIGKILL)
|
||||
except psutil.NoSuchProcess: # Will get raised if the process finishes before we get to killing it.
|
||||
pass
|
||||
|
||||
_, alive = psutil.wait_procs(processes, timeout=10)
|
||||
if len(alive) > 0:
|
||||
error('The following process IDs could not be killed: ' + ', '.join([str(x.pid) for x in sorted(alive, key=lambda x: x.pid)]))
|
||||
|
||||
if not config['disable_keyboard_control']:
|
||||
# Restore original terminal settings.
|
||||
termios.tcsetattr(sys.stdin.fileno(), termios.TCSADRAIN, terminal_settings)
|
||||
if terminal_settings is not None:
|
||||
termios.tcsetattr(sys.stdin.fileno(), termios.TCSADRAIN, terminal_settings)
|
||||
|
||||
async def start_heartbeat(target, period=60):
|
||||
while True:
|
||||
|
@ -104,9 +132,28 @@ async def start_heartbeat(target, period=60):
|
|||
async with target.lock:
|
||||
count = len(target.running_tasks)
|
||||
|
||||
tasks_list = ''
|
||||
tasks_list = []
|
||||
if config['verbose'] >= 1:
|
||||
tasks_list = ': {bblue}' + ', '.join(target.running_tasks.keys()) + '{rst}'
|
||||
for tag, task in target.running_tasks.items():
|
||||
task_str = tag
|
||||
|
||||
if config['verbose'] >= 2:
|
||||
processes = []
|
||||
for process_dict in task['processes']:
|
||||
if process_dict['process'].returncode is None:
|
||||
processes.append(str(process_dict['process'].pid))
|
||||
try:
|
||||
for child in psutil.Process(process_dict['process'].pid).children(recursive=True):
|
||||
processes.append(str(child.pid))
|
||||
except psutil.NoSuchProcess:
|
||||
pass
|
||||
|
||||
if processes:
|
||||
task_str += ' (PID' + ('s' if len(processes) > 1 else '') + ': ' + ', '.join(processes) + ')'
|
||||
|
||||
tasks_list.append(task_str)
|
||||
|
||||
tasks_list = ': {bblue}' + ', '.join(tasks_list) + '{rst}'
|
||||
|
||||
current_time = datetime.now().strftime('%H:%M:%S')
|
||||
|
||||
|
@ -143,24 +190,42 @@ async def keyboard():
|
|||
if len(input) > 0 and input[0] == 's':
|
||||
input = input[1:]
|
||||
for target in autorecon.scanning_targets:
|
||||
count = len(target.running_tasks)
|
||||
async with target.lock:
|
||||
count = len(target.running_tasks)
|
||||
|
||||
tasks_list = []
|
||||
if config['verbose'] >= 1:
|
||||
for key, value in target.running_tasks.items():
|
||||
elapsed_time = calculate_elapsed_time(value['start'], short=True)
|
||||
tasks_list.append('{bblue}' + key + '{rst}' + ' (elapsed: ' + elapsed_time + ')')
|
||||
tasks_list = []
|
||||
if config['verbose'] >= 1:
|
||||
for tag, task in target.running_tasks.items():
|
||||
elapsed_time = calculate_elapsed_time(task['start'], short=True)
|
||||
|
||||
tasks_list = ':\n ' + '\n '.join(tasks_list)
|
||||
else:
|
||||
tasks_list = ''
|
||||
task_str = '{bblue}' + tag + '{rst}' + ' (elapsed: ' + elapsed_time + ')'
|
||||
|
||||
current_time = datetime.now().strftime('%H:%M:%S')
|
||||
if config['verbose'] >= 2:
|
||||
processes = []
|
||||
for process_dict in task['processes']:
|
||||
if process_dict['process'].returncode is None:
|
||||
processes.append(str(process_dict['process'].pid))
|
||||
try:
|
||||
for child in psutil.Process(process_dict['process'].pid).children(recursive=True):
|
||||
processes.append(str(child.pid))
|
||||
except psutil.NoSuchProcess:
|
||||
pass
|
||||
|
||||
if processes:
|
||||
task_str += ' (PID' + ('s' if len(processes) > 1 else '') + ': ' + ', '.join(processes) + ')'
|
||||
|
||||
tasks_list.append(task_str)
|
||||
|
||||
if count > 1:
|
||||
info('{bgreen}' + current_time + '{rst} - There are {byellow}' + str(count) + '{rst} scans still running against {byellow}' + target.address + '{rst}' + tasks_list)
|
||||
elif count == 1:
|
||||
info('{bgreen}' + current_time + '{rst} - There is {byellow}1{rst} scan still running against {byellow}' + target.address + '{rst}' + tasks_list)
|
||||
tasks_list = ':\n ' + '\n '.join(tasks_list)
|
||||
else:
|
||||
tasks_list = ''
|
||||
|
||||
current_time = datetime.now().strftime('%H:%M:%S')
|
||||
|
||||
if count > 1:
|
||||
info('{bgreen}' + current_time + '{rst} - There are {byellow}' + str(count) + '{rst} scans still running against {byellow}' + target.address + '{rst}' + tasks_list)
|
||||
elif count == 1:
|
||||
info('{bgreen}' + current_time + '{rst} - There is {byellow}1{rst} scan still running against {byellow}' + target.address + '{rst}' + tasks_list)
|
||||
else:
|
||||
input = input[1:]
|
||||
await asyncio.sleep(0.1)
|
||||
|
@ -786,30 +851,24 @@ async def scan_target(target):
|
|||
|
||||
async def run():
|
||||
# Find config file.
|
||||
if os.path.isfile(os.path.join(os.getcwd(), 'config.toml')):
|
||||
config_file = os.path.join(os.getcwd(), 'config.toml')
|
||||
elif os.path.isfile(os.path.join(config['config_dir'], 'config.toml')):
|
||||
if os.path.isfile(os.path.join(config['config_dir'], 'config.toml')):
|
||||
config_file = os.path.join(config['config_dir'], 'config.toml')
|
||||
else:
|
||||
config_file = None
|
||||
|
||||
# Find global file.
|
||||
if os.path.isfile(os.path.join(os.getcwd(), 'global.toml')):
|
||||
config['global_file'] = os.path.join(os.getcwd(), 'global.toml')
|
||||
elif os.path.isfile(os.path.join(config['config_dir'], 'global.toml')):
|
||||
if os.path.isfile(os.path.join(config['config_dir'], 'global.toml')):
|
||||
config['global_file'] = os.path.join(config['config_dir'], 'global.toml')
|
||||
else:
|
||||
config['global_file'] = None
|
||||
|
||||
# Find plugins.
|
||||
if os.path.isdir(os.path.join(os.getcwd(), 'plugins')):
|
||||
config['plugins_dir'] = os.path.join(os.getcwd(), 'plugins')
|
||||
elif os.path.isdir(os.path.join(config['config_dir'], 'plugins')):
|
||||
config['plugins_dir'] = os.path.join(config['config_dir'], 'plugins')
|
||||
if os.path.isdir(os.path.join(config['data_dir'], 'plugins')):
|
||||
config['plugins_dir'] = os.path.join(config['data_dir'], 'plugins')
|
||||
else:
|
||||
config['plugins_dir'] = None
|
||||
|
||||
parser = argparse.ArgumentParser(add_help=False, description='Network reconnaissance tool to port scan and automatically enumerate services found on multiple targets.')
|
||||
parser = argparse.ArgumentParser(add_help=False, allow_abbrev=False, description='Network reconnaissance tool to port scan and automatically enumerate services found on multiple targets.')
|
||||
parser.add_argument('targets', action='store', help='IP addresses (e.g. 10.0.0.1), CIDR notation (e.g. 10.0.0.1/24), or resolvable hostnames (e.g. foo.bar) to scan.', nargs='*')
|
||||
parser.add_argument('-t', '--target-file', action='store', type=str, default='', help='Read targets from file.')
|
||||
parser.add_argument('-p', '--ports', action='store', type=str, help='Comma separated list of ports / port ranges to scan. Specify TCP/UDP ports by prepending list with T:/U: To scan both TCP/UDP, put port(s) at start or specify B: e.g. 53,T:21-25,80,U:123,B:123. Default: %(default)s')
|
||||
|
@ -1081,6 +1140,7 @@ async def run():
|
|||
autorecon.argparse.set_defaults(**{key: val})
|
||||
|
||||
parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS, help='Show this help message and exit.')
|
||||
parser.error = lambda s: fail(s[0].upper() + s[1:])
|
||||
args = parser.parse_args()
|
||||
|
||||
args_dict = vars(args)
|
||||
|
@ -1138,7 +1198,7 @@ async def run():
|
|||
else:
|
||||
error('Invalid value provided to --max-plugin-global-instances. Values must be in the format PLUGIN:NUMBER.')
|
||||
|
||||
for plugin in autorecon.plugins.values():
|
||||
for slug, plugin in autorecon.plugins.items():
|
||||
if hasattr(plugin, 'max_target_instances') and plugin.slug in max_plugin_target_instances:
|
||||
plugin.max_target_instances = max_plugin_target_instances[plugin.slug]
|
||||
|
||||
|
@ -1147,7 +1207,9 @@ async def run():
|
|||
|
||||
for member_name, _ in inspect.getmembers(plugin, predicate=inspect.ismethod):
|
||||
if member_name == 'check':
|
||||
plugin.check()
|
||||
if plugin.check() == False:
|
||||
autorecon.plugins.pop(slug)
|
||||
continue
|
||||
continue
|
||||
|
||||
if config['ports']:
|
||||
|
@ -1315,6 +1377,7 @@ async def run():
|
|||
error('The target file ' + args.target_file + ' could not be read.')
|
||||
sys.exit(1)
|
||||
|
||||
unresolvable_targets = False
|
||||
for target in raw_targets:
|
||||
try:
|
||||
ip = ipaddress.ip_address(target)
|
||||
|
@ -1394,8 +1457,12 @@ async def run():
|
|||
|
||||
autorecon.pending_targets.append(Target(target, ip, 'IPv6', 'hostname', autorecon))
|
||||
except socket.gaierror:
|
||||
unresolvable_targets = True
|
||||
error(target + ' does not appear to be a valid IP address, IP range, or resolvable hostname.')
|
||||
errors = True
|
||||
|
||||
if not args.disable_sanity_checks and unresolvable_targets == True:
|
||||
error('AutoRecon will not run if any targets are invalid / unresolvable. To override this, re-run with the --disable-sanity-checks option.')
|
||||
errors = True
|
||||
|
||||
if len(autorecon.pending_targets) == 0:
|
||||
error('You must specify at least one target to scan!')
|
||||
|
@ -1446,6 +1513,9 @@ async def run():
|
|||
|
||||
start_time = time.time()
|
||||
|
||||
if not config['disable_keyboard_control']:
|
||||
terminal_settings = termios.tcgetattr(sys.stdin.fileno())
|
||||
|
||||
pending = []
|
||||
i = 0
|
||||
while autorecon.pending_targets:
|
||||
|
@ -1507,19 +1577,23 @@ async def run():
|
|||
# If there's only one target we don't need a combined report
|
||||
if len(autorecon.completed_targets) > 1:
|
||||
for plugin in autorecon.plugin_types['report']:
|
||||
plugin_tag_set = set(plugin.tags)
|
||||
if config['reports'] and plugin.slug in config['reports']:
|
||||
matching_tags = True
|
||||
excluded_tags = False
|
||||
else:
|
||||
plugin_tag_set = set(plugin.tags)
|
||||
|
||||
matching_tags = False
|
||||
for tag_group in autorecon.tags:
|
||||
if set(tag_group).issubset(plugin_tag_set):
|
||||
matching_tags = True
|
||||
break
|
||||
matching_tags = False
|
||||
for tag_group in autorecon.tags:
|
||||
if set(tag_group).issubset(plugin_tag_set):
|
||||
matching_tags = True
|
||||
break
|
||||
|
||||
excluded_tags = False
|
||||
for tag_group in autorecon.excluded_tags:
|
||||
if set(tag_group).issubset(plugin_tag_set):
|
||||
excluded_tags = True
|
||||
break
|
||||
excluded_tags = False
|
||||
for tag_group in autorecon.excluded_tags:
|
||||
if set(tag_group).issubset(plugin_tag_set):
|
||||
excluded_tags = True
|
||||
break
|
||||
|
||||
if matching_tags and not excluded_tags:
|
||||
pending.add(asyncio.create_task(generate_report(plugin, autorecon.completed_targets)))
|
||||
|
@ -1545,7 +1619,8 @@ async def run():
|
|||
|
||||
if not config['disable_keyboard_control']:
|
||||
# Restore original terminal settings.
|
||||
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, terminal_settings)
|
||||
if terminal_settings is not None:
|
||||
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, terminal_settings)
|
||||
|
||||
def main():
|
||||
# Capture Ctrl+C and cancel everything.
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
[[package]]
|
||||
name = "appdirs"
|
||||
version = "1.4.4"
|
||||
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.4"
|
||||
description = "Cross-platform colored terminal text."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.10.2"
|
||||
description = "Python Library for Tom's Obvious, Minimal Language"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
|
||||
[[package]]
|
||||
name = "unidecode"
|
||||
version = "1.3.2"
|
||||
description = "ASCII transliterations of Unicode text"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.7"
|
||||
content-hash = "681db41aa556d6d3f79e1e8ee0107bccd078e39c8db7e6e0159860c96ea93c5b"
|
||||
|
||||
[metadata.files]
|
||||
appdirs = [
|
||||
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
|
||||
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
|
||||
]
|
||||
colorama = [
|
||||
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
|
||||
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
|
||||
]
|
||||
toml = [
|
||||
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
||||
]
|
||||
unidecode = [
|
||||
{file = "Unidecode-1.3.2-py3-none-any.whl", hash = "sha256:215fe33c9d1c889fa823ccb66df91b02524eb8cc8c9c80f9c5b8129754d27829"},
|
||||
{file = "Unidecode-1.3.2.tar.gz", hash = "sha256:669898c1528912bcf07f9819dc60df18d057f7528271e31f8ec28cc88ef27504"},
|
||||
]
|
|
@ -1,2 +0,0 @@
|
|||
[virtualenvs]
|
||||
create = true
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "autorecon"
|
||||
version = "2.0.25"
|
||||
version = "2.0.32"
|
||||
description = "A multi-threaded network reconnaissance tool which performs automated enumeration of services."
|
||||
authors = ["Tib3rius"]
|
||||
license = "GNU GPL v3"
|
||||
|
@ -14,6 +14,7 @@ python = "^3.8"
|
|||
appdirs = "^1.4.4"
|
||||
colorama = "^0.4.5"
|
||||
impacket = "^0.10.0"
|
||||
psutil = "^5.9.4"
|
||||
requests = "^2.28.1"
|
||||
toml = "^0.10.2"
|
||||
Unidecode = "^1.3.1"
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
appdirs
|
||||
colorama
|
||||
impacket
|
||||
requests
|
||||
toml
|
||||
unidecode
|
||||
appdirs>=1.4.4
|
||||
colorama>=0.4.5
|
||||
impacket>=0.10.0
|
||||
psutil>=5.9.4
|
||||
requests>=2.28.1
|
||||
toml>=0.10.2
|
||||
Unidecode>=1.3.1
|
Loading…
Reference in New Issue