Skip to content

Commit

Permalink
Merge pull request #335 from 13ph03nix/fix/nuclei-module
Browse files Browse the repository at this point in the history
Bug fixes & improvements
  • Loading branch information
13ph03nix authored Nov 9, 2022
2 parents cf6b358 + 13e751f commit 774a58d
Show file tree
Hide file tree
Showing 13 changed files with 110 additions and 81 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
# version 2.0.1
----------------
* fix words matcher expression execution #333, thanks @soapffz
* fix catch binascii ValueError Exception #334, thanks @izj007
* improve template robustness
* support digest_username and digest_password
* support negative matchers

# version 2.0.0
----------------
* yaml poc support, compatible with nuclei
Expand Down
2 changes: 1 addition & 1 deletion manpages/poc-console.1
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ is maintained at:
.I https://pocsuite.org
.PP
.SH VERSION
This manual page documents pocsuite3 version 2.0.0
This manual page documents pocsuite3 version 2.0.1
.SH AUTHOR
.br
(c) 2014-present by Knownsec 404 Team
Expand Down
2 changes: 1 addition & 1 deletion manpages/pocsuite.1
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ is maintained at:
.I https://pocsuite.org
.PP
.SH VERSION
This manual page documents pocsuite3 version 2.0.0
This manual page documents pocsuite3 version 2.0.1
.SH AUTHOR
.br
(c) 2014-present by Knownsec 404 Team
Expand Down
2 changes: 1 addition & 1 deletion pocsuite3/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
__title__ = 'pocsuite3'
__version__ = '2.0.0'
__version__ = '2.0.1'
__author__ = 'Knownsec 404 Team'
__author_email__ = '[email protected]'
__license__ = 'GPLv2'
Expand Down
5 changes: 3 additions & 2 deletions pocsuite3/lib/yaml/nuclei/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import binascii
import json
import re
from collections import OrderedDict

Expand Down Expand Up @@ -67,7 +68,7 @@ def __init__(self, template, target=''):
self.yaml_template = template
try:
self.yaml_template = binascii.unhexlify(self.yaml_template).decode()
except binascii.Error:
except ValueError:
pass
self.json_template = yaml.safe_load(expand_preprocessors(self.yaml_template))
self.template = dacite.from_dict(
Expand Down Expand Up @@ -143,7 +144,7 @@ def __str__(self):
if k in key_convert:
k = key_convert.get(k)
if type(v) in [str]:
v = f'\'\'\'{v.strip()}\'\'\''
v = json.dumps(v.strip())

info.append(f' {k} = {v}')

Expand Down
14 changes: 11 additions & 3 deletions pocsuite3/lib/yaml/nuclei/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,15 @@
StrSlice = NewType('StrSlice', Union[str, list])


class Severify(Enum):
class CaseInsensitiveEnum(Enum):
@classmethod
def _missing_(cls, value: str):
for member in cls:
if member.value == value.lower():
return member


class Severify(CaseInsensitiveEnum):
Info = 'info'
Low = 'low'
Medium = 'medium'
Expand All @@ -26,14 +34,14 @@ class Classification:

@dataclass
class Info:
"""Info contains metadata information abount a template
"""Info contains metadata information about a template
"""
name: str = ''
author: StrSlice = field(default_factory=list)
tags: StrSlice = field(default_factory=list)
description: str = ''
reference: StrSlice = field(default_factory=list)
severity: Severify = 'unknown'
severity: Severify = Severify.Unknown
metadata: dict = field(default_factory=dict)
classification: Classification = field(default_factory=Classification)
remediation: str = ''
6 changes: 3 additions & 3 deletions pocsuite3/lib/yaml/nuclei/operators/extrators/__init__.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
import json
import re
from dataclasses import dataclass, field
from enum import Enum
from typing import List

from lxml import etree
from requests.structures import CaseInsensitiveDict

from pocsuite3.lib.core.log import LOGGER as logger
from pocsuite3.lib.yaml.nuclei.model import CaseInsensitiveEnum
from pocsuite3.lib.yaml.nuclei.protocols.common.expressions import evaluate, UNRESOLVED_VARIABLE, Marker


class ExtractorType(Enum):
class ExtractorType(CaseInsensitiveEnum):
RegexExtractor = "regex"
KValExtractor = "kval"
XPathExtractor = "xpath"
Expand All @@ -27,7 +27,7 @@ class Extractor:
name: str = ''

# Type is the type of the extractor.
type: ExtractorType = 'regex'
type: ExtractorType = ExtractorType.RegexExtractor

# Regex contains the regular expression patterns to extract from a part.
regex: List[str] = field(default_factory=list)
Expand Down
13 changes: 10 additions & 3 deletions pocsuite3/lib/yaml/nuclei/operators/matchers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import binascii
import re
from dataclasses import dataclass, field
from enum import Enum
from typing import List

from pocsuite3.lib.yaml.nuclei.model import CaseInsensitiveEnum
from pocsuite3.lib.yaml.nuclei.protocols.common.expressions import evaluate, Marker


class MatcherType(Enum):
class MatcherType(CaseInsensitiveEnum):
StatusMatcher = "status"
SizeMatcher = "size"
WordsMatcher = "word"
Expand All @@ -22,7 +22,7 @@ class Matcher:
"""

# Type is the type of the matcher.
type: MatcherType = 'word'
type: MatcherType = MatcherType.WordsMatcher

# Condition is the optional condition between two matcher variables. By default, the condition is assumed to be OR.
condition: str = 'or'
Expand Down Expand Up @@ -86,6 +86,13 @@ def match_words(matcher: Matcher, corpus: str, data: dict) -> (bool, list):
matched_words = []
for i, word in enumerate(matcher.words):
word = evaluate(word, data)
if matcher.encoding == 'hex':
try:
word = binascii.unhexlify(word).decode()
except (ValueError, UnicodeDecodeError):
pass
if matcher.case_insensitive:
word = word.lower()

if word not in corpus:
if matcher.condition == 'and':
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import itertools
from collections import OrderedDict
from enum import Enum

from pocsuite3.lib.core.common import check_file, get_file_items
from pocsuite3.lib.yaml.nuclei.model import CaseInsensitiveEnum


class AttackType(Enum):
class AttackType(CaseInsensitiveEnum):
BatteringRamAttack = "batteringram"
PitchForkAttack = "pitchfork"
ClusterBombAttack = "clusterbomb"
Expand Down
73 changes: 38 additions & 35 deletions pocsuite3/lib/yaml/nuclei/protocols/http/__init__.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
from collections import OrderedDict
from dataclasses import dataclass, field
from enum import Enum
from typing import Union, List
from typing import Union, List, Optional

from requests_toolbelt.utils import dump

from pocsuite3.lib.core.data import AttribDict
from pocsuite3.lib.core.log import LOGGER as logger
from pocsuite3.lib.request import requests
from pocsuite3.lib.yaml.nuclei.model import CaseInsensitiveEnum
from pocsuite3.lib.yaml.nuclei.operators import (Extractor, ExtractorType,
Matcher, MatcherType,
extract_dsl, extract_json,
Expand All @@ -16,11 +17,12 @@
match_size, match_status_code,
match_words)
from pocsuite3.lib.yaml.nuclei.protocols.common.generators import AttackType, payload_generator
from pocsuite3.lib.yaml.nuclei.protocols.common.interactsh import InteractshClient
from pocsuite3.lib.yaml.nuclei.protocols.common.replacer import (
UnresolvedVariableException, UNRESOLVED_VARIABLE, marker_replace, Marker)


class HTTPMethod(Enum):
class HTTPMethod(CaseInsensitiveEnum):
HTTPGet = "GET"
HTTPHead = "HEAD"
HTTPPost = "POST"
Expand Down Expand Up @@ -55,10 +57,10 @@ class HttpRequest:

name: str = ''
# Attack is the type of payload combinations to perform.
attack: AttackType = 'batteringram'
attack: AttackType = AttackType.BatteringRamAttack

# Method is the HTTP Request Method.
method: HTTPMethod = 'GET'
method: Optional[HTTPMethod] = HTTPMethod.HTTPGet

# Body is an optional parameter which contains HTTP Request body.
body: str = ''
Expand All @@ -76,10 +78,10 @@ class HttpRequest:
max_redirects: int = 0

# PipelineConcurrentConnections is number of connections to create during pipelining.
pipeline_concurrent_connections = 0
pipeline_concurrent_connections: int = 0

# PipelineRequestsPerConnection is number of requests to send per connection when pipelining.
pipeline_requests_per_connection = 0
pipeline_requests_per_connection: int = 0

# Threads specifies number of threads to use sending requests. This enables Connection Pooling.
threads: int = 0
Expand Down Expand Up @@ -110,7 +112,7 @@ class HttpRequest:
def http_response_to_dsl_map(resp: requests.Response):
"""Converts an HTTP response to a map for use in DSL matching
"""
data = {}
data = AttribDict()
if not isinstance(resp, requests.Response):
return data

Expand Down Expand Up @@ -142,25 +144,27 @@ def http_response_to_dsl_map(resp: requests.Response):


def http_get_match_part(part: str, resp_data: dict, interactsh=None, return_bytes: bool = False) -> str:
result = ''
if part == '':
part = 'body'

if part in resp_data:
result = resp_data[part]
elif part == 'interactsh_protocol':
interactsh.poll()
result = '\n'.join(interactsh.interactsh_protocol)
elif part == 'interactsh_request':
interactsh.poll()
result = '\n'.join(interactsh.interactsh_request)
elif part == 'interactsh_response':
interactsh.poll()
result = '\n'.join(interactsh.interactsh_response)
else:
result = ''
elif part.startswith('interactsh'):
if not isinstance(interactsh, InteractshClient):
result = ''
# poll oob data
else:
interactsh.poll()
if part == 'interactsh_protocol':
result = '\n'.join(interactsh.interactsh_protocol)
elif part == 'interactsh_request':
result = '\n'.join(interactsh.interactsh_request)
elif part == 'interactsh_response':
result = '\n'.join(interactsh.interactsh_response)

if return_bytes and not isinstance(result, bytes):
result = result.encode()
result = str(result).encode()
elif not return_bytes and isinstance(result, bytes):
try:
result = result.decode()
Expand All @@ -178,28 +182,27 @@ def http_match(request: HttpRequest, resp_data: dict, interactsh=None):
item = http_get_match_part(matcher.part, resp_data, interactsh, matcher.type == MatcherType.BinaryMatcher)

if matcher.type == MatcherType.StatusMatcher:
matcher_res = match_status_code(matcher, resp_data['status_code'])
logger.debug(f'[+] {matcher} -> {matcher_res}')
matcher_res = match_status_code(matcher, resp_data.get('status_code', 0))

elif matcher.type == MatcherType.SizeMatcher:
matcher_res = match_size(matcher, len(item))
logger.debug(f'[+] {matcher} -> {matcher_res}')

elif matcher.type == MatcherType.WordsMatcher:
matcher_res, _ = match_words(matcher, item, {})
logger.debug(f'[+] {matcher} -> {matcher_res}')
matcher_res, _ = match_words(matcher, item, resp_data)

elif matcher.type == MatcherType.RegexMatcher:
matcher_res, _ = match_regex(matcher, item)
logger.debug(f'[+] {matcher} -> {matcher_res}')

elif matcher.type == MatcherType.BinaryMatcher:
matcher_res, _ = match_binary(matcher, item)
logger.debug(f'[+] {matcher} -> {matcher_res}')

elif matcher.type == MatcherType.DSLMatcher:
matcher_res = match_dsl(matcher, resp_data)
logger.debug(f'[+] {matcher} -> {matcher_res}')

if matcher.negative:
matcher_res = not matcher_res

logger.debug(f'[+] {matcher} -> {matcher_res}')

if not matcher_res:
if request.matchers_condition == 'and':
Expand Down Expand Up @@ -228,20 +231,16 @@ def http_extract(request: HttpRequest, resp_data: dict):
res = None
if extractor.type == ExtractorType.RegexExtractor:
res = extract_regex(extractor, item)
logger.debug(f'[+] {extractor} -> {res}')
elif extractor.type == ExtractorType.KValExtractor:
res = extract_kval(extractor, resp_data['kval_extractor_dict'])
logger.debug(f'[+] {extractor} -> {res}')
res = extract_kval(extractor, resp_data.get('kval_extractor_dict', {}))
elif extractor.type == ExtractorType.XPathExtractor:
res = extract_xpath(extractor, item)
logger.debug(f'[+] {extractor} -> {res}')
elif extractor.type == ExtractorType.JSONExtractor:
res = extract_json(extractor, item)
logger.debug(f'[+] {extractor} -> {res}')
elif extractor.type == ExtractorType.DSLExtractor:
res = extract_dsl(extractor, resp_data)
logger.debug(f'[+] {extractor} -> {res}')

logger.debug(f'[+] {extractor} -> {res}')
extractors_result['internal'].update(res['internal'])
extractors_result['external'].update(res['external'])
extractors_result['extra_info'] += res['extra_info']
Expand All @@ -264,6 +263,7 @@ def http_request_generator(request: HttpRequest, dynamic_values: OrderedDict):
current_index += 1
method, url, headers, data, kwargs = '', '', '', '', OrderedDict()
# base request
username, password = request.digest_username, request.digest_password
if path.startswith(Marker.ParenthesisOpen):
method = request.method.value
headers = request.headers
Expand Down Expand Up @@ -295,13 +295,16 @@ def http_request_generator(request: HttpRequest, dynamic_values: OrderedDict):
kwargs.setdefault('allow_redirects', request.redirects)
kwargs.setdefault('data', data)
kwargs.setdefault('headers', headers)

if username or password:
kwargs.setdefault('auth', (username, password))
try:
url = marker_replace(url, dynamic_values)
kwargs = marker_replace(kwargs, dynamic_values)
except UnresolvedVariableException:
continue

if 'auth' in kwargs:
kwargs['auth'] = tuple(kwargs['auth'])
yield method, url, kwargs, payload_instance, request_count, current_index


Expand Down
Loading

0 comments on commit 774a58d

Please sign in to comment.