Skip to content

Commit

Permalink
Merge branch 'hashlist'
Browse files Browse the repository at this point in the history
  • Loading branch information
alpatron committed Aug 12, 2024
2 parents 49979f4 + a2dc70a commit b43bb08
Show file tree
Hide file tree
Showing 87 changed files with 491 additions and 316 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
.env
fitcrack-data/*
24 changes: 24 additions & 0 deletions update_be.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#!/bin/bash

CONTAINER=fc-fitcrack_server-1
ROOT_INSIDE=/var/www/html
PATH_TO_API=$1

# check if cwd is the root of the project
if [ ! -f "Dockerfile" ]; then
echo "You must run this script from the root of the project"
exit 1
fi

# Set default value for PATH_TO_API if not provided
if [ -z "$PATH_TO_API" ]; then
PATH_TO_API="fitcrackAPI/src/src"
echo "No path provided, using default path: $PATH_TO_API"
fi

# remove the old directory inside the container
docker exec -t $CONTAINER rm -rf $ROOT_INSIDE/$PATH_TO_API
# copy the new directory inside the container
docker cp webadmin/$PATH_TO_API $CONTAINER:$ROOT_INSIDE/$PATH_TO_API
# restart the server inside the container
docker exec -t $CONTAINER service apache2 restart
1 change: 1 addition & 0 deletions webadmin/AUTHORS
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@ Matus Mucka
Adam Horak
David Bolvansky
Radek Hranicky
Viktor Rucky
Original file line number Diff line number Diff line change
Expand Up @@ -43,37 +43,39 @@ def hash_to_bytes(hashObj:str|bytes) -> bytes:

if hash_list.is_locked:
abort(400,'Hash list is locked for editing.')

new_hashes_bin_list = list(map(hash_to_bytes, new_hashes))

new_hashes_bin = set(new_hashes_bin_list)

if not binary_hash:
validation_result = validate_hash_list(new_hashes_bin,hash_type,False)
else:
if len(new_hashes_bin) != 1:
abort(500,'If input is binary, then hash list may only contain one hash.')
base_64 = base64.encodebytes(list(new_hashes_bin)[0]).decode("ascii")
validation_result = validate_hash_list(new_hashes_bin,hash_type,False, f'BASE64<{base_64[0:64]}{"..." if len(base_64) > 64 else ""}>') #Placeholder is the first 64 characters of BASE 64 representation

errored_validation_results = [x for x in validation_result['items'] if x['result'] != 'OK']
error_output = {bytes(x['hash'],encoding='ascii'):x['result'] for x in errored_validation_results}

if validation_result['error'] == True:
if validation_mode == 'fail_invalid':
return {
'result' : 'FUCKED',
'id' : hash_list.id,
'name' : hash_list.name,
'hashCount' : hash_list.hash_count,
'addedCount' : 0,
'erroredCount' : len(errored_validation_results),
'errors' : [{
'order_in_input': new_hashes_bin_list.index(hash),
'error' : error}
for (hash,error) in error_output.items()]
},400
if validation_mode == 'skip_invalid':
new_hashes_bin = new_hashes_bin - error_output.keys()
if validation_mode != 'no_validate':
if not binary_hash:
validation_result = validate_hash_list(new_hashes_bin,hash_type,False)
else:
if len(new_hashes_bin) != 1:
abort(500,'If input is binary, then hash list may only contain one hash.')
base_64 = base64.encodebytes(list(new_hashes_bin)[0]).decode("ascii")
validation_result = validate_hash_list(new_hashes_bin,hash_type,False, f'BASE64<{base_64[0:64]}{"..." if len(base_64) > 64 else ""}>') #Placeholder is the first 64 characters of BASE 64 representation

errored_validation_results = [x for x in validation_result['items'] if x['result'] != 'OK']
error_output = {bytes(x['hash'],encoding='ascii'):x['result'] for x in errored_validation_results}

if validation_result['error'] == True:
if validation_mode == 'fail_invalid':
return {
'result' : 'FUCKED',
'id' : hash_list.id,
'name' : hash_list.name,
'hashCount' : hash_list.hash_count,
'addedCount' : 0,
'erroredCount' : len(errored_validation_results),
'errors' : [{
'order_in_input': new_hashes_bin_list.index(hash),
'error' : error}
for (hash,error) in error_output.items()]
},400
if validation_mode == 'skip_invalid':
new_hashes_bin = new_hashes_bin - error_output.keys()

if hash_list.hash_type is None:
hash_list.hash_type = hash_type
Expand All @@ -89,7 +91,7 @@ def hash_to_bytes(hashObj:str|bytes) -> bytes:
db.session.add(hash)

#WARNING: This function is called at the end of endpoints, so the db session might contain
#more objects than just the hashhes added in this function. For example, the adding endpoints
#more objects than just the hashes added in this function. For example, the adding endpoints
#have a "new" variant that also adds a new hash list.
db.session.commit()

Expand All @@ -105,7 +107,7 @@ def hash_to_bytes(hashObj:str|bytes) -> bytes:
'error' : error}
for (hash,error) in error_output.items()
]
}
},200


def validate_hash_list(hash_list:list[bytes],hash_type:str,valid_only:bool,binary_placeholder:Optional[str]=None):
Expand Down Expand Up @@ -171,6 +173,7 @@ def acquire_hash_list(id:str,name:Optional[str]=None) -> FcHashList:
if id == 'new':
hash_list = FcHashList(name=name if name is not None else f'Unnamed automatically created hash list')
db.session.add(hash_list)
db.session.flush()
else:
try:
id = int(id)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

from src.api.apiConfig import api
from src.api.fitcrack.endpoints.hashlists.argumentsParser import make_empty_hash_list_parser, hash_list_parser, hash_list_add_hash_list_parser, hash_list_add_hash_file_parser, hash_list_hashes_parser_paginated, hash_list_hashes_parser, hash_list_add_protected_file_parser
from src.api.fitcrack.endpoints.hashlists.responseModels import empty_hash_list_created_model, page_of_hash_lists_model, hash_addition_result_model, page_of_hashes_model, hash_list_model_long
from src.api.fitcrack.endpoints.hashlists.responseModels import empty_hash_list_created_model, page_of_hash_lists_model, hash_addition_result_model, page_of_hashes_model, hash_list_model_long, hash_extraction_result_model
from src.api.fitcrack.endpoints.hashlists.functions import upload_hash_list, build_hash_query, acquire_hash_list
from src.api.fitcrack.endpoints.protectedFile.functions import addProtectedFile
from src.api.fitcrack.endpoints.job.functions import editable_jobs_ids, kill_job
Expand Down Expand Up @@ -61,7 +61,7 @@ def get(self):
else:
hash_list_query = hash_list_query.order_by(FcHashList.id.asc())

hash_list_page = hash_list_query.paginate(page,per_page,error_out=True)
hash_list_page = hash_list_query.paginate(page,per_page if per_page != -1 else 10000,error_out=True) # This is an ugly ugly hack, but I don't think anyone will run Fitcrack with more than 10000 hashlists.

return hash_list_page

Expand Down Expand Up @@ -165,7 +165,7 @@ def get(self, id:int):


@ns.route('/<id>/hashes')
class hashListUploadList(Resource):
class HashListUploadList(Resource):

@api.expect(hash_list_add_hash_list_parser)
@api.marshal_with(hash_addition_result_model)
Expand All @@ -187,7 +187,7 @@ def post(self,id:str):


@ns.route('/<id>/file')
class hashListUploadHashFile(Resource):
class HashListExtractHash(Resource):
@api.expect(hash_list_add_hash_file_parser)
@api.marshal_with(hash_addition_result_model)
def post(self,id:str):
Expand Down Expand Up @@ -215,9 +215,9 @@ def post(self,id:str):


@ns.route('/<id>/extract')
class hashListUploadHashFile(Resource):
class HashListExtractHash(Resource):
@api.expect(hash_list_add_protected_file_parser)
@api.marshal_with(hash_addition_result_model)
@api.marshal_with(hash_extraction_result_model)
def post(self,id:str):
"""
Extract the hash from the given protected file and adds it to the hash list with the given id.
Expand All @@ -232,13 +232,17 @@ def post(self,id:str):
if args['file'].filename == '': #I don't know if we still need this... since we are using this new and fancy way...? I suppose we do?
abort(500, 'No selected file')

result = addProtectedFile(args.file)
extraction_result = addProtectedFile(args.file)

return upload_hash_list([result['hash']],hash_list,int(result['hash_type']),'fail_invalid',False)
#No need to validate the input to here because the extraction function already does validation for us.
upload_result, exit_code = upload_hash_list([extraction_result['hash']],hash_list,int(extraction_result['hash_type']),'no_validate',False)

upload_result['extracted_easy_hash'] = extraction_result['easy_hash']
return upload_result, exit_code


@ns.route('/<int:id>/purge')
class hashListPurge(Resource):
class HashListPurge(Resource):
@api.response(200, 'Hash list purged.')
@api.response(403, 'Hash list contains jobs that you do not have rights to; cannot perform purge.')
def post(self,id:int):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,19 +18,13 @@
'cracked_hash_count' : fields.Integer(readOnly=True, required=True),
'added': fields.DateTime(readOnly=True, required=True),
'job_count' : fields.Integer(readOnly=True, required=True),
'is_locked' : fields.Boolean(readOnly=True, required=True)
'is_locked' : fields.Boolean(readOnly=True, required=True),
'deleted' : fields.Boolean(readOnly=True, required=True)
})


hash_list_model_long = api.model('short hash list', {
'id': fields.Integer(readOnly=True, required=False),
'name' : fields.String(readOnly=True,required=True),
'hash_type_name' : fields.String('hash_type',readOnly=True),
'added': fields.DateTime(readOnly=True, required=True),
'hash_count' : fields.Integer(readOnly=True, required=True),
'cracked_hash_count' : fields.Integer(readOnly=True, required=True),
hash_list_model_long = api.inherit('long hash list', hash_list_model_short, {
'jobs' : fields.List(fields.Nested(job_nano_model)),
'is_locked' : fields.Boolean(readOnly=True, required=True)
})


Expand Down Expand Up @@ -66,3 +60,8 @@
'erroredCount' : fields.Integer(),
'errors': fields.List(fields.Nested(error_model))
})


hash_extraction_result_model = api.inherit('extracted hashes', hash_addition_result_model, {
'extracted_easy_hash' : fields.Boolean()
})
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,11 @@ def verifyHashFormat(hash, hash_type, abortOnFail=False, binaryHash=False) -> ve
"{} -m {} {} --show --machine-readable".format(HASHCAT_PATH, hash_type, hash), getReturnCode=True
)

if binaryHash:
if result['returnCode'] not in (0, 255): # 0 means success (at least one hash passes); 255 means error (no hash passes); everything else is bad and validation failed completely.
with open(hash, "r") as hashFile:
for h in hashFile.readlines():
hashes.append((h.strip(),"Hashcat crashed or exited unexpectedly when validating; no hashes could be validated."))
elif binaryHash:
hashes = [('HASH','OK') if result['returnCode'] == 0 else ('HASH', 'Token length exception')]
else:
hash_validity = {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,16 @@

from settings import XTOHASHCAT_PATH, XTOHASHCAT_EXECUTABLE, PROTECTEDFILES_DIR
from src.api.fitcrack.functions import shellExec, fileUpload
from src.api.fitcrack.endpoints.hashlists.functions import validate_hash_list
from src.database import db
from src.database.models import FcEncryptedFile


ALLOWED_EXTENSIONS = set(["doc", "docx", "xls", "xlsx", "ppt", "pptx", "pdf", "rar", "zip", "7z"])


def getHashFromFile(filename, path):
res = shellExec('python3 ' + XTOHASHCAT_EXECUTABLE + ' ' + os.path.join(PROTECTEDFILES_DIR, path), cwd=XTOHASHCAT_PATH, getReturnCode=True)
def getHashFromFile(filename, path, extract_easy_hash:bool=False):
res = shellExec('python3 ' + XTOHASHCAT_EXECUTABLE + (' -e ' if extract_easy_hash else ' ') + os.path.join(PROTECTEDFILES_DIR, path), cwd=XTOHASHCAT_PATH, getReturnCode=True)
if res['returnCode'] == 2:
abort(500, 'Hashcat doesn\'t support PKZIP.')
if res['returnCode'] != 0:
Expand All @@ -32,9 +33,32 @@ def getHashFromFile(filename, path):


def addProtectedFile(file:typing.IO):
"""
Extracts and verifies a hash from a protected file.
On success, returns a dict for outputting to the API user.
The protected file and its hash is added to the FcEncryptedFile table.
If at first the verification of the extracted hash fails, the function
tries extracting the hash again with the "easy hash" option, which
produces a potentially shorter hash (with an increased chance of false
positives). Due to its shorter length, this hash may pass verification;
this is because hashcat has some limits on hash length. If the hash still
cannot be verified, this function aborts.
"""

uploadedFile = fileUpload(file, PROTECTEDFILES_DIR, ALLOWED_EXTENSIONS, withTimestamp=True)
if uploadedFile:
loadedHash = getHashFromFile(filename=uploadedFile['filename'], path=uploadedFile['path'])
easyHash = False
loadedHash = getHashFromFile(filename=uploadedFile['filename'], path=uploadedFile['path'], extract_easy_hash=False)
verifyResult = validate_hash_list([loadedHash['hash'].encode('ascii')],loadedHash['hash_type'],False)
if verifyResult['error']:
easyHash = True
loadedHash = getHashFromFile(filename=uploadedFile['filename'], path=uploadedFile['path'], extract_easy_hash=True)
verifyResult = validate_hash_list([loadedHash['hash'].encode('ascii')],loadedHash['hash_type'],False)
if verifyResult['error']:
abort(500, 'Could not extract hash from file.')

encFile = FcEncryptedFile(name=uploadedFile['filename'], path=uploadedFile['path'], hash=loadedHash['hash'].encode(),
hash_type=loadedHash['hash_type'])
try:
Expand All @@ -49,7 +73,8 @@ def addProtectedFile(file:typing.IO):
'hash': loadedHash['hash'],
'hash_type': loadedHash['hash_type'],
'hash_type_name': encFile.hash_type_name,
'file_id': encFile.id
'file_id': encFile.id,
'easy_hash': easyHash
}
else:
abort(500, 'We only support ' + ', '.join(str(x) for x in ALLOWED_EXTENSIONS) + '.')
Loading

0 comments on commit b43bb08

Please sign in to comment.