Skip to content

Commit

Permalink
Merge pull request #473 from owasp-noir/llm-integration
Browse files Browse the repository at this point in the history
Add AI Integration (LLM)
  • Loading branch information
hahwul authored Jan 2, 2025
2 parents 2b5775b + bccdc99 commit 6d3cfb7
Show file tree
Hide file tree
Showing 12 changed files with 170 additions and 4 deletions.
5 changes: 5 additions & 0 deletions .github/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,8 @@
🐳 docker:
- changed-files:
- any-glob-to-any-file: Dockerfile
🤖 llm:
- changed-files:
- any-glob-to-any-file:
- src/llm/**
- src/analyzer/analyzers/llm_analyzers/**
4 changes: 2 additions & 2 deletions shard.lock
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: 2.0
shards:
crest:
git: https://github.com/mamantoha/crest.git
version: 1.3.13
version: 1.4.1

har:
git: https://github.com/neuralegion/har.git
Expand All @@ -14,5 +14,5 @@ shards:

http_proxy:
git: https://github.com/mamantoha/http_proxy.git
version: 0.10.3
version: 0.12.1

2 changes: 1 addition & 1 deletion shard.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,6 @@ targets:
dependencies:
crest:
github: mamantoha/crest
version: ~> 1.3.13
version: ~> 1.4.0
har:
github: NeuraLegion/har
9 changes: 9 additions & 0 deletions spec/functional_test/fixtures/hahwul/for_llm.hahwul
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
get '/' do
puts param['query']
puts cookies[:cookie1]
puts cookies["cookie2"]
end

post "/update" do
puts "update"
end
6 changes: 6 additions & 0 deletions src/analyzer/analyzer.cr
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ def initialize_analyzers(logger : NoirLogger)
{"rust_axum", Rust::Axum},
{"rust_rocket", Rust::Rocket},
{"rust_actix_web", Rust::ActixWeb},
{"ai_ollama", AI::Ollama},
])

logger.success "#{analyzers.size} Analyzers initialized"
Expand All @@ -67,6 +68,11 @@ def analysis_endpoints(options : Hash(String, YAML::Any), techs, logger : NoirLo
logger.info "Analysis Started"
logger.sub "➔ Code Analyzer: #{techs.size} in use"

if (options["ollama"].to_s != "") && (options["ollama_model"].to_s != "")
logger.sub "➔ AI Analyzer: Ollama in use"
techs << "ai_ollama"
end

techs.each do |tech|
if analyzer.has_key?(tech)
if NoirTechs.similar_to_tech(options["exclude_techs"].to_s).includes?(tech)
Expand Down
97 changes: 97 additions & 0 deletions src/analyzer/analyzers/llm_analyzers/ollama.cr
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
require "../../../utils/utils.cr"
require "../../../models/analyzer"
require "../../../llm/ollama"

module Analyzer::AI
class Ollama < Analyzer
@llm_url : String
@model : String

def initialize(options : Hash(String, YAML::Any))
super(options)
@llm_url = options["ollama"].as_s
@model = options["ollama_model"].as_s
end

def analyze
# Init LLM Instance
ollama = LLM::Ollama.new(@llm_url, @model)

# Source Analysis
begin
Dir.glob("#{base_path}/**/*") do |path|
next if File.directory?(path)

relative_path = get_relative_path(base_path, path)

if File.exists?(path) && !(ignore_extensions().includes? File.extname(path))
File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file|
content = file.gets_to_end

begin
prompt = <<-PROMPT
!! Respond only in JSON format. Do not include explanations, comments, or any additional text. !!
---
Analyze the given source code and extract the endpoint and parameter details. Strictly follow this JSON structure:
[
{
"url": "string / e.g., /api/v1/users",
"method": "string / e.g., GET, POST, PUT, DELETE",
"params": [
{
"name": "string / e.g., id",
"param_type": "string / one of: query, json, form, header, cookie, path",
"value": "string / optional, default empty"
}
]
}
]
- Ensure `param_type` uses only these values: `query`, `json`, `form`, `header`, `cookie`, `path`.
- If no endpoints are found in the code, respond with an empty array `[]`.
- Do not deviate from the specified JSON structure.
Input Code:
#{content}
PROMPT

response = ollama.request(prompt)
logger.debug "Ollama response (#{relative_path}):"
logger.debug_sub response

response_json = JSON.parse(response.to_s)
response_json.as_a.each do |endpoint|
url = endpoint["url"].as_s
method = endpoint["method"].as_s
params = endpoint["params"].as_a.map do |param|
Param.new(
param["name"].as_s,
param["value"].as_s,
param["param_type"].as_s
)
end
details = Details.new(PathInfo.new(path))
@result << Endpoint.new(url, method, params, details)
end
rescue ex : Exception
puts "Error processing file: #{path}"
puts "Error: #{ex.message}"
end
end
end
end
rescue e
logger.debug e
end
Fiber.yield

@result
end

def ignore_extensions
[".js", ".css", ".html", ".xml", ".json", ".yml", ".yaml", ".md", ".jpg", ".jpeg", ".png", ".gif", ".svg", ".ico", ".eot", ".ttf", ".woff", ".woff2", ".otf", ".mp3", ".mp4", ".avi", ".mov", ".webm", ".zip", ".tar", ".gz", ".7z", ".rar", ".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx", ".txt", ".csv", ".log", ".sql", ".bak", ".swp"]
end
end
end
8 changes: 7 additions & 1 deletion src/completions.cr
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@ _arguments \\
'-d[Show debug messages]' \\
'-v[Show version]' \\
'--build-info[Show version and Build info]' \\
'--ollama[Specify the Ollama server URL]:URL:_urls' \\
'--ollama-model[Specify the Ollama model name]:model:' \\
'-h[Show help]'
SCRIPT
end
Expand Down Expand Up @@ -86,6 +88,8 @@ _noir_completions() {
--config-file
--concurrency
--generate-completion
--ollama
--ollama-model
-d --debug
-v --version
--build-info
Expand All @@ -97,7 +101,7 @@ _noir_completions() {
COMPREPLY=( $(compgen -W "plain yaml json jsonl markdown-table curl httpie oas2 oas3 only-url only-param only-header only-cookie only-tag" -- "${cur}") )
return 0
;;
--send-proxy|--send-es|--with-headers|--use-matchers|--use-filters|--diff-path|--config-file|--set-pvalue|--techs|--exclude-techs|-o|-b|-u)
--send-proxy|--send-es|--with-headers|--use-matchers|--use-filters|--diff-path|--config-file|--set-pvalue|--techs|--exclude-techs|--ollama|--ollama-model|-o|-b|-u)
COMPREPLY=( $(compgen -f -- "${cur}") )
return 0
;;
Expand Down Expand Up @@ -161,6 +165,8 @@ complete -c noir -n '__fish_noir_needs_command' -a '--list-techs' -d 'Show all t
complete -c noir -n '__fish_noir_needs_command' -a '--config-file' -d 'Specify the path to a configuration file in YAML format'
complete -c noir -n '__fish_noir_needs_command' -a '--concurrency' -d 'Set concurrency'
complete -c noir -n '__fish_noir_needs_command' -a '--generate-completion' -d 'Generate Zsh/Bash/Fish completion script'
complete -c noir -n '__fish_noir_needs_command' -a '--ollama' -d 'Specify the Ollama server URL'
complete -c noir -n '__fish_noir_needs_command' -a '--ollama-model' -d 'Specify the Ollama model name'
complete -c noir -n '__fish_noir_needs_command' -a '-d' -d 'Show debug messages'
complete -c noir -n '__fish_noir_needs_command' -a '-v' -d 'Show version'
complete -c noir -n '__fish_noir_needs_command' -a '--build-info' -d 'Show version and Build info'
Expand Down
2 changes: 2 additions & 0 deletions src/config_initializer.cr
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,8 @@ class ConfigInitializer
"diff" => YAML::Any.new(""),
"passive_scan" => YAML::Any.new(false),
"passive_scan_path" => YAML::Any.new([] of YAML::Any),
"ollama" => YAML::Any.new(""),
"ollama_model" => YAML::Any.new(""),
}

noir_options
Expand Down
30 changes: 30 additions & 0 deletions src/llm/ollama/ollama.cr
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
module LLM
class Ollama
def initialize(url : String, model : String)
@url = url
@api = @url + "/api/generate"
@model = model
end

def request(prompt : String)
body = {
:model => @model,
:prompt => prompt,
:stream => false,
}

response = Crest.post(@api, body, json: true)
response_json = JSON.parse response.body

response_json["response"]
rescue ex : Exception
puts "Error: #{ex.message}"

""
end

def query(code : String)
request(PROMPT + "\n" + code)
end
end
end
5 changes: 5 additions & 0 deletions src/llm/prompt.cr
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
module LLM
PROMPT = <<-PROMPT
What is 4*4? ONLY ANSWER.
PROMPT
end
2 changes: 2 additions & 0 deletions src/noir.cr
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,8 @@ if app.techs.size == 0
app.logger.sub "➔ Please check tech lists using the --list-techs flag."
if app.options["url"] != ""
app.logger.info "Start file-based analysis as the -u flag has been used."
elsif (app.options["ollama"] != "") && (app.options["ollama_model"] != "")
app.logger.info "Start AI-based analysis as the --ollama and --ollama-model flags have been used."
elsif app.passive_results.size > 0
app.logger.info "Noir found #{app.passive_results.size} passive results."
app.report
Expand Down
4 changes: 4 additions & 0 deletions src/options.cr
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,10 @@ def run_options_parser
append_to_yaml_array(noir_options, use_filters, var)
end

parser.separator "\n AI Integration:".colorize(:blue)
parser.on "--ollama http://localhost:11434", "Specify the Ollama server URL" { |var| noir_options["ollama"] = YAML::Any.new(var) }
parser.on "--ollama-model MODEL", "Specify the Ollama model name" { |var| noir_options["ollama_model"] = YAML::Any.new(var) }

parser.separator "\n DIFF:".colorize(:blue)
parser.on "--diff-path ./app2", "Specify the path to the old version of the source code for comparison" { |var| noir_options["diff"] = YAML::Any.new(var) }

Expand Down

0 comments on commit 6d3cfb7

Please sign in to comment.