From 2e909bdd5de7db3e6f71b2b626edf87ba5ff782a Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Tue, 24 Dec 2024 23:22:30 +0900 Subject: [PATCH 01/16] feat(llm): add Ollama integration with configuration options Signed-off-by: HAHWUL --- src/config_initializer.cr | 2 ++ src/llm/ollama/ollama.cr | 26 ++++++++++++++++++++++++++ src/llm/prompt.cr | 5 +++++ src/options.cr | 4 ++++ 4 files changed, 37 insertions(+) create mode 100644 src/llm/ollama/ollama.cr create mode 100644 src/llm/prompt.cr diff --git a/src/config_initializer.cr b/src/config_initializer.cr index 797382d6..79a5e8d5 100644 --- a/src/config_initializer.cr +++ b/src/config_initializer.cr @@ -107,6 +107,8 @@ class ConfigInitializer "diff" => YAML::Any.new(""), "passive_scan" => YAML::Any.new(false), "passive_scan_path" => YAML::Any.new([] of YAML::Any), + "ollama" => YAML::Any.new(""), + "ollama_model" => YAML::Any.new(""), } noir_options diff --git a/src/llm/ollama/ollama.cr b/src/llm/ollama/ollama.cr new file mode 100644 index 00000000..1932c7d4 --- /dev/null +++ b/src/llm/ollama/ollama.cr @@ -0,0 +1,26 @@ +module LLM + class Ollama + def initialize(url : String, model : String) + @url = url + @api = @url + "/api/generate" + @model = model + end + + def request(prompt : String) + body = { + "model": @model, + "prompt": prompt, + } + + Crest::Request.execute( + method: "POST", + url: @api, + form: body, + json: true + ) + end + + def query(code : String) + end + end +end diff --git a/src/llm/prompt.cr b/src/llm/prompt.cr new file mode 100644 index 00000000..4422dc7f --- /dev/null +++ b/src/llm/prompt.cr @@ -0,0 +1,5 @@ +module LLM + PROMPT = <<-PROMPT + What is 4*4? ONLY ANSWER. + PROMPT +end diff --git a/src/options.cr b/src/options.cr index 73a9445d..ffb06c00 100644 --- a/src/options.cr +++ b/src/options.cr @@ -108,6 +108,10 @@ def run_options_parser append_to_yaml_array(noir_options, use_filters, var) end + parser.separator "\n LLM Integration:".colorize(:blue) + parser.on "--ollama http://localhost:11434", "Specify the Ollama server URL" { |var| noir_options["ollama"] = YAML::Any.new(var) } + parser.on "--ollama-model MODEL", "Specify the Ollama model name" { |var| noir_options["ollama_model"] = YAML::Any.new(var) } + parser.separator "\n DIFF:".colorize(:blue) parser.on "--diff-path ./app2", "Specify the path to the old version of the source code for comparison" { |var| noir_options["diff"] = YAML::Any.new(var) } From ca97abf3217da08e3a56622a46e7b63ca73a0b9a Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Tue, 24 Dec 2024 23:24:59 +0900 Subject: [PATCH 02/16] feat(labeler): add LLM label for changes in src/llm directory Signed-off-by: HAHWUL --- .github/labeler.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/labeler.yml b/.github/labeler.yml index a75ec91b..2592ead6 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -52,3 +52,6 @@ 🐳 docker: - changed-files: - any-glob-to-any-file: Dockerfile +🧠 llm: + - changed-files: + - any-glob-to-any-file: src/llm/** From b931fe928e55231049ca7a314f1305785e6e5356 Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Tue, 24 Dec 2024 23:25:25 +0900 Subject: [PATCH 03/16] fix(labeler): update llm label emoji from brain to robot Signed-off-by: HAHWUL --- .github/labeler.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/labeler.yml b/.github/labeler.yml index 2592ead6..559473bd 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -52,6 +52,6 @@ 🐳 docker: - changed-files: - any-glob-to-any-file: Dockerfile -🧠 llm: +🤖 llm: - changed-files: - any-glob-to-any-file: src/llm/** From 390af2cd3e5e2612b5e042712689ccab876b2ae7 Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Tue, 24 Dec 2024 23:40:53 +0900 Subject: [PATCH 04/16] chore(dependencies): update crest to version 1.4.1 and http_proxy to 0.12.1 fix(ollama): refactor request method to use Crest.post and handle exceptions Signed-off-by: HAHWUL --- shard.lock | 4 ++-- shard.yml | 2 +- src/llm/ollama/ollama.cr | 17 ++++++++--------- 3 files changed, 11 insertions(+), 12 deletions(-) diff --git a/shard.lock b/shard.lock index d0b3cf71..632e6260 100644 --- a/shard.lock +++ b/shard.lock @@ -2,7 +2,7 @@ version: 2.0 shards: crest: git: https://github.com/mamantoha/crest.git - version: 1.3.13 + version: 1.4.1 har: git: https://github.com/neuralegion/har.git @@ -14,5 +14,5 @@ shards: http_proxy: git: https://github.com/mamantoha/http_proxy.git - version: 0.10.3 + version: 0.12.1 diff --git a/shard.yml b/shard.yml index 7c089e58..ef876d68 100644 --- a/shard.yml +++ b/shard.yml @@ -20,6 +20,6 @@ targets: dependencies: crest: github: mamantoha/crest - version: ~> 1.3.13 + version: ~> 1.4.0 har: github: NeuraLegion/har \ No newline at end of file diff --git a/src/llm/ollama/ollama.cr b/src/llm/ollama/ollama.cr index 1932c7d4..185df7b3 100644 --- a/src/llm/ollama/ollama.cr +++ b/src/llm/ollama/ollama.cr @@ -8,19 +8,18 @@ module LLM def request(prompt : String) body = { - "model": @model, - "prompt": prompt, + :model => @model, + :prompt => prompt } - Crest::Request.execute( - method: "POST", - url: @api, - form: body, - json: true - ) + response = Crest.post(@api, body, json: true) + response.body + rescue ex : Exception + puts "Error: #{ex.message}" end def query(code : String) + request(PROMPT + "\n" + code) end end -end +end \ No newline at end of file From ec0f27ef2aec8f1ba4c27f2c7806c963884af069 Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Wed, 25 Dec 2024 00:06:33 +0900 Subject: [PATCH 05/16] feat(ollama): enhance request method to include streaming option and parse response Signed-off-by: HAHWUL --- src/llm/ollama/ollama.cr | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/llm/ollama/ollama.cr b/src/llm/ollama/ollama.cr index 185df7b3..76746dd8 100644 --- a/src/llm/ollama/ollama.cr +++ b/src/llm/ollama/ollama.cr @@ -9,13 +9,18 @@ module LLM def request(prompt : String) body = { :model => @model, - :prompt => prompt + :prompt => prompt, + :stream => false } response = Crest.post(@api, body, json: true) - response.body + response_json = JSON.parse response.body + + response_json["response"] rescue ex : Exception puts "Error: #{ex.message}" + + "" end def query(code : String) From 804755f66da7904d00ffaac0ff7a9c310390a5d3 Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Wed, 25 Dec 2024 23:24:48 +0900 Subject: [PATCH 06/16] feat(analyzer): add Ollama AI analyzer and integrate with analysis endpoints Signed-off-by: HAHWUL --- src/analyzer/analyzer.cr | 6 ++ .../analyzers/llm_analyzers/ollama.cr | 56 +++++++++++++++++++ 2 files changed, 62 insertions(+) create mode 100644 src/analyzer/analyzers/llm_analyzers/ollama.cr diff --git a/src/analyzer/analyzer.cr b/src/analyzer/analyzer.cr index a86cf608..c366351d 100644 --- a/src/analyzer/analyzer.cr +++ b/src/analyzer/analyzer.cr @@ -44,6 +44,7 @@ def initialize_analyzers(logger : NoirLogger) {"rust_axum", Rust::Axum}, {"rust_rocket", Rust::Rocket}, {"rust_actix_web", Rust::ActixWeb}, + {"ai_ollama", AI::Ollama}, ]) logger.success "#{analyzers.size} Analyzers initialized" @@ -67,6 +68,11 @@ def analysis_endpoints(options : Hash(String, YAML::Any), techs, logger : NoirLo logger.info "Analysis Started" logger.sub "➔ Code Analyzer: #{techs.size} in use" + if (options["ollama"].to_s != "") && (options["ollama_model"].to_s != "") + logger.sub "➔ AI Analyzer: Ollama in use" + techs << "ai_ollama" + end + techs.each do |tech| if analyzer.has_key?(tech) if NoirTechs.similar_to_tech(options["exclude_techs"].to_s).includes?(tech) diff --git a/src/analyzer/analyzers/llm_analyzers/ollama.cr b/src/analyzer/analyzers/llm_analyzers/ollama.cr new file mode 100644 index 00000000..92ce87db --- /dev/null +++ b/src/analyzer/analyzers/llm_analyzers/ollama.cr @@ -0,0 +1,56 @@ +require "../../../utils/utils.cr" +require "../../../models/analyzer" +require "../../../llm/ollama" + +module Analyzer::AI + class Ollama < Analyzer + @llm_url : String + @model : String + + def initialize(options : Hash(String, YAML::Any)) + super(options) + @llm_url = options["ollama"].as_s + @model = options["ollama_model"].as_s + end + + def analyze + # Init LLM Instance + ollama = LLM::Ollama.new(@llm_url, @model) + + # Source Analysis + begin + Dir.glob("#{base_path}/**/*") do |path| + next if File.directory?(path) + + relative_path = get_relative_path(base_path, path) + + if File.exists?(path) && !(ingnore_extensions.includes? File.extname(path)) + File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file| + params_query = [] of Param + params_body = [] of Param + methods = [] of String + + file.each_line do |line| + # TODO + # puts ollama.request("Hi! This is prompt text.") + # details = Details.new(PathInfo.new(path)) + # result << Endpoint.new("/#{relative_path}", method, params_body, details) + rescue + next + end + end + end + end + rescue e + logger.debug e + end + Fiber.yield + + result + end + + def ingnore_extensions + [".js", ".css", ".html", ".xml", ".json", ".yml", ".yaml", ".md", ".jpg", ".jpeg", ".png", ".gif", ".svg", ".ico", ".eot", ".ttf", ".woff", ".woff2", ".otf", ".mp3", ".mp4", ".avi", ".mov", ".webm", ".zip", ".tar", ".gz", ".7z", ".rar", ".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx", ".txt", ".csv", ".log", ".sql", ".bak", ".swp"] + end + end +end From 6cbcd267f74c98410fb9506d0643517c0e6a3ced Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Wed, 25 Dec 2024 23:29:36 +0900 Subject: [PATCH 07/16] feat(labeler): update LLM label to include llm_analyzers directory Signed-off-by: HAHWUL --- .github/labeler.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/labeler.yml b/.github/labeler.yml index 559473bd..14b24090 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -54,4 +54,4 @@ - any-glob-to-any-file: Dockerfile 🤖 llm: - changed-files: - - any-glob-to-any-file: src/llm/** + - any-glob-to-any-file: [src/llm/**, src/analyzer/analyzers/llm_analyzers/**] From 3145bad65b825502dbe120eb800ef1b578a44433 Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Wed, 25 Dec 2024 23:31:49 +0900 Subject: [PATCH 08/16] yaml linting Signed-off-by: HAHWUL --- .github/labeler.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/labeler.yml b/.github/labeler.yml index 14b24090..b902b3ee 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -54,4 +54,6 @@ - any-glob-to-any-file: Dockerfile 🤖 llm: - changed-files: - - any-glob-to-any-file: [src/llm/**, src/analyzer/analyzers/llm_analyzers/**] + - any-glob-to-any-file: + - src/llm/** + - src/analyzer/analyzers/llm_analyzers/** From 0e9974c3002954cb652a29d76b27be1a40b402f4 Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Wed, 25 Dec 2024 23:35:59 +0900 Subject: [PATCH 09/16] fix(options): rename LLM Integration to AI Integration in options parser Signed-off-by: HAHWUL --- src/options.cr | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/options.cr b/src/options.cr index ffb06c00..e9a7a8b1 100644 --- a/src/options.cr +++ b/src/options.cr @@ -108,7 +108,7 @@ def run_options_parser append_to_yaml_array(noir_options, use_filters, var) end - parser.separator "\n LLM Integration:".colorize(:blue) + parser.separator "\n AI Integration:".colorize(:blue) parser.on "--ollama http://localhost:11434", "Specify the Ollama server URL" { |var| noir_options["ollama"] = YAML::Any.new(var) } parser.on "--ollama-model MODEL", "Specify the Ollama model name" { |var| noir_options["ollama_model"] = YAML::Any.new(var) } From 7e9718313dd02f0c67a76b65295f8505348fb780 Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Tue, 31 Dec 2024 23:16:46 +0900 Subject: [PATCH 10/16] feat(completions): add options for specifying Ollama server URL and model name Signed-off-by: HAHWUL --- src/completions.cr | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/completions.cr b/src/completions.cr index 9ee29faf..6f2431cc 100644 --- a/src/completions.cr +++ b/src/completions.cr @@ -40,6 +40,8 @@ _arguments \\ '-d[Show debug messages]' \\ '-v[Show version]' \\ '--build-info[Show version and Build info]' \\ + '--ollama[Specify the Ollama server URL]:URL:_urls' \\ + '--ollama-model[Specify the Ollama model name]:model:' \\ '-h[Show help]' SCRIPT end @@ -86,6 +88,8 @@ _noir_completions() { --config-file --concurrency --generate-completion + --ollama + --ollama-model -d --debug -v --version --build-info @@ -97,7 +101,7 @@ _noir_completions() { COMPREPLY=( $(compgen -W "plain yaml json jsonl markdown-table curl httpie oas2 oas3 only-url only-param only-header only-cookie only-tag" -- "${cur}") ) return 0 ;; - --send-proxy|--send-es|--with-headers|--use-matchers|--use-filters|--diff-path|--config-file|--set-pvalue|--techs|--exclude-techs|-o|-b|-u) + --send-proxy|--send-es|--with-headers|--use-matchers|--use-filters|--diff-path|--config-file|--set-pvalue|--techs|--exclude-techs|--ollama|--ollama-model|-o|-b|-u) COMPREPLY=( $(compgen -f -- "${cur}") ) return 0 ;; @@ -161,9 +165,11 @@ complete -c noir -n '__fish_noir_needs_command' -a '--list-techs' -d 'Show all t complete -c noir -n '__fish_noir_needs_command' -a '--config-file' -d 'Specify the path to a configuration file in YAML format' complete -c noir -n '__fish_noir_needs_command' -a '--concurrency' -d 'Set concurrency' complete -c noir -n '__fish_noir_needs_command' -a '--generate-completion' -d 'Generate Zsh/Bash/Fish completion script' +complete -c noir -n '__fish_noir_needs_command' -a '--ollama' -d 'Specify the Ollama server URL' +complete -c noir -n '__fish_noir_needs_command' -a '--ollama-model' -d 'Specify the Ollama model name' complete -c noir -n '__fish_noir_needs_command' -a '-d' -d 'Show debug messages' complete -c noir -n '__fish_noir_needs_command' -a '-v' -d 'Show version' complete -c noir -n '__fish_noir_needs_command' -a '--build-info' -d 'Show version and Build info' complete -c noir -n '__fish_noir_needs_command' -a '-h' -d 'Show help' SCRIPT -end +end \ No newline at end of file From b08bb3af4d617fe3d3feb0ba977e2d602e53ada2 Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Tue, 31 Dec 2024 23:24:15 +0900 Subject: [PATCH 11/16] Linting Signed-off-by: HAHWUL --- src/analyzer/analyzers/llm_analyzers/ollama.cr | 4 +++- src/completions.cr | 2 +- src/llm/ollama/ollama.cr | 8 ++++---- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/analyzer/analyzers/llm_analyzers/ollama.cr b/src/analyzer/analyzers/llm_analyzers/ollama.cr index 92ce87db..1f4167a5 100644 --- a/src/analyzer/analyzers/llm_analyzers/ollama.cr +++ b/src/analyzer/analyzers/llm_analyzers/ollama.cr @@ -30,11 +30,13 @@ module Analyzer::AI params_body = [] of Param methods = [] of String - file.each_line do |line| + file.each_line do |_| # TODO # puts ollama.request("Hi! This is prompt text.") # details = Details.new(PathInfo.new(path)) # result << Endpoint.new("/#{relative_path}", method, params_body, details) + + rescue next end diff --git a/src/completions.cr b/src/completions.cr index 6f2431cc..52683832 100644 --- a/src/completions.cr +++ b/src/completions.cr @@ -172,4 +172,4 @@ complete -c noir -n '__fish_noir_needs_command' -a '-v' -d 'Show version' complete -c noir -n '__fish_noir_needs_command' -a '--build-info' -d 'Show version and Build info' complete -c noir -n '__fish_noir_needs_command' -a '-h' -d 'Show help' SCRIPT -end \ No newline at end of file +end diff --git a/src/llm/ollama/ollama.cr b/src/llm/ollama/ollama.cr index 76746dd8..4b7e3065 100644 --- a/src/llm/ollama/ollama.cr +++ b/src/llm/ollama/ollama.cr @@ -8,14 +8,14 @@ module LLM def request(prompt : String) body = { - :model => @model, + :model => @model, :prompt => prompt, - :stream => false + :stream => false, } response = Crest.post(@api, body, json: true) response_json = JSON.parse response.body - + response_json["response"] rescue ex : Exception puts "Error: #{ex.message}" @@ -27,4 +27,4 @@ module LLM request(PROMPT + "\n" + code) end end -end \ No newline at end of file +end From 5e529b664ba094d9d7e670dfc8b5931428b07201 Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Wed, 1 Jan 2025 00:07:23 +0900 Subject: [PATCH 12/16] feat(ollama): enhance endpoint extraction with JSON response handling and fix typo in ignore_extensions method Signed-off-by: HAHWUL --- .../analyzers/llm_analyzers/ollama.cr | 60 +++++++++++++++---- 1 file changed, 49 insertions(+), 11 deletions(-) diff --git a/src/analyzer/analyzers/llm_analyzers/ollama.cr b/src/analyzer/analyzers/llm_analyzers/ollama.cr index 1f4167a5..8027da0f 100644 --- a/src/analyzer/analyzers/llm_analyzers/ollama.cr +++ b/src/analyzer/analyzers/llm_analyzers/ollama.cr @@ -24,21 +24,59 @@ module Analyzer::AI relative_path = get_relative_path(base_path, path) - if File.exists?(path) && !(ingnore_extensions.includes? File.extname(path)) + if File.exists?(path) && !(ignore_extensions().includes? File.extname(path)) File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file| + content = file.gets_to_end params_query = [] of Param params_body = [] of Param methods = [] of String + result = [] of Endpoint + + begin + prompt = <<-PROMPT + !! You must only report JSON results. Don't explain anything and don't decorate it with Markdown. !! + --- + Analyze the following code and extract endpoint and parameter information. + Return the result strictly as a JSON object with the following structure: + [ + { + "url": "string / e.g. /api/v1/users", + "method": "string / e.g. GET, POST, PUT, DELETE", + "params": [ + { + "name": "string / e.g. id", + "param_type": "string / e.g. query, json, form, header, cookie, path", + "value": "string / e.g. hahwul" + } + ] + } + ] - file.each_line do |_| - # TODO - # puts ollama.request("Hi! This is prompt text.") - # details = Details.new(PathInfo.new(path)) - # result << Endpoint.new("/#{relative_path}", method, params_body, details) - - - rescue - next + Code: + #{content} + PROMPT + + response = ollama.request(prompt) + logger.debug "Ollama response (#{relative_path}):" + logger.debug_sub response + + response_json = JSON.parse(response.to_s) + response_json.as_a.each do |endpoint| + url = endpoint["url"].as_s + method = endpoint["method"].as_s + params = endpoint["params"].as_a.map do |param| + Param.new( + param["name"].as_s, + param["value"].as_s, + param["param_type"].as_s + ) + end + details = Details.new(PathInfo.new(path)) + result << Endpoint.new(url, method, params, details) + end + rescue ex : Exception + puts "Error processing file: #{path}" + puts "Error: #{ex.message}" end end end @@ -51,7 +89,7 @@ module Analyzer::AI result end - def ingnore_extensions + def ignore_extensions [".js", ".css", ".html", ".xml", ".json", ".yml", ".yaml", ".md", ".jpg", ".jpeg", ".png", ".gif", ".svg", ".ico", ".eot", ".ttf", ".woff", ".woff2", ".otf", ".mp3", ".mp4", ".avi", ".mov", ".webm", ".zip", ".tar", ".gz", ".7z", ".rar", ".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx", ".txt", ".csv", ".log", ".sql", ".bak", ".swp"] end end From 789eaf535d147032367f4f9daf2e233afa577ea7 Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Wed, 1 Jan 2025 00:07:51 +0900 Subject: [PATCH 13/16] Linting Signed-off-by: HAHWUL --- src/analyzer/analyzers/llm_analyzers/ollama.cr | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/analyzer/analyzers/llm_analyzers/ollama.cr b/src/analyzer/analyzers/llm_analyzers/ollama.cr index 8027da0f..0adc5276 100644 --- a/src/analyzer/analyzers/llm_analyzers/ollama.cr +++ b/src/analyzer/analyzers/llm_analyzers/ollama.cr @@ -27,11 +27,8 @@ module Analyzer::AI if File.exists?(path) && !(ignore_extensions().includes? File.extname(path)) File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file| content = file.gets_to_end - params_query = [] of Param - params_body = [] of Param - methods = [] of String result = [] of Endpoint - + begin prompt = <<-PROMPT !! You must only report JSON results. Don't explain anything and don't decorate it with Markdown. !! @@ -55,11 +52,11 @@ module Analyzer::AI Code: #{content} PROMPT - + response = ollama.request(prompt) logger.debug "Ollama response (#{relative_path}):" logger.debug_sub response - + response_json = JSON.parse(response.to_s) response_json.as_a.each do |endpoint| url = endpoint["url"].as_s From 306d1b78acb0867ebd1221681f5bfda42cfe7e77 Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Wed, 1 Jan 2025 00:16:31 +0900 Subject: [PATCH 14/16] feat(tests): add functional test fixture for LLM and enhance logging for AI-based analysis Signed-off-by: HAHWUL --- spec/functional_test/fixtures/hahwul/for_llm.hahwul | 9 +++++++++ src/noir.cr | 2 ++ 2 files changed, 11 insertions(+) create mode 100644 spec/functional_test/fixtures/hahwul/for_llm.hahwul diff --git a/spec/functional_test/fixtures/hahwul/for_llm.hahwul b/spec/functional_test/fixtures/hahwul/for_llm.hahwul new file mode 100644 index 00000000..55556567 --- /dev/null +++ b/spec/functional_test/fixtures/hahwul/for_llm.hahwul @@ -0,0 +1,9 @@ +get '/' do + puts param['query'] + puts cookies[:cookie1] + puts cookies["cookie2"] +end + +post "/update" do + puts "update" +end \ No newline at end of file diff --git a/src/noir.cr b/src/noir.cr index 57ca4fcc..1b430e0a 100644 --- a/src/noir.cr +++ b/src/noir.cr @@ -94,6 +94,8 @@ if app.techs.size == 0 app.logger.sub "➔ Please check tech lists using the --list-techs flag." if app.options["url"] != "" app.logger.info "Start file-based analysis as the -u flag has been used." + elsif (app.options["ollama"] != "") && (app.options["ollama_model"] != "") + app.logger.info "Start AI-based analysis as the --ollama and --ollama-model flags have been used." elsif app.passive_results.size > 0 app.logger.info "Noir found #{app.passive_results.size} passive results." app.report From a2dc0154a9aaecb72f12dbf2c73cb008b54e837b Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Wed, 1 Jan 2025 14:35:46 +0900 Subject: [PATCH 15/16] feat(ollama): refine JSON response requirements and enhance parameter type validation in endpoint extraction Signed-off-by: HAHWUL --- .../analyzers/llm_analyzers/ollama.cr | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/src/analyzer/analyzers/llm_analyzers/ollama.cr b/src/analyzer/analyzers/llm_analyzers/ollama.cr index 0adc5276..981e5ece 100644 --- a/src/analyzer/analyzers/llm_analyzers/ollama.cr +++ b/src/analyzer/analyzers/llm_analyzers/ollama.cr @@ -27,14 +27,13 @@ module Analyzer::AI if File.exists?(path) && !(ignore_extensions().includes? File.extname(path)) File.open(path, "r", encoding: "utf-8", invalid: :skip) do |file| content = file.gets_to_end - result = [] of Endpoint begin prompt = <<-PROMPT - !! You must only report JSON results. Don't explain anything and don't decorate it with Markdown. !! + !! Return results strictly as a JSON object. Do not include any explanations, comments, or additional text. !! --- - Analyze the following code and extract endpoint and parameter information. - Return the result strictly as a JSON object with the following structure: + Analyze the provided source code and extract the endpoint and parameter information. The response must follow the exact JSON structure specified below, without any deviations: + [ { "url": "string / e.g. /api/v1/users", @@ -42,14 +41,17 @@ module Analyzer::AI "params": [ { "name": "string / e.g. id", - "param_type": "string / e.g. query, json, form, header, cookie, path", - "value": "string / e.g. hahwul" + "param_type": "string / one of: query, json, form, header, cookie, path", + "value": "string / optional, default empty" } ] } ] - Code: + The `param_type` field must strictly use one of the following values: `query`, `json`, `form`, `header`, `cookie`, `path`. + + Input Code: + #{content} PROMPT @@ -69,7 +71,7 @@ module Analyzer::AI ) end details = Details.new(PathInfo.new(path)) - result << Endpoint.new(url, method, params, details) + @result << Endpoint.new(url, method, params, details) end rescue ex : Exception puts "Error processing file: #{path}" @@ -83,7 +85,7 @@ module Analyzer::AI end Fiber.yield - result + @result end def ignore_extensions From bccdc99f6fa66aa2d454dc1447fd7fe0e990d83c Mon Sep 17 00:00:00 2001 From: HAHWUL Date: Thu, 2 Jan 2025 23:55:02 +0900 Subject: [PATCH 16/16] Enhanced Prompt for Ollama Signed-off-by: HAHWUL --- src/analyzer/analyzers/llm_analyzers/ollama.cr | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/analyzer/analyzers/llm_analyzers/ollama.cr b/src/analyzer/analyzers/llm_analyzers/ollama.cr index 981e5ece..b9b82d91 100644 --- a/src/analyzer/analyzers/llm_analyzers/ollama.cr +++ b/src/analyzer/analyzers/llm_analyzers/ollama.cr @@ -30,17 +30,17 @@ module Analyzer::AI begin prompt = <<-PROMPT - !! Return results strictly as a JSON object. Do not include any explanations, comments, or additional text. !! + !! Respond only in JSON format. Do not include explanations, comments, or any additional text. !! --- - Analyze the provided source code and extract the endpoint and parameter information. The response must follow the exact JSON structure specified below, without any deviations: + Analyze the given source code and extract the endpoint and parameter details. Strictly follow this JSON structure: [ { - "url": "string / e.g. /api/v1/users", - "method": "string / e.g. GET, POST, PUT, DELETE", + "url": "string / e.g., /api/v1/users", + "method": "string / e.g., GET, POST, PUT, DELETE", "params": [ { - "name": "string / e.g. id", + "name": "string / e.g., id", "param_type": "string / one of: query, json, form, header, cookie, path", "value": "string / optional, default empty" } @@ -48,7 +48,9 @@ module Analyzer::AI } ] - The `param_type` field must strictly use one of the following values: `query`, `json`, `form`, `header`, `cookie`, `path`. + - Ensure `param_type` uses only these values: `query`, `json`, `form`, `header`, `cookie`, `path`. + - If no endpoints are found in the code, respond with an empty array `[]`. + - Do not deviate from the specified JSON structure. Input Code: