diff --git a/.gitignore b/.gitignore index b3f43cf0..d781e951 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ helk-logstash/.DS_Store .vscode/ .idea/ .testing/ +docs/_build # Backup files / swap .bak .BAK diff --git a/README.md b/README.md index 2b5d3632..cef932ec 100644 --- a/README.md +++ b/README.md @@ -56,19 +56,3 @@ There are a few things that I would like to accomplish with the HELK as shown in # License: GPL-3.0 [ HELK's GNU General Public License](https://github.com/Cyb3rWard0g/HELK/blob/master/LICENSE) - -# TO-Do - -- [ ] Kubernetes Cluster Migration -- [ ] OSQuery Data Ingestion -- [ ] MITRE ATT&CK mapping to logs or dashboards -- [ ] Cypher for Apache Spark Integration (Adding option for Zeppelin Notebook) -- [ ] Test and integrate neo4j spark connectors with build -- [ ] Add more network data sources (i.e Bro) -- [ ] Research & integrate spark structured direct streaming -- [ ] Packer Images -- [ ] Terraform integration (AWS, Azure, GC) -- [ ] Add more Jupyter Notebooks to teach the basics -- [ ] Auditd beat intergation - -More coming soon... diff --git a/docs/_build/.doctrees/architecture/elasticsearch.doctree b/docs/_build/.doctrees/architecture/elasticsearch.doctree deleted file mode 100644 index 4d37aba3..00000000 Binary files a/docs/_build/.doctrees/architecture/elasticsearch.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/architecture/kibana.doctree b/docs/_build/.doctrees/architecture/kibana.doctree deleted file mode 100644 index 9127f995..00000000 Binary files a/docs/_build/.doctrees/architecture/kibana.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/architecture/logstash.doctree b/docs/_build/.doctrees/architecture/logstash.doctree deleted file mode 100644 index cf2e5f62..00000000 Binary files a/docs/_build/.doctrees/architecture/logstash.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/environment.pickle b/docs/_build/.doctrees/environment.pickle deleted file mode 100644 index 17408ce3..00000000 Binary files a/docs/_build/.doctrees/environment.pickle and /dev/null differ diff --git a/docs/_build/.doctrees/glue_cache.json b/docs/_build/.doctrees/glue_cache.json deleted file mode 100644 index 9e26dfee..00000000 --- a/docs/_build/.doctrees/glue_cache.json +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/docs/_build/.doctrees/how-to/docker/export-images.doctree b/docs/_build/.doctrees/how-to/docker/export-images.doctree deleted file mode 100644 index ae30631c..00000000 Binary files a/docs/_build/.doctrees/how-to/docker/export-images.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/how-to/docker/intro.doctree b/docs/_build/.doctrees/how-to/docker/intro.doctree deleted file mode 100644 index fb46c189..00000000 Binary files a/docs/_build/.doctrees/how-to/docker/intro.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/how-to/docker/load-images.doctree b/docs/_build/.doctrees/how-to/docker/load-images.doctree deleted file mode 100644 index 13302f4d..00000000 Binary files a/docs/_build/.doctrees/how-to/docker/load-images.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/how-to/kafka/intro.doctree b/docs/_build/.doctrees/how-to/kafka/intro.doctree deleted file mode 100644 index e6fe1ac5..00000000 Binary files a/docs/_build/.doctrees/how-to/kafka/intro.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/how-to/kafka/topic-ingestion.doctree b/docs/_build/.doctrees/how-to/kafka/topic-ingestion.doctree deleted file mode 100644 index 420affad..00000000 Binary files a/docs/_build/.doctrees/how-to/kafka/topic-ingestion.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/how-to/kafka/update-ip-address.doctree b/docs/_build/.doctrees/how-to/kafka/update-ip-address.doctree deleted file mode 100644 index ad7ff46e..00000000 Binary files a/docs/_build/.doctrees/how-to/kafka/update-ip-address.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/how-to/ksql/deploy-locally.doctree b/docs/_build/.doctrees/how-to/ksql/deploy-locally.doctree deleted file mode 100644 index f09e1a12..00000000 Binary files a/docs/_build/.doctrees/how-to/ksql/deploy-locally.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/how-to/ksql/intro.doctree b/docs/_build/.doctrees/how-to/ksql/intro.doctree deleted file mode 100644 index 2ae2eea6..00000000 Binary files a/docs/_build/.doctrees/how-to/ksql/intro.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/how-to/logstash/create-plugins-offline.doctree b/docs/_build/.doctrees/how-to/logstash/create-plugins-offline.doctree deleted file mode 100644 index 29d18e00..00000000 Binary files a/docs/_build/.doctrees/how-to/logstash/create-plugins-offline.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/how-to/logstash/intro.doctree b/docs/_build/.doctrees/how-to/logstash/intro.doctree deleted file mode 100644 index 94b5b4d5..00000000 Binary files a/docs/_build/.doctrees/how-to/logstash/intro.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/how-to/winlogbeat/check-log-shipping.doctree b/docs/_build/.doctrees/how-to/winlogbeat/check-log-shipping.doctree deleted file mode 100644 index 4121b957..00000000 Binary files a/docs/_build/.doctrees/how-to/winlogbeat/check-log-shipping.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/how-to/winlogbeat/intro.doctree b/docs/_build/.doctrees/how-to/winlogbeat/intro.doctree deleted file mode 100644 index 67f70b66..00000000 Binary files a/docs/_build/.doctrees/how-to/winlogbeat/intro.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/installation.doctree b/docs/_build/.doctrees/installation.doctree deleted file mode 100644 index eaa6523a..00000000 Binary files a/docs/_build/.doctrees/installation.doctree and /dev/null differ diff --git a/docs/_build/.doctrees/intro.doctree b/docs/_build/.doctrees/intro.doctree deleted file mode 100644 index 36329db8..00000000 Binary files a/docs/_build/.doctrees/intro.doctree and /dev/null differ diff --git a/docs/_build/html/.buildinfo b/docs/_build/html/.buildinfo deleted file mode 100644 index ebe69d0c..00000000 --- a/docs/_build/html/.buildinfo +++ /dev/null @@ -1,4 +0,0 @@ -# Sphinx build info version 1 -# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: 2d9ddf55eebc47ddae22ab2383ed9635 -tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/docs/_build/html/_images/ELASTICSEARCH-Design.png b/docs/_build/html/_images/ELASTICSEARCH-Design.png deleted file mode 100644 index 8c629d17..00000000 Binary files a/docs/_build/html/_images/ELASTICSEARCH-Design.png and /dev/null differ diff --git a/docs/_build/html/_images/HELK-Design.png b/docs/_build/html/_images/HELK-Design.png deleted file mode 100644 index 38ef642c..00000000 Binary files a/docs/_build/html/_images/HELK-Design.png and /dev/null differ diff --git a/docs/_build/html/_images/KAFKA-producer1.png b/docs/_build/html/_images/KAFKA-producer1.png deleted file mode 100644 index 0c93b6d6..00000000 Binary files a/docs/_build/html/_images/KAFKA-producer1.png and /dev/null differ diff --git a/docs/_build/html/_images/KAFKA-producer2.png b/docs/_build/html/_images/KAFKA-producer2.png deleted file mode 100644 index b7ea04b2..00000000 Binary files a/docs/_build/html/_images/KAFKA-producer2.png and /dev/null differ diff --git a/docs/_build/html/_images/KIBANA-Design.png b/docs/_build/html/_images/KIBANA-Design.png deleted file mode 100644 index 4ac0562d..00000000 Binary files a/docs/_build/html/_images/KIBANA-Design.png and /dev/null differ diff --git a/docs/_build/html/_images/LOGSTASH-Design.png b/docs/_build/html/_images/LOGSTASH-Design.png deleted file mode 100644 index 4773bc65..00000000 Binary files a/docs/_build/html/_images/LOGSTASH-Design.png and /dev/null differ diff --git a/docs/_build/html/_sources/architecture/elasticsearch.md.txt b/docs/_build/html/_sources/architecture/elasticsearch.md.txt deleted file mode 100644 index 8cc8f119..00000000 --- a/docs/_build/html/_sources/architecture/elasticsearch.md.txt +++ /dev/null @@ -1,131 +0,0 @@ -# Elasticsearch - -![](../images/ELASTICSEARCH-Design.png) - -## HELK's Elasticsearch Heap Size - -Elasticsearch uses heap, which can more specifically be referred to as memory/RAM, in order to perform various functions. -A list of some of the functions this heap/memory does is as follows (keep in mind this is not an exhaustive list): -* Keep track of indexes -* When aggregations are run such as calculating sums, mathematical variations, sub aggregations of aggregations, etc.. -* When certain searches are -* Keep track of offsets of the tokens/terms of indexed values (aka events/logs/data) - -As you can see, heap and the amount of it is important in a healthy setup. The HELK installation process uses various functions to try to set the "perfect" amount of heap, however there are thousands of variables in all the different ways people use/install HELK. -Therefore, we are unable to account for them all and thus our logic will never be perfect and unfortunately may not work best for you. However, we have given you an ability to set your own heap and we have described the logic if you choose to let HELK determine what to set it. - -Heap can and or is set one of four ways, as detailed below. - -### 1) Allow HELK to calculate how much to assign. - -This is based on the available memory and variables shown in the code block below. -It’s very important to note `available memory`, not the amount of memory the host has. -An example to show why this is critical to understand.. If you have a 100GB RAM server, but the server is actively using 90GBs of RAM - then you will NOT get the max 31GB heap/memory for elasticsearch. In this example you would actually end up getting roughly 3 GBs for the heap. Because, with only 10 GBs of available/free memory, it could cause drastic issues to lock up all of the remaining memory! - -``` -if available memory >= 1000 MBs and <= 5999 MBs: - then set to 2000 MBs -else if available memory => 6000 MBs and <= 8999 MBs: - then set to 3200 MBs -else if available memory => 9000 MBs and <= 12999 MBs: - then set to 5000 MBs -else if available memory => 13000 MBs and <= 16000 MBs: - then set to 7100 MBs -else: - if available memory => 31 GBs: - then set to 31 GBs - else: - set to available memory in GBs -``` - -### 2) Set your own heap -In order to define your own heap settings, in the file `HELK/docker/helk-elasticsearch/config/jvm.options` -edit the following two lines that begin with - -`#-Xms` -`#-Xmx` - -Then make sure to restart elasticsearch. -**Always set the min and max JVM heap size to the same value -Also, you will be restarting elasticsearch. Therefore your cluster will temporarily be down as the elasticsearch service/database is coming back online** - -Here is an example of how to perform the above: - -``` -# Edit the file jvm file -sudo nano HELK/docker/helk-elasticsearch/config/jvm.options -# Resulting lines (as mentioned that you should edit from above) -# should look something like the following if you wanted to set the heap to 16GBs --Xms16g --Xmx16g -# Restart elasticsearch -docker restart helk-elasticsearch -``` - -### 3) Add `ES_JAVA_OPTS` to the docker config file - -Which docker config file to use is shown later. -You will add this value under `services.helk-elasticsearch.environment`. -Example, if I used the option for ELK + Kafka with no license and no alerting and I wanted to set the heap to 16GBs -Then I would edit `HELK/docker/helk-kibana-analysis-basic.yml` and add the following line under the environment seciton: -`- "ES_JAVA_OPTS=-Xms16g -Xmx16g"` - -Then make sure rebuild the elasticsearch docker container. -**Always set the min and max JVM heap size to the same value -Also, you will be restarting elasticsearch. Therefore your cluster will temporarily be down as the elasticsearch service/database is coming back online** -**Note if you are using (elastic) license you will need to set your ELASTIC_PASSWORD and KIBANA_UI_PASSWORD variables (and logstash password if applicable)** - -Here is how to perform the above: - -``` -# Example config (only showing the beginning lines) Note, that these settings may not match your config exactly, but that the important thing is to have the value under the environment section -version: '3.5' - -services: - helk-elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:7.3.1 - container_name: helk-elasticsearch - secrets: - - source: elasticsearch.yml - target: /usr/share/elasticsearch/config/elasticsearch.yml - volumes: - - esdata:/usr/share/elasticsearch/data - - ./helk-elasticsearch/scripts:/usr/share/elasticsearch/scripts - - ./helk-elasticsearch/config/jvm.options:/usr/share/elasticsearch/config/jvm.options - entrypoint: /usr/share/elasticsearch/scripts/elasticsearch-entrypoint.sh - environment: - - cluster.name=helk-cluster - - node.name=helk-1 - - xpack.license.self_generated.type=basic - - xpack.security.enabled=false - - "ES_JAVA_OPTS= -Xms16g -Xmx16g" - ulimits: - memlock: - soft: -1 - hard: -1 - nproc: 20480 - nofile: - soft: 160000 - hard: 160000 - restart: always - networks: - helk: -# Rebuild the elasticsearch docker container -`docker-compose -f HELK/docker/helk-kibana-analysis-basic.yml up --build -d` -``` - -#### 4) Set at run time using custom bash variable - -Example bash variable such as: - -```bash -export ES_JAVA_OPTS="-Xms16g -Xmx16g" -``` - -Then run the following using your own docker config file. - -```bash -docker-compose -f $PlaceDockerConfigFileNameHere up --build -d -``` - -**Only use this option if you explicitly need to. Please know what your getting into to ;)** \ No newline at end of file diff --git a/docs/_build/html/_sources/architecture/kibana.md.txt b/docs/_build/html/_sources/architecture/kibana.md.txt deleted file mode 100644 index 35f83ffb..00000000 --- a/docs/_build/html/_sources/architecture/kibana.md.txt +++ /dev/null @@ -1,61 +0,0 @@ -# Kibana - -![](../images/KIBANA-Design.png) - -## Visualize your logs - -### Discover - -Make sure you have logs being sent to your HELK first (At least Windows security and Sysmon events). Then, go to `https://` in your preferred browser. If you don’t see logs right away then update your time picker (in the top right) to include a farther back window. Additionally, if you just started sending logs then wait a minute and check again. - -Currently, HELK creates automatically 7 index patterns for you and sets **logs-endpoint-winevent-sysmon-*** as your default one: - -* "logs-*" -* "logs-endpoint-winevent-sysmon-*" -* "logs-endpoint-winevent-security-*" -* "logs-endpoint-winevent-application-*" -* "logs-endpoint-winevent-system-*" -* "logs-endpoint-winevent-powershell-*" -* "logs-endpoint-winevent-wmiactivity-*" - - - -## Dashboards - -Currently, the HELK comes with 3 dashboards: - -### Global_Dashboard - - - -### Network_Dashboard - - - -### Sysmon_Dashboard - - - -## Monitoring Views (x-Pack Basic Free License) - -### Kibana Initial Overview - - - -### Elasticsearch Overview - - - -### Logstash Overview - - - - - -## Troubleshooting - -Apart from running `docker ps` and `docker logs --follow --tail 25 helk-kibana`, additionally you can look at logs located at `/usr/share/kibana/config/kibana_logs.log`. - -Example: `docker exec helk-kibana tail -f /usr/share/kibana/config/kibana_logs.log` - -Many times Kibana will not be "working" because elasticsearch is still starting up or has ran into an error. \ No newline at end of file diff --git a/docs/_build/html/_sources/architecture/logstash.md.txt b/docs/_build/html/_sources/architecture/logstash.md.txt deleted file mode 100644 index fe52dfb4..00000000 --- a/docs/_build/html/_sources/architecture/logstash.md.txt +++ /dev/null @@ -1,3 +0,0 @@ -# Logstash - -![](../images/LOGSTASH-Design.png) \ No newline at end of file diff --git a/docs/_build/html/_sources/how-to/docker/export-images.md.txt b/docs/_build/html/_sources/how-to/docker/export-images.md.txt deleted file mode 100644 index bcab3d4a..00000000 --- a/docs/_build/html/_sources/how-to/docker/export-images.md.txt +++ /dev/null @@ -1,89 +0,0 @@ -# Export Docker Images locally - -If the system where you are planning to install HELK is isolated from the Internet, you can run HELK on another system that has access to the Internet and then export the built/downloaded images to .tar files. You can then LOAD Those image files in the system that is isolated from the Internet. - -* List all the images available in the non-isolated system via the docker images command - -```bash -sudo docker images -``` - -``` -REPOSITORY TAG IMAGE ID CREATED SIZE -cyb3rward0g/helk-jupyter 0.1.1 efa46ecc8d32 2 days ago 2.18GB -confluentinc/cp-ksql-server 5.1.2 f57298019757 6 days ago 514MB -confluentinc/cp-ksql-cli 5.1.2 bd411ce0ba9f 6 days ago 510MB -docker.elastic.co/logstash/logstash 6.6.1 3e7fbb7964ee 11 days ago 786MB -docker.elastic.co/kibana/kibana 6.6.1 b94222148a00 11 days ago 710MB -docker.elastic.co/elasticsearch/elasticsearch 6.6.1 c6ffcb0ee97e 11 days ago 842MB -cyb3rward0g/helk-elastalert 0.2.1 569f588a22fc 3 weeks ago 758MB -cyb3rward0g/helk-kafka-broker 2.1.0 7b3e7f9ce732 2 months ago 388MB -cyb3rward0g/helk-zookeeper 2.1.0 abb732da3e50 2 months ago 388MB -cyb3rward0g/helk-spark-worker 2.4.0 b1545b0582db 2 months ago 579MB -cyb3rward0g/helk-spark-master 2.4.0 70fc61de3445 2 months ago 579MB -cyb3rward0g/helk-nginx 0.0.7 280d044b6719 6 months ago 329MB -``` - -* List all the containers running in the non-isolated system via the docker ps command - -```bash -sudo docker ps -``` - -``` -CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES -de048c88dc7f confluentinc/cp-ksql-cli:5.1.2 "/bin/sh" 6 hours ago Up 6 hours helk-ksql-cli -69e06070c14c confluentinc/cp-ksql-server:5.1.2 "/etc/confluent/dock…" 6 hours ago Up 6 hours 0.0.0.0:8088->8088/tcp helk-ksql-server -d57967977c9c cyb3rward0g/helk-kafka-broker:2.1.0 "./kafka-entrypoint.…" 6 hours ago Up 6 hours 0.0.0.0:9092->9092/tcp helk-kafka-broker -4889e917d76d cyb3rward0g/helk-spark-worker:2.4.0 "./spark-worker-entr…" 6 hours ago Up 6 hours helk-spark-worker -c0a29d8b18a7 cyb3rward0g/helk-nginx:0.0.7 "/opt/helk/scripts/n…" 6 hours ago Up 6 hours 0.0.0.0:80->80/tcp, 0.0.0.0:443->443/tcp helk-nginx -6a887d693a31 cyb3rward0g/helk-elastalert:0.2.1 "./elastalert-entryp…" 6 hours ago Up 6 hours helk-elastalert -a32be7a399c7 cyb3rward0g/helk-zookeeper:2.1.0 "./zookeeper-entrypo…" 6 hours ago Up 6 hours 2181/tcp, 2888/tcp, 3888/tcp helk-zookeeper -c636a8a1e8f7 cyb3rward0g/helk-spark-master:2.4.0 "./spark-master-entr…" 6 hours ago Up 6 hours 7077/tcp, 0.0.0.0:8080->8080/tcp helk-spark-master -ef1b8d8015ab cyb3rward0g/helk-jupyter:0.1.1 "./jupyter-entrypoin…" 6 hours ago Up 6 hours 8000/tcp helk-jupyter -bafeeb1587cf docker.elastic.co/logstash/logstash:6.6.1 "/usr/share/logstash…" 6 hours ago Up 6 hours 0.0.0.0:5044->5044/tcp, 0.0.0.0:8531->8531/tcp, 9600/tcp helk-logstash -29b57e5c71e5 docker.elastic.co/kibana/kibana:6.6.1 "/usr/share/kibana/s…" 6 hours ago Up 6 hours 5601/tcp helk-kibana -48499aa83917 docker.elastic.co/elasticsearch/elasticsearch:6.6.1 "/usr/share/elastics…" 6 hours ago Up 6 hours 9200/tcp, 9300/tcp helk-elasticsearch -``` - -* Export images as tar files: - -```bash -sudo docker save -o /home/helk/helk-ksql-cli.tar confluentinc/cp-ksql-cli:5.1.2 -sudo docker save -o /home/helk/helk-ksql-server.tar confluentinc/cp-ksql-server:5.1.2 -sudo docker save -o /home/helk/helk-kafka-broker.tar cyb3rward0g/helk-kafka-broker:2.1.0 -sudo docker save -o /home/helk/helk-spark-worker.tar cyb3rward0g/helk-spark-worker:2.4.0 -sudo docker save -o /home/helk/helk-nginx.tar cyb3rward0g/helk-nginx:0.0.7 -sudo docker save -o /home/helk/helk-elastalert.tar cyb3rward0g/helk-elastalert:0.2.1 -sudo docker save -o /home/helk/helk-zookeeper.tar cyb3rward0g/helk-zookeeper:2.1.0 -sudo docker save -o /home/helk/helk-spark-master.tar cyb3rward0g/helk-spark-master:2.4.0 -sudo docker save -o /home/helk/helk-logstash.tar docker.elastic.co/logstash/logstash:6.6.1 -sudo docker save -o /home/helk/helk-kibana.tar docker.elastic.co/kibana/kibana:6.6.1 -sudo docker save -o /home/helk/helk-elasticsearch.tar docker.elastic.co/elasticsearch/elasticsearch:6.6.1 -sudo docker save -o /home/helk/helk-jupyter.tar cyb3rward0g/helk-jupyter:0.1.1 -``` - -* check if images exist locally - -```bash -ls -l -``` - -``` -total 10810584 -drwxrwxr-x 9 helk helk 4096 Feb 24 21:01 HELK --rw------- 1 root root 778629632 Feb 25 03:07 helk-elastalert.tar --rw------- 1 root root 854236160 Feb 25 03:12 helk-elasticsearch.tar --rw------- 1 root root 2254629888 Feb 25 03:14 helk-jupyter.tar --rw------- 1 root root 395871744 Feb 25 03:04 helk-kafka-broker.tar --rw------- 1 root root 767277568 Feb 25 03:11 helk-kibana.tar --rw------- 1 root root 521177600 Feb 25 03:00 helk-ksql-cli.tar --rw------- 1 root root 525901824 Feb 25 03:02 helk-ksql-server.tar --rw------- 1 root root 810578944 Feb 25 03:09 helk-logstash.tar --rw------- 1 root root 335945728 Feb 25 03:06 helk-nginx.tar --rw------- 1 root root 587616768 Feb 25 03:08 helk-spark-master.tar --rw------- 1 root root 587616768 Feb 25 03:05 helk-spark-worker.tar --rw------- 1 root root 395854848 Feb 25 03:08 helk-zookeeper.tar - -helk@ubuntu:~$ -``` \ No newline at end of file diff --git a/docs/_build/html/_sources/how-to/docker/intro.md.txt b/docs/_build/html/_sources/how-to/docker/intro.md.txt deleted file mode 100644 index 68714083..00000000 --- a/docs/_build/html/_sources/how-to/docker/intro.md.txt +++ /dev/null @@ -1 +0,0 @@ -# Docker \ No newline at end of file diff --git a/docs/_build/html/_sources/how-to/docker/load-images.md.txt b/docs/_build/html/_sources/how-to/docker/load-images.md.txt deleted file mode 100644 index aa175cb0..00000000 --- a/docs/_build/html/_sources/how-to/docker/load-images.md.txt +++ /dev/null @@ -1,95 +0,0 @@ -# Load Local Docker Images - -If you followed [this document](docker-export-images) to export your docker images locally, you should be ready to load them into an isolated system where it cannot access the dockerhub registry. - -* Copy images to the isolated (10.0.10.102) system - -```bash -for f in /home/helk/*.tar; do scp $f helk@10.0.10.102:/tmp/; done -``` - -``` -helk-spark-worker.tar 100% 560MB 24.4MB/s 00:23 -helk-ksql-server.tar 100% 502MB 29.5MB/s 00:17 -helk-logstash.tar 100% 773MB 28.6MB/s 00:27 -helk-ksql-cli.tar 100% 497MB 21.6MB/s 00:23 -helk-elasticsearch.tar 100% 815MB 29.1MB/s 00:28 -``` - -* Check if images exist in the isolated system - -```bash -ls /tmp/ -``` - -``` -helk-elastalert.tar helk-jupyter.tar -helk-kibana.tar helk-ksql-server.tar helk-nginx.tar -helk-spark-worker.tar helk-elasticsearch.tar -helk-kafka-broker.tar helk-ksql-cli.tar helk-logstash.tar -helk-spark-master.tar helk-zookeeper.tar -``` - -* Load images with the docker load commands: - -```bash -for i in /tmp/*.tar; do sudo docker load --input $i; done -``` - -``` -f49017d4d5ce: Loading layer [==================================================>] 85.96MB/85.96MB -8f2b771487e9: Loading layer [==================================================>] 15.87kB/15.87kB -ccd4d61916aa: Loading layer [==================================================>] 10.24kB/10.24kB -c01d74f99de4: Loading layer [==================================================>] 5.632kB/5.632kB -268a067217b5: Loading layer [==================================================>] 3.072kB/3.072kB -831fff32e4f2: Loading layer [==================================================>] 65.02kB/65.02kB -c89f4fbc01f8: Loading layer [==================================================>] 103.4MB/103.4MB -adfd094c5517: Loading layer [==================================================>] 3.245MB/3.245MB -c73538215c3e: Loading layer [==================================================>] 567.6MB/567.6MB -080f01d1ecbc: Loading layer [==================================================>] 13.31kB/13.31kB -60bbd38a907e: Loading layer [==================================================>] 3.584kB/3.584kB -9affd17eb100: Loading layer [==================================================>] 5.632kB/5.632kB -0561c04cbf7e: Loading layer [==================================================>] 7.168kB/7.168kB -ba0201512417: Loading layer [==================================================>] 18.29MB/18.29MB -Loaded image: cyb3rward0g/helk-elastalert:0.2.1 -071d8bd76517: Loading layer [==================================================>] 210.2MB/210.2MB -a175339dcf83: Loading layer [==================================================>] 310.5MB/310.5MB -9a70a6f483f7: Loading layer [==================================================>] 95.68MB/95.68MB -f4db77828c81: Loading layer [==================================================>] 311.3kB/311.3kB -be48c67e9d13: Loading layer [==================================================>] 237.5MB/237.5MB -432cb712190e: Loading layer [==================================================>] 7.68kB/7.68kB -a512981fd597: Loading layer [==================================================>] 9.728kB/9.728kB -Loaded image: docker.elastic.co/elasticsearch/elasticsearch:6.6.1 -49778752e7ec: Loading layer [==================================================>] 394.9MB/394.9MB -5f3913b1d541: Loading layer [==================================================>] 1.667GB/1.667GB -77fa3a9c5ff6: Loading layer [==================================================>] 7.168kB/7.168kB -cbc15b984e03: Loading layer [==================================================>] 10.24kB/10.24kB -38c44d7a52f6: Loading layer [==================================================>] 5.12kB/5.12kB -0ec2dbbfd6c7: Loading layer [==================================================>] 3.584kB/3.584kB -Loaded image: cyb3rward0g/helk-jupyter:0.1.1 -4e31d8c1cf96: Loading layer [==================================================>] 203.1MB/203.1MB -efb23c49455d: Loading layer [==================================================>] 11.26kB/11.26kB -``` - -* check if images are loaded via the docker images command - -```bash -sudo docker images -``` - -``` -REPOSITORY TAG IMAGE ID CREATED SIZE -cyb3rward0g/helk-jupyter 0.1.1 efa46ecc8d32 2 days ago 2.18GB -confluentinc/cp-ksql-server 5.1.2 f57298019757 6 days ago 514MB -confluentinc/cp-ksql-cli 5.1.2 bd411ce0ba9f 6 days ago 510MB -docker.elastic.co/logstash/logstash 6.6.1 3e7fbb7964ee 11 days ago 786MB -docker.elastic.co/kibana/kibana 6.6.1 b94222148a00 11 days ago 710MB -docker.elastic.co/elasticsearch/elasticsearch 6.6.1 c6ffcb0ee97e 11 days ago 842MB -cyb3rward0g/helk-elastalert 0.2.1 569f588a22fc 3 weeks ago 758MB -cyb3rward0g/helk-kafka-broker 2.1.0 7b3e7f9ce732 2 months ago 388MB -cyb3rward0g/helk-zookeeper 2.1.0 abb732da3e50 2 months ago 388MB -cyb3rward0g/helk-spark-worker 2.4.0 b1545b0582db 2 months ago 579MB -cyb3rward0g/helk-spark-master 2.4.0 70fc61de3445 2 months ago 579MB -cyb3rward0g/helk-nginx 0.0.7 280d044b6719 6 months ago 329MB -helk@helk:~$ -``` \ No newline at end of file diff --git a/docs/_build/html/_sources/how-to/kafka/intro.md.txt b/docs/_build/html/_sources/how-to/kafka/intro.md.txt deleted file mode 100644 index 96fba0ab..00000000 --- a/docs/_build/html/_sources/how-to/kafka/intro.md.txt +++ /dev/null @@ -1 +0,0 @@ -# Kafka \ No newline at end of file diff --git a/docs/_build/html/_sources/how-to/kafka/topic-ingestion.md.txt b/docs/_build/html/_sources/how-to/kafka/topic-ingestion.md.txt deleted file mode 100644 index 0dccd7c9..00000000 --- a/docs/_build/html/_sources/how-to/kafka/topic-ingestion.md.txt +++ /dev/null @@ -1,36 +0,0 @@ -# Check Kafka Topic Ingestion - -There are a few ways that you can accomplish this - -## HELK's Kafka broker container - -Access your kafka broker container by running the following command: - -```bash -sudo docker exec -ti helk-kafka-broker bash -``` - -Run the kafka-console-consumer.sh script available in the container: - -```bash -/opt/helk/kafka/bin/kafka-console-consumer.sh --bootstrap-server helk-kafka-broker:9092 --topic winlogbeat --from-beginning -``` - -or simply run the script without an interactive shell - -```bash -sudo docker exec -ti helk-kafka-broker /opt/helk/kafka/bin/kafka-console-consumer.sh --bootstrap-server helk-kafka-broker:9092 --topic winlogbeat --from-beginning -``` - -## Kafkacat - -It is generic non-JVM producer and consumer for Apache Kafka >=0.8, think of it as a netcat for Kafka. You can install it by following the [instructions](https://github.com/edenhill/kafkacat#install) from the Kafkacat repo. - -```bash -kafkacat -b 10.0.10.100:9092 -t winlogbeat -C -``` - -## References - -* [Kafka Consumer Example](https://kafka.apache.org/quickstart#quickstart_consume) -* [Kafkacat](https://github.com/edenhill/kafkacat) diff --git a/docs/_build/html/_sources/how-to/kafka/update-ip-address.md.txt b/docs/_build/html/_sources/how-to/kafka/update-ip-address.md.txt deleted file mode 100644 index c7b2a134..00000000 --- a/docs/_build/html/_sources/how-to/kafka/update-ip-address.md.txt +++ /dev/null @@ -1,35 +0,0 @@ -# Update Kafka Broker IP - -For the docker deployment, you will have to update the environment variable ADVERTISED_LISTENER first. You can do this in your system hosting the entire HELK or the Kafka broker itself if your distributed your docker containers across other systems. - -```bash -export ADVERTISED_LISTENER=10.0.10.104 -``` - -Then, you can simply just run docker-compose the same way how it was used to build the HELK. This will re-create the system with the new value assigned to the environment variable `ADVERTISED_LISTENER`. - -```bash -sudo -E docker-compose -f helk-kibana-notebook-analysis-basic.yml up -d -``` - -If you just restart your containers, it will not update the environment variable in the Kafka broker. You have to re-create the container. Not re-creating the broker would still show you messages like the ones below: - -``` -[2019-01-25 05:35:21,026] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:24,194] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:27,362] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:30,530] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:33,698] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:36,866] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:40,034] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:43,238] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:46,306] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:49,382] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:52,450] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:55,522] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:35:58,594] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:36:01,714] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:36:04,770] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:36:08,450] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -[2019-01-25 05:36:11,650] WARN [Controller id=1, targetBrokerId=1] Connection to node 1 (/10.0.10.104:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient) -``` \ No newline at end of file diff --git a/docs/_build/html/_sources/how-to/ksql/deploy-locally.md.txt b/docs/_build/html/_sources/how-to/ksql/deploy-locally.md.txt deleted file mode 100644 index 3808af70..00000000 --- a/docs/_build/html/_sources/how-to/ksql/deploy-locally.md.txt +++ /dev/null @@ -1 +0,0 @@ -# Deploy KSQL Locally diff --git a/docs/_build/html/_sources/how-to/ksql/intro.md.txt b/docs/_build/html/_sources/how-to/ksql/intro.md.txt deleted file mode 100644 index 2b4b8213..00000000 --- a/docs/_build/html/_sources/how-to/ksql/intro.md.txt +++ /dev/null @@ -1,173 +0,0 @@ -# KSQL - -You can use KSQL CLI to connect to the HELK's KSQL Server from a different system. You will have to download the self-managed software Confluent platform and then run KSQL - -* Download the self-managed software Confluent platform in a .tar.gz format from: https://www.confluent.io/download/#popup_form_3109 -* Decompress the folder: - -``` -tar -xvzf confluent-5.1.2-2.11.tar.gz -``` - -``` -x confluent-5.1.2/ -x confluent-5.1.2/src/ -x confluent-5.1.2/src/avro-cpp-1.8.0-confluent5.1.2.tar.gz -x confluent-5.1.2/src/librdkafka-0.11.6-confluent5.1.2.tar.gz -x confluent-5.1.2/src/confluent-libserdes-5.1.2.tar.gz -x confluent-5.1.2/src/avro-c-1.8.0-confluent5.1.2.tar.gz -x confluent-5.1.2/lib/ -``` - -* Access the KSQL scripts: - -``` -cd confluent-5.1.2 -ls -``` - -``` -README bin etc lib logs share src -``` - -``` -cd bin/ -ls -``` - -``` -confluent kafka-acls kafka-mirror-maker kafka-server-stop schema-registry-start -confluent-hub kafka-api-start kafka-mqtt-run-class kafka-streams-application-reset schema-registry-stop -confluent-rebalancer kafka-avro-console-consumer kafka-mqtt-start kafka-topics schema-registry-stop-service -connect-distributed kafka-avro-console-producer kafka-mqtt-stop kafka-verifiable-consumer security-plugins-run-class -connect-standalone kafka-broker-api-versions kafka-preferred-replica-election kafka-verifiable-producer sr-acl-cli -control-center-3_0_0-reset kafka-configs kafka-producer-perf-test ksql support-metrics-bundle -control-center-3_0_1-reset kafka-console-consumer kafka-reassign-partitions ksql-datagen windows -control-center-console-consumer kafka-console-producer kafka-replica-verification ksql-print-metrics zookeeper-security-migration -control-center-export kafka-consumer-groups kafka-rest-run-class ksql-run-class zookeeper-server-start -control-center-reset kafka-consumer-perf-test kafka-rest-start ksql-server-start zookeeper-server-stop -control-center-run-class kafka-delegation-tokens kafka-rest-stop ksql-server-stop zookeeper-shell -control-center-set-acls kafka-delete-records kafka-rest-stop-service ksql-stop -control-center-start kafka-dump-log kafka-run-class replicator -control-center-stop kafka-log-dirs kafka-server-start schema-registry-run-class -Robertos-MBP:bin wardog$ -``` - -* Check the options for KSQL: - -``` -./ksql --help -``` - -``` -NAME - ksql - KSQL CLI - -SYNOPSIS - ksql [ --config-file ] [ {-h | --help} ] - [ --output ] - [ --query-row-limit ] - [ --query-timeout ] [--] - -OPTIONS - --config-file - A file specifying configs for Ksql and its underlying Kafka Streams - instance(s). Refer to KSQL documentation for a list of available - configs. - - -h, --help - Display help information - - --output - The output format to use (either 'JSON' or 'TABULAR'; can be - changed during REPL as well; defaults to TABULAR) - - --query-row-limit - An optional maximum number of rows to read from streamed queries - - This options value must fall in the following range: value >= 1 - - - --query-timeout - An optional time limit (in milliseconds) for streamed queries - - This options value must fall in the following range: value >= 1 - - - -- - This option can be used to separate command-line options from the - list of arguments (useful when arguments might be mistaken for - command-line options) - - - The address of the Ksql server to connect to (ex: - http://confluent.io:9098) - - This option may occur a maximum of 1 times - -Robertos-MBP:bin wardog$ -``` - -* Connect to the HELK KSQL Server. You will just need to point to the IP address of your HELK Docker environment over port 8088 - -``` -./ksql http://192.168.64.138:8088 -``` - -``` - =========================================== - = _ __ _____ ____ _ = - = | |/ // ____|/ __ \| | = - = | ' /| (___ | | | | | = - = | < \___ \| | | | | = - = | . \ ____) | |__| | |____ = - = |_|\_\_____/ \___\_\______| = - = = - = Streaming SQL Engine for Apache Kafka® = - =========================================== - -Copyright 2017-2018 Confluent Inc. - -CLI v5.1.2, Server v5.1.0 located at http://192.168.64.138:8088 - -Having trouble? Type 'help' (case-insensitive) for a rundown of how things work! - -ksql> -``` - -* Verify that you can see the topics available in the HELK Kafka broker - -``` -./ksql http://192.168.64.138:8088 -``` - -``` - =========================================== - = _ __ _____ ____ _ = - = | |/ // ____|/ __ \| | = - = | ' /| (___ | | | | | = - = | < \___ \| | | | | = - = | . \ ____) | |__| | |____ = - = |_|\_\_____/ \___\_\______| = - = = - = Streaming SQL Engine for Apache Kafka® = - =========================================== - -Copyright 2017-2018 Confluent Inc. - -CLI v5.1.2, Server v5.1.0 located at http://192.168.64.138:8088 - -Having trouble? Type 'help' (case-insensitive) for a rundown of how things work! - -ksql> SHOW TOPICS; - - Kafka Topic | Registered | Partitions | Partition Replicas | Consumers | ConsumerGroups ------------------------------------------------------------------------------------------ - filebeat | false | 1 | 1 | 0 | 0 - SYSMON_JOIN | false | 1 | 1 | 0 | 0 - winlogbeat | false | 1 | 1 | 0 | 0 - winsecurity | false | 1 | 1 | 0 | 0 - winsysmon | false | 1 | 1 | 0 | 0 ------------------------------------------------------------------------------------------ -ksql> -``` \ No newline at end of file diff --git a/docs/_build/html/_sources/how-to/logstash/create-plugins-offline.md.txt b/docs/_build/html/_sources/how-to/logstash/create-plugins-offline.md.txt deleted file mode 100644 index 09bf2eb4..00000000 --- a/docs/_build/html/_sources/how-to/logstash/create-plugins-offline.md.txt +++ /dev/null @@ -1,43 +0,0 @@ -# Create Plugins Offline Package - -If you are installing HELK, and the helk-logstash extra plugins are still being installed over the Internet, you can use the following steps to export them in an zipped offline package to then be loaded to the system that does not have access to the Internet and it is stuck at installing plugins. - -Remember that you will need to do this in a system where HELK is already installed and the plugins were installed successfully. - -* Access your helk-logstash docker container in the system where HELK was successfully installed already: - -```bash -sudo docker exec -ti helk-logstash bash -``` - -``` -bash-4.2$ -``` - -* Using the logstash-plugin script prepare and export the plugins offline package - -```bash -bin/logstash-plugin prepare-offline-pack logstash-filter-translate logstash-filter-dns logstash-filter-cidr logstash-filter-geoip logstash-filter-dissect logstash-output-kafka logstash-input-kafka logstash-filter-alter logstash-filter-fingerprint logstash-filter-prune logstash-codec-gzip_lines logstash-codec-netflow logstash-filter-i18n logstash-filter-environment logstash-filter-de_dot logstash-input-wmi logstash-filter-clone -``` - -``` -Offline package created at: /usr/share/logstash/logstash-offline-plugins-6.6.1.zip - -You can install it with this command -bin/logstash-plugin install file:///usr/share/logstash/logstash-offline-plugins-6.6.1.zip -``` - - -* Copy the offline package from your helk-logstash container to your local system - -```bash -sudo docker cp helk-logstash:/usr/share/logstash/logstash-offline-plugins-6.6.1.zip . -``` - -* Copy the logstash-offline-plugins-6.6.1.zip to the OFFLINE-ISOLATED (i.e. 10.0.10.102) system. You bust be authorized to ssh to it. - -```bash -scp logstash-offline-plugins-6.6.1.zip helk@10.0.10.102:/home/helk/ -``` - -Now you should be able to use it in the offline-isolated HELK system diff --git a/docs/_build/html/_sources/how-to/logstash/intro.md.txt b/docs/_build/html/_sources/how-to/logstash/intro.md.txt deleted file mode 100644 index 6c5a0573..00000000 --- a/docs/_build/html/_sources/how-to/logstash/intro.md.txt +++ /dev/null @@ -1 +0,0 @@ -# Logstash \ No newline at end of file diff --git a/docs/_build/html/_sources/how-to/winlogbeat/check-log-shipping.md.txt b/docs/_build/html/_sources/how-to/winlogbeat/check-log-shipping.md.txt deleted file mode 100644 index 41302c4d..00000000 --- a/docs/_build/html/_sources/how-to/winlogbeat/check-log-shipping.md.txt +++ /dev/null @@ -1,35 +0,0 @@ -# Check Winlogbeat Log Shipping - -If you believe logs are not being sent to HELK from winlogbeat then there are a couple of things that could be going on. For this document we will stick to looking at only winlogbeat itself. If your logs contain what is shown below then winlogbeat is shipping/sending the logs properly and therefore could be an issue somewhere else and thus you should consult the wiki for additional resources. - -## Installed as a Service - -If winlogbeat is installed as a service then the logs will be located within: - -``` -%PROGRAMDATA%\winlogbeat\logs\winlogbeat -``` - -## Manually Running the Executable - -If you are manually running winlogbeat then the logs will be within the directory you are running winlogbeat.exe at the path - -``` -.\logs\winlogbeat -``` - -## Viewing Logs - -To view the logs you can simply use notepad or notepad++ or use a powershell command such as - -``` -Get-Content C:\ProgramData\winlogbeat\logs\winlogbeat -Tail 10 -Wait -``` - -## Log Output - -Your logs should contain information with the verbiage successfully published #NUMBER events, similar to the output shown below - -![](../../images/KAFKA-producer1.png) - -![](../../images/KAFKA-producer2.png) \ No newline at end of file diff --git a/docs/_build/html/_sources/how-to/winlogbeat/intro.md.txt b/docs/_build/html/_sources/how-to/winlogbeat/intro.md.txt deleted file mode 100644 index 815a4c05..00000000 --- a/docs/_build/html/_sources/how-to/winlogbeat/intro.md.txt +++ /dev/null @@ -1 +0,0 @@ -# Winlogbeat \ No newline at end of file diff --git a/docs/_build/html/_sources/installation.md.txt b/docs/_build/html/_sources/installation.md.txt deleted file mode 100644 index c52bb4f3..00000000 --- a/docs/_build/html/_sources/installation.md.txt +++ /dev/null @@ -1,292 +0,0 @@ -# Installation - -# Requirements (Please Read Carefully) -* **Operating System:** - * Ubuntu 18.04 (preferred) - * Ubuntu 16 - * CentOS 7 with or without SELinux in enforcement mode - * CentOS 8 with or without SELinux in enforcement mode -* **Docker:** - * HELK uses the official Docker Community Edition (CE) bash script (Edge Version) to install Docker for you. The Docker CE Edge script supports the following distros: **ubuntu**, **debian**, **raspbian**, **centos**, and **fedora**. - * You can see the specific distro versions supported in the script [here](https://get.docker.com/). - * If you have Docker & Docker-Compose already installed in your system, make sure you uninstall them to avoid old incompatible version. Let HELK use the official Docker CE Edge script execution to install Docker. -* **Processor/OS Architecture:** - * 64-bit also known as x64, x86_64, AMD64 or Intel 64. - * FYI: old processors don't support SSE3 instructions to start ML (Machine Learning) on elasticsearch. Since version 6.1 Elastic has been compiling the ML programs on the assumption that SSE4.2 instructions are available (See: https://github.com/Cyb3rWard0g/HELK/issues/321 and https://discuss.elastic.co/t/failed-to-start-machine-learning-on-elasticsearch-7-0-0/178216/7) -* **Cores:** Minimum of 4 cores (whether logical or physical) -* **Network Connection:** NAT or Bridge - * IP version 4 address. IPv6 has not been tested yet. - * If using a proxy, documentation is yet to come - so use a proxy at your own expense. However, open a GitHub issue and we will try to help until it is officially documented/supported. - * If using a VM then NAT or Bridge will work. - * Internet access - * List of required domains/IPs will be listed in future documentation. -* **RAM:** There are four options, and the following are minimum requirements (include more if you are able). - * **Option 1: 5GB** includes `KAFKA + KSQL + ELK + NGNIX.` - * **Option 2: 5GB** includes `KAFKA + KSQL + ELK + NGNIX + ELASTALERT` - * **Option 3: 7GB** includes `KAFKA + KSQL + ELK + NGNIX + SPARK + JUPYTER`. - * **Option 4: 8GB** includes `KAFKA + KSQL + ELK + NGNIX + SPARK + JUPYTER + ELASTALERT`. -* **Disk:** 20GB for testing purposes and 100GB+ for production (minimum) -* **Applications:** - * Docker: 18.06.1-ce+ & Docker-Compose (HELK INSTALLS THIS FOR YOU) - * [Winlogbeat](https://www.elastic.co/downloads/beats/winlogbeat) running on your endpoints or centralized WEF server (that your endpoints are forwarding to). - * You can install Winlogbeat by following one of [@Cyb3rWard0g](https://twitter.com/Cyb3rWard0g) posts [here](https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_87.html). - * [Winlogbeat config](https://github.com/Cyb3rWard0g/HELK/blob/master/configs/winlogbeat/winlogbeat.yml) recommended by the HELK since it uses the [Kafka output plugin](https://www.elastic.co/guide/en/beats/winlogbeat/current/kafka-output.html) and it is already pointing to the right ports with recommended options. You will just have to add your HELK's IP address. - -# HELK Download -Run the following commands to clone the HELK repo via git. - -```bash -git clone https://github.com/Cyb3rWard0g/HELK.git -``` - -# HELK Install -In order to make the installation of the HELK easy for everyone, the project comes with an install script named **helk_install.sh**. This script builds and runs everything for HELK automatically. During the installation process, the script will allow you to set up the following: -* Set the components/applications for the HELK' -* Set the Kibana User's password. Default user is **helk** -* Set the HELK's IP. By default you can confirm that you want to use your HOST IP address for the HELK, unless you want to use a different one. Press \[Return\] or let the script continue on its own (90 Seconds sleep). -* Set the HELK's License Subscription. By default the HELK has the **basic** subscription selected. You can set it to **trial** if you want and will be valid for 30 days. If you want to learn more about subscriptions go [here](https://www.elastic.co/subscriptions) - * If the license is set to **trial**, HELK asks you to set the password for the **elastic** account. - -**To install HELK:** -Change your current directory location to the new HELK directory, and run the **helk_install.sh** bash script as shown: - -```bash -cd HELK/docker -sudo ./helk_install.sh -``` - -**Here is an example output of installing the HELK using Option 2** - -``` -********************************************** -** HELK - THE HUNTING ELK ** -** ** -** Author: Roberto Rodriguez (@Cyb3rWard0g) ** -** HELK build version: v0.1.8-alpha01032020 ** -** HELK ELK version: 7.6.2 ** -** License: GPL-3.0 ** -********************************************** - -[HELK-INSTALLATION-INFO] HELK hosted on a Linux box -[HELK-INSTALLATION-INFO] Available Memory: 8345 MBs -[HELK-INSTALLATION-INFO] You're using ubuntu version bionic - -***************************************************** -* HELK - Docker Compose Build Choices * -***************************************************** - -1. KAFKA + KSQL + ELK + NGNIX -2. KAFKA + KSQL + ELK + NGNIX + ELASTALERT -3. KAFKA + KSQL + ELK + NGNIX + SPARK + JUPYTER -4. KAFKA + KSQL + ELK + NGNIX + SPARK + JUPYTER + ELASTALERT - -Enter build choice [ 1 - 4]: 2 -[HELK-INSTALLATION-INFO] HELK build set to 2 -[HELK-INSTALLATION-INFO] Set HELK IP. Default value is your current IP: 10.66.6.35 - -[HELK-INSTALLATION-INFO] Please make sure to create a custom Kibana password and store it securely for future use. -[HELK-INSTALLATION-INFO] Set HELK Kibana UI Password: Mmh3QAvQm3535F4f4VZQD -[HELK-INSTALLATION-INFO] Verify HELK Kibana UI Password: Mmh3QAvQm3535F4f4VZQD -[HELK-INSTALLATION-INFO] Docker already installed -[HELK-INSTALLATION-INFO] Making sure you assigned enough disk space to the current Docker base directory -[HELK-INSTALLATION-INFO] Available Docker Disk: 107 GBs -[HELK-INSTALLATION-INFO] Checking local vm.max_map_count variable and setting it to 4120294 -[HELK-INSTALLATION-INFO] Setting local vm.swappiness variable to 25 -[HELK-INSTALLATION-INFO] Building & running HELK from helk-kibana-analysis-alert-basic.yml file.. -[HELK-INSTALLATION-INFO] Waiting for some services to be up ..... - - -*********************************************************************************** -** [HELK-INSTALLATION-INFO] HELK WAS INSTALLED SUCCESSFULLY ** -** [HELK-INSTALLATION-INFO] USE THE FOLLOWING SETTINGS TO INTERACT WITH THE HELK ** -*********************************************************************************** - -HELK KIBANA URL: https://10.66.6.35 -HELK KIBANA USER: helk -HELK KIBANA PASSWORD: Mmh3QAvQm3535F4f4VZQD -HELK ZOOKEEPER: 10.66.6.35:2181 -HELK KSQL SERVER: 10.66.6.35:8088 - -IT IS HUNTING SEASON!!!!! - -You can stop all the HELK docker containers by running the following command: - [+] sudo docker-compose -f helk-kibana-analysis-alert-basic.yml stop - -``` -# Monitor HELK installation Logs (Always) -Once the installation kicks in, it will start showing you pre-defined messages about the installation, but no all the details of what is actually happening in the background. It is designed that way to keep your main screen clean and let you know where it is in the installation process. - -What I recommend to do all the time is to open another shell and monitor the HELK installation logs by using the **tail** command and pointing it to the **/var/log/helk-install.log** file that gets created by the **helk_install** script as soon as it is run. This log file is available on your local host even if you are deploying the HELK via Docker (I want to make sure it is clear that it is a local file). - -```bash -tail -f /var/log/helk-install.log -``` - -``` -Adding password for user helk -Creating network "docker_helk" with driver "bridge" -Creating volume "docker_esdata" with local driver -Pulling helk-elasticsearch (docker.elastic.co/elasticsearch/elasticsearch:7.6.2)... -7.6.2: Pulling from elasticsearch/elasticsearch -Digest: sha256:771240a8e1c76cc6ac6aa740d2b82de94d4b8b7dbcca5ad0cf49d12b88a3b8e7 -Status: Downloaded newer image for docker.elastic.co/elasticsearch/elasticsearch:7.6.2 -Pulling helk-kibana (docker.elastic.co/kibana/kibana:7.6.2)... -7.6.2: Pulling from kibana/kibana -Digest: sha256:fb0ac36c40de29b321a30805bcbda4cbe486e1c5979780647458ad77b5ee2f98 -Status: Downloaded newer image for docker.elastic.co/kibana/kibana:7.6.2 -Pulling helk-logstash (otrf/helk-logstash:7.6.2)... -7.6.2: Pulling from otrf/helk-logstash -Digest: sha256:c54057ff1d02d7ebae23e49835060c0b4012844312c674ce2264d8bbaee64f1a -Status: Downloaded newer image for otrf/helk-logstash:7.6.2 -Pulling helk-nginx (otrf/helk-nginx:0.3.0)... -0.0.8: Pulling from otrf/helk-nginx -Digest: sha256:83e86d3ee3891b8a06173f4278ddc9f85cbba9b2dfceada48fb311411e236341 -Status: Downloaded newer image for otrf/helk-nginx:0.3.0 -Pulling helk-zookeeper (otrf/helk-zookeeper:2.4.0)... -2.3.0: Pulling from otrf/helk-zookeeper -Digest: sha256:3e7a0f3a73bcffeac4f239083618c362017005463dd747392a9b43db99535a68 -Status: Downloaded newer image for otrf/helk-zookeeper:2.4.0 -Pulling helk-kafka-broker (otrf/helk-kafka-broker:2.4.0)... -2.3.0: Pulling from otrf/helk-kafka-broker -Digest: sha256:03569d98c46028715623778b4adf809bf417a055c3c19d21f426db4e1b2d6f55 -Status: Downloaded newer image for otrf/helk-kafka-broker:2.4.0 -Pulling helk-ksql-server (confluentinc/cp-ksql-server:5.1.3)... -5.1.3: Pulling from confluentinc/cp-ksql-server -Digest: sha256:063add111cc93b1a0118f88b577e31303045d4cc08eb1d21458429f05cba4b02 -Status: Downloaded newer image for confluentinc/cp-ksql-server:5.1.3 -Pulling helk-ksql-cli (confluentinc/cp-ksql-cli:5.1.3)... -5.1.3: Pulling from confluentinc/cp-ksql-cli -Digest: sha256:18c0ccb00fbf87679e16e9e0da600548fcb236a2fd173263b09e89b2d3a42cc3 -Status: Downloaded newer image for confluentinc/cp-ksql-cli:5.1.3 -Pulling helk-elastalert (otrf/helk-elastalert:0.3.0)... -0.2.6: Pulling from otrf/helk-elastalert -Digest: sha256:ae1096829aacbadce42bd4024b36da3a9636f1901ef4e9e62a12b881cfc23cf5 -Status: Downloaded newer image for otrf/helk-elastalert:0.3.0 -Creating helk-elasticsearch ... done -Creating helk-kibana ... done -Creating helk-logstash ... done -Creating helk-nginx ... done -Creating helk-zookeeper ... done -Creating helk-elastalert ... done -Creating helk-kafka-broker ... done -Creating helk-ksql-server ... done -Creating helk-ksql-cli ... done -``` -Once you see that the containers have been created you can check all the containers running by executing the following: - -```bash -sudo docker ps -``` - -``` -CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES -2caa7d86bc9e confluentinc/cp-ksql-cli:5.1.3 "/bin/sh" 5 minutes ago Up 5 minutes helk-ksql-cli -1ee3c0d90b2a confluentinc/cp-ksql-server:5.1.3 "/etc/confluent/dock…" 5 minutes ago Up 5 minutes 0.0.0.0:8088->8088/tcp helk-ksql-server -e753a811ffd2 otrf/helk-kafka-broker:2.4.0 "./kafka-entrypoint.…" 5 minutes ago Up 5 minutes 0.0.0.0:9092->9092/tcp helk-kafka-broker -f93239de7d95 otrf/helk-zookeeper:2.4.0 "./zookeeper-entrypo…" 5 minutes ago Up 5 minutes 2181/tcp, 2888/tcp, 3888/tcp helk-zookeeper -229ea8467075 otrf/helk-elastalert:0.3.0 "./elastalert-entryp…" 5 minutes ago Up 5 minutes helk-elastalert -f6fd290d2a9d otrf/helk-nginx:0.3.0 "/opt/helk/scripts/n…" 5 minutes ago Up 5 minutes 0.0.0.0:80->80/tcp, 0.0.0.0:443->443/tcp helk-nginx -d4f2b6d7d21e otrf/helk-logstash:7.6.2 "/usr/share/logstash…" 5 minutes ago Up 5 minutes 0.0.0.0:3515->3515/tcp, 0.0.0.0:5044->5044/tcp, 0.0.0.0:8531->8531/tcp, 9600/tcp helk-logstash -c5ae143741ea docker.elastic.co/kibana/kibana:7.6.2 "/usr/share/kibana/s…" 5 minutes ago Up 5 minutes 5601/tcp helk-kibana -1729e3234b91 docker.elastic.co/elasticsearch/elasticsearch:7.6.2 "/usr/share/elastics…" 5 minutes ago Up 5 minutes 9200/tcp, 9300/tcp helk-elasticsearch -``` - -If you want to monitor the resources being utilized (Memory, CPU, etc), you can run the following: - -``` -user@HELK-vm:~$ sudo docker stats --all - -CONTAINER ID NAME CPU % MEM USAGE / LIMIT MEM % NET I/O BLOCK I/O PIDS -2caa7d86bc9e helk-ksql-cli 0.00% 840KiB / 8.703GiB 0.01% 26.3kB / 0B 98.3kB / 0B 1 -1ee3c0d90b2a helk-ksql-server 0.29% 222.6MiB / 8.703GiB 2.50% 177kB / 125kB 147kB / 197kB 31 -e753a811ffd2 helk-kafka-broker 1.71% 366.4MiB / 8.703GiB 4.11% 381kB / 383kB 823kB / 2.14MB 74 -f93239de7d95 helk-zookeeper 0.18% 74.24MiB / 8.703GiB 0.83% 109kB / 67.2kB 111kB / 1.39MB 48 -229ea8467075 helk-elastalert 10.71% 53.78MiB / 8.703GiB 0.60% 2.34MB / 3.39MB 3.62MB / 1.87MB 12 -f6fd290d2a9d helk-nginx 0.02% 6.562MiB / 8.703GiB 0.07% 28.7kB / 1.54kB 61.4kB / 12.3kB 7 -d4f2b6d7d21e helk-logstash 10.46% 1.337GiB / 8.703GiB 15.36% 632kB / 154MB 430MB / 31.5MB 81 -c5ae143741ea helk-kibana 1.10% 359.7MiB / 8.703GiB 4.04% 345kB / 1.18MB 458MB / 12.3kB 13 -1729e3234b91 helk-elasticsearch 43.62% 3.524GiB / 8.703GiB 40.49% 159MB / 3.14MB 609MB / 600MB 77 -``` - -You should also monitor the logs of each container while they are being initialized: - -Just run the following: - -``` -user@HELK-vm:~$ sudo docker logs --follow --tail 20 helk-elasticsearch - -[HELK-ES-DOCKER-INSTALLATION-INFO] Setting ES_JAVA_OPTS to -Xms3200m -Xmx3200m from custom HELK "algorithm" -[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elastic license to basic -[HELK-ES-DOCKER-INSTALLATION-INFO] Running docker-entrypoint script.. -{"type": "server", "timestamp": "2020-01-25T04:26:19,448Z", "level": "INFO", "component": "o.e.e.NodeEnvironment", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "using [1] data paths, mounts [[/usr/share/elasticsearch/data (/dev/mapper/ubuntu--vg-root)]], net usable_space [102.2gb], net total_space [116.6gb], types [ext4]" } -{"type": "server", "timestamp": "2020-01-25T04:26:19,451Z", "level": "INFO", "component": "o.e.e.NodeEnvironment", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "heap size [3gb], compressed ordinary object pointers [true]" } -{"type": "server", "timestamp": "2020-01-25T04:26:19,458Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "node name [helk-1], node ID [Ed3L9UydShyLmPCbP3GLxw], cluster name [helk-cluster]" } -{"type": "server", "timestamp": "2020-01-25T04:26:19,459Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "version[7.6.2], pid[16], build[default/docker/8bec50e1e0ad29dad5653712cf3bb580cd1afcdf/2020-01-15T12:11:52.313576Z], OS[Linux/4.15.0-74-generic/amd64], JVM[AdoptOpenJDK/OpenJDK 64-Bit Server VM/13.0.1/13.0.1+9]" } -{"type": "server", "timestamp": "2020-01-25T04:26:19,459Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "JVM home [/usr/share/elasticsearch/jdk]" } -{"type": "server", "timestamp": "2020-01-25T04:26:19,460Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "JVM arguments [-Des.networkaddress.cache.ttl=60, -Des.networkaddress.cache.negative.ttl=10, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -XX:-OmitStackTraceInFastThrow, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dio.netty.allocator.numDirectArenas=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Djava.locale.providers=COMPAT, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -Des.networkaddress.cache.ttl=60, -Des.networkaddress.cache.negative.ttl=10, -XX:+AlwaysPreTouch, -Djava.io.tmpdir=/tmp/elasticsearch-3812421782724323797, -XX:+HeapDumpOnOutOfMemoryError, -XX:HeapDumpPath=data, -XX:ErrorFile=logs/hs_err_pid%p.log, -Xlog:gc*,gc+age=trace,safepoint:file=logs/gc.log:utctime,pid,tags:filecount=32,filesize=64m, -Djava.locale.providers=COMPAT, -Des.cgroups.hierarchy.override=/, -Xms3200m, -Xmx3200m, -XX:MaxDirectMemorySize=1677721600, -Des.path.home=/usr/share/elasticsearch, -Des.path.conf=/usr/share/elasticsearch/config, -Des.distribution.flavor=default, -Des.distribution.type=docker, -Des.bundled_jdk=true]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,523Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [aggs-matrix-stats]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,523Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [analysis-common]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,524Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [flattened]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,524Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [frozen-indices]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,524Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [ingest-common]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,524Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [ingest-geoip]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,526Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [ingest-user-agent]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,526Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [lang-expression]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,526Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [lang-mustache]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,526Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [lang-painless]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,526Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [mapper-extras]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,526Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [parent-join]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,526Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [percolator]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,527Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [rank-eval]" } -{"type": "server", "timestamp": "2020-01-25T04:26:21,527Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "helk-cluster", "node.name": "helk-1", "message": "loaded module [reindex]" } - -.. -.... -``` - -All you need to do now for the other ones is just replace helk-elasticsearch with the specific containers name: - -```bash -sudo docker logs --follow -``` - -Remember that you can also access your docker images by running the following commands: - -```bash -sudo docker exec -ti helk-elasticsearch bash -[root@1729e3234b91 elasticsearch]# -``` - -# Final Details -Once your HELK installation ends, you will be presented with information that you will need to access the HELK and all its other components. - -You will get the following information: - -``` -*********************************************************************************** -** [HELK-INSTALLATION-INFO] HELK WAS INSTALLED SUCCESSFULLY ** -** [HELK-INSTALLATION-INFO] USE THE FOLLOWING SETTINGS TO INTERACT WITH THE HELK ** -*********************************************************************************** - -HELK KIBANA URL: https://192.168.1.35 -HELK KIBANA USER: helk -HELK KIBANA PASSWORD: Mmh3QAvQm3535F4f4VZQD -HELK ZOOKEEPER: 192.168.1.35:2181 -HELK KSQL SERVER: 192.168.1.35:8088 - -IT IS HUNTING SEASON!!!!! - -You can stop all the HELK docker containers by running the following command: - [+] sudo docker-compose -f helk-kibana-analysis-alert-trial.yml stop - -``` - -| Type | Description | -|--------|---------| -| HELK KIBANA URL | URL to access the Kibana server. You will need to copy that and paste it in your browser to access Kibana. Make sure you use **https** since Kibana is running behind NGINX via port 443 with a self-signed certificate| -| HELK KIBANA USER & PASSWORD | Credentials used to access Kibana | -| HELK SPARK MASTER UI | URL to access the Spark Master server (Spark Standalone). That server manages the Spark Workers used during execution of code by Jupyter Notebooks. Spark Master acts as a proxy to Spark Workers and applications running | -| HELK JUPYTER SERVER URL | URL to access the Jupyter notebook server. | -| HELK JUPYTER CURRENT TOKEN | Jupyter token to log in instead of providing a password | -| ZOOKEEPER | URL for the kafka cluster zookeeper | -| KSQL SERVER| URL to access the KSQL server and send SQL queries to the data in the kafka brokers| \ No newline at end of file diff --git a/docs/_build/html/_sources/intro.md.txt b/docs/_build/html/_sources/intro.md.txt deleted file mode 100644 index 54479f53..00000000 --- a/docs/_build/html/_sources/intro.md.txt +++ /dev/null @@ -1,63 +0,0 @@ -# Introduction - -[![](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0) -[![](https://img.shields.io/twitter/follow/THE_HELK.svg?style=social&label=Follow)](https://twitter.com/THE_HELK) -[![](https://img.shields.io/github/issues-closed/Cyb3rward0g/HELK.svg)](https://GitHub.com/Cyb3rWard0g/HELK/issues?q=is%3Aissue+is%3Aclosed) -[![](https://badges.frapsoft.com/os/v3/open-source.svg?v=103)](https://github.com/ellerbrock/open-source-badges/) - -![](images/HELK-Design.png) - -The Hunting ELK or simply the HELK is one of the first open source hunt platforms with advanced analytics capabilities such as SQL declarative language, graphing, structured streaming, and even machine learning via Jupyter notebooks and Apache Spark over an ELK stack. This project was developed primarily for research, but due to its flexible design and core components, it can be deployed in larger environments with the right configurations and scalable infrastructure. - -## Goals - -* Provide an open source hunting platform to the community and share the basics of Threat Hunting. -* Expedite the time it takes to deploy a hunt platform. -* Improve the testing and development of hunting use cases in an easier and more affordable way. -* Enable Data Science capabilities while analyzing data via Apache Spark, GraphFrames & Jupyter Notebooks. - -## Main Features - -* **Kafka**: A distributed publish-subscribe messaging system that is designed to be fast, scalable, fault-tolerant, and durable. -* **Elasticsearch**: A highly scalable open-source full-text search and analytics engine. -* **Logstash**: A data collection engine with real-time pipelining capabilities. -* **Kibana**: An open source analytics and visualization platform designed to work with Elasticsearch. -* **ES-Hadoop**: An open-source, stand-alone, self-contained, small library that allows Hadoop jobs (whether using Map/Reduce or libraries built upon it such as Hive, Pig or Cascading or new upcoming libraries like Apache Spark ) to interact with Elasticsearch. -* **Spark**: A fast and general-purpose cluster computing system. It provides high-level APIs in Java, Scala, Python and R, and an optimized engine that supports general execution graphs. -* **Jupyter Notebooks**: An open-source web application that allows you to create and share documents that contain live code, equations, visualizations and narrative text. - -## Optional Features - -* **KSQL**: Confluent KSQL is the open source, streaming SQL engine that enables real-time data processing against Apache Kafka®. It provides an easy-to-use, yet powerful interactive SQL interface for stream processing on Kafka, without the need to write code in a programming language such as Java or Python -* **Elastalert**: ElastAlert is a simple framework for alerting on anomalies, spikes, or other patterns of interest from data in Elasticsearch -* **Sigma**: Sigma is a generic and open signature format that allows you to describe relevant log events in a straightforward manner. - -## Author - -* Roberto Rodriguez [@Cyb3rWard0g](https://twitter.com/Cyb3rWard0g) [@THE_HELK](https://twitter.com/THE_HELK) - -## Current Committers - -* Nate Guagenti [@neu5ron](https://twitter.com/neu5ron) - -## Contributing - -There are a few things that I would like to accomplish with the HELK as shown in the To-Do list below. I would love to make the HELK a stable build for everyone in the community. If you are interested on making this build a more robust one and adding some cool features to it, PLEASE feel free to submit a pull request. #SharingIsCaring - -# TO-Do - -- [ ] Kubernetes Cluster Migration -- [ ] OSQuery Data Ingestion -- [ ] MITRE ATT&CK mapping to logs or dashboards -- [ ] Cypher for Apache Spark Integration (Adding option for Zeppelin Notebook) -- [ ] Test and integrate neo4j spark connectors with build -- [ ] Add more network data sources (i.e Bro) -- [ ] Research & integrate spark structured direct streaming -- [ ] Packer Images -- [ ] Terraform integration (AWS, Azure, GC) -- [ ] Add more Jupyter Notebooks to teach the basics -- [ ] Auditd beat intergation - -## License: GPL-3.0 - -[ HELK's GNU General Public License](https://github.com/Cyb3rWard0g/HELK/blob/master/LICENSE) \ No newline at end of file diff --git a/docs/_build/html/_static/basic.css b/docs/_build/html/_static/basic.css deleted file mode 100644 index 9f935245..00000000 --- a/docs/_build/html/_static/basic.css +++ /dev/null @@ -1,768 +0,0 @@ -/* - * basic.css - * ~~~~~~~~~ - * - * Sphinx stylesheet -- basic theme. - * - * :copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -/* -- main layout ----------------------------------------------------------- */ - -div.clearer { - clear: both; -} - -/* -- relbar ---------------------------------------------------------------- */ - -div.related { - width: 100%; - font-size: 90%; -} - -div.related h3 { - display: none; -} - -div.related ul { - margin: 0; - padding: 0 0 0 10px; - list-style: none; -} - -div.related li { - display: inline; -} - -div.related li.right { - float: right; - margin-right: 5px; -} - -/* -- sidebar --------------------------------------------------------------- */ - -div.sphinxsidebarwrapper { - padding: 10px 5px 0 10px; -} - -div.sphinxsidebar { - float: left; - width: 270px; - margin-left: -100%; - font-size: 90%; - word-wrap: break-word; - overflow-wrap : break-word; -} - -div.sphinxsidebar ul { - list-style: none; -} - -div.sphinxsidebar ul ul, -div.sphinxsidebar ul.want-points { - margin-left: 20px; - list-style: square; -} - -div.sphinxsidebar ul ul { - margin-top: 0; - margin-bottom: 0; -} - -div.sphinxsidebar form { - margin-top: 10px; -} - -div.sphinxsidebar input { - border: 1px solid #98dbcc; - font-family: sans-serif; - font-size: 1em; -} - -div.sphinxsidebar #searchbox form.search { - overflow: hidden; -} - -div.sphinxsidebar #searchbox input[type="text"] { - float: left; - width: 80%; - padding: 0.25em; - box-sizing: border-box; -} - -div.sphinxsidebar #searchbox input[type="submit"] { - float: left; - width: 20%; - border-left: none; - padding: 0.25em; - box-sizing: border-box; -} - - -img { - border: 0; - max-width: 100%; -} - -/* -- search page ----------------------------------------------------------- */ - -ul.search { - margin: 10px 0 0 20px; - padding: 0; -} - -ul.search li { - padding: 5px 0 5px 20px; - background-image: url(file.png); - background-repeat: no-repeat; - background-position: 0 7px; -} - -ul.search li a { - font-weight: bold; -} - -ul.search li div.context { - color: #888; - margin: 2px 0 0 30px; - text-align: left; -} - -ul.keywordmatches li.goodmatch a { - font-weight: bold; -} - -/* -- index page ------------------------------------------------------------ */ - -table.contentstable { - width: 90%; - margin-left: auto; - margin-right: auto; -} - -table.contentstable p.biglink { - line-height: 150%; -} - -a.biglink { - font-size: 1.3em; -} - -span.linkdescr { - font-style: italic; - padding-top: 5px; - font-size: 90%; -} - -/* -- general index --------------------------------------------------------- */ - -table.indextable { - width: 100%; -} - -table.indextable td { - text-align: left; - vertical-align: top; -} - -table.indextable ul { - margin-top: 0; - margin-bottom: 0; - list-style-type: none; -} - -table.indextable > tbody > tr > td > ul { - padding-left: 0em; -} - -table.indextable tr.pcap { - height: 10px; -} - -table.indextable tr.cap { - margin-top: 10px; - background-color: #f2f2f2; -} - -img.toggler { - margin-right: 3px; - margin-top: 3px; - cursor: pointer; -} - -div.modindex-jumpbox { - border-top: 1px solid #ddd; - border-bottom: 1px solid #ddd; - margin: 1em 0 1em 0; - padding: 0.4em; -} - -div.genindex-jumpbox { - border-top: 1px solid #ddd; - border-bottom: 1px solid #ddd; - margin: 1em 0 1em 0; - padding: 0.4em; -} - -/* -- domain module index --------------------------------------------------- */ - -table.modindextable td { - padding: 2px; - border-collapse: collapse; -} - -/* -- general body styles --------------------------------------------------- */ - -div.body { - min-width: 450px; - max-width: 800px; -} - -div.body p, div.body dd, div.body li, div.body blockquote { - -moz-hyphens: auto; - -ms-hyphens: auto; - -webkit-hyphens: auto; - hyphens: auto; -} - -a.headerlink { - visibility: hidden; -} - -a.brackets:before, -span.brackets > a:before{ - content: "["; -} - -a.brackets:after, -span.brackets > a:after { - content: "]"; -} - -h1:hover > a.headerlink, -h2:hover > a.headerlink, -h3:hover > a.headerlink, -h4:hover > a.headerlink, -h5:hover > a.headerlink, -h6:hover > a.headerlink, -dt:hover > a.headerlink, -caption:hover > a.headerlink, -p.caption:hover > a.headerlink, -div.code-block-caption:hover > a.headerlink { - visibility: visible; -} - -div.body p.caption { - text-align: inherit; -} - -div.body td { - text-align: left; -} - -.first { - margin-top: 0 !important; -} - -p.rubric { - margin-top: 30px; - font-weight: bold; -} - -img.align-left, .figure.align-left, object.align-left { - clear: left; - float: left; - margin-right: 1em; -} - -img.align-right, .figure.align-right, object.align-right { - clear: right; - float: right; - margin-left: 1em; -} - -img.align-center, .figure.align-center, object.align-center { - display: block; - margin-left: auto; - margin-right: auto; -} - -img.align-default, .figure.align-default { - display: block; - margin-left: auto; - margin-right: auto; -} - -.align-left { - text-align: left; -} - -.align-center { - text-align: center; -} - -.align-default { - text-align: center; -} - -.align-right { - text-align: right; -} - -/* -- sidebars -------------------------------------------------------------- */ - -div.sidebar { - margin: 0 0 0.5em 1em; - border: 1px solid #ddb; - padding: 7px 7px 0 7px; - background-color: #ffe; - width: 40%; - float: right; -} - -p.sidebar-title { - font-weight: bold; -} - -/* -- topics ---------------------------------------------------------------- */ - -div.topic { - border: 1px solid #ccc; - padding: 7px 7px 0 7px; - margin: 10px 0 10px 0; -} - -p.topic-title { - font-size: 1.1em; - font-weight: bold; - margin-top: 10px; -} - -/* -- admonitions ----------------------------------------------------------- */ - -div.admonition { - margin-top: 10px; - margin-bottom: 10px; - padding: 7px; -} - -div.admonition dt { - font-weight: bold; -} - -div.admonition dl { - margin-bottom: 0; -} - -p.admonition-title { - margin: 0px 10px 5px 0px; - font-weight: bold; -} - -div.body p.centered { - text-align: center; - margin-top: 25px; -} - -/* -- tables ---------------------------------------------------------------- */ - -table.docutils { - border: 0; - border-collapse: collapse; -} - -table.align-center { - margin-left: auto; - margin-right: auto; -} - -table.align-default { - margin-left: auto; - margin-right: auto; -} - -table caption span.caption-number { - font-style: italic; -} - -table caption span.caption-text { -} - -table.docutils td, table.docutils th { - padding: 1px 8px 1px 5px; - border-top: 0; - border-left: 0; - border-right: 0; - border-bottom: 1px solid #aaa; -} - -table.footnote td, table.footnote th { - border: 0 !important; -} - -th { - text-align: left; - padding-right: 5px; -} - -table.citation { - border-left: solid 1px gray; - margin-left: 1px; -} - -table.citation td { - border-bottom: none; -} - -th > p:first-child, -td > p:first-child { - margin-top: 0px; -} - -th > p:last-child, -td > p:last-child { - margin-bottom: 0px; -} - -/* -- figures --------------------------------------------------------------- */ - -div.figure { - margin: 0.5em; - padding: 0.5em; -} - -div.figure p.caption { - padding: 0.3em; -} - -div.figure p.caption span.caption-number { - font-style: italic; -} - -div.figure p.caption span.caption-text { -} - -/* -- field list styles ----------------------------------------------------- */ - -table.field-list td, table.field-list th { - border: 0 !important; -} - -.field-list ul { - margin: 0; - padding-left: 1em; -} - -.field-list p { - margin: 0; -} - -.field-name { - -moz-hyphens: manual; - -ms-hyphens: manual; - -webkit-hyphens: manual; - hyphens: manual; -} - -/* -- hlist styles ---------------------------------------------------------- */ - -table.hlist td { - vertical-align: top; -} - - -/* -- other body styles ----------------------------------------------------- */ - -ol.arabic { - list-style: decimal; -} - -ol.loweralpha { - list-style: lower-alpha; -} - -ol.upperalpha { - list-style: upper-alpha; -} - -ol.lowerroman { - list-style: lower-roman; -} - -ol.upperroman { - list-style: upper-roman; -} - -li > p:first-child { - margin-top: 0px; -} - -li > p:last-child { - margin-bottom: 0px; -} - -dl.footnote > dt, -dl.citation > dt { - float: left; -} - -dl.footnote > dd, -dl.citation > dd { - margin-bottom: 0em; -} - -dl.footnote > dd:after, -dl.citation > dd:after { - content: ""; - clear: both; -} - -dl.field-list { - display: grid; - grid-template-columns: fit-content(30%) auto; -} - -dl.field-list > dt { - font-weight: bold; - word-break: break-word; - padding-left: 0.5em; - padding-right: 5px; -} - -dl.field-list > dt:after { - content: ":"; -} - -dl.field-list > dd { - padding-left: 0.5em; - margin-top: 0em; - margin-left: 0em; - margin-bottom: 0em; -} - -dl { - margin-bottom: 15px; -} - -dd > p:first-child { - margin-top: 0px; -} - -dd ul, dd table { - margin-bottom: 10px; -} - -dd { - margin-top: 3px; - margin-bottom: 10px; - margin-left: 30px; -} - -dt:target, span.highlighted { - background-color: #fbe54e; -} - -rect.highlighted { - fill: #fbe54e; -} - -dl.glossary dt { - font-weight: bold; - font-size: 1.1em; -} - -.optional { - font-size: 1.3em; -} - -.sig-paren { - font-size: larger; -} - -.versionmodified { - font-style: italic; -} - -.system-message { - background-color: #fda; - padding: 5px; - border: 3px solid red; -} - -.footnote:target { - background-color: #ffa; -} - -.line-block { - display: block; - margin-top: 1em; - margin-bottom: 1em; -} - -.line-block .line-block { - margin-top: 0; - margin-bottom: 0; - margin-left: 1.5em; -} - -.guilabel, .menuselection { - font-family: sans-serif; -} - -.accelerator { - text-decoration: underline; -} - -.classifier { - font-style: oblique; -} - -.classifier:before { - font-style: normal; - margin: 0.5em; - content: ":"; -} - -abbr, acronym { - border-bottom: dotted 1px; - cursor: help; -} - -/* -- code displays --------------------------------------------------------- */ - -pre { - overflow: auto; - overflow-y: hidden; /* fixes display issues on Chrome browsers */ -} - -span.pre { - -moz-hyphens: none; - -ms-hyphens: none; - -webkit-hyphens: none; - hyphens: none; -} - -td.linenos pre { - padding: 5px 0px; - border: 0; - background-color: transparent; - color: #aaa; -} - -table.highlighttable { - margin-left: 0.5em; -} - -table.highlighttable td { - padding: 0 0.5em 0 0.5em; -} - -div.code-block-caption { - padding: 2px 5px; - font-size: small; -} - -div.code-block-caption code { - background-color: transparent; -} - -div.code-block-caption + div > div.highlight > pre { - margin-top: 0; -} - -div.doctest > div.highlight span.gp { /* gp: Generic.Prompt */ - user-select: none; -} - -div.code-block-caption span.caption-number { - padding: 0.1em 0.3em; - font-style: italic; -} - -div.code-block-caption span.caption-text { -} - -div.literal-block-wrapper { - padding: 1em 1em 0; -} - -div.literal-block-wrapper div.highlight { - margin: 0; -} - -code.descname { - background-color: transparent; - font-weight: bold; - font-size: 1.2em; -} - -code.descclassname { - background-color: transparent; -} - -code.xref, a code { - background-color: transparent; - font-weight: bold; -} - -h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { - background-color: transparent; -} - -.viewcode-link { - float: right; -} - -.viewcode-back { - float: right; - font-family: sans-serif; -} - -div.viewcode-block:target { - margin: -1px -10px; - padding: 0 10px; -} - -/* -- math display ---------------------------------------------------------- */ - -img.math { - vertical-align: middle; -} - -div.body div.math p { - text-align: center; -} - -span.eqno { - float: right; -} - -span.eqno a.headerlink { - position: relative; - left: 0px; - z-index: 1; -} - -div.math:hover a.headerlink { - visibility: visible; -} - -/* -- printout stylesheet --------------------------------------------------- */ - -@media print { - div.document, - div.documentwrapper, - div.bodywrapper { - margin: 0 !important; - width: 100%; - } - - div.sphinxsidebar, - div.related, - div.footer, - #top-link { - display: none; - } -} \ No newline at end of file diff --git a/docs/_build/html/_static/clipboard.min.js b/docs/_build/html/_static/clipboard.min.js deleted file mode 100644 index 02c549e3..00000000 --- a/docs/_build/html/_static/clipboard.min.js +++ /dev/null @@ -1,7 +0,0 @@ -/*! - * clipboard.js v2.0.4 - * https://zenorocha.github.io/clipboard.js - * - * Licensed MIT © Zeno Rocha - */ -!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return function(n){var o={};function r(t){if(o[t])return o[t].exports;var e=o[t]={i:t,l:!1,exports:{}};return n[t].call(e.exports,e,e.exports,r),e.l=!0,e.exports}return r.m=n,r.c=o,r.d=function(t,e,n){r.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:n})},r.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)r.d(n,o,function(t){return e[t]}.bind(null,o));return n},r.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return r.d(e,"a",e),e},r.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},r.p="",r(r.s=0)}([function(t,e,n){"use strict";var r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},i=function(){function o(t,e){for(var n=0;n