Skip to content

Commit

Permalink
Merge pull request #6 from snelis/master
Browse files Browse the repository at this point in the history
NEW elasticsearch backend
  • Loading branch information
snelis committed Feb 1, 2016
2 parents 09f97bd + 2fb93b9 commit 8315577
Show file tree
Hide file tree
Showing 16 changed files with 510 additions and 177 deletions.
16 changes: 9 additions & 7 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ TOX=$(shell "$(CMD_FROM_VENV)" "tox")
PYTHON=$(shell "$(CMD_FROM_VENV)" "python")
TOX_PY_LIST="$(shell $(TOX) -l | grep ^py | xargs | sed -e 's/ /,/g')"

.PHONY: clean docsclean pyclean test lint isort docs docker setup.py
.PHONY: clean docsclean pyclean test lint isort docs docker setup.py requirements

tox: clean venv
tox: clean requirements
$(TOX)

pyclean:
Expand All @@ -29,10 +29,12 @@ docsclean:

clean: pyclean docsclean
@rm -rf venv
@rm -rf .tox

venv:
@virtualenv -p python2.7 venv
@$(PIP) install -U "pip>=7.0" -q
@$(PIP) install -U "pip>=7.0" -q
@$(PIP) install -r $(DEPS)

test: venv pyclean
Expand All @@ -52,16 +54,16 @@ docs: venv docsclean
@$(TOX) -e docs

docker:
$(DOCKER_COMPOSE) run --rm app bash
$(DOCKER_COMPOSE) run --rm --service-ports app bash

docker/%:
$(DOCKER_COMPOSE) run --rm app make $*
$(DOCKER_COMPOSE) run --rm --service-ports app make $*

setup.py: venv
$(PYTHON) setup_gen.py
$(PYTHON) setup.py check --restructuredtext
@$(PYTHON) setup_gen.py
@$(PYTHON) setup.py check --restructuredtext

publish: setup.py
$(PYTHON) setup.py sdist upload
@$(PYTHON) setup.py sdist upload

build: clean venv tox setup.py
76 changes: 73 additions & 3 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@ Time Execution
:target: http://py-timeexecution.readthedocs.org/en/latest/?badge=latest


This package is designed to record metrics of the application into a backend.
With the help of grafana_ you can easily create dashboards with them


Features
--------

Expand All @@ -25,6 +29,7 @@ Backends
--------

- InfluxDB 0.8
- Elasticsearch 2.1


Installation
Expand All @@ -50,12 +55,14 @@ See the following example
from time_execution import configure, time_execution
from time_execution.backends.influxdb import InfluxBackend
from time_execution.backends.elasticsearch import ElasticsearchBackend
# Setup the desired backend
influx = InfluxBackend(host='localhost', database='metrics', use_udp=False)
influx = InfluxBackend(host='influx', database='metrics', use_udp=False)
elasticsearch = ElasticsearchBackend('elasticsearch', index='metrics')
# Configure the time_execution decorator
configure(backends=[influx])
configure(backends=[influx, elasticsearch])
# Wrap the methods where u want the metrics
@time_execution
Expand Down Expand Up @@ -89,6 +96,28 @@ This will result in an entry in the influxdb
}
]
And the following in Elasticsearch

.. code-block:: json
[
{
"_index": "metrics-2016.01.28",
"_type": "metric",
"_id": "AVKIp9DpnPWamvqEzFB3",
"_score": null,
"_source": {
"timestamp": "2016-01-28T14:34:05.416968",
"hostname": "dfaa4928109f",
"name": "__main__.hello",
"value": 312
},
"sort": [
1453991645416
]
}
]
Hooks
-----
Expand Down Expand Up @@ -125,7 +154,7 @@ See the following example how to setup hooks.
)
# Configure the time_execution decorator, but now with hooks
configure(backends=[influx], hooks=[my_hook])
configure(backends=[backend], hooks=[my_hook])
Manually sending metrics
------------------------
Expand All @@ -141,3 +170,44 @@ See the following example.
write_metric('cpu.load.1m', value=loadavg[0])
write_metric('cpu.load.5m', value=loadavg[1])
write_metric('cpu.load.15m', value=loadavg[2])
.. _grafana: http://grafana.org/


Custom Backend
--------------

Writing a custom backend is very simple, all you need to do is create a class
with a `write` method. It is not required to extend `BaseMetricsBackend`
but in order to easily upgrade I recommend u do.

.. code-block:: python
from time_execution.backends.base import BaseMetricsBackend
class MetricsPrinter(BaseMetricsBackend):
def write(self, name, **data):
print(name, data)
Contribute
----------

You have something to contribute ? Great !
A few things that may come in handy.

Testing in this project is done via docker. There is a docker-compose to easily
get all the required containers up and running.

There is a Makefile with a few targets that we use often:

- `make test`
- `make isort`
- `make lint`
- `make build`
- `make setup.py`

All of these make targets can be prefixed by `docker/`. This will execute
the target inside the docker container instead of on your local machine.
For example `make docker/build`.
20 changes: 18 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,16 @@ influx:
environment:
- PRE_CREATE_DB=metrics
- UDP_DB=metrics
# volumes:
# - data:/data
elasticsearch:
image: elasticsearch:2.1
ports:
- "9200:9200"
kibana:
image: kibana
ports:
- "5601:5601"
links:
- elasticsearch
grafana:
image: grafana/grafana
ports:
Expand All @@ -17,11 +25,17 @@ grafana:
- GF_SECURITY_ADMIN_PASSWORD=admin
links:
- influx
- elasticsearch
influx_wait:
image: kpndigital/tox
links:
- influx
command: sh -c "while ! nc -w1 -z influx 8086; do sleep 1; done"
elasticsearch_wait:
image: kpndigital/tox
links:
- elasticsearch
command: sh -c "while ! nc -w1 -z elasticsearch 9200; do sleep 1; done"
app:
build: .
volumes:
Expand All @@ -30,3 +44,5 @@ app:
links:
- influx_wait
- influx
- elasticsearch_wait
- elasticsearch
8 changes: 8 additions & 0 deletions docs/api/time_execution.backends.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,14 @@ time_execution.backends.base module
:undoc-members:
:show-inheritance:

time_execution.backends.elasticsearch module
--------------------------------------------

.. automodule:: time_execution.backends.elasticsearch
:members:
:undoc-members:
:show-inheritance:

time_execution.backends.influxdb module
---------------------------------------

Expand Down
3 changes: 2 additions & 1 deletion requirements/requirements-base.txt
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
influxdb
influxdb>=2.11
elasticsearch>=2.2.0
18 changes: 18 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from time_execution import time_execution


@time_execution
def go(*args, **kwargs):
return True


@time_execution
def fqn_test():
pass


@time_execution
class Dummy(object):
@time_execution
def go(self, *args, **kwargs):
pass
10 changes: 10 additions & 0 deletions tests/test_base_backend.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import unittest

from time_execution.backends.base import BaseMetricsBackend


class TestBaseBackend(unittest.TestCase):
def test_write_method(self):
backend = BaseMetricsBackend()
with self.assertRaises(NotImplementedError):
backend.write('test')
87 changes: 87 additions & 0 deletions tests/test_elasticsearch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
from tests.conftest import Dummy, go
from tests.test_base_backend import TestBaseBackend
from time_execution import configure
from time_execution.backends.elasticsearch import ElasticsearchBackend


class TestTimeExecution(TestBaseBackend):
def setUp(self):
super(TestTimeExecution, self).setUp()

self.backend = ElasticsearchBackend(
'elasticsearch',
index='unittest',
)

self._clear()
configure(backends=[self.backend])

def tearDown(self):
self._clear()

def _clear(self):
self.backend.client.indices.delete(self.backend.index, ignore=404)
self.backend.client.indices.delete("{}*".format(self.backend.index), ignore=404)

def _query_backend(self, name):

self.backend.client.indices.refresh(self.backend.get_index())
metrics = self.backend.client.search(
index=self.backend.get_index(),
body={
"query": {
"term": {"name": name}
},
}
)
return metrics

def test_time_execution(self):

count = 4

for i in range(count):
go()

metrics = self._query_backend(go.fqn)
self.assertEqual(metrics['hits']['total'], count)

for metric in metrics['hits']['hits']:
self.assertTrue('value' in metric['_source'])

def test_duration_field(self):

configure(backends=[self.backend], duration_field='my_duration')

go()

for metric in self._query_backend(go.fqn)['hits']['hits']:
self.assertTrue('my_duration' in metric['_source'])

def test_with_arguments(self):
go('hello', world='world')
Dummy().go('hello', world='world')

metrics = self._query_backend(go.fqn)
self.assertEqual(metrics['hits']['total'], 1)

metrics = self._query_backend(Dummy().go.fqn)
self.assertEqual(metrics['hits']['total'], 1)

def test_hook(self):

def test_args(**kwargs):
self.assertIn('response', kwargs)
self.assertIn('exception', kwargs)
self.assertIn('metric', kwargs)
return dict()

def test_metadata(*args, **kwargs):
return dict(test_key='test value')

configure(backends=[self.backend], hooks=[test_args, test_metadata])

go()

for metric in self._query_backend(go.fqn)['hits']['hits']:
self.assertEqual(metric['_source']['test_key'], 'test value')
11 changes: 11 additions & 0 deletions tests/test_fqn.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import unittest

from tests.conftest import Dummy, fqn_test


class TestFQN(unittest.TestCase):

def test_fqn(self):
self.assertEqual(fqn_test.fqn, 'tests.conftest.fqn_test')
self.assertEqual(Dummy.fqn, 'tests.conftest.Dummy')
self.assertEqual(Dummy().go.fqn, 'tests.conftest.Dummy.go')
Loading

0 comments on commit 8315577

Please sign in to comment.