Skip to content

Commit

Permalink
Merge pull request #46 from kammala/fix/refactor-makefile
Browse files Browse the repository at this point in the history
OPT: use black and optimize build process
  • Loading branch information
kammala authored Aug 2, 2019
2 parents 6935c19 + 6520cef commit 730986e
Show file tree
Hide file tree
Showing 19 changed files with 140 additions and 285 deletions.
69 changes: 37 additions & 32 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,71 +1,76 @@
# This Makefile requires the following commands to be available:
# * virtualenv
# * python2.7
# * python3.6
# * docker
# * docker-compose

DEPS:=requirements.txt
DOCKER_COMPOSE:=$(shell which docker-compose)

PIP:="venv/bin/pip"
CMD_FROM_VENV:=". venv/bin/activate; which"
TOX=$(shell "$(CMD_FROM_VENV)" "tox")
PYTHON=$(shell "$(CMD_FROM_VENV)" "python")
TOX_PY_LIST="$(shell $(TOX) -l | grep ^py | xargs | sed -e 's/ /,/g')"

.PHONY: clean docsclean pyclean test lint isort docs docker setup.py requirements

tox: clean requirements
$(TOX)

.PHONY: pyclean
pyclean:
@find . -name *.pyc -delete
@rm -rf *.egg-info build
@rm -rf coverage.xml .coverage
find . -name "*.pyc" -delete
rm -rf *.egg-info build
rm -rf coverage.xml .coverage

.PHONY: docsclean
docsclean:
@rm -fr docs/_build/
@rm -fr docs/api/
rm -rf docs/_build/
rm -rf docs/api/

.PHONY: clean
clean: pyclean docsclean
@rm -rf venv
@rm -rf .tox

venv:
@python3.6 -m venv venv
@$(PIP) install -U "pip>=7.0" -q
@$(PIP) install -r $(DEPS)
python3.6 -m venv venv
venv/bin/pip install -U pip -q
venv/bin/pip install -r requirements.txt

.PHONY: test
test: venv pyclean
$(TOX) -e $(TOX_PY_LIST)
venv/bin/tox

test/%: venv pyclean
$(TOX) -e $(TOX_PY_LIST) -- $*
venv/bin/tox -- $*

.PHONY: lint
lint: venv
@$(TOX) -e lint
@$(TOX) -e isort-check
venv/bin/flake8 time_execution tests
venv/bin/isort -rc -c time_execution tests
venv/bin/black --check time_execution tests

isort: venv
@$(TOX) -e isort-fix
.PHONY: format
format: venv
venv/bin/isort -rc time_execution tests
venv/bin/black --verbose time_execution tests

.PHONY: docs
docs: venv docsclean
@$(TOX) -e docs
venv/bin/python docs/apidoc.py -T -M -d 2 -o docs/api time_execution
venv/bin/sphinx-build -W -b html docs docs/_build/html

.PHONY: docker
docker:
$(DOCKER_COMPOSE) run --rm --service-ports app bash

docker/%:
$(DOCKER_COMPOSE) run --rm --service-ports app make $*

setup.py: venv
@$(PYTHON) setup_gen.py
@$(PYTHON) setup.py check --restructuredtext
venv/bin/python setup_gen.py
venv/bin/python setup.py sdist
venv/bin/twine check dist/*

.PHONY: publish
publish: setup.py
@$(PYTHON) setup.py sdist upload
-rm setup.py
$(MAKE) setup.py
venv/bin/twine upload dist/*

build: clean venv tox setup.py
.PHONY: build
build: clean venv lint test setup.py

changelog:
gitchangelog
venv/bin/gitchangelog
1 change: 1 addition & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Time Execution
.. image:: https://img.shields.io/pypi/l/timeexecution.svg
:target: https://pypi.org/project/timeexecution

.. image:: https://img.shields.io/badge/code%20style-black-000000.svg

This package is designed to record application metrics into specific backends.
With the help of Grafana_ or Kibana_ you can easily use these metrics to create meaningful monitoring dashboards.
Expand Down
15 changes: 0 additions & 15 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,21 +13,6 @@ services:
image: elasticsearch:5
ports:
- "9200:9200"
kibana:
image: kibana:5
ports:
- "5601:5601"
links:
- elasticsearch
grafana:
image: grafana/grafana
ports:
- "3000:3000"
environment:
- GF_SECURITY_ADMIN_PASSWORD=admin
links:
- influx
- elasticsearch
zookeeper:
image: confluentinc/cp-zookeeper:5.0.0
environment:
Expand Down
4 changes: 4 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[tool.black]
line-length = 119
target_version = ['py27', 'py36', 'py37']
skip-string-normalization = true
3 changes: 3 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# Additional packages
-r requirements/requirements-base.txt
-r requirements/requirements-elasticsearch.txt
-r requirements/requirements-influxdb.txt
-r requirements/requirements-kafka.txt
-r requirements/requirements-testing.txt
4 changes: 3 additions & 1 deletion requirements/requirements-testing.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
tox
isort
flake8==3.6.0
black; python_version >= "3.6"
flake8
twine
pkgversion

freezegun>=0.3.7,<0.4
Expand Down
10 changes: 8 additions & 2 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,15 @@ combine_as_imports = true
default_section = THIRDPARTY
include_trailing_comma = true
known_first_party = de
multi_line_output = 5
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
multi_line_output = 3
sections = FUTURE,STDLIB,THIRDPARTY,OTHER,FIRSTPARTY,LOCALFOLDER
not_skip = __init__.py
from_first = false
length_sort = false
order_by_type = true
use_parenthesis = true
line_length = 120
force_grid_wrap = 0

[wheel]
universal = 1
59 changes: 11 additions & 48 deletions tests/test_elasticsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@


class TestConnectionErrors(TestBaseBackend):

@mock.patch('time_execution.backends.elasticsearch.logger')
def test_error_resilience(self, mocked_logger):
backend = ElasticsearchBackend(hosts=['non-existant-domain'])
Expand All @@ -23,7 +22,6 @@ def test_error_resilience(self, mocked_logger):


class ElasticTestMixin(object):

@staticmethod
def _clear(backend):
backend.client.indices.delete(backend.index, ignore=404)
Expand All @@ -32,25 +30,15 @@ def _clear(backend):
@staticmethod
def _query_backend(backend, name):
backend.client.indices.refresh(backend.get_index())
metrics = backend.client.search(
index=backend.get_index(),
body={
"query": {
"term": {"name": name}
},
}
)
metrics = backend.client.search(index=backend.get_index(), body={"query": {"term": {"name": name}}})
return metrics


class TestTimeExecution(TestBaseBackend):
def setUp(self):
super(TestTimeExecution, self).setUp()

self.backend = ElasticsearchBackend(
'elasticsearch',
index='unittest',
)
self.backend = ElasticsearchBackend('elasticsearch', index='unittest')
settings.configure(backends=[self.backend])
self._clear()

Expand Down Expand Up @@ -94,7 +82,6 @@ def test_with_arguments(self):
self.assertEqual(metrics['hits']['total'], 1)

def test_hook(self):

def test_args(**kwargs):
self.assertIn('response', kwargs)
self.assertIn('exception', kwargs)
Expand All @@ -114,21 +101,16 @@ def test_error_warning(self, mocked_logger):

transport_error = TransportError('mocked error')
es_index_error_ctx = mock.patch(
'time_execution.backends.elasticsearch.Elasticsearch.index',
side_effect=transport_error
'time_execution.backends.elasticsearch.Elasticsearch.index', side_effect=transport_error
)
frozen_time_ctx = freeze_time('2016-07-13')

with es_index_error_ctx, frozen_time_ctx:
self.backend.write(name='test:metric', value=None)
mocked_logger.warning.assert_called_once_with(
'writing metric %r failure %r',
{
'timestamp': datetime(2016, 7, 13),
'value': None,
'name': 'test:metric'
},
transport_error
{'timestamp': datetime(2016, 7, 13), 'value': None, 'name': 'test:metric'},
transport_error,
)

def test_with_origin(self):
Expand All @@ -141,40 +123,21 @@ def test_with_origin(self):

def test_bulk_write(self):
metrics = [
{
'name': 'metric.name',
'value': 1,
'timestamp': 1,
},
{
'name': 'metric.name',
'value': 2,
'timestamp': 2,
},
{
'name': 'metric.name',
'value': 3,
'timestamp': 3,
}
{'name': 'metric.name', 'value': 1, 'timestamp': 1},
{'name': 'metric.name', 'value': 2, 'timestamp': 2},
{'name': 'metric.name', 'value': 3, 'timestamp': 3},
]
self.backend.bulk_write(metrics)
query_result = self._query_backend('metric.name')
self.assertEqual(
len(metrics),
query_result['hits']['total']
)
self.assertEqual(len(metrics), query_result['hits']['total'])

@mock.patch('time_execution.backends.elasticsearch.logger')
def test_bulk_write_error(self, mocked_logger):
transport_error = TransportError('mocked error')
es_index_error_ctx = mock.patch(
'time_execution.backends.elasticsearch.Elasticsearch.bulk',
side_effect=transport_error
'time_execution.backends.elasticsearch.Elasticsearch.bulk', side_effect=transport_error
)
metrics = [1, 2, 3]
with es_index_error_ctx:
self.backend.bulk_write(metrics)
mocked_logger.warning.assert_called_once_with(
'bulk_write metrics %r failure %r',
metrics,
transport_error)
mocked_logger.warning.assert_called_once_with('bulk_write metrics %r failure %r', metrics, transport_error)
9 changes: 3 additions & 6 deletions tests/test_hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,8 +182,7 @@ def test_metadata(*args, **kwargs):
def asserts(name, **data):
assert data['test_key'] == 'test value'

with settings(backends=[AssertBackend(asserts)],
hooks=[test_args, test_metadata]):
with settings(backends=[AssertBackend(asserts)], hooks=[test_args, test_metadata]):
go()

def test_hook_exception(self):
Expand All @@ -203,8 +202,7 @@ def asserts(name, **data):
def go():
raise TimeExecutionException(message)

with settings(backends=[AssertBackend(asserts)],
hooks=[exception_hook]):
with settings(backends=[AssertBackend(asserts)], hooks=[exception_hook]):
with pytest.raises(TimeExecutionException):
go()

Expand All @@ -226,8 +224,7 @@ def asserts(name, **data):
def go():
raise TimeExecutionException(message, True)

with settings(backends=[AssertBackend(asserts)],
hooks=[exception_hook]):
with settings(backends=[AssertBackend(asserts)], hooks=[exception_hook]):
with pytest.raises(TimeExecutionException):
go()

Expand Down
7 changes: 1 addition & 6 deletions tests/test_influxdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,7 @@ def setUp(self):
super(TestTimeExecution, self).setUp()

self.database = 'unittest'
self.backend = InfluxBackend(
host='influx',
database=self.database,
use_udp=False
)
self.backend = InfluxBackend(host='influx', database=self.database, use_udp=False)

try:
self.backend.client.create_database(self.database)
Expand Down Expand Up @@ -65,7 +61,6 @@ def test_with_arguments(self):
self.assertEqual(len(metrics), 1)

def test_hook(self):

def test_args(**kwargs):
self.assertIn('response', kwargs)
self.assertIn('exception', kwargs)
Expand Down
Loading

0 comments on commit 730986e

Please sign in to comment.