Skip to content

Commit

Permalink
Merge pull request #1 from NarrativeScience/pre-commit-path
Browse files Browse the repository at this point in the history
Fix path and copy over changes
  • Loading branch information
jdrake authored Jan 30, 2020
2 parents 3e6aa81 + 2a563fc commit b5b33ef
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 6 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ repos:

- id: flake8
name: Lint Python (flake8)
entry: flake8 --config py2sfn-task-tools/.flake8
entry: flake8 --config .flake8
language: python
types: [file, python]
additional_dependencies:
Expand Down
2 changes: 1 addition & 1 deletion src/py2sfn_task_tools/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
"""Tools for tasks embedded in an AWS Step Functions state machine."""

__version__ = "0.1.0"
__version__ = "0.2.1"
15 changes: 11 additions & 4 deletions src/py2sfn_task_tools/state_data_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,10 @@

from .exceptions import NoItemFound

dynamodb = boto3.client("dynamodb")
s3 = boto3.client("s3")
dynamodb = boto3.client(
"dynamodb", endpoint_url=os.environ.get("DYNAMODB_ENDPOINT_URL")
)
s3 = boto3.client("s3", endpoint_url=os.environ.get("S3_ENDPOINT_URL"))

#: If an item's data is larger than this threshold it will be stored in S3 instead of
#: DynamoDB. The item limit is 400KB but we'll leave room for other attributes.
Expand Down Expand Up @@ -42,6 +44,7 @@ class StateDataClient:
tasks to use to store data remotely instead of passing it directly to downstream
states in the state machine input data object. This is handy when the state data is
larger than 32K characters (the AWS limit).
"""

def __init__(
Expand Down Expand Up @@ -74,7 +77,9 @@ def __init__(
self.default_table_name = default_table_name
self.namespace = namespace
self.ttl_days = ttl_days
self.s3_bucket = boto3.resource("s3").Bucket(s3_bucket)
self.s3_bucket = boto3.resource(
"s3", endpoint_url=os.environ.get("S3_ENDPOINT_URL")
).Bucket(s3_bucket)

def table(self, table_name: str) -> "dynamodb.Table":
"""Helper method to create a DynamoDB table object.
Expand All @@ -86,7 +91,9 @@ def table(self, table_name: str) -> "dynamodb.Table":
DynamoDB table object
"""
return boto3.resource("dynamodb").Table(table_name)
return boto3.resource(
"dynamodb", endpoint_url=os.environ.get("DYNAMODB_ENDPOINT_URL")
).Table(table_name)

def _load_item_data(self, item: Dict) -> Any:
"""Load item data for a given item metadata dict.
Expand Down

0 comments on commit b5b33ef

Please sign in to comment.