Skip to content

Commit

Permalink
Merge pull request catalogueglobal#16 from catalogueglobal/dev
Browse files Browse the repository at this point in the history
Merge back changes from catalogueglobal repo
  • Loading branch information
Landon Reed authored Dec 7, 2018
2 parents 6a27c88 + d751cd3 commit 93f3e95
Show file tree
Hide file tree
Showing 152 changed files with 10,864 additions and 9,101 deletions.
6 changes: 5 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
node_modules/
data/
.DS_Store
dist
*.iml
*.log
coverage
.idea
Expand All @@ -16,5 +18,7 @@ configurations/*
!configurations/default

# Secret config files
.env
env.yml
env.yml-original
env.yml-original
server.yml
29 changes: 21 additions & 8 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,29 +3,42 @@ jdk:
- oraclejdk8
install: true
sudo: false
# Install mongoDB to perform persistence tests
services: mongodb
cache:
directories:
- "$HOME/.m2/repository"
- "$HOME/.m2"
before_install:
- sed -i.bak -e 's|https://nexus.codehaus.org/snapshots/|https://oss.sonatype.org/content/repositories/codehaus-snapshots/|g'
~/.m2/settings.xml
#- sed -i.bak -e 's|https://nexus.codehaus.org/snapshots/|https://oss.sonatype.org/content/repositories/codehaus-snapshots/|g' ~/.m2/settings.xml
# set region in AWS config for S3 setup
- mkdir ~/.aws && printf '%s\n' '[default]' 'aws_access_key_id=foo' 'aws_secret_access_key=bar' 'region=us-east-1' > ~/.aws/config
- cp configurations/default/server.yml.tmp configurations/default/server.yml
script:
- mvn package -DskipTests
# package jar
- mvn package
after_success:
# Upload coverage reports to codecov.io
- bash <(curl -s https://codecov.io/bash)
# notify slack channel of build status
notifications:
slack: conveyal:WQxmWiu8PdmujwLw4ziW72Gc
before_deploy:
# get branch name of current branch for use in jar name: https://graysonkoonce.com/getting-the-current-branch-name-during-a-pull-request-in-travis-ci/
- export BRANCH=$(if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then echo $TRAVIS_BRANCH; else echo $TRAVIS_PULL_REQUEST_BRANCH; fi)
# copy packaged jars over to deploy dir
- mkdir deploy
- cp target/dt-*.jar deploy/
- cp "target/dt-$(git describe --always).jar" "deploy/dt-latest-$(git rev-parse --abbrev-ref HEAD).jar"
- cp "target/dt-$(git describe --always).jar" "deploy/dt-latest-${BRANCH}.jar"
deploy:
skip_cleanup: true
provider: s3
skip_cleanup: true
access_key_id: AKIAJISY76KTZBNHS4SA
secret_access_key:
secure: txyT+nwgYM+JKM9m+yj+VmOzu3B8PZle12bucZN7lD/uy25y7qVSAO+hqdj5mHR2j3EZ57BYzPbrma0DnFe7pKJKYPss3C7LeU/NEsVRsJp0cpsMwZnnJGdxHYLa5Q8/RsW0LPhO6LWk2XRswJfv4/oCjPBKJOGip/IYr03rFu7+QWNmQ8CAlAdIHrgoGayfX99x/6GZ6fZ37IweUY9/YGdjT8D7IWF0XQ5kkkyaWBw/nDZAKbqZrpfPxykwJhnKzLa1SpAuFjzAU6QkGilptod/ZmF1NdT9XZMqy1Dgwvs84lGoI2T5KtZ12UCKKZa3e5masyrfQjgmnr14nDmWNq1dZswhTw8nacq9f5FSu56fuw5dA0CggZAn+BrHuEkbL+0Kp5Bdj7eOIkB3RDERO6jW8SHSGB/XiAG27j4MSsjA1FvipMYYSo8LTsik3YfFjNHapkLHcbqwzpMqW8E6pe2towcNR1Hnn+EbdIUJabxUrYAF71+TxlewbTqOF3ZcAvW6Qwn8A06aCyNvZO0+Yk6xQItuf/PWXAYp1RI5B4w6X+ylXvoDeFtB+KJK3CiJYp7UKT32Kj6YP+r1m3KJ8PJ4Spqzfll21jTLa9CYS+7ZtXHxwJunNRzTQ3h90c810KhWlgcreIT/OX5NhyafcwlnPQ1ijKa/i7o+4JjCRKI=
secure: a2PNYiv7kzgKxfSx6IhZxSCFBZTCjrbIAK/vmCB1KcpnlV4vTt/IL13i3u6XC8wAbUxhd6iJMtVRm4diIwmy0K7nnpp0h3cQDxYqWCmf1dHZWBJXkpurDpbfxW5G6IlL14i+EsTSCpmwalov+atOBDVyJWVGqfEYaj9c6Q1E0fiYNP3QwZQcsVuD1CRw91xzckfERwqYcz70p/hmTEPOgUwDHuyHsjFafJx+krY3mnBdRdDRLcnPavjcEtprjGkdiVbNETe3CHVNQrAVfqm187OoDA2tHTPjTFmlAdUedp4rYqLmF/WWbHZLzUkQb95FJkklx30vlwC0bIutP1TwIlr3ma5aCRFc58x3SzG07AeM+vbt/nh5A52cpdRjBnhctC2kL++QvwkJhwRy2xptl/WEd5AUagoN4ngnGzyDS4kk/taQFL0IAav5C2WH668kGyH17KNeWG/bCDd55oCvwNlppAYXH+WdbtylqiVb9Fllvs1wcIYWqqyX5zdYiyFEI8LyEQsNF/D5ekuAtLXcF25uwjNtHMjdAxQxHbAbBOeaaLwJd29os9GrKFI/2C0TVXZo2zaFLZyFaIsDHqAC+MXDBDtktimC9Uuozz7bXENCrOUBfsDEQXb46tkXLGaQNXeOhe3KwVKxlGDCsLb7iHIcdDyBm19hqUWhU3uA+dU=
# upload jars in deploy dir to bucket
bucket: datatools-builds
local-dir: deploy
acl: public_read
on:
repo: conveyal/datatools-server
repo: catalogueglobal/datatools-server
all_branches: true
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,5 @@ The core application for Conveyal's transit data tools suite.
## Documentation

View the [latest documentation](http://conveyal-data-tools.readthedocs.org/en/latest/) at ReadTheDocs.

Note: `dev` branch docs can be found [here](http://conveyal-data-tools.readthedocs.org/en/dev/).
11 changes: 10 additions & 1 deletion configurations/default/env.yml.tmp
Original file line number Diff line number Diff line change
@@ -1,7 +1,16 @@
AUTH0_CLIENT_ID: your-auth0-client-id
AUTH0_DOMAIN: your-auth0-domain
AUTH0_SECRET: your-auth0-secret
# Note: One of AUTH0_SECRET or AUTH0_PUBLIC_KEY should be used depending on the signing algorithm set on the client.
# It seems that newer Auth0 accounts (2017 and later) might default to RS256 (public key).
AUTH0_SECRET: your-auth0-secret # uses HS256 signing algorithm
# AUTH0_PUBLIC_KEY: /path/to/auth0.pem # uses RS256 signing algorithm
AUTH0_TOKEN: your-auth0-token
DISABLE_AUTH: false
OSM_VEX: http://localhost:1000
SPARKPOST_KEY: your-sparkpost-key
SPARKPOST_EMAIL: [email protected]
GTFS_DATABASE_URL: jdbc:postgresql://localhost/catalogue
# GTFS_DATABASE_USER:
# GTFS_DATABASE_PASSWORD:
#MONGO_URI: mongodb://mongo-host:27017
MONGO_DB_NAME: catalogue
Original file line number Diff line number Diff line change
@@ -1,36 +1,26 @@
application:
assets_bucket: bucket-name
gtfs_s3_bucket: bucket-name
public_url: http://localhost:9000
assets_bucket: datatools-staging # dist directory
public_url: http://localhost:9966
notifications_enabled: false
port: 4000
data:
mapdb: /tmp
gtfs: /tmp
editor_mapdb: /tmp
regions: /tmp
use_s3_storage: false
s3_region: us-east-1
gtfs_s3_bucket: bucket-name
modules:
enterprise:
enabled: false
editor:
enabled: true
alerts:
enabled: false
use_extension: xyz
sign_config:
enabled: false
user_admin:
enabled: true
validator:
enabled: true
deployment:
enabled: false
gtfsapi:
enabled: true
load_on_fetch: false
load_on_startup: false
use_extension: xyz
update_frequency: 3600 # in seconds
# update_frequency: 3600 # in seconds
extensions:
transitland:
enabled: true
Expand Down
6 changes: 6 additions & 0 deletions jmeter/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# jmeter stuff
apache-jmeter*
*.log

# test output
output/*
143 changes: 143 additions & 0 deletions jmeter/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
# datatools-server jmeter tests

This folder contains various items that to run jmeter load tests on datatools-server.

## Installation

Install jmeter with this nifty script:

```sh
./install-jmeter.sh
```

## Running

The jmeter test plan can be ran from the jmeter GUI or it can be ran without a GUI. In each of these cases, it is assumed that a datatools-server instance can be queried at http://localhost:4000.

### Starting jmeter GUI

This script starts the jmeter gui and loads the test script.

```sh
./run-gui.sh
```

### Running test plan without GUI

The test plan can be ran straight from the command line. A helper script is provided to assist in running jmeter from the command line. This script has 3 required and 1 optional positional arguments:

| # | argument | possible values | description |
| ---- | ---- | ---- | ---- |
| 1 | test plan mode | `batch`, `fetch`, `query` or `upload` | which test plan mode to use when running the jmeter script. (see notes below for more explanation of these test plan modes) |
| 2 | number of threads | an integer greater than 0 | The number of simultaneous threads to run at a time. The threads will have staggered start times 1 second apart. |
| 3 | number of loops | an integer greater than 0 | the number of loops to run. This is combined with the number of threads, so if the number of threads is 10 and the number of loops is 8, the total number of test plans to run will be 80. |
| 4 | project name or batch csv file | string of the project name or string of file path to batch csv file | This argument is required if running the script with the `batch` test plan mode, otherwise, this argument is optional. The jmeter script will create new projects with a project name plus the current iteration number. The default name is "test project #". Also, if the s3 bucket argument is also provided, the output folder will be tarred up and with this name. |
| 5 | s3 bucket | string of an s3 bucket | OPTIONAL. If provided, the script will tar up the output folder and attempt to upload to the specified s3 bucket. This assumes that aws credentials have been setup for use by the `aws` command line tool. |

Examples:

_Run the test plan in upload mode 1 total times in 1 thread running 1 loop._
```sh
./run-tests.sh upload 1 1
```

_Run the test plan in query mode 80 total times in 10 threads each completing 8 loops._
```sh
./run-tests.sh query 10 8 my-project-name my-s3-bucket
```

_Run in batch mode. Note that all feeds in the csv file will be processed in each loop. So in the following command, each feed in the batch.csv file would be processed 6 times. See the section below for documentation on the csv file and also see the fixtures folder for an example file._
```sh
./run-tests.sh query 3 2 batch.csv my-s3-bucket
```

### Running the upload test on multiple gtfs files

As noted above, the jmeter script can be run in `batch` mode. The provded csv file must contain the following headers and data:

| header | description |
| ---- | ---- |
| project name | name of project to be created |
| mode | Must be either `fetch` or `upload` |
| location | The path to the file if the mode is `upload` or the http address if the mode is `fetch` |

There is also a helper python script that can be used to run the jmeter script in `batch` mode using all files stored within an s3 bucket. This script requires that aws credentials have been setup for use by the aws command line tool.

| # | argument | possible values | description |
| ---- | ---- | ---- | ---- |
| 1 | test plan mode | `fetch` or `upload` | The test plan mode to use. This will be written to each row of the csv file described above. |
| 2 | s3 bucket of gtfs feeds | the string of an s3 bucket | An s3 bucket that is accessbile with the credentials setup for the aws cli. Place zip files within the bucket. Each zip file will be downloaded to the local machine and the jmeter test plan will be ran in upload mode for each gtfs zip file. |
| 3 | s3 bucket for output reports | the string of an s3 bucket | OPTIONAL. After each test run, the script will tar up the output folder and attempt to upload to the specified s3 bucket. |

Example:

```sh
python run-upload-tests.py fetch gtfs-test-feeds datatools-jmeter-results
```


## Test Plan

A single test plan file is used for maintainablility. By default, the test plan runs 1 thread in 1 loop and will upload a feed and then perform various checks on the uploaded feed version. As noted in the above section, it is possible to run different variations of the test plan. There are 4 types of test plans that can be initiated: `batch`, `fetch`, `query` or `upload`.

### Batch Test Plan Mode Script Steps

When the test plan is run in batch mode, a csv file must be provided that contains rows of test plans of either `fetch` or `upload` types. Each row is then ran the with specified number of threads and loops.

1. For Each Row: Run either the `fetch` or `upload` test plan according to the configuration in the row.

### Upload Test Plan Mode Script Steps

This section is run under the `upload` test plan mode or for a feed marked for uploading in the batch csv file.

1. Create Project
1. Create Feedsource
1. Upload zip to create new Feed Version
1. Loop until job to upload feed is complete (making http requests to job status)
1. Save a record of the amount of time it took from the completion of the feed upload until receiving a status update that the feed version processing has completed
1. Continue to API Integrity Script Steps

### Fetch Test Plan Mode Script Steps

This section is run under the `fetch` test plan mode or for a feed marked for fetching in the batch csv file.

1. Create Project
1. Create Feedsource
1. Create new Feed Version (which initiates a download of a feed from datatools-server)
1. Loop until job to fetch and process the feed is complete (making http requests to job status)
1. Save a record of the amount of time it took from the completion of the feed version creation request until receiving a status update that the feed version processing has completed
1. Continue to API Integrity Script Steps

### Query Test Plan Mode Script Steps

This section is run under the `query` test plan mode. This script assumes that each project has a feed source that has a valid feed version.

1. Fetch all projects
1. Pick a random project
1. Fetch all feed sources from the selected project
1. Pick a random feed source
1. Fetch all feed versions from the selected feed source
1. Pick a random feed version
1. Continue to API Integrity Script Steps

### API Integrity Script Steps

This section is run in all test plan modes.

1. Fetch all routes
1. Pick a random route
1. Fetch all trips on selected route
1. Check that all trips have same route_id as route
1. Fetch all patterns on selected route
1. Check that all patterns have same route_id
1. Fetch embedded stop_times from trips from a random pattern
1. Check that all stop_times have proper trip_id
1. Check that all stop_times in trips on pattern have same stop sequence as pattern

## Reporting

If running this script in GUI mode, it is possible to see all results in real-time by viewing the various listeners at the end of the thread group.

When running the test plan from the command line in non-gui mode, reports will be saved to the `output` folder. The outputs will contain a csv file of all requests made and an html report summarizing the results. If the test plan mode was `batch`, `fetch` or `upload` than another csv file will be written that contains a list of the elapsed time for processing the creation of a new gtfs feed version.

The csv files can be loaded into a jmeter GUI listener to view more details.
14 changes: 14 additions & 0 deletions jmeter/amazon-linux-startup-script.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#!/bin/bash

# install java 8
yum install java-1.8.0 -y
yum remove java-1.7.0-openjdk -y

# install jmeter
./install-jmeter.sh

# TODO: update jmeter.properties file
# http://www.testingdiaries.com/jmeter-on-aws/

# start up jmeter server
apache-jmeter-3.3/bin/jmeter-server
4 changes: 4 additions & 0 deletions jmeter/fixtures/create-feedsource.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"name": "test-feedsource",
"projectId": "${projectId}"
}
3 changes: 3 additions & 0 deletions jmeter/fixtures/create-project.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"name": "tester 1"
}
32 changes: 32 additions & 0 deletions jmeter/fixtures/feed_route_pattern_stops_and_trips_graphql.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
console.log(
JSON.stringify({
query: `
query ($namespace: String, $pattern_id: String) {
feed(namespace: $namespace) {
feed_id
feed_version
filename
patterns (pattern_id: [$pattern_id]) {
pattern_id
route_id
stops {
stop_id
}
trips {
trip_id
pattern_id
stop_times {
stop_id
trip_id
}
}
}
}
}
`,
variables: JSON.stringify({
namespace: "${namespace}",
pattern_id: "${randomPatternId}"
})
})
)
29 changes: 29 additions & 0 deletions jmeter/fixtures/feed_route_pattern_trips_graphql.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
console.log(
JSON.stringify({
query: `
query ($namespace: String, $route_id: String) {
feed(namespace: $namespace) {
feed_id
feed_version
filename
routes (route_id: [$route_id]) {
route_id
route_type
patterns {
pattern_id
route_id
trips {
trip_id
pattern_id
}
}
}
}
}
`,
variables: JSON.stringify({
namespace: "${namespace}",
route_id: "${randomRouteId}"
})
})
)
Loading

0 comments on commit 93f3e95

Please sign in to comment.