diff --git a/.dockerignore b/.dockerignore index 3e75f5950..4fe073583 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,15 +1,8 @@ .git +**/.DS_Store +**/.*sw[op] +**/*.py[co] +**/*.egg-info -# --- Below Should mirror .gitignore, without leading '/' --- # -.DS_Store -.*sw[op] -*.py[co] -*.egg-info -persistent -runtime -bootstrap.json -.cache -.coverage* -coverage.xml -htmlcov -node_modules +/.coverage* +/htmlcov diff --git a/CONTRIBUTING.md b/.github/CONTRIBUTING.md similarity index 100% rename from CONTRIBUTING.md rename to .github/CONTRIBUTING.md diff --git a/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md similarity index 100% rename from PULL_REQUEST_TEMPLATE.md rename to .github/PULL_REQUEST_TEMPLATE.md diff --git a/.gitignore b/.gitignore index 14982fa1d..9dd251cf4 100644 --- a/.gitignore +++ b/.gitignore @@ -2,12 +2,6 @@ .*sw[op] *.py[co] *.egg-info -/persistent -/runtime -bootstrap.json -.cache + /.coverage* -coverage.xml /htmlcov -node_modules/ -/bin/accesslog.csv diff --git a/.travis.yml b/.travis.yml index b80b05ec1..97b48309d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,26 +1,40 @@ -sudo: required dist: trusty +sudo: required services: - - mongodb + - docker + env: - global: - secure: HELJx6WPr+W2S0FV47KkRdlS9NCqlMcdRMK8xWgRqqrEPv24KEvNnHxCy0tRbzITqadYtyvI1MtqtmpG04uty8Gpkc7w6L6LMJ/OuLG0gVX7AnaUovYTlY04m1/L9oyzOrTDXk5J/BKbcyiz7uJtkTc/A8MMZAFfZh7hmhLID78= # BUILD_TRIGGER_URL + global: + - DOCKER_DIR="$HOME/.cache/docker" + - secure: "HhT1TdJcpqys8juVMw/DIZeK7oD4595TEKH5KlowH7MvwwFAUyQFb5W63F8dgk7elvRG+3fmga/m1JfXO+Iu7PVD912eiNDagW9aB3CEl3Z8zg+JUL8IjpMCkyKQDyJMnfOkrzdxdaqfOK+WmF+13f2qBu9Kc7wdXuzgHQrg4+0=" # CI_REGISTRY_USER + - secure: "hh7VDZnkxgl/vqHtS4IpXfIAckKpVQvoCzNW7fstr5Mcu8KNiCWIPgObBRm+m13aqpcFTMWQ6lT2kzORz2wWRbDeVhI1eGWOJswGNHPHZLO0Jaei6yfY2nY2mpxZbl+vdg00jkN64mi1ab3e++QgeLFruW0gyNefXX7E5L/mHTs=" # CI_REGISTRY_PASS + +cache: + directories: + - $DOCKER_DIR + +before_install: + - sudo apt-get update + - sudo apt-get -y install docker-ce realpath + - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASS -install: - - bin/install-ubuntu.sh - - tests/bin/setup-integration-tests-ubuntu.sh +install: true script: - - SCITRAN_PERSISTENT_DB_PORT=27017 tests/bin/run-tests-ubuntu.sh + - test -f "$DOCKER_DIR/image.tar" && docker load -i "$DOCKER_DIR/image.tar" || true + - docker build -t core:build --target build . + - docker build -t core:dist --target dist --build-arg VCS_BRANCH="$TRAVIS_BRANCH" --build-arg VCS_COMMIT="$TRAVIS_COMMIT" . + - docker build -t core:dev --target dev . + - docker save -o "$DOCKER_DIR/image.tar" $(docker history -q core:build | grep -v '') $(docker history -q core:dist | grep -v '') + - ./tests/bin/docker-tests.sh --image core:dev after_success: - - if [ "$TRAVIS_BRANCH" == "master" -o "$TRAVIS_EVENT_TYPE" == "pull_request" ]; then - bash <(curl -s https://codecov.io/bash) -cF python; - fi - - if [ "$TRAVIS_TAG" ]; then - ./docker/build-trigger.sh Tag "$TRAVIS_TAG" "$BUILD_TRIGGER_URL"; - fi - - if [ "$TRAVIS_EVENT_TYPE" == "push" -a "$TRAVIS_BRANCH" == "master" ]; then - ./docker/build-trigger.sh Branch "$TRAVIS_BRANCH" "$BUILD_TRIGGER_URL"; - fi + - if [ "$TRAVIS_TAG" ]; then + docker tag core:dist scitran/core:$TRAVIS_TAG; + docker push scitran/core:$TRAVIS_TAG; + fi + - if [ "$TRAVIS_EVENT_TYPE" == "push" -a "$TRAVIS_BRANCH" == "nginx-unit" ]; then + docker tag core:dev scitran/core:nginx-unit; + docker push scitran/core:nginx-unit; + fi diff --git a/Dockerfile b/Dockerfile index 3813f9e9a..3a9f9e64f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,103 +1,51 @@ -# -# Image used for hosting scitran core with uwsgi. -# -# Example usage is in README.md -# - -FROM ubuntu:14.04 - - -# Install pre-requisites -RUN apt-get update \ - && apt-get install -y \ - build-essential \ - ca-certificates curl \ - libatlas3-base \ - numactl \ - python-dev \ - python-pip \ - libffi-dev \ - libssl-dev \ - libpcre3 \ - libpcre3-dev \ - git \ - && rm -rf /var/lib/apt/lists/* \ - && pip install -U pip - - -# Grab gosu for easy step-down from root in a docker-friendly manner -# https://github.com/tianon/gosu -# -# Alternate key servers are due to reliability issues with ha.pool.sks-keyservers.net -RUN curl -o /usr/local/bin/gosu -SL "https://github.com/tianon/gosu/releases/download/1.6/gosu-$(dpkg --print-architecture)" \ - && curl -o /tmp/gosu.asc -SL "https://github.com/tianon/gosu/releases/download/1.6/gosu-$(dpkg --print-architecture).asc" \ - && export GNUPGHOME="$(mktemp -d)" \ - && for server in $(shuf -e ha.pool.sks-keyservers.net \ - hkp://p80.pool.sks-keyservers.net:80 \ - keyserver.ubuntu.com \ - hkp://keyserver.ubuntu.com:80 \ - pgp.mit.edu) ; do \ - gpg --keyserver "$server" --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4 && break || : ; \ - done \ - && gpg --batch --verify /tmp/gosu.asc /usr/local/bin/gosu \ - && rm -r "$GNUPGHOME" /tmp/gosu.asc \ - && chmod +x /usr/local/bin/gosu - - -# Setup environment -WORKDIR /var/scitran - -RUN mkdir -p \ - /var/scitran/config \ - /var/scitran/data \ - /var/scitran/code/api \ - /var/scitran/logs \ - /var/scitran/keys - -# Declaring a volume makes the intent to map externally explicit. This enables -# the contents to survive/persist across container versions, and easy access -# to the contents outside the container. -# -# Declaring the VOLUME in the Dockerfile guarantees the contents are empty -# for any new container that doesn't specify a volume map via 'docker run -v ' -# or similar option. -# -VOLUME /var/scitran/keys -VOLUME /var/scitran/data -VOLUME /var/scitran/logs - - -# Install pip modules -# -# Split this out for better cache re-use. -# -COPY requirements.txt docker/requirements-docker.txt /var/scitran/code/api/ - -RUN pip install --upgrade pip wheel setuptools \ - && pip install -r /var/scitran/code/api/requirements-docker.txt \ - && pip install -r /var/scitran/code/api/requirements.txt - -COPY tests /var/scitran/code/api/tests/ -RUN bash -e -x /var/scitran/code/api/tests/bin/setup-integration-tests-ubuntu.sh - - -# Copy full repo -# -COPY . /var/scitran/code/api/ - -COPY docker/uwsgi-entrypoint.sh /var/scitran/ -COPY docker/uwsgi-config.ini /var/scitran/config/ - - - -# Inject build information into image so the source of the container can be -# determined from within it. -ARG BRANCH_LABEL=NULL -ARG COMMIT_HASH=0 -COPY docker/inject_build_info.sh / -RUN /inject_build_info.sh ${BRANCH_LABEL} ${COMMIT_HASH} \ - && rm /inject_build_info.sh - - -ENTRYPOINT ["/var/scitran/uwsgi-entrypoint.sh"] -CMD ["uwsgi", "--ini", "/var/scitran/config/uwsgi-config.ini", "--http", "0.0.0.0:8080", "--http-keepalive", "--so-keepalive", "--add-header", "Connection: Keep-Alive" ] +FROM python:2.7-alpine3.7 as build + +RUN apk --no-cache add build-base curl + +WORKDIR /src/nginx-unit + +RUN curl -L https://github.com/nginx/unit/archive/0.4.tar.gz | tar xz --strip-components 1 +RUN ./configure --prefix=/usr/local --modules=lib --state=/var/local/unit --pid=/var/unit.pid --log=/var/log/unit.log \ + && ./configure python \ + && make install + + +FROM python:2.7-alpine3.7 as dist + +RUN apk --no-cache add git + +COPY --from=build /usr/local/sbin/unitd /usr/local/sbin/unitd +COPY --from=build /usr/local/lib/python.unit.so /usr/local/lib/python.unit.so + +EXPOSE 80 8088 +VOLUME /data/db /data/persistent + +WORKDIR /src/core +ENV SCITRAN_PERSISTENT_DATA_PATH=/data/persistent + +COPY nginx-unit.json /var/local/unit/conf.json +COPY requirements.txt requirements.txt +RUN pip install -r requirements.txt + +COPY . . +RUN pip install -e . + +ARG VCS_BRANCH=NULL +ARG VCS_COMMIT=NULL +RUN ./bin/build_info.sh $VCS_BRANCH $VCS_COMMIT | tee /version.json + +CMD ["unitd", "--control", "*:8088", "--no-daemon", "--log", "/dev/stdout"] + + +FROM dist as dev + +EXPOSE 27017 + +RUN apk --no-cache add mongodb nginx +RUN mkdir /run/nginx + +COPY nginx.conf /etc/nginx/nginx.conf + +RUN pip install -r tests/requirements.txt + +CMD ["./bin/dev+mongo.sh"] diff --git a/README.md b/README.md index d19522466..6b1db6bac 100644 --- a/README.md +++ b/README.md @@ -19,23 +19,6 @@ SciTran Core is a RESTful HTTP API, written in Python and backed by MongoDB. It ### Usage -**Currently Python 2 Only** - -#### OSX -``` -$ ./bin/run-dev-osx.sh --help -``` - -For the best experience, please upgrade to a recent version of bash. -``` -brew install bash bash-completion -sudo dscl . -create /Users/$(whoami) UserShell /usr/local/bin/bash -``` - -#### Ubuntu ``` -mkvirtualenv scitran-core -./bin/install-ubuntu.sh -uwsgi --http :8080 --master --wsgi-file bin/api.wsgi -H $VIRTUAL_ENV \ - --env SCITRAN_PERSISTENT_DB_URI="mongodb://localhost:27017/scitran-core" +docker run -p 80:80 -e SCITRAN_CORE_DRONE_SECRET=secret scitran/core ``` diff --git a/TESTING.md b/TESTING.md deleted file mode 100644 index a6c168171..000000000 --- a/TESTING.md +++ /dev/null @@ -1,44 +0,0 @@ -## Run the tests - -### Ubuntu -Run automated tests: -``` -# Follow installation instructions in README first -. /runtime/bin/activate # Or wherever your scitran virtualenv is -./test/bin/setup-integration-tests-ubuntu.sh -./test/bin/run-tests-ubuntu.sh -``` -All tests are executed by default. Subsets can be run using the filtering options: - -* To run linting, use `--lint` (`-l`) -* To run unit tests, use `--unit` (`-u`) -* To run integration tests, use `--integ` (`-i`) -* To run abao tests, use `--abao` (`-a`) -* To pass any arguments to `py.test`, use `-- PYTEST_ARGS` - -See [py.test usage](https://docs.pytest.org/en/latest/usage.html) for more. - -### Docker -Build scitran-core image and run automated tests in a docker container: -``` -./tests/bin/run-tests-docker.sh -``` -* To skip building the image, use `--no-build` (`-B`) -* To pass any arguments to `run-tests-ubuntu.sh`, use `-- TEST_ARGS` - - -#### Example -Without rebuilding the image, run only integration tests matching `foo`, use the highest verbosity level for test output and jump into a python debugger session in case an assertion fails: -``` -./tests/bin/run-tests-docker.sh -B -- -i -- -k foo -vvv --pdb -``` - -**NOTE:** The mongodb version is pinned via the `MONGO_VERSION` variable in `tests/bin/run-tests-docker.sh`. - -### Tools -- [abao](https://github.com/cybertk/abao/) - -### Testing API against RAML with Abao -Abao is one of the testing tools run during our TravisCI build. It tests the API implementation against what’s defined in the RAML spec. Adding a new resource / url to the RAML spec will cause Abao to verify that resource during integration tests. Sometimes abao cannot properly test a resource (file field uploads) or a test may require chaining variable. Abao has before and after hooks for tests, written in javascript. These can be used to skip a test, inject variables into the request, or make extra assertions about the response. See tests/integration/abao in the repo for the hooks file. See [abao github readme](https://github.com/cybertk/abao/blob/master/README.md) for more information on how to use hooks. - -Abao tests can depend on specific resources (eg. group, project, session, etc.) pre-existing in the DB. That resource loading should be maintained within `tests/integration_tests/abao/load_fixture.py` and is executed automatically via the integration test scripts at `test/bin`. diff --git a/api/app.py b/api/app.py new file mode 100644 index 000000000..9951ff12f --- /dev/null +++ b/api/app.py @@ -0,0 +1,3 @@ +from .web import start + +application = start.app_factory() diff --git a/api/web/start.py b/api/web/start.py index 61c407a0a..1f0436e24 100644 --- a/api/web/start.py +++ b/api/web/start.py @@ -16,7 +16,7 @@ def save_coverage(cov): def start_coverage(): import coverage print("Enabling code coverage") - cov = coverage.coverage(source=["api"], data_suffix="integration-tests") + cov = coverage.coverage(source=["api"], data_file="/tmp/.coverage.integration-tests") cov.start() atexit.register(save_coverage, cov) diff --git a/bin/api.wsgi b/bin/api.wsgi deleted file mode 100644 index 5c2168a53..000000000 --- a/bin/api.wsgi +++ /dev/null @@ -1,4 +0,0 @@ -# vim: filetype=python -from api.web import start - -application = start.app_factory() diff --git a/bin/build_info.sh b/bin/build_info.sh new file mode 100755 index 000000000..a8c2d6245 --- /dev/null +++ b/bin/build_info.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env sh + +set -eu + +cat < 7: - fraction_str = dcm_time[7:] - fraction = float(dcm_time[7:])/10^(len(fraction_str)) - fraction = int(fraction*1000) - else: - fraction = 0 - return '%s:%s:%s.%03d00' % (hours, minutes, seconds, fraction) - -def cast_datetime(dcm_datetime): - """ - Cast DICOM datetime string (YYYYMMDDHHMMSS.FFFFFF) - into ElasticSearch pre-defined basic_date_time format (yyyyMMdd'T'HHmmss.SSSZ) - """ - # TODO: this fxn needs to be tested on real data - year = dcm_datetime[:4] - month = dcm_datetime[4:6] - day = dcm_datetime[6:8] - if len(dcm_datetime) > 8: - hours = dcm_datetime[8:10] - minutes = dcm_datetime[10:12] - seconds = dcm_datetime[12:14] - else: - hours = '00' - minutes = '00' - seconds = '00' - if len(dcm_datetime) > 15: - fraction_str = dcm_datetime[15:] - fraction = float(dcm_datetime[15:])/10^(len(fraction_str)) - fraction = int(fraction*1000) - else: - fraction = 0 - return '%s%s%sT%s%s%s.%03d0' % (year, month, day, hours, minutes, seconds, fraction) - -def cast_age(dcm_age): - """ Cast DICOM age string into seconds""" - # TODO: this fxn needs to be tested on real data - unit = dcm_age[-1] - if unit not in ['D', 'W', 'M', 'Y']: - return None - multipliers = dict(D=60*60*24, - W=60*60*24*7, - M=60*60*24*30, - Y=60*60*24*365) - value = int(dcm_age[:-1]) - seconds = multipliers[unit]*value - return seconds - -def value_is_array(value): - if type(value) != unicode: - return False - if len(value) < 2: - return False - if value[0] == '[' and value[-1] == ']': - return True - return False - -def cast_array_from_string(string): - array = None - try: - array = ast.literal_eval(string) - except: - config.log.warn('Tried to cast string {} as array, failed.'.format(string)) - - if array: - new_array = [] - for element in array: - try: - element = int(element) - except: - try: - element = float(element) - except: - pass - new_array.append(element) - return new_array - else: - return string - -def remove_blacklisted_keys(obj): - for key in BLACKLIST_KEYS: - obj.pop(key, None) - -def handle_files(parent, parent_type, files, dicom_mappings, permissions, doc): - doc['container_type'] = 'file' - for f in files: - # f.pop('info', None) - doc['file'] = f - # doc = { - # 'file': f, - # 'permissions': permissions - # } - # if f.get('type', '') == 'dicom' and f.get('info'): - # dicom_data = f.pop('info') - # term_fields = {} - # modified_data = {} - # for skipped in SKIPPED: - # dicom_data.pop(skipped, None) - # for k,v in dicom_data.iteritems(): - - # try: - - # # Arrays are saved as strings in - # if value_is_array(v): - # config.log.debug('calling array for {} and value {}'.format(k, v)) - # v = cast_array_from_string(v) - # if 'datetime' in k.lower(): - # config.log.debug('called datetime for {} and value {}'.format(k, v)) - # v = cast_datetime(str(v)) - # elif 'date' in k.lower(): - # config.log.debug('called date for {} and value {}'.format(k, v)) - # v = cast_date(str(v)) - # # elif 'time' in k.lower(): - # # # config.log.debug('called time for {} and value {}'.format(k, v)) - # # # v = cast_time(str(v)) - # elif 'Age' in k: - # config.log.debug('called age for {} and value {}'.format(k, v)) - # v = cast_age(str(v)) - # except: - # pass - - # term_field_name = k+'_term' - # if term_field_name in dicom_mappings and type(v) in [unicode, str]: - # term_fields[k+'_term'] = str(v) - # modified_data[k] = v - - # modified_data.update(term_fields) - # doc['dicom_header'] = modified_data - - generated_id = str(parent['_id']) + '_' + f['name'] - - doc['parent'] = { - '_id': parent['_id'], - 'type': parent_type - } - - doc_s = json.dumps(doc, default=encoder.custom_json_serializer) - try: - # es.index(index=DE_INDEX, id=generated_id, parent=str(parent['_id']), doc_type='file', body=doc) - es.index(index=DE_INDEX, id=generated_id, doc_type='flywheel', body=doc_s) - except: - return - - -if __name__ == '__main__': - - if es.indices.exists(DE_INDEX): - print 'Removing existing data explorer index...' - res = es.indices.delete(index=DE_INDEX) - print 'response: {}'.format(res) - - # mappings = create_mappings() - - request = { - 'settings': { - "index.mapping.total_fields.limit": 4000, - 'number_of_shards': 1, - 'number_of_replicas': 0, - 'analysis' : ANALYSIS - }, - 'mappings': { - '_default_' : { - '_all' : {'enabled' : True}, - 'dynamic_templates': DYNAMIC_TEMPLATES - }, - 'flywheel': {} - } - } - - print 'creating {} index ...'.format(DE_INDEX) - res = es.indices.create(index=DE_INDEX, body=request) - print 'response: {}'.format(res) - - # mappings = es.indices.get_mapping(index=DE_INDEX, doc_type='flywheel') - - # dicom_mappings = mappings[DE_INDEX]['mappings']['file']['properties']['dicom_header']['properties'] - dicom_mappings = None - - permissions = [] - - groups = db.groups.find({}) - print 'STARTING THE GROUPS' - print '' - print '' - print '' - count = 1 - group_count_total = groups.count() - for g in groups: - print 'Loading group {} ({} of {})'.format(g['name'], count, group_count_total) - count += 1 - - remove_blacklisted_keys(g) - - projects = db.projects.find({'group': g['_id']}) - for p in projects: - - files = p.pop('files', []) - # Set permissions for documents - permissions = p.pop('permissions', []) - remove_blacklisted_keys(p) - - doc = { - 'project': p, - 'group': g, - 'permissions': permissions, - 'container_type': 'project' - - } - - doc_s = json.dumps(doc, default=encoder.custom_json_serializer) - es.index(index=DE_INDEX, id=str(p['_id']), doc_type='flywheel', body=doc_s) - - handle_files(p, 'project', files, dicom_mappings, permissions, doc) - - - sessions = db.sessions.find({'project': p['_id']}) - for s in sessions: - subject = s.pop('subject', {}) - - analyses = s.pop('analyses', []) - files = s.pop('files', []) - remove_blacklisted_keys(s) - - doc = { - 'project': p, - 'group': g, - 'session': s, - 'subject': subject, - 'permissions': permissions, - 'container_type': 'session' - - } - - doc_s = json.dumps(doc, default=encoder.custom_json_serializer) - es.index(index=DE_INDEX, id=str(s['_id']), doc_type='flywheel', body=doc_s) - - handle_files(s, 'session', files, dicom_mappings, permissions, doc) - - for an in analyses: - files = an.pop('files', []) - doc = { - 'analysis': an, - 'session': s, - 'subject': subject, - 'project': p, - 'group': g, - 'permissions': permissions, - 'container_type': 'analysis' - - } - - doc_s = json.dumps(doc, default=encoder.custom_json_serializer) - es.index(index=DE_INDEX, id=str(an['_id']), doc_type='flywheel', body=doc_s) - - files = [f for f in files if f.get('output')] - - handle_files(an, 'analysis', files, dicom_mappings, permissions, doc) - - - - acquisitions = db.acquisitions.find({'session': s['_id']}) - for a in acquisitions: - a.pop('info', None) - files = a.pop('files', []) - remove_blacklisted_keys(a) - - doc = { - 'acquisition': a, - 'session': s, - 'subject': subject, - 'project': p, - 'group': g, - 'permissions': permissions, - 'container_type': 'acquisition' - - } - - doc_s = json.dumps(doc, default=encoder.custom_json_serializer) - es.index(index=DE_INDEX, id=str(a['_id']), doc_type='flywheel', body=doc_s) - - - handle_files(a, 'acquisition', files, dicom_mappings, permissions, doc) - - - diff --git a/bin/install-dev-osx.sh b/bin/install-dev-osx.sh deleted file mode 100755 index 641e648c9..000000000 --- a/bin/install-dev-osx.sh +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env bash - -set -e - -unset CDPATH -cd "$( dirname "${BASH_SOURCE[0]}" )/.." - -SCITRAN_RUNTIME_PATH=${SCITRAN_RUNTIME_PATH:-"./runtime"} - -if [ $(echo $BASH_VERSION | cut -c 1) -ge 4 ]; then - echo() { builtin echo -e "\e[1;7mSCITRAN\e[0;7m $@\e[27m"; } -fi - -if hash brew 2>/dev/null; then - echo "Homebrew is installed" -else - echo "Installing Homebrew" - ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" - echo "Installed Homebrew" -fi - -if brew list | grep -q openssl; then - echo "OpenSSL is installed" -else - echo "Installing OpenSSL" - brew install openssl - echo "Installed OpenSSL" -fi - -if brew list | grep -q "^python$"; then - echo "Python is installed" -else - echo "Installing Python" - brew install python - echo "Installed Python" -fi - -if hash virtualenv 2>/dev/null; then - echo "Virtualenv is installed" -else - echo "Installing Virtualenv" - pip install virtualenv - echo "Installed Virtualenv" -fi - -if [ -d "$SCITRAN_RUNTIME_PATH" ]; then - echo "Virtualenv exists at $SCITRAN_RUNTIME_PATH" -else - echo "Creating 'scitran' Virtualenv at $SCITRAN_RUNTIME_PATH" - virtualenv -p `brew --prefix`/bin/python --prompt="(scitran) " $SCITRAN_RUNTIME_PATH - echo "Created 'scitran' Virtualenv at $SCITRAN_RUNTIME_PATH" -fi - - -echo "Activating Virtualenv" -source $SCITRAN_RUNTIME_PATH/bin/activate # will fail with `set -u` - - -echo "Installing Python requirements" -pip install "pip>=8" -CFLAGS="-I/usr/local/opt/openssl/include" LDFLAGS="-L/usr/local/opt/openssl/lib" UWSGI_PROFILE_OVERRIDE="ssl=true" \ - pip install --no-cache-dir -r requirements.txt - - -# Install MongoDB -MONGODB_URL="http://downloads.mongodb.org/osx/mongodb-osx-x86_64-v3.2-latest.tgz" -if [ -x "$SCITRAN_RUNTIME_PATH/bin/mongod" ]; then - MONGODB_VERSION=$($SCITRAN_RUNTIME_PATH/bin/mongod --version | grep "db version" | cut -d "v" -f 3) - echo "MongoDB version $MONGODB_VERSION is installed" - echo "Remove $SCITRAN_RUNTIME_PATH/bin/mongod to install latest version" -else - echo "Installing MongoDB" - curl $MONGODB_URL | tar xz -C $SCITRAN_RUNTIME_PATH/bin --strip-components 2 - MONGODB_VERSION=$($SCITRAN_RUNTIME_PATH/bin/mongod --version | grep "db version" | cut -d "v" -f 3) - echo "MongoDB version $MONGODB_VERSION installed" -fi diff --git a/bin/install-ubuntu.sh b/bin/install-ubuntu.sh deleted file mode 100755 index 4165328eb..000000000 --- a/bin/install-ubuntu.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash - -set -eu - -unset CDPATH -cd "$( dirname "${BASH_SOURCE[0]}" )/.." - -sudo apt-get update -sudo apt-get install -y \ - build-essential \ - ca-certificates \ - curl \ - libatlas3-base \ - numactl \ - python-dev \ - libffi-dev \ - libssl-dev \ - libpcre3 \ - libpcre3-dev \ - git - -sudo pip install -U pip - -sudo pip install -r requirements.txt diff --git a/bin/integrity_check.py b/bin/integrity_check.py index c8ee53a2b..6a4c3c19c 100755 --- a/bin/integrity_check.py +++ b/bin/integrity_check.py @@ -92,8 +92,3 @@ def session_length(): except Exception as e: logging.exception('Main method failed...') sys.exit(1) - - - - - diff --git a/bin/load_users_drone_secret.py b/bin/load_users_drone_secret.py deleted file mode 100755 index 471a73669..000000000 --- a/bin/load_users_drone_secret.py +++ /dev/null @@ -1,145 +0,0 @@ -#!/usr/bin/env python - -"""This script helps bootstrap users and data""" - -import os -import sys -import json -import logging -import argparse -import datetime -import requests - -logging.basicConfig( - format='%(asctime)s %(levelname)8.8s %(message)s', - datefmt='%Y-%m-%d %H:%M:%S', - level=logging.DEBUG, -) -log = logging.getLogger('scitran.bootstrap') - -logging.getLogger('requests').setLevel(logging.WARNING) # silence Requests library - - -def _upsert_user(request_session, api_url, user_doc): - """ - Insert user, or update if insert fails due to user already existing. - - Returns: - requests.Response: API response. - - Args: - request_session (requests.Session): Session to use for the request. - api_url (str): Base url for the API eg. 'https://localhost:8443/api' - user_doc (dict): Valid user doc defined in user input schema. - """ - new_user_resp = request_session.post(api_url + '/users', json=user_doc) - if new_user_resp.status_code != 409: - return new_user_resp - - # Already exists, update instead - return request_session.put(api_url + '/users/' + user_doc['_id'], json=user_doc) - - -def _upsert_permission(request_session, api_url, permission_doc, group_id): - """ - Insert group permission, or update if insert fails due to group permission already existing. - - Returns: - requests.Response: API response. - - Args: - request_session (requests.Session): Session to use for the request. - api_url -- (str): Base url for the API eg. 'https://localhost:8443/api' - permission_doc -- (dict) Valid permission doc defined in permission input schema. - """ - base_permission_url = "{0}/groups/{1}/permissions".format(api_url, group_id) - new_permission_resp = request_session.post(base_permission_url , json=permission_doc) - if new_permission_resp.status_code != 409: - return new_permission_resp - - # Already exists, update instead - - full_permission_url = "{0}/{1}".format(base_permission_url, permission_doc['_id']) - return request_session.put(full_permission_url, json=permission_doc) - -def users(filepath, api_url, http_headers, insecure): - """ - Upserts the users/groups/permissions defined in filepath parameter. - - Raises: - requests.HTTPError: Upsert failed. - """ - now = datetime.datetime.utcnow() - with open(filepath) as fd: - input_data = json.load(fd) - with requests.Session() as rs: - log.info('bootstrapping users...') - rs.verify = not insecure - rs.headers = http_headers - for u in input_data.get('users', []): - log.info(' {0}'.format(u['_id'])) - r = _upsert_user(request_session=rs, api_url=api_url, user_doc=u) - r.raise_for_status() - - log.info('bootstrapping groups...') - r = rs.get(api_url + '/config') - r.raise_for_status() - for g in input_data.get('groups', []): - permissions = g.pop('permissions') - log.info(' {0}'.format(g['_id'])) - r = rs.post(api_url + '/groups' , json=g) - r.raise_for_status() - for permission in permissions: - r = _upsert_permission(request_session=rs, api_url=api_url, permission_doc=permission, group_id=g['_id']) - r.raise_for_status() - - log.info('bootstrapping projects...') - for p in input_data.get('projects', []): - r = rs.post(api_url + '/projects?inherit=true' , json=p) - r.raise_for_status() - - project_id = r.json()['_id'] - project_name = p['label'] - - for stanza in input_data.get('gear_rules', []): - - desired_projects = stanza.get('projects', []) - rule = stanza.get('rule', None) - - if project_name in desired_projects and rule: - log.info('Adding rule...') - r = rs.post(api_url + '/projects/' + project_id + '/rules', json=rule) - r.raise_for_status() - - log.info('bootstrapping complete') - - -ap = argparse.ArgumentParser() -ap.description = 'Bootstrap SciTran users and groups' -ap.add_argument('url', help='API URL') -ap.add_argument('json', help='JSON file containing users and groups') -ap.add_argument('--insecure', action='store_true', help='do not verify SSL connections') -ap.add_argument('--secret', help='shared API secret') -args = ap.parse_args() - -if args.insecure: - requests.packages.urllib3.disable_warnings() - -http_headers = { - 'X-SciTran-Method': 'bootstrapper', - 'X-SciTran-Name': 'Bootstrapper', -} -if args.secret: - http_headers['X-SciTran-Auth'] = args.secret -# TODO: extend this to support oauth tokens - -try: - users(args.json, args.url, http_headers, args.insecure) -except requests.HTTPError as ex: - log.error(ex) - log.error("request_body={0}".format(ex.response.request.body)) - sys.exit(1) -except Exception as ex: - log.error('Unexpected error:') - log.error(ex) - sys.exit(1) diff --git a/bin/log_csv.py b/bin/log_export.py old mode 100644 new mode 100755 similarity index 99% rename from bin/log_csv.py rename to bin/log_export.py index 8ee287666..56217a608 --- a/bin/log_csv.py +++ b/bin/log_export.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + # This implementation as of July 19 2017 has these resource utilizations of the mongodb container: # - 2 million entries: 1.50 Gb # - 3 million entries: 2.05 Gb @@ -57,7 +59,7 @@ def download_large_csv(params): csv_file.flush() params['end_date'] = end_date - + print "Encountered unicode errors and skipped {} entries".format(unicode_err_count) csv_file.close() diff --git a/bin/oneoffs/load_external_data.py b/bin/oneoffs/load_external_data.py deleted file mode 100755 index 50635d991..000000000 --- a/bin/oneoffs/load_external_data.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env python - -import bson -import copy -import datetime -import dateutil.parser -import json - -from api import config - -## DEFAULTS ## - -USER_ID = "meganhenning@flywheel.io" -SAFE_FILE_HASH = "v0-sha384-a8d0d1bd9368e5385f31d3582db07f9bc257537d5e1f207d36a91fdd3d2f188fff56616c0874bb3535c37fdf761a446c" -PROJECT_ID = "5a26e049c6fa4a00161e4a1a" -GROUP_ID = 'scitran' - -# Some day maybe this can use the SDK/API calls to get the proper test data -# For now, paste it in - -SESSIONS = [] - -ACQUISITIONS = [] - -def handle_permissions(obj): - obj['permissions'] = [{ - "access": "admin", - "_id": USER_ID - }] - -def handle_dates(obj): - if obj.get('timestamp'): - obj['timestamp'] = dateutil.parser.parse(obj['timestamp']) - if obj.get('created'): - obj['created'] = dateutil.parser.parse(obj['created']) - if obj.get('modified'): - obj['modified'] = dateutil.parser.parse(obj['modified']) - -def handle_file(f): - handle_dates(f) - f.pop('info_exists', None) - f.pop('join_origin', None) - f['hash'] = SAFE_FILE_HASH - - -for i, s in enumerate(SESSIONS): - print "Processing session {} of {} sessions".format(i+1, len(SESSIONS)) - - s.pop('join-origin', None) - - s['_id'] = bson.ObjectId(s['_id']) - s['project'] = bson.ObjectId(str(PROJECT_ID)) - s['group'] = GROUP_ID - handle_dates(s) - handle_permissions(s) - - for f in s.get('files', []): - handle_file(f) - - - config.db.sessions.delete_many({'_id': s['_id']}) - config.db.sessions.insert(s) - -for i, a in enumerate(ACQUISITIONS): - print "Processing acquisition {} of {} acquisitions".format(i+1, len(ACQUISITIONS)) - - a['_id'] = bson.ObjectId(a['_id']) - a['session'] = bson.ObjectId(a['session']) - - a.pop('join-origin', None) - - handle_dates(a) - handle_permissions(a) - - for f in a.get('files', []): - handle_file(f) - - config.db.acquisitions.delete_many({'_id': a['_id']}) - config.db.acquisitions.insert(a) diff --git a/bin/run-dev-osx.sh b/bin/run-dev-osx.sh deleted file mode 100755 index 3e413388b..000000000 --- a/bin/run-dev-osx.sh +++ /dev/null @@ -1,194 +0,0 @@ -#!/usr/bin/env bash - -set -e - -unset CDPATH -cd "$( dirname "${BASH_SOURCE[0]}" )/.." - -if [ $(echo $BASH_VERSION | cut -c 1) -ge 4 ]; then - echo() { builtin echo -e "\e[1;7mSCITRAN\e[0;7m $@\e[27m"; } -fi - -USAGE=" - Run a development instance of scitran/core\n - Starts mongod on port 9001, by default\n -\n - Usage:\n - \n - -c, --config-file : Source the specified config file to set environemnt variables\n - -I, --no-install: Do not attempt install the application first\n - -r, --reload : Enable live reload, specifying interval in seconds\n - -T, --no-testdata: Do not bootstrap testdata\n - -U, --no-user: Do not bootstrap users and groups\n -" - -CONFIG_FILE="" -BOOTSTRAP_USERS=1 -BOOTSTRAP_TESTDATA=1 -AUTO_RELOAD_INTERVAL=0 -INSTALL_APP=1 - -while [ "$#" -gt 0 ]; do - key="$1" - case $key in - -c|--config-file) - CONFIG_FILE="$2" - shift - ;; - --help) - echo $USAGE >&2 - exit 1 - ;; - -I|--no-install) - INSTALL_APP=0 - ;; - -r|--reload) - AUTO_RELOAD_INTERVAL=$2 - shift - ;; - -T|--no-testdata) - BOOTSTRAP_TESTDATA=0 - ;; - -U|--no-users) - BOOTSTRAP_USERS=0 - ;; - *) - echo "Invalid option: $key" >&2 - echo $USAGE >&2 - exit 1 - ;; - esac - shift -done - - -set -o allexport - -if [ "$CONFIG_FILE" != "" ]; then - EXISTING_ENV=$(env | grep "SCITRAN_" | cat) - source "$CONFIG_FILE" - eval "$EXISTING_ENV" -fi - -SCITRAN_RUNTIME_PATH=${SCITRAN_RUNTIME_PATH:-"./runtime"} -SCITRAN_RUNTIME_HOST=${SCITRAN_RUNTIME_HOST:-"127.0.0.1"} -SCITRAN_RUNTIME_PORT=${SCITRAN_RUNTIME_PORT:-"8080"} -SCITRAN_RUNTIME_UWSGI_INI=${SCITRAN_RUNTIME_UWSGI_INI:-""} -SCITRAN_RUNTIME_BOOTSTRAP=${SCITRAN_RUNTIME_BOOTSTRAP:-"./bootstrap.json"} -SCITRAN_RUNTIME_SSL_PEM=${SCITRAN_RUNTIME_SSL_PEM:-""} -SCITRAN_RUNTIME_COVERAGE=${SCITRAN_RUNTIME_COVERAGE:-"false"} - -SCITRAN_CORE_DRONE_SECRET=${SCITRAN_CORE_DRONE_SECRET:-$( openssl rand -base64 32 )} - -SCITRAN_PERSISTENT_PATH=${SCITRAN_PERSISTENT_PATH:-"./persistent"} -SCITRAN_PERSISTENT_DATA_PATH="$SCITRAN_PERSISTENT_PATH/data" -SCITRAN_PERSISTENT_DB_PATH=${SCITRAN_PERSISTENT_DB_PATH:-"$SCITRAN_PERSISTENT_PATH/db"} -SCITRAN_PERSISTENT_DB_PORT=${SCITRAN_PERSISTENT_DB_PORT:-"9001"} -SCITRAN_PERSISTENT_DB_URI=${SCITRAN_PERSISTENT_DB_URI:-"mongodb://localhost:$SCITRAN_PERSISTENT_DB_PORT/scitran"} - -if [ "$SCITRAN_RUNTIME_SSL_PEM" == "" ]; then - SCITRAN_SITE_API_URL="http://$SCITRAN_RUNTIME_HOST:$SCITRAN_RUNTIME_PORT/api" - UWSGI_SOCKET_ARG="--http" - UWSGI_SOCKET_VALUE="$SCITRAN_RUNTIME_HOST:$SCITRAN_RUNTIME_PORT" -else - SCITRAN_SITE_API_URL="https://$SCITRAN_RUNTIME_HOST:$SCITRAN_RUNTIME_PORT/api" - UWSGI_SOCKET_ARG="--https" UWSGI_SOCKET_VALUE="$SCITRAN_RUNTIME_HOST:$SCITRAN_RUNTIME_PORT,$SCITRAN_RUNTIME_SSL_PEM,$SCITRAN_RUNTIME_SSL_PEM" -fi - -set +o allexport - - -if [ $INSTALL_APP -eq 1 ]; then - ./bin/install-dev-osx.sh -fi - - -clean_up () { - kill $MONGOD_PID || true - kill $UWSGI_PID || true - deactivate || true - wait 2> /dev/null -} -trap clean_up EXIT - - -source "$SCITRAN_RUNTIME_PATH/bin/activate" - - -# Launch MongoDB -ulimit -n 1024 -mkdir -p "$SCITRAN_PERSISTENT_DB_PATH" -mongod \ - --port $SCITRAN_PERSISTENT_DB_PORT \ - --dbpath "$SCITRAN_PERSISTENT_DB_PATH" \ - --smallfiles \ - & -MONGOD_PID=$! - - -# Launch uWSGI -if [ "$SCITRAN_RUNTIME_UWSGI_INI" == "" ]; then - uwsgi \ - "$UWSGI_SOCKET_ARG" "$UWSGI_SOCKET_VALUE" \ - --master --die-on-term \ - --home "$SCITRAN_RUNTIME_PATH" \ - --wsgi-file "bin/api.wsgi" \ - --py-autoreload $AUTO_RELOAD_INTERVAL \ - --logformat '%(addr) - %(user) [%(ltime)] "%(method) %(uri) %(proto)" %(status) %(size) "%(referer)" "%(uagent)" request_id=%(request_id)' & - UWSGI_PID=$! -else - uwsgi --ini "$SCITRAN_RUNTIME_UWSGI_INI" & - UWSGI_PID=$! -fi - - -echo "Waiting for API to become available" -until $(curl --output /dev/null --silent --head --fail --insecure "$SCITRAN_SITE_API_URL"); do - sleep 1 -done - - -# Bootstrap users -if [ $BOOTSTRAP_USERS -eq 1 ]; then - if [ -f "$SCITRAN_PERSISTENT_DB_PATH/.bootstrapped" ]; then - echo "Users previously bootstrapped. Remove $SCITRAN_PERSISTENT_DB_PATH to re-bootstrap." - else - echo "Bootstrapping users" - PYTHONPATH=. bin/load_users_drone_secret.py \ - --insecure --secret "$SCITRAN_CORE_DRONE_SECRET" \ - "$SCITRAN_SITE_API_URL" \ - "$SCITRAN_RUNTIME_BOOTSTRAP" - - echo "Bootstrapped users" - touch "$SCITRAN_PERSISTENT_DB_PATH/.bootstrapped" - fi -else - echo "NOT bootstrapping users" -fi - - -# Boostrap test data -TESTDATA_REPO="https://github.com/scitran/testdata.git" -if [ $BOOTSTRAP_TESTDATA -eq 1 ]; then - if [ -f "$SCITRAN_PERSISTENT_DATA_PATH/.bootstrapped" ]; then - echo "Data previously bootstrapped. Remove $SCITRAN_PERSISTENT_DATA_PATH to re-bootstrap." - else - if [ ! -d "$SCITRAN_PERSISTENT_PATH/testdata" ]; then - echo "Cloning testdata to $SCITRAN_PERSISTENT_PATH/testdata" - git clone --single-branch $TESTDATA_REPO $SCITRAN_PERSISTENT_PATH/testdata - else - echo "Updating testdata in $SCITRAN_PERSISTENT_PATH/testdata" - git -C $SCITRAN_PERSISTENT_PATH/testdata pull - fi - echo "Ensuring reaper is up to date with master branch" - pip install --upgrade --upgrade-strategy only-if-needed git+https://github.com/scitran/reaper.git - echo "Bootstrapping testdata" - folder_sniper --yes --insecure --secret "$SCITRAN_CORE_DRONE_SECRET" "$SCITRAN_PERSISTENT_PATH/testdata" $SCITRAN_SITE_API_URL - echo "Bootstrapped testdata" - touch "$SCITRAN_PERSISTENT_DATA_PATH/.bootstrapped" - fi -else - echo "NOT bootstrapping testdata" -fi - -wait diff --git a/bin/oneoffs/timezone_shift.py b/bin/timezone_shift.py similarity index 100% rename from bin/oneoffs/timezone_shift.py rename to bin/timezone_shift.py diff --git a/bootstrap.sample.json b/bootstrap.sample.json deleted file mode 100644 index e85943b67..000000000 --- a/bootstrap.sample.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "groups": [ - { - "_id": "unknown", - "name": "Unknown", - "roles": [ - { - "access": "admin", - "_id": "user1@example.com" - } - ] - } - ], - "users": [ - { - "_id": "user1@example.com", - "email": "user1@example.com", - "firstname": "First", - "lastname": "User", - "root": true - } - ], - "drones": [ - { - "_id": "local", - "type": "engine" - } - ] -} diff --git a/docker/README.md b/docker/README.md deleted file mode 100644 index 725f65ddd..000000000 --- a/docker/README.md +++ /dev/null @@ -1,61 +0,0 @@ - - -## Examples -Following the examples below will server up scitran/core with uwsgi on port 8080 -with auto-reload enabled. This will not utilize HTTPS, thus is meant only for -development. - -The below examples do not account for complexities of docker volumes, and -preserving their contents across container instances. - - -``` -# Build Example: - docker build -t scitran-core . - -# Run Example: - # First start mongodb - docker run --name some-mongo -d mongo - - # Then startup scitran-core, attaching to linked mongo. - docker run \ - --name scitran-core \ - -e "SCITRAN_PERSISTENT_DB_URI=mongodb://some-mongo:27017/scitran" \ - -e "SCITRAN_CORE_INSECURE=true" \ - -e "SCITRAN_CORE_DRONE_SECRET=change-me" \ - -e "SCITRAN_SITE_API_URL=http://localhost:8080/api" \ - -v $(pwd)/persistent/data:/var/scitran/data \ - -v $(pwd):/var/scitran/code/api \ - --link some-mongo \ - -p 0.0.0.0:8080:8080 \ - scitran-core \ - uwsgi \ - --ini /var/scitran/config/uwsgi-config.ini \ - --http 0.0.0.0:8080 \ - --http-keepalive \ - --python-autoreload 1 - - -# Bootstrap Account Example: - docker run \ - -e "SCITRAN_SITE_API_URL=http://scitran-core:8080/api" \ - -e "SCITRAN_CORE_DRONE_SECRET=change-me" \ - --link scitran-core \ - --rm \ - -v /dev/bali.prod/docker/uwsgi/bootstrap-dev.json:/accounts.json \ - scitran-core \ - /var/scitran/code/api/docker/bootstrap-accounts.sh \ - /accounts.json - - -# Bootstrap Data Example: - docker run \ - -e "SCITRAN_SITE_API_URL=http://scitran-core:8080/api" \ - -e "SCITRAN_CORE_DRONE_SECRET=change-me" \ - -e "PRE_RUNAS_CMD=/var/scitran/code/api/docker/bootstrap-data.sh" \ - --link scitran-core \ - --volumes-from scitran-core \ - --rm \ - scitran-core \ - echo "Data bootstrap complete." -``` diff --git a/docker/bootstrap-accounts.sh b/docker/bootstrap-accounts.sh deleted file mode 100755 index e8aab4c1b..000000000 --- a/docker/bootstrap-accounts.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -set -e -set -x - -echo "IN BOOTSTRAP ACCOUNTS" - -( - -# Parse input parameters... -# -# bootstrap account file -bootstrap_user_file=${1:-'/var/scitran/code/api/bootstrap.json.sample'} - - -# Move to API folder for relative path assumptions later on -# -cd /var/scitran/code/api - -# Export PYTHONPATH for python script later on. -# -export PYTHONPATH=. - - -# Bootstrap Users -./bin/load_users_drone_secret.py --insecure --secret "${SCITRAN_CORE_DRONE_SECRET}" "${SCITRAN_SITE_API_URL}" "${bootstrap_user_file}" - - -) diff --git a/docker/bootstrap-data.sh b/docker/bootstrap-data.sh deleted file mode 100755 index 82ced6bc3..000000000 --- a/docker/bootstrap-data.sh +++ /dev/null @@ -1,77 +0,0 @@ -#!/bin/bash -set -e -set -x - -echo "IN BOOTSTRAP DATA" - -( - -# Parse input parameters... -# -# See if we're pulling latest data no matter what, ignoring cache. -# Default = N -GET_LATEST_DATA=${1:-N} - - -# Hard code some other vars important for bootstrapping -# - -# Set the commit hash or tag or branch desired for scitran/testdata. -# Branch name should only be used for testing convenience. -# -# When changing scitran/testdata, merge that change to master first, -# then reference that resulting commit hash here. -bootstrap_data_label=7d5c3608ff360d6ae28aab0ef262e6781c4ae8d6 - - -# Same as bootstrap_data_label above, except for scitran/reaper. -bootstrap_reaper_label=2.0.0-beta.0 - - -# Move to API folder for relative path assumptions later on -# -cd /var/scitran/code/api - -# Export PYTHONPATH for python script later on. -# -export PYTHONPATH=. - - -# Bootstrap data - -# Compare hash of source test data to most recent download. Remove local copy to force re-download if they are different. -TESTDATA_URL="https://github.com/scitran/testdata/archive/${bootstrap_data_label}.tar.gz" -TESTDATA_VERSION=$(curl -sLI ${TESTDATA_URL} | grep ETag | tail -n 1 | cut -f 2 -d '"') - -# use hidden -TESTDATA_DIR=$SCITRAN_PERSISTENT_PATH/testdata - -if [ ! -d "$TESTDATA_DIR" ] || [ ! -d "$TESTDATA_DIR/download" ] || [ ! -f "$TESTDATA_DIR/.testdata_version" ]; then - echo "Downloading testdata to $TESTDATA_DIR" - - # Remove old contents, as they may not be forward compatible. - rm -rf "$TESTDATA_DIR" - mkdir -p "$TESTDATA_DIR/download" - curl -L $TESTDATA_URL | tar xz -C "$TESTDATA_DIR/download" --strip-components 1 -else - if [ "$TESTDATA_VERSION" != "$(cat $TESTDATA_DIR/.testdata_version)" ]; then - echo "Testdata out of date; downloading" - - # Remove old contents, as they may not be forward compatible. - rm -rf "$TESTDATA_DIR" - mkdir -p "$TESTDATA_DIR/download" - curl -L $TESTDATA_URL | tar xz -C "$TESTDATA_DIR/download" --strip-components 1 - else - echo "Testdata up to date" - fi -fi -builtin echo "$TESTDATA_VERSION" > "$TESTDATA_DIR/.testdata_version" - -# pull reaper module -pip install "git+https://github.com/scitran/reaper.git@${bootstrap_reaper_label}" - - -## load the test data in -folder_sniper --yes --insecure --secret "$SCITRAN_CORE_DRONE_SECRET" "$TESTDATA_DIR/download" $SCITRAN_SITE_API_URL - -) diff --git a/docker/build-trigger.sh b/docker/build-trigger.sh deleted file mode 100755 index 8c012e8e8..000000000 --- a/docker/build-trigger.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env bash - -# Triggers an auto-build on Docker Hub for the given source control reference. -# -# Example usage: ./build-trigger Tag 1.0.0 https://registry.hub.docker.com/u/scitran/reaper/trigger/11111111-2222-3333-4444-abcdefabcdef/ - -set -e - -if [ $# -ne 3 ] ; then - >&2 echo "Usage: $( basename $0 ) " - exit 1 -fi - -SOURCE_CONTROL_REF_TYPE="${1}" -SOURCE_CONTROL_REF_NAME="${2}" -TRIGGER_URL="${3}" - -if [ -z "${SOURCE_CONTROL_REF_TYPE}" ] ; then - >&2 echo "Source control reference type not provided. Skipping build trigger." - exit 1 -fi - -if [ -z "${SOURCE_CONTROL_REF_NAME}" ] ; then - >&2 echo "Source control tag name not provided. Skipping build trigger." - exit 1 -fi - -TRIGGER_PAYLOAD="{\"source_type\": \"${SOURCE_CONTROL_REF_TYPE}\", \"source_name\": \"${SOURCE_CONTROL_REF_NAME}\"}" -curl -H "Content-Type: application/json" --data "${TRIGGER_PAYLOAD}" -X POST "${TRIGGER_URL}" ->&2 echo ->&2 echo "Docker Hub build for ${SOURCE_CONTROL_REF_TYPE} '${SOURCE_CONTROL_REF_NAME}' triggered." diff --git a/docker/inject_build_info.sh b/docker/inject_build_info.sh deleted file mode 100755 index cd68e40be..000000000 --- a/docker/inject_build_info.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash -( - -set -e - -# Set cwd -unset CDPATH -cd "$( dirname "${BASH_SOURCE[0]}" )" - -# Dump the build info into version.json so it can be displayed in the footer -# of the site pages. - -# { -# "commit": "5683785e8cd6efdfd794a79828b2cccd2424ed21", -# "timestamp": "January 12, 2016 at 2:46:23 PM CST", -# "branch": "ng-constant" -# } - - - BRANCH_NAME=${1} - COMMIT_HASH=${2} - BUILD_TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ") - - echo "{ - \"commit\": \"${COMMIT_HASH}\", - \"timestamp\": \"${BUILD_TIMESTAMP}\", - \"branch\": \"${BRANCH_NAME}\" -}" > version.json - -cat version.json - -) diff --git a/docker/pymongo-cli.py b/docker/pymongo-cli.py deleted file mode 100644 index 4dd9937ef..000000000 --- a/docker/pymongo-cli.py +++ /dev/null @@ -1,5 +0,0 @@ -import os -import pymongo -from bson import ObjectId -db_uri = os.getenv('SCITRAN_PERSISTENT_DB_URI') -db = pymongo.MongoClient(db_uri).get_default_database() diff --git a/docker/requirements-docker.txt b/docker/requirements-docker.txt deleted file mode 100644 index ac39e009c..000000000 --- a/docker/requirements-docker.txt +++ /dev/null @@ -1,3 +0,0 @@ -# Service and support dependencies -uWSGI==2.0.11 -ipython==4.0.2 diff --git a/docker/uwsgi-config.ini b/docker/uwsgi-config.ini deleted file mode 100644 index 53467c7ec..000000000 --- a/docker/uwsgi-config.ini +++ /dev/null @@ -1,8 +0,0 @@ -[uwsgi] -wsgi-file = bin/api.wsgi -chdir=code/api -pythonpath=code/data -master = True -die-on-term = True -processes = 4 -threads = 2 diff --git a/docker/uwsgi-entrypoint.sh b/docker/uwsgi-entrypoint.sh deleted file mode 100755 index 4e071a07e..000000000 --- a/docker/uwsgi-entrypoint.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/bin/bash - -# FDM-831 workaround -# https://github.com/docker/compose/issues/2454 -# remove after docker 1.10.0 becomes minim supported version. -# -# If /etc/hosts has lines starting with tab, it is corrupted, -# exit to allow docker to restart. -grep -P "^\t" /etc/hosts -if [ "$?" == 0 ] ; then - echo "Host mapping in /etc/hosts is buggy, fail contain start." - exit 1 -fi - - -set -e -set -x - -export PYTHONPATH=/var/scitran/code/api - -export SCITRAN_PERSISTENT_PATH=/var/scitran/data -export SCITRAN_PERSISTENT_DATA_PATH=/var/scitran/data - -# Get the RunAs user from the owner of the mapped folder. -# This is a compromise to get permissions to work well with -# host mapped volumes with docker-machine on OSX and production -# without the vbox driver layer. -RUNAS_USER=$(ls -ld "${SCITRAN_PERSISTENT_DATA_PATH}" | awk '{print $3}') - - -if [ "${1:0:1}" = '-' ]; then - set -- uwsgi "$@" -fi - -# run $PRE_RUNAS_CMD as root if provided. Useful for things like JIT pip insalls. -if [ ! -z "${PRE_RUNAS_CMD}" ]; then - ${PRE_RUNAS_CMD} -fi - -if [ "$1" = 'uwsgi' ]; then - - exec gosu ${RUNAS_USER} "$@" -fi - -gosu ${RUNAS_USER} "$@" - -result=$? -echo "Exit code was $result" diff --git a/logs/.gitignore b/logs/.gitignore deleted file mode 100644 index 17cbde9d6..000000000 --- a/logs/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -.DS_Store -user_access.log diff --git a/nginx-unit.json b/nginx-unit.json new file mode 100644 index 000000000..a266f9ce3 --- /dev/null +++ b/nginx-unit.json @@ -0,0 +1,17 @@ +{ + "listeners": { + "*:8080": { + "application": "scitran-core" + } + }, + + "applications": { + "scitran-core": { + "type": "python", + "workers": 2, + "user": "nobody", + "path": "/src/core", + "module": "api.app" + } + } +} diff --git a/nginx.conf b/nginx.conf new file mode 100644 index 000000000..6157ca9ed --- /dev/null +++ b/nginx.conf @@ -0,0 +1,15 @@ +events { +} + +http { + upstream unit_backend { + server 127.0.0.1:8080; + } + + server { + location /api { + proxy_pass http://unit_backend; + proxy_set_header Host $host; + } + } +} diff --git a/requirements.txt b/requirements.txt index cdb601fee..b46d7021f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,17 +1,15 @@ django>=1.11.0,<1.12.0 elasticsearch==5.3.0 -enum==0.4.6 +enum34==1.1.6 +git+https://github.com/flywheel-io/gears.git@v0.1.4#egg=gears jsonschema==2.6.0 -Markdown==2.6.5 +markdown==2.6.5 pymongo==3.2 -pyOpenSSL >=17.1.0,<18.0 python-dateutil==2.4.2 pytz==2015.7 requests==2.9.1 rfc3987==1.3.4 strict-rfc3339==0.7 unicodecsv==0.9.0 -uwsgi==2.0.13.1 webapp2==2.5.2 -WebOb==1.5.1 -git+https://github.com/flywheel-io/gears.git@v0.1.4#egg=gears +webob==1.5.1 diff --git a/setup.py b/setup.py new file mode 100644 index 000000000..2652f83d3 --- /dev/null +++ b/setup.py @@ -0,0 +1,22 @@ +from setuptools import setup, find_packages + +requirements = open('requirements.txt').readlines() +install_requires = [r for r in requirements if not r.startswith('git+')] +dependency_links = [r for r in requirements if r.startswith('git+')] +tests_require = open('tests/requirements.txt').readlines() + +setup( + name = 'core', + version = '1.0.0', + description = 'Scitran API Core', + author = 'Gunnar Schaefer', + author_email = 'gsfr@flywheel.io', + maintainer = 'Megan Henning, Ambrus Simon', + maintainer_email = 'meganhenning@flywheel.io, ambrussimon@invenshure.com', + url = 'https://github.com/scitran/core', + license = 'MIT', + packages = find_packages(), + dependency_links = dependency_links, + install_requires = install_requires, + tests_require = tests_require, +) diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 000000000..f43ba9aa9 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,24 @@ +## Testing +Build scitran-core and run automated tests in a docker container: +``` +./tests/bin/docker-tests.sh +``` + +* To skip building the image, use `--no-build` (`-B`) +* All tests (unit, integration and linting) are executed by default +* To pass any arguments to `py.test`, use `-- PYTEST_ARGS` + * To run only a subset of test, use the [keyword expression filter](https://docs.pytest.org/en/latest/usage.html#specifying-tests-selecting-tests) `-k` + * To see `print` output during tests, increase verbosity with `-vvv` + * To get a debugger session on failures, use [`--pdb`](https://docs.pytest.org/en/latest/usage.html#dropping-to-pdb-python-debugger-on-failures) + +See [py.test usage](https://docs.pytest.org/en/latest/usage.html) for more. + +### Example +The most common use case is adding a new (still failing) test, and wanting to +* (re-)run it as fast as possible (`-B` and `-k foo`) +* see output from quick and dirty `print` statements in the test (`-vvv`) +* get into an interactive pdb session to inspect what went wrong (`--pdb`) + +``` +./tests/bin/docker-tests.sh -B -- -k foo -vvv --pdb +``` diff --git a/tests/bin/docker-tests.sh b/tests/bin/docker-tests.sh new file mode 100755 index 000000000..cd4125b75 --- /dev/null +++ b/tests/bin/docker-tests.sh @@ -0,0 +1,149 @@ +#!/usr/bin/env sh + +set -eu +unset CDPATH +cd "$( dirname "$0" )/../.." + + +USAGE=" +Usage: + $0 [OPTION...] [-- PYTEST_ARGS...] + +Build scitran/core image and run tests in a Docker container. +Also displays coverage report and saves HTML in htmlcov dir. + +Options: + -h, --help Print this help and exit + + -B, --no-build Skip rebuilding default Docker image + --image IMAGE Use custom Docker image + -- PYTEST_ARGS Arguments passed to py.test + +" + + +main() { + local DOCKER_IMAGE= + local PYTEST_ARGS= + + while [ $# -gt 0 ]; do + case "$1" in + -h|--help) + log "$USAGE" + exit 0 + ;; + -B|--no-build) + DOCKER_IMAGE="scitran/core:testing" + ;; + --image) + DOCKER_IMAGE="$2" + shift + ;; + --) + shift + PYTEST_ARGS="$@" + break + ;; + *) + log "Invalid argument: $1" + log "$USAGE" + exit 1 + ;; + esac + shift + done + + # Docker build + if [ -z "${DOCKER_IMAGE}" ]; then + log "Building scitran/core:testing ..." + docker build -t scitran/core:testing . + else + docker tag "$DOCKER_IMAGE" "scitran/core:testing" + fi + + log "Cleaning pyc and previous coverage results ..." + # Run within container to avoid permission problems + docker run --rm \ + --name core-test-cleanup \ + --volume $(pwd):/src/core \ + scitran/core:testing \ + sh -c " + find . -type d -name __pycache__ -exec rm -rf {} \;; + find . -type f -name '*.pyc' -delete; + rm -rf .coverage htmlcov; + " + + trap clean_up EXIT + docker network create core-test + + # Launch core + mongo + docker run -d \ + --name core-test-service \ + --network core-test \ + --volume $(pwd)/api:/src/core/api \ + --volume $(pwd)/tests:/src/core/tests \ + --env SCITRAN_CORE_DRONE_SECRET=secret \ + --env SCITRAN_RUNTIME_COVERAGE=true \ + --env SCITRAN_CORE_ACCESS_LOG_ENABLED=true \ + scitran/core:testing + + # Execute tests + docker run -it \ + --name core-test-runner \ + --network core-test \ + --volume $(pwd)/api:/src/core/api \ + --volume $(pwd)/tests:/src/core/tests \ + --env SCITRAN_SITE_API_URL=http://core-test-service/api \ + --env SCITRAN_CORE_DRONE_SECRET=secret \ + --env SCITRAN_PERSISTENT_DB_URI=mongodb://core-test-service:27017/scitran \ + --env SCITRAN_PERSISTENT_DB_LOG_URI=mongodb://core-test-service:27017/logs \ + scitran/core:testing \ + tests/bin/tests.sh -- $PYTEST_ARGS +} + + +clean_up() { + local TEST_RESULT_CODE=$? + set +e + + log "INFO: Test return code = $TEST_RESULT_CODE" + if [ "${TEST_RESULT_CODE}" = "0" ]; then + log "INFO: Collecting coverage..." + + # Copy unit test coverage + docker cp core-test-runner:/src/core/.coverage .coverage.unit-tests 2>/dev/null + + # Save integration test coverage + docker wait $(docker stop core-test-service) + docker cp core-test-service:/tmp/.coverage.integration-tests . + + # Combine unit/integ coverage and report/grenerate html + docker run --rm \ + --name core-test-coverage \ + --volume $(pwd):/src/core \ + scitran/core:testing \ + sh -c ' + coverage combine; + coverage report --skip-covered --show-missing; + coverage html; + ' + else + log "INFO: Printing container logs..." + docker logs core-test-service + log "ERROR: Test return code = $TEST_RESULT_CODE. Container logs printed above." + fi + + # Spin down dependencies + docker rm --force --volumes core-test-runner + docker rm --force --volumes core-test-service + docker network rm core-test + exit $TEST_RESULT_CODE +} + + +log() { + printf "\n%s\n" "$@" >&2 +} + + +main "$@" diff --git a/tests/bin/run-tests-docker.sh b/tests/bin/run-tests-docker.sh deleted file mode 100755 index 7cb55be26..000000000 --- a/tests/bin/run-tests-docker.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env bash -set -eu -unset CDPATH -cd "$( dirname "${BASH_SOURCE[0]}" )/../.." - - -function usage() { -cat >&2 < 0 ]]; do - case "$1" in - -B|--no-build) DOCKER_BUILD=false; ;; - -h|--help) usage; exit 0 ;; - --) TEST_ARGS="${@:2}"; break ;; - *) echo "Invalid argument: $1" >&2; usage; exit 1 ;; - esac - shift - done - - if ${DOCKER_BUILD}; then - echo "Building scitran-core:run-tests ..." - docker build -t scitran-core:run-tests . - fi - - trap clean_up EXIT - - docker network create scitran-core-test-network - - # Launch Mongo instance - docker run -d \ - --name scitran-core-test-mongo \ - --network scitran-core-test-network \ - mongo:${MONGO_VERSION} - - # Execute tests - docker run -it \ - --name scitran-core-test-uwsgi \ - --network scitran-core-test-network \ - -e SCITRAN_PERSISTENT_DB_URI=mongodb://scitran-core-test-mongo:27017/scitran \ - -e SCITRAN_PERSISTENT_DB_LOG_URI=mongodb://scitran-core-test-mongo:27017/logs \ - -v $(pwd):/var/scitran/code/api \ - --entrypoint bash \ - scitran-core:run-tests \ - /var/scitran/code/api/tests/bin/run-tests-ubuntu.sh \ - $TEST_ARGS -} - - -function clean_up() { - local TEST_RESULT_CODE=$? - set +e - - # Copy coverage file to host for possible further reporting - docker cp scitran-core-test-uwsgi:/var/scitran/code/api/.coverage .coverage - - # Spin down dependencies - docker rm -f -v scitran-core-test-uwsgi - docker rm -f -v scitran-core-test-mongo - docker network rm scitran-core-test-network - exit $TEST_RESULT_CODE -} - - -main "$@" diff --git a/tests/bin/run-tests-ubuntu.sh b/tests/bin/run-tests-ubuntu.sh deleted file mode 100755 index 8c7d94fa5..000000000 --- a/tests/bin/run-tests-ubuntu.sh +++ /dev/null @@ -1,191 +0,0 @@ -#!/usr/bin/env bash -set -eu -unset CDPATH -cd "$( dirname "${BASH_SOURCE[0]}" )/../.." - - -function usage() { -cat >&2 < 0 ]]; do - case "$1" in - -l|--lint) RUN_ALL=false; RUN_LINT=true ;; - -u|--unit) RUN_ALL=false; RUN_UNIT=true ;; - -i|--integ) RUN_ALL=false; RUN_INTEG=true ;; - -a|--abao) RUN_ALL=false; RUN_ABAO=true ;; - -h|--help) usage; exit 0 ;; - --) PYTEST_ARGS="${@:2}"; break ;; - *) echo "Invalid argument: $1" >&2; usage; exit 1 ;; - esac - shift - done - - if ${RUN_ALL}; then - # No filtering options used, run everything by default - RUN_LINT=true - RUN_UNIT=true - RUN_INTEG=true - RUN_ABAO=true - elif ${RUN_LINT} && ${RUN_UNIT} && ${RUN_INTEG} && ${RUN_ABAO}; then - # All filtering options were used, the same as none - RUN_ALL=true - fi - - trap clean_up EXIT - - # Remove __pycache__ directories for issue with __file__ attribute due to - # running the tests on the host creating bytecode files hich have a - # mismatched __file__ attribute when loaded in docker container - rm -rf tests/unit_tests/python/__pycache__ - rm -rf tests/integration_tests/python/__pycache__ - - export PYTHONPATH="$(pwd)" - export SCITRAN_SITE_API_URL="http://localhost:8081/api" - export SCITRAN_PERSISTENT_DB_PORT=${SCITRAN_PERSISTENT_DB_PORT:-"9001"} - export SCITRAN_PERSISTENT_DB_URI=${SCITRAN_PERSISTENT_DB_URI:-"mongodb://localhost:$SCITRAN_PERSISTENT_DB_PORT/scitran"} - export SCITRAN_PERSISTENT_DB_LOG_URI=${SCITRAN_PERSISTENT_DB_LOG_URI:-"mongodb://localhost:$SCITRAN_PERSISTENT_DB_PORT/logs"} - export SCITRAN_PERSISTENT_PATH=`mktemp -d` - export SCITRAN_PERSISTENT_DATA_PATH="$SCITRAN_PERSISTENT_PATH/data" - export SCITRAN_CORE_DRONE_SECRET=${SCITRAN_CORE_DRONE_SECRET:-$( openssl rand -base64 32 )} - - if ${RUN_LINT}; then - echo "Running pylint ..." - # TODO Enable Refactor and Convention reports - # TODO Move --disable into rc - pylint --reports=no --disable=C,R,W0312,W0141,W0110 api - - # echo "Running pep8 ..." - # pep8 --max-line-length=150 --ignore=E402 api - fi - - if ${RUN_UNIT}; then - echo "Running unit tests ..." - rm -f .coverage - py.test --cov=api --cov-report= tests/unit_tests/python $PYTEST_ARGS - fi - - if ${RUN_INTEG} || ${RUN_ABAO}; then - echo "Spinning up dependencies ..." - uwsgi --http "localhost:8081" --master --http-keepalive \ - --so-keepalive --add-header "Connection: Keep-Alive" \ - --processes 1 --threads 1 \ - --enable-threads \ - --wsgi-file bin/api.wsgi \ - --die-on-term \ - --logformat '%(addr) - %(user) [%(ltime)] "%(method) %(uri) %(proto)" %(status) %(size) "%(referer)" "%(uagent)" request_id=%(request_id)' \ - --env "SCITRAN_PERSISTENT_DB_URI=$SCITRAN_PERSISTENT_DB_URI" \ - --env "SCITRAN_PERSISTENT_DB_LOG_URI=$SCITRAN_PERSISTENT_DB_LOG_URI" \ - --env "SCITRAN_PERSISTENT_PATH=$SCITRAN_PERSISTENT_PATH" \ - --env "SCITRAN_PERSISTENT_DATA_PATH=$SCITRAN_PERSISTENT_DATA_PATH" \ - --env "SCITRAN_CORE_DRONE_SECRET=$SCITRAN_CORE_DRONE_SECRET" \ - --env "SCITRAN_RUNTIME_COVERAGE=true" \ - --env "SCITRAN_CORE_ACCESS_LOG_ENABLED=true" & - export API_PID=$! - - echo "Connecting to API" - until $(curl --output /dev/null --silent --head --fail "$SCITRAN_SITE_API_URL"); do - printf '.' - sleep 1 - done - fi - - if ${RUN_INTEG}; then - echo "Running integration tests ..." - py.test tests/integration_tests/python $PYTEST_ARGS - fi - - if ${RUN_ABAO}; then - echo "Running abao tests ..." - # Create resources that Abao relies on - python tests/integration_tests/abao/load_fixture.py - - # If no VIRTUAL_ENV, make sure /usr/local/bin is in the path - if [[ -z "${VIRTUAL_ENV:-}" ]]; then - PATH="/usr/local/bin:$PATH" - npm install tests/integration_tests - else - npm install --global tests/integration_tests - fi - - PATH="$(npm bin):$PATH" - - # Allow us to require modules from package.json, - # since abao_test_hooks.js is not being called from the package directory - integration_test_node_modules="$(pwd)/node_modules/scitran-core-integration-tests/node_modules" - - # Have to change into definitions directory to resolve - # relative $ref's in the jsonschema's - pushd raml/schemas/definitions - NODE_PATH="$integration_test_node_modules" abao ../../api.raml "--server=$SCITRAN_SITE_API_URL" "--hookfiles=../../../tests/integration_tests/abao/abao_test_hooks.js" - popd - fi -} - - -function clean_up () { - local TEST_RESULT_CODE=$? - set +e - - echo - echo "Test return code = $TEST_RESULT_CODE" - - if [[ -n "${API_PID:-}" ]]; then - # Killing uwsgi - kill $API_PID - wait 2> /dev/null - fi - - if ${RUN_ALL} && [[ "${TEST_RESULT_CODE}" == "0" ]]; then - echo - echo "UNIT TEST COVERAGE:" - coverage report --skip-covered - echo - echo "OVERALL COVERAGE:" - coverage combine - coverage report --show-missing - coverage html - else - echo "Some tests were skipped or failed, skipping coverage report" - fi - - exit $TEST_RESULT_CODE -} - - -main "$@" diff --git a/tests/bin/setup-integration-tests-ubuntu.sh b/tests/bin/setup-integration-tests-ubuntu.sh deleted file mode 100755 index 9a700f37d..000000000 --- a/tests/bin/setup-integration-tests-ubuntu.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash -set -eu -unset CDPATH -cd "$( dirname "${BASH_SOURCE[0]}" )/../.." - -sudo pip install -U -r "tests/integration_tests/requirements-integration-test.txt" - -NODE_URL="https://nodejs.org/dist/v6.4.0/node-v6.4.0-linux-x64.tar.gz" - -if [[ -z "${VIRTUAL_ENV:-}" ]]; then - curl $NODE_URL | sudo tar xz -C /usr/local --strip-components 1 -else - curl $NODE_URL | tar xz -C $VIRTUAL_ENV --strip-components 1 -fi diff --git a/tests/bin/tests.sh b/tests/bin/tests.sh new file mode 100755 index 000000000..5592c688b --- /dev/null +++ b/tests/bin/tests.sh @@ -0,0 +1,75 @@ +#!/usr/bin/env sh + +set -eu +unset CDPATH +cd "$( dirname "$0" )/../.." + + +USAGE=" +Usage: + $0 [-- PYTEST_ARGS...] + +Runs all tests (unit, integ and linting) if no options are provided. + +Assumes running in a scitran/core:testing container or that core and all +of its dependencies are installed the same way as in the Dockerfile. + +Options: + -h, --help Print this help and exit + -- PYTEST_ARGS Arguments passed to py.test + +Envvars (required for integration tests): + SCITRAN_SITE_API_URL URI to a running core instance (including /api) + SCITRAN_CORE_DRONE_SECRET API shared secret + SCITRAN_PERSISTENT_DB_URI Mongo URI to the scitran DB + SCITRAN_PERSISTENT_DB_LOG_URI Mongo URI to the scitran log DB + +" + + +main() { + export PYTHONDONTWRITEBYTECODE=1 + local PYTEST_ARGS= + + while [ $# -gt 0 ]; do + case "$1" in + -h|--help) + log "$USAGE" + exit 0 + ;; + --) + shift + PYTEST_ARGS="$@" + break + ;; + *) + log "Invalid argument: $1" + log "$USAGE" >&2 + exit 1 + ;; + esac + shift + done + + log "Running unit tests ..." + py.test --cov=api --cov-report= tests/unit_tests/python $PYTEST_ARGS + + log "Running integration tests ..." + py.test tests/integration_tests/python $PYTEST_ARGS + + log "Running pylint ..." + # TODO Enable Refactor and Convention reports + # TODO Move --disable into rc + pylint --jobs=4 --reports=no --disable=C,R,W0312,W0141,W0110 api + + # log "Running pep8 ..." + # pep8 --max-line-length=150 --ignore=E402 api +} + + +log() { + printf "\n%s\n" "$@" >&2 +} + + +main "$@" diff --git a/tests/integration_tests/.npmignore b/tests/integration_tests/.npmignore deleted file mode 100644 index 72e8ffc0d..000000000 --- a/tests/integration_tests/.npmignore +++ /dev/null @@ -1 +0,0 @@ -* diff --git a/tests/integration_tests/abao/abao_test_hooks.js b/tests/integration_tests/abao/abao_test_hooks.js deleted file mode 100644 index 270c43aae..000000000 --- a/tests/integration_tests/abao/abao_test_hooks.js +++ /dev/null @@ -1,1463 +0,0 @@ -var hooks = require('hooks'); -var chai = require('chai'); -var assert = chai.assert; - -// Variables for passing results as input to subsequent tests -var gear_name = 'test-case-gear'; -var group_id = 'test-group'; -var delete_group_id = 'example_group'; -var test_group_tag = 'test-group-tag'; -var collection_id = 'test-collection-1'; -var delete_collection_id = ''; -var test_collection_1 = null; -var test_collection_tag = 'test-collection-tag'; -var test_session_1 = null; -var test_session_2_id = null; -var test_session_tag = 'test-session-tag'; -var test_session_1_analysis_2_id = null; -var test_acquisition_1 = null; -var test_acquisition_tag = 'test-acq-tag'; -var example_acquisition_id = ''; -var test_project_1 = null; -var test_project_tag = 'test-project-tag'; -var delete_project_id = ''; -var device_id = 'bootstrapper_Bootstrapper' -var injected_api_key = 'XZpXI40Uk85eozjQkU1zHJ6yZHpix+j0mo1TMeGZ4dPzIqVPVGPmyfeK' - -// Tests we're skipping, fix these - -// Fails only in travis -hooks.skip("GET /version -> 200"); - -// Should 400 to say invalid json -hooks.skip("GET /download -> 400"); - -// Should 422 for missing metadata field -hooks.skip("POST /upload/label -> 402"); -hooks.skip("POST /upload/uid -> 402"); -hooks.skip("POST /upload/uid-match -> 402"); - -// Should 404 -hooks.skip("GET /jobs/{JobId} -> 404"); - -// Can only retry a failed job -hooks.skip("POST /jobs/{JobId}/retry -> 200"); - -// Cannot get JobId without GET /jobs endpoint -hooks.skip("GET /jobs/{JobId} -> 200"); -hooks.skip("GET /jobs/{JobId}/config.json -> 200"); -hooks.skip("POST /jobs/{JobId}/retry -> 200"); -hooks.skip("GET /jobs/{JobId} -> 404"); - -// https://github.com/cybertk/abao/issues/160 -hooks.skip("GET /users/self/avatar -> 307"); -hooks.skip("GET /users/{UserId}/avatar -> 307"); - -// drones currently use shared secret, allow when using API keys -hooks.skip("POST /devices -> 200") -hooks.skip("GET /devices/self -> 200") - -// Tests that are skipped because we do them in python - -// Skipping because abao doesn't support file fields -hooks.skip("POST /download -> 200"); -hooks.skip("GET /download -> 200"); -hooks.skip("POST /upload/label -> 200"); -hooks.skip("POST /upload/uid -> 200"); -hooks.skip("POST /upload/uid-match -> 200"); -hooks.skip("POST /upload/uid-match -> 404"); -hooks.skip("POST /engine -> 200"); -hooks.skip("POST /collections/{CollectionId}/packfile-start -> 200"); -hooks.skip("POST /collections/{CollectionId}/packfile -> 200"); -hooks.skip("GET /collections/{CollectionId}/packfile-end -> 200"); -hooks.skip("POST /sessions/{SessionId}/packfile-start -> 200"); -hooks.skip("POST /sessions/{SessionId}/packfile -> 200"); -hooks.skip("GET /sessions/{SessionId}/packfile-end -> 200"); -hooks.skip("POST /acquisitions/{AcquisitionId}/packfile-start -> 200"); -hooks.skip("POST /acquisitions/{AcquisitionId}/packfile -> 200"); -hooks.skip("GET /acquisitions/{AcquisitionId}/packfile-end -> 200"); -hooks.skip("POST /projects/{ProjectId}/packfile-start -> 200"); -hooks.skip("POST /projects/{ProjectId}/packfile -> 200"); -hooks.skip("GET /projects/{ProjectId}/packfile-end -> 200"); - - -// Skipping until merge with rest of project raml (So we have a ProjectId) -hooks.skip("POST /projects/{ProjectId}/template -> 200") -hooks.skip("DELETE /projects/{ProjectId}/template -> 200") -hooks.skip("POST /projects/{ProjectId}/recalc -> 200") -hooks.skip("GET /projects/{ProjectId}/rules -> 200") - -// Porting to python as per #600 -hooks.skip("POST /jobs/add -> 200") -hooks.skip("PUT /jobs/{JobId} -> 200") -hooks.skip("GET /gears/{GearId} -> 200") -hooks.skip("GET /sessions/{SessionId}/jobs -> 200") - -// Cannot be ran due to gear IDs being used as per # -hooks.skip("POST /sessions/{SessionId}/analyses -> 200") -hooks.skip("GET /sessions/{SessionId}/analyses/{AnalysisId} -> 200") -hooks.skip("DELETE /sessions/{SessionId}/analyses/{AnalysisId} -> 200") -// Related, ref #696 -hooks.skip("DELETE /gears/{GearId} -> 200") - - -hooks.before("POST /login -> 200", function(test, done) { - test.request.body = { - 'code': injected_api_key, - 'auth_type': 'api-key' - }; - done(); -}); - -hooks.beforeEach(function (test, done) { - test.request.query.root = "true" - test.request.headers.Authorization = "scitran-user XZpXI40Uk85eozjQkU1zHJ6yZHpix+j0mo1TMeGZ4dPzIqVPVGPmyfeK"; - done(); -}); - -hooks.before("GET /download -> 404", function(test, done) { - test.request.query = { - ticket: '1234' - }; - done(); -}); - -hooks.before("POST /users -> 400", function(test, done) { - test.request.body = {api_key:{key:"test"}}; - done(); -}); - -hooks.before("GET /users/{UserId} -> 200", function(test, done) { - test.request.params = { - UserId: "jane.doe@gmail.com" - }; - done(); -}); - -hooks.before("PUT /users/{UserId} -> 200", function(test, done) { - test.request.params = { - UserId: "jane.doe@gmail.com" - }; - done(); -}); - -hooks.before("PUT /users/{UserId} -> 400", function(test, done) { - test.request.params = { - UserId: "jane.doe@gmail.com" - }; - test.request.body = {"not_a_valid_property":"foo"}; - done(); -}); - -hooks.before("DELETE /users/{UserId} -> 200", function(test, done) { - test.request.params = { - UserId: "jane.doe@gmail.com" - }; - done(); -}); - -hooks.before("GET /users/{UserId}/acquisitions -> 200", function(test, done) { - test.request.params = { - UserId: "admin@user.com" - }; - done(); -}); - -hooks.before("GET /users/{UserId}/collections -> 200", function(test, done) { - test.request.params = { - UserId: "admin@user.com" - }; - done(); -}); - -hooks.before("GET /users/{UserId}/projects -> 200", function(test, done) { - test.request.params = { - UserId: "admin@user.com" - }; - done(); -}); - -hooks.before("GET /users/{UserId}/sessions -> 200", function(test, done) { - test.request.params = { - UserId: "admin@user.com" - }; - done(); -}); - -hooks.before("PUT /groups/{GroupId} -> 400", function(test, done) { - test.request.params = { - GroupId: group_id - }; - test.request.body = {"not_a_real_property":"foo"}; - done(); -}); - -hooks.before("POST /groups -> 400", function(test, done) { - test.request.body = {"not_a_real_property":"foo"}; - done(); -}); - - -hooks.before("GET /groups/{GroupId} -> 200", function(test, done) { - test.request.params = { - GroupId: group_id - }; - done(); -}); - -hooks.before("DELETE /groups/{GroupId} -> 200", function(test, done) { - test.request.params = { - GroupId: delete_group_id - }; - done(); -}); - -hooks.before("POST /groups/{GroupId}/permissions -> 200", function(test, done) { - test.request.params = { - GroupId: group_id - }; - test.request.body = { - _id: "test@user.com", - access: "ro" - } - done(); -}); - -hooks.before("POST /groups/{GroupId}/permissions -> 400", function(test, done) { - test.request.params = { - GroupId: group_id - }; - test.request.body.foo = "bar"; - done(); -}); - -hooks.before("GET /groups/{GroupId}/permissions/{UserId} -> 200", function(test, done) { - test.request.params = { - GroupId: group_id, - UserId: "test@user.com" - }; - done(); -}); - -hooks.before("PUT /groups/{GroupId}/permissions/{UserId} -> 200", function(test, done) { - test.request.params = { - GroupId: group_id, - UserId: "test@user.com" - }; - test.request.body = { - _id: "test@user.com", - access: "admin" - }; - done(); -}); - -hooks.before("PUT /groups/{GroupId}/permissions/{UserId} -> 400", function(test, done) { - test.request.params = { - GroupId: group_id, - UserId:"test@user.com" - }; - test.request.body = { - _id: "test@user.com", - access: "rw", - not_a_real_property: "foo" - }; - done(); -}); - -hooks.before("DELETE /groups/{GroupId}/permissions/{UserId} -> 200", function(test, done) { - test.request.params = { - GroupId: group_id, - UserId: "test@user.com" - }; - done(); -}); - -hooks.before("POST /groups/{GroupId}/tags -> 200", function(test, done) { - test.request.params = { - GroupId: group_id - }; - test.request.body = { - "value":test_group_tag - }; - done(); -}); - -hooks.before("POST /groups/{GroupId}/tags -> 400", function(test, done) { - test.request.params = { - GroupId: group_id - }; - test.request.body = { - "value":test_group_tag, - "bad property": "foo" - }; - done(); -}); - -hooks.before("GET /groups/{GroupId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - GroupId: group_id, - TagValue: test_group_tag - }; - done(); -}); - -hooks.before("PUT /groups/{GroupId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - GroupId: group_id, - TagValue: test_group_tag - }; - test_group_tag = "a-new-tag"; - test.request.body = { - "value":test_group_tag - }; - done(); -}); - -hooks.before("PUT /groups/{GroupId}/tags/{TagValue} -> 400", function(test, done) { - test.request.params = { - GroupId: group_id, - TagValue: test_group_tag - }; - test.request.body = { - "value":test_group_tag, - "bad proeprty":"blah" - }; - done(); -}); - -hooks.before("DELETE /groups/{GroupId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - GroupId: group_id, - TagValue: test_group_tag - }; - done(); -}); - -hooks.before("GET /groups/{GroupId}/projects -> 200", function(test, done) { - test.request.params = { - GroupId: group_id - }; - done(); -}); - - -// set initial test_collection_1 -hooks.after("GET /collections -> 200", function(test, done) { - test_collection_1 = test.response.body[0]; - collection_id = test.response.body[0]._id; - delete_collection_id = test.response.body[1]._id; - done(); -}); - -hooks.before("GET /collections/{CollectionId} -> 200", function(test, done) { - test.request.params.CollectionId = collection_id; - done(); -}); - -// set detailed test_collection_1 (including analyses, that are omitted during listing) -hooks.after("GET /collections/{CollectionId} -> 200", function(test, done) { - test_collection_1 = test.response.body; - done(); -}); - -hooks.before("GET /collections/{CollectionId}/sessions -> 200", function(test, done) { - test.request.params.CollectionId = collection_id; - done(); -}); - -hooks.before("GET /collections/{CollectionId}/acquisitions -> 200", function(test, done) { - test.request.params.CollectionId = collection_id; - done(); -}); - -hooks.before("POST /collections -> 400", function(test, done) { - test.request.params.CollectionId = collection_id; - test.request.body.foo = "not an allowed property"; - done(); -}); - -hooks.before("PUT /collections/{CollectionId} -> 400", function(test, done) { - test.request.params.CollectionId = collection_id; - test.request.body.foo = "not an allowed property"; - done(); -}); - -hooks.before("DELETE /collections/{CollectionId} -> 200", function(test, done) { - test.request.params.CollectionId = delete_collection_id; - done(); -}); - -hooks.before("POST /collections/{CollectionId}/tags -> 200", function(test, done) { - test.request.params.CollectionId = collection_id; - test.request.body = { - "value":test_collection_tag - }; - done(); -}); - -hooks.before("POST /collections/{CollectionId}/tags -> 400", function(test, done) { - test.request.params.CollectionId = collection_id; - test.request.body = { - "value":"" - }; - done(); -}); - -hooks.before("GET /collections/{CollectionId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - TagValue : test_collection_tag - }; - done(); -}); - -hooks.before("PUT /collections/{CollectionId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - TagValue : test_collection_tag - }; - test_collection_tag = "new-tag-value"; - test.request.body = { - "value":test_collection_tag - }; - done(); -}); - -hooks.before("PUT /collections/{CollectionId}/tags/{TagValue} -> 400", function(test, done) { - test.request.params = { - CollectionId : collection_id, - TagValue : test_collection_tag - }; - test.request.body = { - "value":"" - }; - done(); -}); - -hooks.before("DELETE /collections/{CollectionId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - TagValue : test_collection_tag - }; - done(); -}); - -hooks.before("GET /collections/{CollectionId}/files/{FileName} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - FileName : "notes.txt" - }; - test.request.query = { - "ticket":"" - }; - done(); -}); - -hooks.before("POST /collections/{CollectionId}/permissions -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id - }; - test.request.body = { - "_id":"test@user.com", - "access":"ro" - }; - done(); -}); - -hooks.before("POST /collections/{CollectionId}/permissions -> 400", function(test, done) { - test.request.params = { - CollectionId : collection_id - }; - test.request.body = { - "not a valid":"permissions entry" - }; - done(); -}); - -hooks.before("GET /collections/{CollectionId}/permissions/{UserId} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - UserId: "test@user.com" - }; - done(); -}); - -hooks.before("PUT /collections/{CollectionId}/permissions/{UserId} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - UserId: "test@user.com" - }; - test.request.body = { - "access":"rw", - "_id":"test@user.com" - }; - done(); -}); - -hooks.before("PUT /collections/{CollectionId}/permissions/{UserId} -> 400", function(test, done) { - test.request.params = { - CollectionId : collection_id, - UserId: "test@user.com" - }; - test.request.body = { - "not a valid":"permissions entry" - }; - done(); -}); - -hooks.before("DELETE /collections/{CollectionId}/permissions/{UserId} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - UserId: "test@user.com" - }; - done(); -}); - -hooks.before("POST /collections/{CollectionId}/notes -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id - }; - done(); -}); - -hooks.before("POST /collections/{CollectionId}/notes -> 400", function(test, done) { - test.request.params = { - CollectionId : collection_id - }; - test.request.body = { - "not real":"property" - }; - done(); -}); - -hooks.before("GET /collections/{CollectionId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - NoteId: test_collection_1.notes[0]._id - }; - done(); -}); - -hooks.before("PUT /collections/{CollectionId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - NoteId: test_collection_1.notes[0]._id - }; - test.request.body = { - "text":"new note" - }; - done(); -}); - -hooks.before("PUT /collections/{CollectionId}/notes/{NoteId} -> 400", function(test, done) { - test.request.params = { - CollectionId : collection_id, - NoteId: test_collection_1.notes[0]._id - }; - test.request.body = { - "note a":"real property" - }; - done(); -}); - -hooks.before("DELETE /collections/{CollectionId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - NoteId: test_collection_1.notes[0]._id - }; - done(); -}); - -hooks.before("GET /collections/{CollectionId}/analyses/{AnalysisId} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - AnalysisId: test_collection_1.analyses[0]._id - }; - done(); -}); - -hooks.before("DELETE /collections/{CollectionId}/analyses/{AnalysisId} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - AnalysisId: test_collection_1.analyses[1]._id - }; - done(); -}); - -hooks.before("GET /collections/{CollectionId}/analyses/{AnalysisId}/files -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - AnalysisId: test_collection_1.analyses[0]._id - }; - test.request.query.ticket = ""; - done(); -}); - -hooks.before("GET /collections/{CollectionId}/analyses/{AnalysisId}/files/{Filename} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - AnalysisId: test_collection_1.analyses[0]._id, - Filename: "test-1.dcm" - }; - test.request.query.ticket = ""; - done(); -}); - -hooks.before("POST /collections/{CollectionId}/analyses/{AnalysisId}/notes -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - AnalysisId: test_collection_1.analyses[0]._id - }; - done(); -}); - -hooks.before("POST /collections/{CollectionId}/analyses/{AnalysisId}/notes -> 400", function(test, done) { - test.request.params = { - CollectionId : collection_id, - AnalysisId: test_collection_1.analyses[0]._id - }; - test.request.body = { - "not a":"real property" - }; - done(); -}); - -hooks.before("DELETE /collections/{CollectionId}/analyses/{AnalysisId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - CollectionId : collection_id, - AnalysisId: test_collection_1.analyses[0]._id, - NoteId: test_collection_1.analyses[0].notes[0]._id - }; - done(); -}); - - -// set initial test_session_1 -hooks.after("GET /sessions -> 200", function(test, done) { - test_session_1 = test.response.body[0]; - assert.equal(test_session_1.label, "test-session-1"); - done(); -}); - -hooks.before("GET /sessions/{SessionId} -> 200", function(test, done) { - test.request.params.SessionId = test_session_1._id; - done(); -}); - -// set detailed test_session_1 (including analyses, that are omitted during listing) -hooks.after("GET /sessions/{SessionId} -> 200", function(test, done) { - test_session_1 = test.response.body; - done(); -}); - -hooks.after("GET /sessions/{SessionId} -> 200", function(test, done) { - test_session_1 = test.response.body; - assert.equal(test_session_1.label, "test-session-1"); - done(); -}); - -hooks.before("POST /sessions -> 200", function(test, done) { - test.request.body.project = test_session_1.project; - done(); -}); - -hooks.after("POST /sessions -> 200", function(test, done) { - test_session_2_id = test.response.body._id - done(); -}); - -hooks.before("POST /sessions -> 400", function(test, done) { - test.request.body.foo = "not an allowed property"; - test.request.body.project = test_session_1.project; - done(); -}); - -hooks.before("PUT /sessions/{SessionId} -> 200", function(test, done) { - test.request.params.SessionId = test_session_1._id; - test.request.body = { - project: test_session_1.project, - label: "new-label-test-session-1" - }; - done(); -}); - -hooks.before("PUT /sessions/{SessionId} -> 400", function(test, done) { - test.request.params.SessionId = test_session_1._id; - test.request.body = { - project: test_session_1.project, - "not_a_real_property": "new-label-test-session-1" - }; - done(); -}); - -hooks.before("DELETE /sessions/{SessionId} -> 200", function(test, done) { - test.request.params.SessionId = test_session_2_id; - done(); -}); - - -hooks.before("GET /sessions/{SessionId}/jobs -> 200", function(test, done) { - test.request.params.SessionId = test_session_1._id; - done(); -}); - -hooks.before("POST /sessions/{SessionId}/tags -> 200", function(test, done) { - test.request.params.SessionId = test_session_1._id; - test.request.body = { - value: test_session_tag - }; - done(); -}); - -hooks.before("POST /sessions/{SessionId}/tags -> 400", function(test, done) { - test.request.params.SessionId = test_session_1._id; - test.request.body = { - value: "" - }; - done(); -}); - -hooks.before("GET /sessions/{SessionId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - SessionId : test_session_1._id, - TagValue: test_session_tag - }; - done(); -}); - -hooks.before("PUT /sessions/{SessionId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - SessionId : test_session_1._id, - TagValue: test_session_tag - }; - test_session_tag = 'new-tag-value'; - test.request.body = { - value: test_session_tag - }; - done(); -}); - -hooks.before("PUT /sessions/{SessionId}/tags/{TagValue} -> 400", function(test, done) { - test.request.params = { - SessionId : test_session_1._id, - TagValue: test_session_tag - }; - test.request.body = { - value: "" - }; - done(); -}); - -hooks.before("DELETE /sessions/{SessionId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - SessionId : test_session_1._id, - TagValue: test_session_tag - }; - done(); -}); - -hooks.before("GET /sessions/{SessionId}/files/{FileName} -> 200", function(test, done) { - test.request.params = { - SessionId : test_session_1._id, - FileName : "notes.txt" - }; - test.request.query = { - "ticket":"" - }; - done(); -}); - -hooks.before("POST /sessions/{SessionId}/notes -> 200", function(test, done) { - test.request.params = { - SessionId : test_session_1._id - }; - test.request.body = { - "text":"test note" - }; - done(); -}); - -hooks.before("POST /sessions/{SessionId}/notes -> 400", function(test, done) { - test.request.params = { - SessionId : test_session_1._id - }; - test.request.body = { - "note a real":"property" - }; - done(); -}); - -hooks.before("GET /sessions/{SessionId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - SessionId : test_session_1._id, - NoteId: test_session_1.notes[0]._id - }; - done(); -}); - -hooks.before("PUT /sessions/{SessionId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - SessionId : test_session_1._id, - NoteId: test_session_1.notes[0]._id - }; - test.request.body = { - "text":"new note" - }; - done(); -}); - -hooks.before("PUT /sessions/{SessionId}/notes/{NoteId} -> 400", function(test, done) { - test.request.params = { - SessionId : test_session_1._id, - NoteId: test_session_1.notes[0]._id - }; - test.request.body = { - "not a real":"property" - }; - done(); -}); - -hooks.before("DELETE /sessions/{SessionId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - SessionId : test_session_1._id, - NoteId: test_session_1.notes[0]._id - }; - done(); -}); - -hooks.before("GET /sessions/{SessionId}/acquisitions -> 200", function(test, done) { - test.request.params.SessionId = test_session_1._id; - done(); -}); - -hooks.before("POST /sessions/{SessionId}/analyses -> 200", function(test, done) { - test.request.params.SessionId = test_session_1._id; - test.request.query = {"job":"true"}; - test.request.body = { - "analysis": { - "label": "Test Analysis 1" - }, - "job" : { - "gear": "test-case-gear", - "inputs": {}, - "tags": ["example"] - } - } - done(); -}); - -hooks.after("POST /sessions/{SessionId}/analyses -> 200", function(test, done) { - test_session_1_analysis_2_id = test.response.body._id; - done(); -}); - -hooks.before("GET /sessions/{SessionId}/analyses/{AnalysisId} -> 200", function(test, done) { - test.request.params.SessionId = test_session_1._id; - test.request.params.AnalysisId = test_session_1_analysis_2_id; - done(); -}); - -hooks.before("DELETE /sessions/{SessionId}/analyses/{AnalysisId} -> 200", function(test, done) { - test.request.params.SessionId = test_session_1._id; - test.request.params.AnalysisId = test_session_1_analysis_2_id; - done(); -}); - -hooks.before("GET /sessions/{SessionId}/analyses/{AnalysisId}/files -> 200", function(test, done) { - test.request.params.SessionId = test_session_1._id; - test.request.params.AnalysisId = test_session_1.analyses[0]._id; - test.request.query.ticket = ""; - done(); -}); - -hooks.before("GET /sessions/{SessionId}/analyses/{AnalysisId}/files/{Filename} -> 200", function(test, done) { - test.request.params.SessionId = test_session_1._id; - test.request.params.AnalysisId = test_session_1.analyses[0]._id; - test.request.params.Filename = "test-1.dcm"; - test.request.query.ticket = ""; - done(); -}); - -hooks.before("POST /sessions/{SessionId}/analyses/{AnalysisId}/notes -> 200", function(test, done) { - test.request.params.SessionId = test_session_1._id; - test.request.params.AnalysisId = test_session_1.analyses[0]._id; - done(); -}); - -hooks.before("POST /sessions/{SessionId}/analyses/{AnalysisId}/notes -> 400", function(test, done) { - test.request.params.SessionId = test_session_1._id; - test.request.params.AnalysisId = test_session_1.analyses[0]._id; - test.request.body = { - "not a":"real property" - }; - done(); -}); - -hooks.before("DELETE /sessions/{SessionId}/analyses/{AnalysisId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params.SessionId = test_session_1._id; - test.request.params.AnalysisId = test_session_1.analyses[0]._id; - test.request.params.NoteId = test_session_1.analyses[0].notes[0]._id; - done(); -}); - - - -// set initial test_acquisition_1 -hooks.after("GET /acquisitions -> 200", function(test, done) { - test_acquisition_1 = test.response.body[0]; - assert.equal(test_acquisition_1.label, "test-acquisition-1"); - done(); -}); - -hooks.before("GET /acquisitions/{AcquisitionId} -> 200", function(test, done) { - test.request.params.AcquisitionId = test_acquisition_1._id; - done(); -}); - -// set detailed test_acquisition_1 (including analyses, that are omitted during listing) -hooks.after("GET /acquisitions/{AcquisitionId} -> 200", function(test, done) { - test_acquisition_1 = test.response.body; - done(); -}); - -hooks.before("POST /acquisitions -> 200", function(test, done) { - test.request.body.session = test_session_1._id; - done(); -}); - -hooks.after("POST /acquisitions -> 200", function(test, done) { - example_acquisition_id = test.response.body._id; - done(); -}); - -hooks.before("POST /acquisitions -> 400", function(test, done) { - test.request.body.session = test_session_1._id; - test.request.body.foo = "bar"; - done(); -}); - -hooks.before("PUT /acquisitions/{AcquisitionId} -> 200", function(test, done) { - test.request.params.AcquisitionId = test_acquisition_1._id; - test.request.body = {"label":"test-acquisition-1-new-label"}; - done(); -}); - - -hooks.before("PUT /acquisitions/{AcquisitionId} -> 400", function(test, done) { - test.request.params.AcquisitionId = test_acquisition_1._id; - test.request.body = {"not-real":"an invalid property"}; - done(); -}); - -hooks.before("DELETE /acquisitions/{AcquisitionId} -> 200", function(test, done) { - test.request.params.AcquisitionId = example_acquisition_id; - done(); -}); - -hooks.before("POST /acquisitions/{AcquisitionId}/tags -> 200", function(test, done) { - test.request.params.AcquisitionId = test_acquisition_1._id; - test.request.body = { - "value": test_acquisition_tag - }; - done(); -}); - -hooks.before("POST /acquisitions/{AcquisitionId}/tags -> 400", function(test, done) { - test.request.params.AcquisitionId = test_acquisition_1._id; - test.request.body = { - "value": test_acquisition_tag, - "bad property": "not a real property" - }; - done(); -}); - -hooks.before("GET /acquisitions/{AcquisitionId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - TagValue : test_acquisition_tag - }; - done(); -}); - -hooks.before("PUT /acquisitions/{AcquisitionId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - TagValue : test_acquisition_tag - }; - test_acquisition_tag = "new-tag-value"; - test.request.body = { - "value": test_acquisition_tag - }; - done(); -}); - -hooks.before("PUT /acquisitions/{AcquisitionId}/tags/{TagValue} -> 400", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - TagValue : test_acquisition_tag - }; - test.request.body = { - "value": test_acquisition_tag, - "bad property": "not a real property" - }; - done(); -}); - -hooks.before("DELETE /acquisitions/{AcquisitionId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - TagValue : test_acquisition_tag - }; - done(); -}); - -hooks.before("GET /acquisitions/{AcquisitionId}/files/{FileName} -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - FileName : "test-1.dcm" - }; - test.request.query = { - "ticket":"" - }; - done(); -}); - -hooks.before("POST /acquisitions/{AcquisitionId}/notes -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id - }; - done(); -}); - -hooks.before("POST /acquisitions/{AcquisitionId}/notes -> 400", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id - }; - test.request.body.not_real = "invalid property"; - done(); -}); - -hooks.before("GET /acquisitions/{AcquisitionId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - NoteId: test_acquisition_1.notes[0]._id - }; - done(); -}); - -hooks.before("PUT /acquisitions/{AcquisitionId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - NoteId: test_acquisition_1.notes[0]._id - }; - test.request.body = { - "text":"updated note text" - }; - done(); -}); - -hooks.before("PUT /acquisitions/{AcquisitionId}/notes/{NoteId} -> 400", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - NoteId: test_acquisition_1.notes[0]._id - }; - test.request.body = { - "invalid property":"specified" - }; - done(); -}); - -hooks.before("DELETE /acquisitions/{AcquisitionId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - NoteId: test_acquisition_1.notes[0]._id - }; - done(); -}); - -hooks.before("GET /acquisitions/{AcquisitionId}/analyses/{AnalysisId} -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - AnalysisId: test_acquisition_1.analyses[0]._id - }; - done(); -}); - -hooks.before("DELETE /acquisitions/{AcquisitionId}/analyses/{AnalysisId} -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - AnalysisId: test_acquisition_1.analyses[1]._id - }; - done(); -}); - -hooks.before("GET /acquisitions/{AcquisitionId}/analyses/{AnalysisId}/files -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - AnalysisId: test_acquisition_1.analyses[0]._id - }; - test.request.query.ticket = ""; - done(); -}); - -hooks.before("GET /acquisitions/{AcquisitionId}/analyses/{AnalysisId}/files/{Filename} -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - AnalysisId: test_acquisition_1.analyses[0]._id, - Filename: "test-1.dcm" - }; - test.request.query.ticket = ""; - done(); -}); - -hooks.before("POST /acquisitions/{AcquisitionId}/analyses/{AnalysisId}/notes -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - AnalysisId: test_acquisition_1.analyses[0]._id - }; - done(); -}); - -hooks.before("POST /acquisitions/{AcquisitionId}/analyses/{AnalysisId}/notes -> 400", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - AnalysisId: test_acquisition_1.analyses[0]._id - }; - test.request.body = { - "not a":"real property" - }; - done(); -}); - -hooks.before("GET /acquisitions/{AcquisitionId}/analyses/{AnalysisId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - AnalysisId: test_acquisition_1.analyses[0]._id, - NoteId: test_acquisition_1.analyses[0].notes[0]._id - }; - done(); -}); - -hooks.before("DELETE /acquisitions/{AcquisitionId}/analyses/{AnalysisId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - AcquisitionId : test_acquisition_1._id, - AnalysisId: test_acquisition_1.analyses[0]._id, - NoteId: test_acquisition_1.analyses[0].notes[0]._id - }; - done(); -}); - - -// set initial test_project_1 -hooks.after("GET /projects -> 200", function(test, done) { - test_project_1 = test.response.body[0]; - assert.equal(test_project_1.label, "test-project-1"); - done(); -}); - -hooks.after("POST /projects -> 200", function(test, done) { - delete_project_id = test.response.body._id; - done(); -}); - -hooks.before("POST /projects -> 400", function(test, done) { - test.request.body.not_real = "an invalid property"; - done(); -}); - -hooks.before("GET /projects/{ProjectId} -> 200", function(test, done) { - test.request.params.ProjectId = test_project_1._id; - done(); -}); - -// set detailed test_project_1 (including analyses, that are omitted during listing) -hooks.after("GET /projects/{ProjectId} -> 200", function(test, done) { - test_project_1 = test.response.body; - done(); -}); - -hooks.before("PUT /projects/{ProjectId} -> 400", function(test, done) { - test.request.params.ProjectId = test_project_1._id; - test.request.body = {"not_real":"fake property"}; - done(); -}); - -hooks.before("DELETE /projects/{ProjectId} -> 200", function(test, done) { - test.request.params.ProjectId = delete_project_id; - done(); -}); - -hooks.before("POST /projects/{ProjectId}/tags -> 200", function(test, done) { - test.request.params.ProjectId = test_project_1._id; - test.request.body = { - "value":test_project_tag - }; - done(); -}); - -hooks.before("POST /projects/{ProjectId}/tags -> 400", function(test, done) { - test.request.params.ProjectId = test_project_1._id; - test.request.body = { - "value":"" - }; - done(); -}); - -hooks.before("GET /projects/{ProjectId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - TagValue : test_project_tag - }; - done(); -}); - -hooks.before("PUT /projects/{ProjectId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - TagValue : test_project_tag - }; - test_project_tag = "new-tag-value"; - test.request.body = { - "value":test_project_tag - }; - done(); -}); - -hooks.before("PUT /projects/{ProjectId}/tags/{TagValue} -> 400", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - TagValue : test_project_tag - }; - test.request.body = { - "value":"" - }; - done(); -}); - -hooks.before("DELETE /projects/{ProjectId}/tags/{TagValue} -> 200", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - TagValue : test_project_tag - }; - done(); -}); - -hooks.before("GET /projects/{ProjectId}/files/{FileName} -> 200", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - FileName : "notes.txt" - }; - test.request.query = { - "ticket":"" - }; - done(); -}); - -hooks.before("POST /projects/{ProjectId}/permissions -> 200", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id - }; - test.request.body = { - "_id":"test@user.com", - "access":"ro" - }; - done(); -}); - -hooks.before("POST /projects/{ProjectId}/permissions -> 400", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id - }; - test.request.body = { - "not a valid":"permissions entry" - }; - done(); -}); - -hooks.before("GET /projects/{ProjectId}/permissions/{UserId} -> 200", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - UserId: "test@user.com" - }; - done(); -}); - -hooks.before("PUT /projects/{ProjectId}/permissions/{UserId} -> 200", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - UserId: "test@user.com" - }; - test.request.body = { - "access":"rw", - "_id":"test@user.com" - }; - done(); -}); - -hooks.before("PUT /projects/{ProjectId}/permissions/{UserId} -> 400", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - UserId: "test@user.com" - }; - test.request.body = { - "not a valid":"permissions entry" - }; - done(); -}); - -hooks.before("DELETE /projects/{ProjectId}/permissions/{UserId} -> 200", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - UserId: "test@user.com" - }; - done(); -}); - -hooks.before("POST /projects/{ProjectId}/notes -> 200", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id - }; - test.request.body = { - "text":"test note" - }; - done(); -}); - -hooks.before("POST /projects/{ProjectId}/notes -> 400", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id - }; - test.request.body = { - "not a real":"property" - }; - done(); -}); - -hooks.before("GET /projects/{ProjectId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - NoteId: test_project_1.notes[0]._id - }; - done(); -}); - -hooks.before("PUT /projects/{ProjectId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - NoteId: test_project_1.notes[0]._id - }; - test.request.body = { - "text":"updated note" - }; - done(); -}); - -hooks.before("PUT /projects/{ProjectId}/notes/{NoteId} -> 400", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - NoteId: test_project_1.notes[0]._id - }; - test.request.body = { - "not a real":"property" - }; - done(); -}); - -hooks.before("DELETE /projects/{ProjectId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - ProjectId : test_project_1._id, - NoteId: test_project_1.notes[0]._id - }; - done(); -}); - -hooks.before("GET /projects/{ProjectId}/sessions -> 200", function(test, done) { - test.request.params.ProjectId = test_project_1._id; - done(); -}); - -hooks.before("GET /projects/{ProjectId}/acquisitions -> 200", function(test, done) { - test.request.params.ProjectId = test_project_1._id; - done(); -}); - -hooks.before("GET /report/project -> 200", function(test, done) { - test.request.query = { - "projects":test_project_1._id - }; - done(); -}); - -hooks.before("GET /projects/{ProjectId}/analyses/{AnalysisId} -> 200", function(test, done) { - test.request.params = { - ProjectId: test_project_1._id, - AnalysisId: test_project_1.analyses[0]._id - }; - done(); -}); - -hooks.before("DELETE /projects/{ProjectId}/analyses/{AnalysisId} -> 200", function(test, done) { - test.request.params = { - ProjectId: test_project_1._id, - AnalysisId: test_project_1.analyses[1]._id - }; - done(); -}); - -hooks.before("GET /projects/{ProjectId}/analyses/{AnalysisId}/files -> 200", function(test, done) { - test.request.params = { - ProjectId: test_project_1._id, - AnalysisId: test_project_1.analyses[0]._id - }; - test.request.query.ticket = ""; - done(); -}); - -hooks.before("GET /projects/{ProjectId}/analyses/{AnalysisId}/files/{Filename} -> 200", function(test, done) { - test.request.params = { - ProjectId: test_project_1._id, - AnalysisId: test_project_1.analyses[0]._id, - Filename: "test-1.dcm" - }; - test.request.query.ticket = ""; - done(); -}); - -hooks.before("POST /projects/{ProjectId}/analyses/{AnalysisId}/notes -> 200", function(test, done) { - test.request.params = { - ProjectId: test_project_1._id, - AnalysisId: test_project_1.analyses[0]._id - }; - done(); -}); - - -hooks.before("POST /projects/{ProjectId}/analyses/{AnalysisId}/notes -> 400", function(test, done) { - test.request.params = { - ProjectId: test_project_1._id, - AnalysisId: test_project_1.analyses[0]._id - }; - test.request.body = { - "not a":"real property" - } - done(); -}); - -hooks.before("DELETE /projects/{ProjectId}/analyses/{AnalysisId}/notes/{NoteId} -> 200", function(test, done) { - test.request.params = { - ProjectId: test_project_1._id, - AnalysisId: test_project_1.analyses[0]._id, - NoteId: test_project_1.analyses[0].notes[0]._id - }; - done(); -}); - -hooks.before("GET /devices/{DeviceId} -> 200", function(test, done) { - test.request.params.DeviceId = device_id; - done(); -}); - -hooks.before("GET /devices/{DeviceId} -> 404", function(test, done) { - test.request.params.DeviceId = 'bad_device_id'; - done(); -}); diff --git a/tests/integration_tests/abao/load_fixture.py b/tests/integration_tests/abao/load_fixture.py deleted file mode 100644 index 32c036bdc..000000000 --- a/tests/integration_tests/abao/load_fixture.py +++ /dev/null @@ -1,433 +0,0 @@ -import datetime -import json -import os - -import pymongo -import requests - - -# load required envvars w/ the same name -SCITRAN_CORE_DRONE_SECRET = os.environ['SCITRAN_CORE_DRONE_SECRET'] -SCITRAN_PERSISTENT_DB_URI = os.environ['SCITRAN_PERSISTENT_DB_URI'] -SCITRAN_SITE_API_URL = os.environ['SCITRAN_SITE_API_URL'] - - -class BaseUrlSession(requests.Session): - """Requests session subclass using core api's base url""" - def request(self, method, url, **kwargs): - return super(BaseUrlSession, self).request(method, SCITRAN_SITE_API_URL + url, **kwargs) - - -def main(): - abao_user = 'abao@user.com' - abao_api_key = 'XZpXI40Uk85eozjQkU1zHJ6yZHpix+j0mo1TMeGZ4dPzIqVPVGPmyfeK' - - as_drone = BaseUrlSession() - as_drone.headers.update({ - 'X-SciTran-Method': 'bootstrapper', - 'X-SciTran-Name': 'Bootstrapper', - 'X-SciTran-Auth': SCITRAN_CORE_DRONE_SECRET, - }) - - as_drone.post('/users', json={ - '_id': abao_user, - 'firstname': 'Abao', - 'lastname': 'User', - 'root': True, - }) - api_db = pymongo.MongoClient(SCITRAN_PERSISTENT_DB_URI).get_default_database() - api_db.apikeys.insert_one({ - '_id': abao_api_key, - 'created': datetime.datetime.utcnow(), - 'last_seen': None, - 'type': 'user', - 'uid': abao_user - }) - - as_root = BaseUrlSession() - as_root.headers.update({'Authorization': 'scitran-user {}'.format(abao_api_key)}) - as_root.params.update({'root': 'true'}) - - # create scitran group - r = as_root.post('/groups', json={'_id': 'scitran'}) - assert r.ok - - # create test-group - r = as_root.post('/groups', json={'_id': 'test-group'}) - assert r.ok - - # upload file to test-project-1/test-session-1/test-acquisition-1 - # depends on 'create test-group' - r = as_root.post('/upload/label', files={ - 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), - 'metadata': ('', json.dumps({ - 'group': { '_id': 'test-group' }, - 'project': { - 'label': 'test-project-1' - }, - 'session': { - 'label': 'test-session-1', - 'subject': { - 'age': 25, - 'sex': 'male', - 'firstname': 'xyz' - } - }, - 'acquisition': { - 'label': 'test-acquisition-1', - 'files': [{ 'name': 'test-1.dcm' }] - } - })) - }) - assert r.ok - - # list projects - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.get('/projects') - assert r.ok - assert r.json()[0]['label'] == 'test-project-1' - test_project = r.json()[0] - - # list sessions - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.get('/sessions') - assert r.ok - assert r.json()[0]['label'] == 'test-session-1' - test_session = r.json()[0] - - # list acquisitions for test-session-1 - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.get('/sessions/' + test_session['_id'] + '/acquisitions') - assert r.ok - assert r.json()[0]['label'] == 'test-acquisition-1' - test_acquisition = r.json()[0] - - # add test-case-gear - r = as_root.post('/gears/test-case-gear', json={ - 'category': 'converter', - 'gear': { - 'inputs': { - 'dicom': { - 'base': 'file', - 'type': { 'enum': [ 'wat' ] } - } - }, - 'maintainer': 'Example', - 'description': 'Example', - 'license': 'BSD-2-Clause', - 'author': 'Example', - 'url': 'https://example.example', - 'label': 'wat', - 'flywheel': '0', - 'source': 'https://example.example', - 'version': '0.0.1', - 'config': {}, - 'name': 'test-case-gear' - }, - 'exchange': { - 'git-commit': 'aex', - 'rootfs-hash': 'sha384:oy', - 'rootfs-url': 'https://example.example' - } - }) - assert r.ok - test_gear = r.json() - - # create test-collection-1 - r = as_root.post('/collections', json={ - 'label': 'test-collection-1' - }) - assert r.ok - test_collection = r.json() - - # add test-session-1 to test-collection-1 - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - # depends on 'create test-collection-1' - r = as_root.put('/collections/' + test_collection['_id'], json={ - 'contents':{ - 'operation': 'add', - 'nodes': [{ - 'level': 'session', - '_id': test_session['_id'] - }] - } - }) - assert r.ok - - # upload file to test-collection-1 - # depends on 'create test-collection-1' - r = as_root.post('/collections/' + test_collection['_id'] + '/files', files={ - 'file': ('notes.txt', open('tests/integration_tests/abao/test_files/notes.txt', 'rb')) - }) - assert r.ok - - # create test-collection-2 - r = as_root.post('/collections', json={ - 'label': 'test-collection-2' - }) - assert r.ok - - # upload file to test-project-1 - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.post('/projects/' + test_project['_id'] + '/files', files={ - 'file': ('notes.txt', open('tests/integration_tests/abao/test_files/notes.txt', 'rb')) - }) - assert r.ok - - # upload file to test-session-1 - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.post('/sessions/' + test_session['_id'] + '/files', files={ - 'file': ('notes.txt', open('tests/integration_tests/abao/test_files/notes.txt', 'rb')) - }) - assert r.ok - - # add a note to test-project-1 - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.post('/projects/' + test_project['_id'] + '/notes', json={ - 'text': 'test note' - }) - assert r.ok - - # add a note to test-session-1 - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.post('/sessions/' + test_session['_id'] + '/notes', json={ - 'text': 'test note' - }) - assert r.ok - - # add a note to test-acquisition-1 - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.post('/acquisitions/' + test_acquisition['_id'] + '/notes', json={ - 'text': 'test note' - }) - assert r.ok - - # add a note to test-collection-1 - # depends on 'create test-collection-1' - r = as_root.post('/collections/' + test_collection['_id'] + '/notes', json={ - 'text': 'test note' - }) - assert r.ok - - # create session 1 test-analysis-1 (job) - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - # depends on 'add test-case-gear' - r = as_root.post('/sessions/' + test_session['_id'] + '/analyses?job=true', json={ - 'analysis': { 'label': 'Test Analysis 1' }, - 'job': { - 'gear_id': test_gear['_id'], - 'inputs': { - 'dicom': { - 'type': 'acquisition', - 'id': test_acquisition['_id'], - 'name': 'test-1.dcm' - } - }, - 'tags': ['example'] - } - }) - assert r.ok - test_session_analysis = r.json() - - # create session 1 test-analysis (file upload) - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.post('/sessions/' + test_session['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), - 'metadata': ('', json.dumps({ - 'label': 'test analysis', - 'inputs': [ { 'name': 'test-1.dcm' } ] - })) - }) - assert r.ok - test_session_analysis_upload = r.json() - - # delete session 1 test analysis (file upload) - # depends on 'create session 1 test-analysis (file upload)' - r = as_root.delete('/sessions/' + test_session['_id'] + '/analyses/' + test_session_analysis_upload['_id']) - assert r.ok - - # create acquisition 1 test-analysis (file upload) - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.post('/acquisitions/' + test_acquisition['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), - 'metadata': ('', json.dumps({ - 'label': 'test analysis', - 'inputs': [ { 'name': 'test-1.dcm' } ] - })) - }) - assert r.ok - test_acquisition_analysis_upload = r.json() - - # create acquisition 1 test-analysis 2 (file upload) - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.post('/acquisitions/' + test_acquisition['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), - 'metadata': ('', json.dumps({ - 'label': 'test analysis', - 'inputs': [ { 'name': 'test-1.dcm' } ] - })) - }) - assert r.ok - - # create collection 1 test-analysis (file upload) - # depends on 'create test-collection-1' - r = as_root.post('/collections/' + test_collection['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), - 'metadata': ('', json.dumps({ - 'label': 'test analysis', - 'inputs': [ { 'name': 'test-1.dcm' } ] - })) - }) - assert r.ok - test_collection_analysis_upload = r.json() - - # create collection 1 test-analysis 2 (file upload) - # depends on 'create test-collection-1' - r = as_root.post('/collections/' + test_collection['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), - 'metadata': ('', json.dumps({ - 'label': 'test analysis 2', - 'inputs': [ { 'name': 'test-1.dcm' } ] - })) - }) - assert r.ok - - # create project 1 test-analysis (file upload) - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.post('/projects/' + test_project['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), - 'metadata': ('', json.dumps({ - 'label': 'test analysis', - 'inputs': [ { 'name': 'test-1.dcm' } ] - })) - }) - assert r.ok - test_project_analysis_upload = r.json() - - # create project 1 test-analysis 2 (file upload) - # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' - r = as_root.post('/projects/' + test_project['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), - 'metadata': ('', json.dumps({ - 'label': 'test analysis', - 'inputs': [ { 'name': 'test-1.dcm' } ] - })) - }) - assert r.ok - - # add a note to test-acquisition-1 test-analysis-1 - # depends on 'create acquisition 1 test-analysis (file upload)' - r = as_root.post('/acquisitions/' + test_acquisition['_id'] + '/analyses/' + test_acquisition_analysis_upload['_id'] + '/notes', json={ - 'text': 'test note' - }) - assert r.ok - - # add a note to test-collection-1 test-analysis-1 - # depends on 'create test-collection-1' - r = as_root.post('/collections/' + test_collection['_id'] + '/analyses/' + test_collection_analysis_upload['_id'] + '/notes', json={ - 'text': 'test note' - }) - assert r.ok - - # add a note to test-session-1 test-analysis-1 - # depends on 'create session 1 test-analysis (file upload)' - r = as_root.post('/sessions/' + test_session['_id'] + '/analyses/' + test_session_analysis['_id'] + '/notes', json={ - 'text': 'test note' - }) - assert r.ok - - # add a note to test-project-1 test-analysis-1 - # depends on 'create project 1 test-analysis (file upload)' - r = as_root.post('/projects/' + test_project['_id'] + '/analyses/' + test_project_analysis_upload['_id'] + '/notes', json={ - 'text': 'test note' - }) - assert r.ok - - # create project - r = as_root.post('/projects', json={ - 'group': 'test-group', - 'label': 'Project with template', - 'public': False - }) - assert r.ok - st_project = r.json() - - # create compliant session - # depends on 'create project' - r = as_root.post('/sessions', json={ - 'subject': { 'code': 'ex8945' }, - 'label': 'Compliant Session', - 'project': st_project['_id'], - 'public': False - }) - assert r.ok - st_compliant_session = r.json() - - # create non-compliant session - # depends on 'create project' - r = as_root.post('/sessions', json={ - 'subject': { 'code': 'ex9849' }, - 'label': 'Non-compliant Session', - 'project': st_project['_id'], - 'public': False - }) - assert r.ok - st_noncompliant_session = r.json() - - # create acquisition-1 for compliant session - # depends on 'create compliant session' - r = as_root.post('/acquisitions', json={ - 'label': 'c-acquisition-1-t1', - 'session': st_compliant_session['_id'], - 'public': False - }) - assert r.ok - - # create acquisition-2 for compliant session - # depends on 'create compliant session' - r = as_root.post('/acquisitions', json={ - 'label': 'c-acquisition-2-t1', - 'session': st_compliant_session['_id'], - 'public': False - }) - assert r.ok - - # create acquisition-1 for noncompliant session - # depends on 'create non-compliant session' - r = as_root.post('/acquisitions', json={ - 'label': 'nc-acquisition-1-t1', - 'session': st_noncompliant_session['_id'], - 'public': False - }) - assert r.ok - - # add project template - r = as_root.post('/projects/' + st_project['_id'] + '/template', json={ - 'session': { 'subject': { 'code' : '^ex' } }, - 'acquisitions': [{ - 'label': 't1', - 'minimum': 2 - }] - }) - assert r.ok - assert r.json()['modified'] == 1 - - # create acquisition-2 for noncompliant session - # depends on 'create non-compliant session' - r = as_root.post('/acquisitions', json={ - 'label': 'nc-acquisition-2-t1', - 'session': st_noncompliant_session['_id'], - 'public': False - }) - assert r.ok - - # update session 2 to be non-compliant - # depends on 'create non-compliant session' - r = as_root.put('/sessions/' + st_noncompliant_session['_id'], json={ - 'subject': { 'code': 'bad-subject-code' } - }) - assert r.ok - - -if __name__ == '__main__': - main() diff --git a/tests/integration_tests/abao/test_files/engine-analyses-1.txt b/tests/integration_tests/abao/test_files/engine-analyses-1.txt deleted file mode 100644 index 6686dd9ae..000000000 --- a/tests/integration_tests/abao/test_files/engine-analyses-1.txt +++ /dev/null @@ -1 +0,0 @@ -Replace this with something more realistic diff --git a/tests/integration_tests/abao/test_files/notes.txt b/tests/integration_tests/abao/test_files/notes.txt deleted file mode 100644 index 907b30816..000000000 --- a/tests/integration_tests/abao/test_files/notes.txt +++ /dev/null @@ -1 +0,0 @@ -blah diff --git a/tests/integration_tests/abao/test_files/test-1.dcm b/tests/integration_tests/abao/test_files/test-1.dcm deleted file mode 100644 index f088aa8f9..000000000 Binary files a/tests/integration_tests/abao/test_files/test-1.dcm and /dev/null differ diff --git a/tests/integration_tests/package.json b/tests/integration_tests/package.json deleted file mode 100644 index ea666c398..000000000 --- a/tests/integration_tests/package.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "name": "scitran-core-integration-tests", - "version": "1.0.0", - "description": "SciTran Core integation test node dependencies", - "dependencies": { - "abao":"git+https://github.com/flywheel-io/abao.git#better-jsonschema-ref", - "chai": "~3.5.0" - }, - "bin":{ - "abao":"node_modules/.bin/abao" - } -} diff --git a/tests/integration_tests/python/test_upgrades.py b/tests/integration_tests/python/test_upgrades.py index 748986e58..212170250 100644 --- a/tests/integration_tests/python/test_upgrades.py +++ b/tests/integration_tests/python/test_upgrades.py @@ -9,8 +9,8 @@ def database(mocker): bin_path = os.path.join(os.getcwd(), 'bin') mocker.patch('sys.path', [bin_path] + sys.path) - import database - return database + import db_upgrade + return db_upgrade def test_42(data_builder, api_db, as_admin, database): diff --git a/tests/integration_tests/requirements-integration-test.txt b/tests/requirements.txt similarity index 100% rename from tests/integration_tests/requirements-integration-test.txt rename to tests/requirements.txt diff --git a/tests/unit_tests/python/test_db_upgrade.py b/tests/unit_tests/python/test_db_upgrade.py index dd9988480..9cb793559 100644 --- a/tests/unit_tests/python/test_db_upgrade.py +++ b/tests/unit_tests/python/test_db_upgrade.py @@ -6,51 +6,51 @@ bin_path = os.path.join(os.getcwd(), "bin") sys.path.insert(0, bin_path) -import database +import db_upgrade from api import config -CDV = database.CURRENT_DATABASE_VERSION +CDV = db_upgrade.CURRENT_DATABASE_VERSION def test_all_upgrade_scripts_exist(): for i in range(1, CDV): script_name = 'upgrade_to_{}'.format(i) - assert hasattr(database, script_name) + assert hasattr(db_upgrade, script_name) def test_CDV_was_bumped(): script_name = 'upgrade_to_{}'.format(CDV+1) - assert hasattr(database, script_name) is False + assert hasattr(db_upgrade, script_name) is False @patch('api.config.get_version', Mock(return_value={'database': 5})) def test_get_db_version_from_config(): - assert database.get_db_version() == 5 + assert db_upgrade.get_db_version() == 5 @pytest.fixture(scope='function') -def database_mock_setup(): +def db_upgrade_mock_setup(): setattr(config.db.singletons, 'update_one', Mock()) for i in range(1, CDV): script_name = 'upgrade_to_{}'.format(i) - setattr(database, script_name, Mock()) + setattr(db_upgrade, script_name, Mock()) -@patch('database.get_db_version', Mock(return_value=0)) -def test_all_upgrade_scripts_ran(database_mock_setup): +@patch('db_upgrade.get_db_version', Mock(return_value=0)) +def test_all_upgrade_scripts_ran(db_upgrade_mock_setup): with pytest.raises(SystemExit): - database.upgrade_schema() + db_upgrade.upgrade_schema() for i in range(1, CDV): script_name = 'upgrade_to_{}'.format(i) - assert getattr(database, script_name).called + assert getattr(db_upgrade, script_name).called -@patch('database.get_db_version', Mock(return_value=CDV-4)) -def test_necessary_upgrade_scripts_ran(database_mock_setup): +@patch('db_upgrade.get_db_version', Mock(return_value=CDV-4)) +def test_necessary_upgrade_scripts_ran(db_upgrade_mock_setup): with pytest.raises(SystemExit): - database.upgrade_schema() + db_upgrade.upgrade_schema() # Assert the necessary scripts were called for i in range(CDV-3, CDV): script_name = 'upgrade_to_{}'.format(i) - assert getattr(database, script_name).called + assert getattr(db_upgrade, script_name).called # But not the scripts before it for i in range(1, CDV-4): script_name = 'upgrade_to_{}'.format(i) - assert getattr(database, script_name).called is False + assert getattr(db_upgrade, script_name).called is False