Skip to content

Commit

Permalink
Merge branch 'master' into issue-289
Browse files Browse the repository at this point in the history
  • Loading branch information
mattwelborn committed Aug 15, 2019
2 parents a2b4c8a + 63366db commit 40f7293
Show file tree
Hide file tree
Showing 70 changed files with 1,038 additions and 1,061 deletions.
2 changes: 1 addition & 1 deletion .codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ coverage:
ignore:
- */tests/*
- qcfractal/dashboard/* # early state
- qcfractal/migrations/* # difficult to test
- qcfractal/alembic/* # difficult to test
- qcfractal/_version.py
- setup.py
status:
Expand Down
1 change: 1 addition & 0 deletions .lgtm.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ path_classifiers:
- versioneer.py # Set Versioneer.py to an external "library" (3rd party code)
- devtools/*
- qcfractal/dashboard/* # Very early state, some conditions forcing LGTM issues
- qcfractal/alembic/* # One-shot, from templates
generated:
- qcfractal/_version.py
queries:
Expand Down
6 changes: 4 additions & 2 deletions devtools/conda-envs/adapters.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ channels:
dependencies:
- python
- numpy
- msgpack-python>=0.6.1
- pandas
- tornado
- requests
Expand All @@ -17,6 +18,7 @@ dependencies:
- psycopg2>=2.7
- postgresql
- alembic
- tqdm

# Test depends
- pytest
Expand All @@ -33,8 +35,8 @@ dependencies:
- parsl>=0.8.0

# QCArchive includes
- qcengine>=0.8.2
- qcelemental>=0.5.0
- qcengine>=0.9.0
- qcelemental>=0.6.0

# Pip includes
- pip:
Expand Down
6 changes: 4 additions & 2 deletions devtools/conda-envs/base.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ channels:
dependencies:
- python
- numpy
- msgpack-python>=0.6.1
- pandas
- tornado
- requests
Expand All @@ -17,12 +18,13 @@ dependencies:
- psycopg2>=2.7
- postgresql
- alembic
- tqdm

# Test depends
- pytest
- pytest-cov
- codecov

# QCArchive includes
- qcengine>=0.8.2
- qcelemental>=0.5.0
- qcengine>=0.9.0
- qcelemental>=0.6.0
2 changes: 2 additions & 0 deletions devtools/conda-envs/dev_head.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ channels:
dependencies:
- python
- numpy
- msgpack-python>=0.6.1
- pandas
- tornado
- requests
Expand All @@ -17,6 +18,7 @@ dependencies:
- psycopg2>=2.7
- postgresql
- alembic
- tqdm

# Test depends
- pytest
Expand Down
4 changes: 3 additions & 1 deletion devtools/conda-envs/generate_envs.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
dependencies:
- python
- numpy
- msgpack-python>=0.6.1
- pandas
- tornado
- requests
Expand All @@ -27,13 +28,14 @@
- psycopg2>=2.7
- postgresql
- alembic
- tqdm
# Test depends
- pytest
- pytest-cov
- codecov
"""
qca_ecosystem_template = ["qcengine>=0.8.2", "qcelemental>=0.5.0"]
qca_ecosystem_template = ["qcengine>=0.9.0", "qcelemental>=0.6.0"]

pip_depends_template = []

Expand Down
6 changes: 4 additions & 2 deletions devtools/conda-envs/openff.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ channels:
dependencies:
- python
- numpy
- msgpack-python>=0.6.1
- pandas
- tornado
- requests
Expand All @@ -18,6 +19,7 @@ dependencies:
- psycopg2>=2.7
- postgresql
- alembic
- tqdm

# Test depends
- pytest
Expand All @@ -31,5 +33,5 @@ dependencies:
- torsiondrive

# QCArchive includes
- qcengine>=0.8.2
- qcelemental>=0.5.0
- qcengine>=0.9.0
- qcelemental>=0.6.0
60 changes: 41 additions & 19 deletions devtools/scripts/create_staging.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,18 @@
"""

from qcfractal.storage_sockets import storage_socket_factory
from qcfractal.storage_sockets.models import (BaseResultORM, ResultORM, CollectionORM,
from qcfractal.storage_sockets.sql_models import (BaseResultORM, ResultORM, CollectionORM,
OptimizationProcedureORM, GridOptimizationProcedureORM,
TorsionDriveProcedureORM, TaskQueueORM)
from qcfractal.interface.models import (ResultRecord, OptimizationRecord,
TorsionDriveRecord, GridOptimizationRecord)

# production_uri = "postgresql+psycopg2://qcarchive:mypass@localhost:5432/test_qcarchivedb"
production_uri = "postgresql+psycopg2://postgres:@localhost:11711/qcarchivedb"
staging_uri = "postgresql+psycopg2://qcarchive:mypass@localhost:5432/staging_qcarchivedb"
staging_uri = "postgresql+psycopg2://localhost:5432/staging_qcarchivedb"
SAMPLE_SIZE = 0.0001 # 0.1 is 10%
MAX_LIMIT = 10000
VERBOSE = False


def connect_to_DBs(staging_uri, production_uri, max_limit):
Expand All @@ -34,7 +35,7 @@ def connect_to_DBs(staging_uri, production_uri, max_limit):
def get_number_to_copy(total_size, sample_size):
to_copy = int(total_size*sample_size)
if to_copy:
return to_copy
return max(to_copy, 10)
else:
return 1 # avoid zero because zero means no limit in storage

Expand All @@ -48,12 +49,15 @@ def copy_molecules(staging_storage, prod_storage, prod_ids):
print('----Total # of Molecules to copy: ', len(prod_ids))

ret = prod_storage.get_molecules(id=prod_ids)
print('Get from prod:', ret)
if VERBOSE:
print('Get from prod:', ret)
staging_ids = staging_storage.add_molecules(ret['data'])
print('Add to staging:', staging_ids)
if VERBOSE:
print('Add to staging:', staging_ids)

map = {m1: m2 for m1, m2 in zip(prod_ids, staging_ids['data'])}
print('MAP: ', map)
if VERBOSE:
print('MAP: ', map)

print('---- Done copying molecules\n\n')

Expand All @@ -71,12 +75,14 @@ def copy_keywords(staging_storage, prod_storage, prod_ids):


ret = prod_storage.get_keywords(id=prod_ids)
print('Get from prod:', ret)
if VERBOSE:
print('Get from prod:', ret)
staging_ids = staging_storage.add_keywords(ret['data'])
print('Add to staging:', staging_ids)

map = {m1: m2 for m1, m2 in zip(prod_ids, staging_ids['data'])}
print('MAP: ', map)
if VERBOSE:
print('MAP: ', map)

print('---- Done copying keywords\n\n')

Expand All @@ -94,12 +100,15 @@ def copy_kv_store(staging_storage, prod_storage, prod_ids):


ret = prod_storage.get_kvstore(id=prod_ids)
print('Get from prod:', ret)
if VERBOSE:
print('Get from prod:', ret)
staging_ids = staging_storage.add_kvstore(ret['data'].values())
print('Add to staging:', staging_ids)
if VERBOSE:
print('Add to staging:', staging_ids)

map = {m1: m2 for m1, m2 in zip(prod_ids, staging_ids['data'])}
print('MAP: ', map)
if VERBOSE:
print('MAP: ', map)

print('---- Done copying KV_store \n\n')

Expand All @@ -113,7 +122,8 @@ def copy_users(staging_storage, prod_storage):
print('-----Total # of Users in the DB is: ', len(prod_users))

sql_insered = staging_storage._copy_users(prod_users)['data']
print('Inserted in SQL:', len(sql_insered))
if VERBOSE:
print('Inserted in SQL:', len(sql_insered))


print('---- Done copying Users\n\n')
Expand All @@ -130,7 +140,8 @@ def copy_managers(staging_storage, prod_storage, mang_list):


sql_insered = staging_storage._copy_managers(prod_mangers)['data']
print('Inserted in SQL:', len(sql_insered))
if VERBOSE:
print('Inserted in SQL:', len(sql_insered))

print('---- Done copying Queue Manager\n\n')

Expand All @@ -149,7 +160,8 @@ def copy_collections(staging_storage, production_storage, SAMPLE_SIZE=0):
for col in prod_results:
ret = staging_storage.add_collection(col)['data']
sql_insered += 1
print('Inserted in SQL:', sql_insered)
if VERBOSE:
print('Inserted in SQL:', sql_insered)

print('---- Done copying Collections\n\n')

Expand Down Expand Up @@ -204,7 +216,8 @@ def copy_results(staging_storage, production_storage, SAMPLE_SIZE=0, results_ids

results_py = [ResultRecord(**res) for res in prod_results]
staging_ids = staging_storage.add_results(results_py)['data']
print('Inserted in SQL:', len(staging_ids))
if VERBOSE:
print('Inserted in SQL:', len(staging_ids))

print('---- Done copying Results\n\n')

Expand Down Expand Up @@ -265,7 +278,8 @@ def copy_optimization_procedure(staging_storage, production_storage, SAMPLE_SIZE

procedures_py = [OptimizationRecord(**proc) for proc in prod_proc]
staging_ids = staging_storage.add_procedures(procedures_py)['data']
print('Inserted in SQL:', len(staging_ids))
if VERBOSE:
print('Inserted in SQL:', len(staging_ids))

print('---- Done copying Optimization procedures\n\n')

Expand Down Expand Up @@ -325,7 +339,8 @@ def copy_torsiondrive_procedure(staging_storage, production_storage, SAMPLE_SIZE

procedures_py = [TorsionDriveRecord(**proc) for proc in prod_proc]
staging_ids = staging_storage.add_procedures(procedures_py)['data']
print('Inserted in SQL:', len(staging_ids))
if VERBOSE:
print('Inserted in SQL:', len(staging_ids))

print('---- Done copying Torsiondrive procedures\n\n')

Expand Down Expand Up @@ -450,7 +465,8 @@ def copy_task_queue(staging_storage, production_storage, SAMPLE_SIZE=None):
raise Exception('Result not found!', rec.base_result.id)

staging_ids = staging_storage._copy_task_to_queue(prod_tasks)['data']
print('Inserted in SQL:', len(staging_ids))
if VERBOSE:
print('Inserted in SQL:', len(staging_ids))

print('---- Done copying Task Queue\n\n')

Expand Down Expand Up @@ -488,6 +504,11 @@ def main():
print('Exit without creating the DB.')
return

# Copy metadata
#with production_storage.session_scope() as session:
# alembic = session.execute("select * from alembic_version")
# version = alembic.first()[0]

# copy all users, small tables, no need for sampling
copy_users(staging_storage, production_storage)

Expand All @@ -514,5 +535,6 @@ def main():
copy_alembic(staging_storage, production_storage)



if __name__ == "__main__":
main()
main()
2 changes: 1 addition & 1 deletion docs/qcfractal/source/setup_quickstart.rst
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ You may optionally provide a TLS certificate to enable host verification for the
using the ``--tls-cert`` and ``--tls-key`` options.
If a TLS certificate is not provided, communications with the server will still be encrypted,
but host verification will be unavailable
(and :term:`Managers <Manager>` and clients will need to specify ``--verify False``).
(and :term:`Managers <Manager>` and clients will need to specify ``verify=False``).

Next, add users for admin, the :term:`Manager`, and a user
(you may choose whatever usernames you like)::
Expand Down
1 change: 1 addition & 0 deletions qcfractal/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from .storage_sockets import storage_socket_factory

# Handle top level object imports
from .postgres_harness import PostgresHarness, TemporaryPostgres
from .server import FractalServer
from .snowflake import FractalSnowflake, FractalSnowflakeHandler
from .queue import QueueManager
Expand Down
34 changes: 34 additions & 0 deletions qcfractal/alembic/versions/05ceea11b78a_base_records_msgpack_2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
"""Msgpack Base Results Phase 2
Revision ID: 05ceea11b78a
Revises: 8b0cd9accaf2
Create Date: 2019-08-11 22:30:51.453746
"""
from alembic import op
import sqlalchemy as sa

import os
import sys
sys.path.insert(1, os.path.dirname(os.path.abspath(__file__)))
from migration_helpers import msgpack_migrations
from qcelemental.util import msgpackext_dumps, msgpackext_loads

# revision identifiers, used by Alembic.
revision = '05ceea11b78a'
down_revision = '8b0cd9accaf2'
branch_labels = None
depends_on = None

table_name = "base_result"
update_columns = {"extras"}

nullable = {"extras"}


def upgrade():
msgpack_migrations.json_to_msgpack_table_altercolumns(table_name, update_columns, nullable_true=nullable)


def downgrade():
raise ValueError("Cannot downgrade json to msgpack conversions")
33 changes: 33 additions & 0 deletions qcfractal/alembic/versions/1134312ad4a3_results_msgpack_2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
"""Msgpack Results Phase 2
Revision ID: 1134312ad4a3
Revises: 84c94a48e491
Create Date: 2019-08-11 17:21:43.328492
"""
from alembic import op
import sqlalchemy as sa

import os
import sys
sys.path.insert(1, os.path.dirname(os.path.abspath(__file__)))
from migration_helpers import msgpack_migrations

# revision identifiers, used by Alembic.
revision = '1134312ad4a3'
down_revision = '84c94a48e491'
branch_labels = None
depends_on = None

table_name = "result"
update_columns = {"return_result"}

nullable = {"return_result"}


def upgrade():
msgpack_migrations.json_to_msgpack_table_altercolumns(table_name, update_columns, nullable_true=nullable)


def downgrade():
raise ValueError("Cannot downgrade json to msgpack conversions")
Loading

0 comments on commit 40f7293

Please sign in to comment.