Skip to content

Commit

Permalink
V2.1.0 add vscode cloud editor (#69)
Browse files Browse the repository at this point in the history
* ADD working first pass at kube theia and proxy (through kube)

* WIP

* fix stats response urls

* add full submission stats endpoint

* stability improvement for pipeline initialization

* THEIA POC

* ADD minimum full flow for theia components

* ADD theia instructions, and stronger theia resource reaping

* FINAL polish for 2.1.0
  • Loading branch information
wabscale authored Dec 15, 2020
1 parent 838b66f commit ff9cea8
Show file tree
Hide file tree
Showing 105 changed files with 5,233 additions and 1,876 deletions.
1 change: 0 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ check:
.PHONY: build # Build all docker images
build:
docker-compose build --parallel $(BUILD_ALLWAYS)
./pipeline/build.sh

.PHONY: push # Push images to registry.osiris.services (requires vpn)
push: build
Expand Down
2 changes: 1 addition & 1 deletion api/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ WORKDIR /opt/app

COPY requirements.txt requirements.txt

RUN apk add --update --no-cache mariadb-client git curl tzdata gcc py3-gevent \
RUN apk add --update --no-cache mariadb-client git curl tzdata gcc musl-dev \
&& pip3 install -r ./requirements.txt \
&& adduser -D anubis \
&& chown anubis:anubis -R /opt/app \
Expand Down
2 changes: 1 addition & 1 deletion api/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
all: venv

venv:
virtualenv -p `which python3` venv
virtualenv -p `which python3.8` venv
./venv/bin/pip install -r ./requirements.txt


Expand Down
11 changes: 3 additions & 8 deletions api/anubis/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,6 @@
from flask import Flask
from anubis.utils.logger import logger

logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())


def init_services(app):
"""
Initialize app with redis cache, mariadb database, and ELK services
Expand Down Expand Up @@ -37,12 +33,11 @@ def init_services(app):
def index():
return 'Hello there...!'

# Make app logger anubis logger
app.logger = logger

# Add ELK stuff
if not config.DISABLE_ELK:
# Add logstash handler
logger.addHandler(logstash.LogstashHandler('logstash', 5000))
app.logger = logger

# Add elastic global error handler
add_global_error_handler(app)

Expand Down
15 changes: 13 additions & 2 deletions api/anubis/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,15 @@ class Config:
OAUTH_CONSUMER_KEY = ''
OAUTH_CONSUMER_SECRET = ''

ADMINS = os.environ.get('ADMINS', '[email protected]')

# Cache config
CACHE_REDIS_HOST = 'redis'

# Logger config
LOGGER_NAME = os.environ.get('LOGGER_NAME', default='anubis-api')

# Theia config
THEIA_DOMAIN = os.environ.get('THEIA_DOMAIN', default='ide.anubis.osiris.services')

def __init__(self):
self.DEBUG = os.environ.get('DEBUG', default='0') == '1'
self.SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URI',
Expand All @@ -35,6 +40,12 @@ def __init__(self):
# Redis
self.CACHE_REDIS_HOST = os.environ.get('CACHE_REDIS_HOST', default='redis')

# Logger
self.LOGGER_NAME = os.environ.get('LOGGER_NAME', default='anubis-api')

# Theia
self.THEIA_DOMAIN = os.environ.get('THEIA_DOMAIN', default='ide.anubis.osiris.services')

logging.info('Starting with DATABASE_URI: {}'.format(
self.SQLALCHEMY_DATABASE_URI))
logging.info('Starting with SECRET_KEY: {}'.format(self.SECRET_KEY))
Expand Down
70 changes: 63 additions & 7 deletions api/anubis/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,14 @@ class Class_(db.Model):

@property
def total_assignments(self):
return len(list(self.assignments))
return self.open_assignments

@property
def open_assignments(self):
now = datetime.now()
return Assignment.query.filter(
Assignment.class_id == self.id,
Assignment.release_date >= now,
Assignment.due_date <= now
Assignment.release_date <= now,
).count()

@property
Expand Down Expand Up @@ -113,6 +112,7 @@ class Assignment(db.Model):
github_classroom_url = db.Column(db.String(256), nullable=True)
pipeline_image = db.Column(db.String(256), unique=True, nullable=True)
unique_code = db.Column(db.String(8), unique=True, default=lambda: base64.b16encode(os.urandom(4)).decode())
ide_enabled = db.Column(db.Boolean, default=True)

# Dates
release_date = db.Column(db.DateTime, nullable=False)
Expand All @@ -132,7 +132,6 @@ def data(self):
'course': self.class_.data,
'description': self.description,
'github_classroom_link': self.github_classroom_url,

'tests': [t.data for t in self.tests]
}

Expand Down Expand Up @@ -172,6 +171,15 @@ class AssignmentRepo(db.Model):
assignment = db.relationship(Assignment, cascade='all,delete')
submissions = db.relationship('Submission', cascade='all,delete')

@property
def data(self):
return {
'github_username': self.github_username,
'assignment_name': self.assignment.name,
'class_name': self.assignment.class_.class_code,
'repo_url': self.repo_url,
}


class AssignmentTest(db.Model):
__tablename__ = 'assignment_test'
Expand Down Expand Up @@ -313,6 +321,8 @@ def init_submission_models(self):
:return:
"""
logging.info('initializing submission {}'.format(self.id))

# If the models already exist, yeet
if len(self.test_results) != 0:
SubmissionTestResult.query.filter_by(submission_id=self.id).delete()
Expand All @@ -325,7 +335,7 @@ def init_submission_models(self):
# Find tests for the current assignment
tests = AssignmentTest.query.filter_by(assignment_id=self.assignment_id).all()

logging.error('found tests: {}'.format(list(map(lambda x: x.data, tests))))
logging.debug('found tests: {}'.format(list(map(lambda x: x.data, tests))))

for test in tests:
tr = SubmissionTestResult(submission=self, assignment_test=test)
Expand All @@ -334,7 +344,7 @@ def init_submission_models(self):
db.session.add(sb)

self.processed = False
self.state = 'Reset'
self.state = 'Waiting for resources...'
db.session.add(self)

# Commit new models
Expand Down Expand Up @@ -379,7 +389,6 @@ def data(self):
'assignment_name': self.assignment.name,
'assignment_due': str(self.assignment.due_date),
'class_code': self.assignment.class_.class_code,
'url': self.url,
'commit': self.commit,
'processed': self.processed,
'state': self.state,
Expand Down Expand Up @@ -480,3 +489,50 @@ def stat_data(self):
data = self.data
del data['stdout']
return data


class TheiaSession(db.Model):
__tablename__ = 'theia_session'

# id
id = db.Column(db.Integer, primary_key=True, autoincrement=True)

owner_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False)
assignment_id = db.Column(db.Integer, db.ForeignKey(Assignment.id), nullable=False)
repo_id = db.Column(db.Integer, db.ForeignKey(AssignmentRepo.id), nullable=False)

active = db.Column(db.Boolean, default=True)
state = db.Column(db.String(128))
cluster_address = db.Column(db.String(256), nullable=True, default=None)

# Timestamps
created = db.Column(db.DateTime, default=datetime.now)
ended = db.Column(db.DateTime, nullable=True, default=None)
last_heartbeat = db.Column(db.DateTime, default=datetime.now)
last_proxy = db.Column(db.DateTime, default=datetime.now)
last_updated = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now)

repo = db.relationship(AssignmentRepo)
owner = db.relationship(User)
assignment = db.relationship(Assignment)

@property
def data(self):
from anubis.utils.data import theia_redirect_url

return {
'id': self.id,
'assignment_id': self.assignment_id,
'assignment_name': self.assignment.name,
'class_name': self.assignment.class_.class_code,
'repo_id': self.repo_id,
'repo_url': self.repo.repo_url,
'redirect_url': theia_redirect_url(self.id, self.owner.netid),
'active': self.active,
'state': self.state,
'created': str(self.created),
'ended': str(self.ended),
'last_heartbeat': str(self.last_heartbeat),
'last_proxy': str(self.last_proxy),
'last_updated': str(self.last_updated),
}
2 changes: 1 addition & 1 deletion api/anubis/routes/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def pipeline_report_panic(submission: Submission):
'owner_id': submission.owner_id, 'data': json.dumps(request.json)})

submission.processed = True
submission.state = 'Whoops! There was an error on our end. The Anubis admins have been notified.'
submission.state = 'Whoops! There was an error on our end. The error has been logged.'
submission.errors = {'panic': request.json}

db.session.add(submission)
Expand Down
89 changes: 72 additions & 17 deletions api/anubis/routes/private.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,21 @@
from flask import request, Blueprint, Response
from sqlalchemy import or_, and_

from anubis.models import db, User, Submission, Assignment, AssignmentQuestion, AssignedStudentQuestion
from anubis.models import db, User, Submission
from anubis.models import Assignment, AssignmentQuestion, AssignedStudentQuestion
from anubis.models import TheiaSession
from anubis.utils.auth import get_token
from anubis.utils.cache import cache
from anubis.utils.data import regrade_submission, is_debug
from anubis.utils.data import regrade_submission, bulk_regrade_submission, is_debug
from anubis.utils.data import success_response, error_response
from anubis.utils.data import bulk_stats, get_students, get_assigned_questions
from anubis.utils.data import fix_dangling, _verify_data_shape, split_chunks
from anubis.utils.decorators import json_response, json_endpoint, load_from_id
from anubis.utils.elastic import log_endpoint
from anubis.utils.redis_queue import enqueue_webhook_rpc
from anubis.utils.redis_queue import enqueue_webhook, rpc_enqueue
from anubis.utils.logger import logger
from anubis.utils.data import fix_dangling, bulk_stats, get_students, _verify_data_shape
from anubis.rpc.batch import rpc_bulk_regrade
from anubis.rpc.theia import reap_all_theia_sessions

private = Blueprint('private', __name__, url_prefix='/private')

Expand All @@ -37,6 +42,28 @@ def private_token_netid(netid):
return res


@private.route('/assignment/<int:id>/questions/get/<string:netid>')
@log_endpoint('cli', lambda: 'question get')
@load_from_id(Assignment, verify_owner=False)
@json_response
def private_assignment_id_questions_get_netid(assignment: Assignment, netid: str):
"""
Get questions assigned to a given student
:param assignment:
:param netid:
:return:
"""
user = User.query.filter_by(netid=netid).first()
if user is None:
return error_response('user not found')

return success_response({
'netid': user.netid,
'questions': get_assigned_questions(assignment.id, user.id)
})


@private.route('/assignment/<id>/questions/assign')
@log_endpoint('cli', lambda: 'question assign')
@load_from_id(Assignment, verify_owner=False)
Expand Down Expand Up @@ -267,7 +294,7 @@ def private_regrade_submission(commit):
return error_response('not found')

s.init_submission_models()
enqueue_webhook_rpc(s.id)
enqueue_webhook(s.id)

return success_response({
'submission': s.data,
Expand Down Expand Up @@ -300,23 +327,18 @@ def private_regrade_assignment(assignment_name):
if assignment is None:
return error_response('cant find assignment')

submission = Submission.query.filter(
submissions = Submission.query.filter(
Submission.assignment_id == assignment.id,
Submission.owner_id != None
).all()

response = []
submission_ids = [s.id for s in submissions]
submission_chunks = split_chunks(submission_ids, 100)

for s in submission:
res = regrade_submission(s)
response.append({
'submission': s.id,
'commit': s.commit,
'netid': s.netid,
'success': res['success'],
})
for chunk in submission_chunks:
rpc_enqueue(rpc_bulk_regrade, chunk)

return success_response({'submissions': response})
return success_response({'status': 'chunks enqueued'})


@private.route('/fix-dangling')
Expand All @@ -332,6 +354,10 @@ def private_fix_dangling():
@json_response
def private_stats_assignment(assignment_id, netid=None):
netids = request.args.get('netids', None)
force = request.args.get('force', False)

if force is not False:
cache.clear()

if netids is not None:
netids = json.loads(netids)
Expand All @@ -344,6 +370,35 @@ def private_stats_assignment(assignment_id, netid=None):
return success_response({'stats': bests})


@private.route('/submission/<int:id>')
@log_endpoint('cli', lambda: 'submission-stats')
@load_from_id(Submission, verify_owner=False)
@json_response
def private_submission_stats_id(submission: Submission):
"""
Get full stats for a specific submission.
:param submission:
:return:
"""

return success_response({
'student': submission.owner.data,
'submission': submission.full_data,
'assignment': submission.assignment.data,
'class': submission.assignment.class_.data,
})


@private.route('/ide/clear')
@log_endpoint('cli', lambda: 'clear-ide')
@json_response
def private_ide_clear():
rpc_enqueue(reap_all_theia_sessions, tuple())

return success_response({'state': 'enqueued'})


from anubis.models import SubmissionTestResult, SubmissionBuild
from anubis.models import AssignmentTest, AssignmentRepo, InClass, Class_

Expand Down Expand Up @@ -407,6 +462,6 @@ def private_seed():
a1s2.init_submission_models()
a2s1.init_submission_models()

enqueue_webhook_rpc(a2s1.id)
enqueue_webhook(a2s1.id)

return success_response('seeded')
Loading

0 comments on commit ff9cea8

Please sign in to comment.