summaryrefslogtreecommitdiff
path: root/server
diff options
context:
space:
mode:
authorAleš Smodiš <aless@guru.si>2015-08-24 19:09:41 +0200
committerAleš Smodiš <aless@guru.si>2015-08-24 19:09:41 +0200
commita4f639242f9f6221a486e0e91adeb75ba6096f45 (patch)
treea6d1c1207ccd9829031d527f322ad84d89a2a550 /server
parent5b4f1e25980ee18a323eba1415ed453b3a910ca3 (diff)
Split the development into daemon and wsgi_server.
Implemented basic infrastructure for daemon (Prolog), and partial support for services in wsgi_server.
Diffstat (limited to 'server')
-rw-r--r--server/__init__.py23
-rw-r--r--server/problems.py83
-rw-r--r--server/prolog_session.py117
-rw-r--r--server/user_session.py122
4 files changed, 345 insertions, 0 deletions
diff --git a/server/__init__.py b/server/__init__.py
new file mode 100644
index 0000000..f2c73b7
--- /dev/null
+++ b/server/__init__.py
@@ -0,0 +1,23 @@
+# coding=utf-8
+
+import multiprocessing.managers
+from . import user_session
+from . import prolog_session
+
+__all__ = ['user_session']
+
+class CodeqManager(multiprocessing.managers.BaseManager):
+ pass
+
+class UserSessionProxy(multiprocessing.managers.BaseProxy):
+ _method_to_typeid_ = {'get_prolog':'PrologSession'}
+
+CodeqManager.register('PrologSession')
+CodeqManager.register('UserSession', proxytype=UserSessionProxy)
+CodeqManager.register('get_session_by_id', callable=user_session.get_session_by_id, proxytype=UserSessionProxy)
+CodeqManager.register('get_or_create_session', callable=user_session.get_or_create_session, proxytype=UserSessionProxy)
+CodeqManager.register('authenticate_and_create_session', callable=user_session.authenticate_and_create_session, proxytype=UserSessionProxy)
+
+def start():
+ m = CodeqManager(address=('localhost', 16231), authkey=b'c0d3q3y')
+ m.get_server().serve_forever()
diff --git a/server/problems.py b/server/problems.py
new file mode 100644
index 0000000..7fb2606
--- /dev/null
+++ b/server/problems.py
@@ -0,0 +1,83 @@
+# coding=utf-8
+
+import sys
+import importlib.machinery
+import threading
+
+#sys.path.append('/home/aless/job/codeq/source/codeq-problems/')
+_path_prefix = '/home/aless/job/codeq/source/codeq-problems/'
+_module_loading_lock = threading.RLock() # TODO: make a more fine-grained locking import machinery
+
+def load_module(fullname):
+# return importlib.import_module(fullname)
+ with _module_loading_lock:
+ mod = sys.modules.get(fullname, None)
+ if mod is None:
+ parts = fullname.split('.')
+ d = _path_prefix + '/'.join(parts[0:-1])
+ ff = importlib.machinery.FileFinder(d, (importlib.machinery.SourceFileLoader, ['.py']))
+ spec = ff.find_spec(fullname)
+ mod = type(sys)(fullname)
+ mod.__loader__ = spec.loader
+ mod.__package__ = spec.parent
+ mod.__spec__ = spec
+ if spec.has_location:
+ mod.__file__ = spec.origin
+ mod.__cached__ = spec.cached
+ sys.modules[fullname] = mod
+ try:
+ spec.loader.exec_module(mod)
+ except:
+ try:
+ del sys.modules[fullname]
+ except KeyError:
+ pass
+ raise
+ return mod
+
+def load_problem(language, problem_group, problem, tail_module):
+ return load_module('{0}.problems.{1}.{2}.{3}'.format(language, problem_group, problem, tail_module))
+
+def load_facts(language, fact_module):
+ return load_module('{0}.facts.{1}'.format(language, fact_module))
+
+def load_problems(language, tuples, tail_module):
+ modules = []
+ for problem_group, problem in tuples:
+ mod = '{0}.problems.{1}.{2}.{3}'.format(language, problem_group, problem, tail_module)
+ print('importing {}'.format(mod))
+ modules.append(load_module(mod))
+ return modules
+
+def get_facts(language, problem_module):
+ try:
+ facts = problem_module.facts
+ except AttributeError as e:
+ return None
+ if facts is None:
+ return None
+ module = load_facts(language, facts)
+ if module:
+ try:
+ return module.facts
+ except AttributeError as e:
+ return None
+ return None
+
+def solutions_for_problems(language, tuples):
+ if not tuples:
+ return ''
+ modules = load_problems(language, tuples, 'common')
+ solutions = set()
+ facts = set()
+ for module in modules:
+ try:
+ solution = module.solution
+ except AttributeError as me:
+ pass
+ else:
+ solutions.add(solution)
+ f = get_facts(language, module)
+ if f:
+ facts.add(f)
+ return '\n'.join(facts) + '\n' + '\n'.join(solutions)
diff --git a/server/prolog_session.py b/server/prolog_session.py
new file mode 100644
index 0000000..e00afd8
--- /dev/null
+++ b/server/prolog_session.py
@@ -0,0 +1,117 @@
+# coding=utf-8
+
+import operator
+import threading
+import prolog.engine
+import db
+from . import problems
+
+__all__ = ['PrologSession']
+
+def format_prolog_output(reply, output):
+ messages = [text for text in map(operator.itemgetter(1), output)]
+ # When an engine is destroyed, a nested data object has the actual query result.
+ event = reply['event']
+ if event == 'destroy':
+ reply = reply['data']
+ event = reply['event']
+
+ if event == 'success':
+ messages.append(prolog.engine.pretty_vars(reply['data'][0]))
+ return messages, 'ok', True and reply['more']
+ if event == 'failure':
+ messages.append('false')
+ return messages, 'ok', False
+ if event == 'error':
+ # Remove potential module name (engine ID) from the error message.
+ messages.append('error: ' + reply['data'].replace("'{}':".format(reply['id']), ''))
+ return messages, 'error', False
+
+ return messages, 'ok', False # TODO: is it possible to reach this return statement?
+
+
+class PrologSession(object):
+ """Abstracts a Prolog session.
+ Only public methods are available to the outside world due to the use of multiprocessing managers.
+ Therefore prefix any private methods with an underscore (_).
+ No properties are accessible; use getters and setters instead.
+ Values are passed by value instead of by reference (deep copy!).
+ """
+ def __init__(self):
+ self._access_lock = threading.Lock()
+ self._engine_id = None
+
+ def run(self, code):
+ with self._access_lock:
+ if self._engine_id is not None:
+ prolog.engine.stop(self._engine_id)
+ self._engine_id = None
+ reply, output = prolog.engine.create(code=code)
+ if reply.get('event') != 'create':
+ raise Exception('System error: could not create a prolog engine')
+ self._engine_id = reply['id']
+ messages = [text for text in map(operator.itemgetter(1), output)]
+ status = 'error' if 'error' in map(operator.itemgetter(0), output) else 'ok'
+ return messages, status, False
+
+ def query(self, query):
+ with self._access_lock:
+ if self._engine_id is None:
+ return ['Prolog is not running'], 'error', False
+ try:
+ return format_prolog_output(*prolog.engine.ask(self._engine_id, query))
+ except Exception as e:
+ return [str(e)], 'error', False
+
+ def step(self):
+ with self._access_lock:
+ if self._engine_id is None:
+ return ['Prolog is not running'], 'error', False
+ try:
+ return format_prolog_output(*prolog.engine.next(self._engine_id))
+ except Exception as e:
+ return [str(e)], 'error', False
+
+ def end(self):
+ """Stops the Prolog engine."""
+ with self._access_lock:
+ if self._engine_id is not None:
+ prolog.engine.stop(self._engine_id)
+ self._engine_id = None
+ return [], 'ok', False
+
+ def __del__(self):
+ # no locking needed if GC is removing us, as there cannot be any concurrent access by definition
+ if hasattr(self, '_engine_id') and (self._engine_id is not None):
+ prolog.engine.stop(self._engine_id)
+ self._engine_id = None
+
+ def run_for_user(self, user_id, language, problem_group, problem, program, query):
+ """A "shorthand" method to start a Prolog session, load correct solutions of all user's solved
+ problems and the given program, and ask a query.
+ """
+ conn = db.get_connection()
+ try:
+ cur = conn.cursor()
+ try:
+ cur.execute('select l.id, p.id from problem p inner join problem_group g on g.id = p.problem_group_id inner join language l on l.id = p.language_id where l.identifier = %s and g.identifier = %s and p.identifier = %s', (language, problem_group, problem))
+ row = cur.fetchone()
+ language_id = row[0]
+ problem_id = row[1]
+ cur.execute('select g.identifier, p.identifier from solution s inner join problem p on p.id = s.problem_id inner join problem_group g on g.id = p.problem_group_id where s.codeq_user_id = %s and s.done = True and s.problem_id != %s and p.language_id = %s', (user_id, problem_id, language_id))
+ solved_problems = cur.fetchall()
+ finally:
+ cur.close()
+ finally:
+ conn.commit()
+ db.return_connection(conn)
+
+ other_solutions = problems.solutions_for_problems(language, solved_problems)
+ problem_module = problems.load_problem(language, problem_group, problem, 'common')
+ problem_facts = problems.get_facts(language, problem_module) or ''
+ code = other_solutions + problem_facts + program
+ messages, status, have_more = self.run(code)
+ if status == 'ok':
+ more_messages, status, have_more = self.query(query)
+ messages.extend(more_messages)
+ return messages, status, have_more
diff --git a/server/user_session.py b/server/user_session.py
new file mode 100644
index 0000000..e418f8d
--- /dev/null
+++ b/server/user_session.py
@@ -0,0 +1,122 @@
+# coding=utf-8
+
+import uuid
+import threading # multiprocessing.managers.BaseManager uses threading to serve incoming requests
+import hashlib
+import base64
+import random
+from . import prolog_session
+import db
+from errors.session import NoSuchSession, AuthenticationFailed
+
+__all__ = ['get_session_by_id', 'get_or_create_session', 'UserSession']
+
+sessions = {} # maps session IDs to session objects
+
+module_access_lock = threading.Lock() # use this lock to access the sessions dictionary
+
+class UserSession(object):
+ """Abstracts a user session.
+ Only public methods are available to the outside world due to the use of multiprocessing managers.
+ Therefore prefix any private methods with an underscore (_).
+ No properties are accessible; use getters and setters instead.
+ Values are passed by value instead of by reference (deep copy!).
+ """
+ def __init__(self, uid, username):
+ self._access_lock = threading.Lock()
+ self.sid = uuid.uuid4().hex
+ self.uid = uid
+ self.username = username
+ self.prolog_session = None
+
+ def destroy(self):
+ """Destroys the session."""
+ with self._access_lock:
+ with module_access_lock:
+ del sessions[self.sid]
+ if self.prolog_session is not None:
+ self.prolog_session.end()
+ self.prolog_session = None
+ # TODO: add any cleanups as features are added!
+
+ def get_sid(self):
+ return self.sid
+
+ def get_uid(self):
+ return self.uid
+
+ def get_prolog(self):
+ with self._access_lock:
+ if self.prolog_session is None:
+ self.prolog_session = prolog_session.PrologSession() # lazy init
+ return self.prolog_session
+
+ def __del__(self):
+ # no locking needed if GC is removing us, as there cannot be any concurrent access by definition
+ if hasattr(self, 'prolog_session') and (self.prolog_session is not None):
+ self.prolog_session.end()
+ self.prolog_session = None
+ # TODO: add any cleanups as features are added!
+
+def get_session_by_id(sid):
+ with module_access_lock:
+ s = sessions.get(sid, None)
+ if s is None:
+ raise NoSuchSession('There is no session with SID {}'.format(sid))
+ return s
+
+def get_or_create_session(uid, username, sid=None):
+ with module_access_lock:
+ if sid is not None:
+ s = sessions.get(sid)
+ if s is not None:
+ return s
+ s = UserSession(uid, username)
+ sessions[s.sid] = s
+ return s
+
+def authenticate_and_create_session(username, password):
+ conn = db.get_connection()
+ try:
+ cur = conn.cursor()
+ try:
+ cur.execute('select id, password from codeq_user where username = %s', (username,))
+ row = cur.fetchone()
+ if row is None:
+ raise AuthenticationFailed('No such user: {}'.format(username))
+ if verify_password(password, row[1]):
+ return get_or_create_session(row[0], username)
+ raise AuthenticationFailed('Password mismatch')
+ finally:
+ cur.close()
+ finally:
+ conn.commit()
+ db.return_connection(conn)
+
+def verify_password(plain_password, encrypted_password):
+ elts = encrypted_password.split('$')
+ if len(elts) != 4:
+ return False
+ if elts[0] != 'pkbdf2_sha256':
+ return False
+ try:
+ rounds = int(elts[1])
+ except:
+ return False
+ enc = hashlib.pbkdf2_hmac('sha256', plain_password.encode('utf-8'), elts[2].encode('utf-8'), rounds)
+ return base64.b64encode(enc).decode('utf-8') == elts[3]
+
+_salt_chars = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
+_salt_chars_len = len(_salt_chars)
+def encrypt_password(plain_password):
+ rounds = 20000
+ chosen_chars = []
+ for i in range(0, 12):
+ chosen_chars.append(_salt_chars[random.randrange(0, _salt_chars_len)])
+ salt = ''.join(chosen_chars)
+ enc = hashlib.pbkdf2_hmac('sha256', plain_password.encode('utf-8'), salt.encode('utf-8'), rounds)
+ return '{0}${1}${2}${3}'.format('pkbdf2_sha256', rounds, salt, base64.b64encode(enc).decode('utf-8'))
+
+random.seed()
+
+# TODO: add a session timeout timer