summaryrefslogtreecommitdiff
path: root/server/problems.py
blob: f54237d2c6a32b7e6b730a5f13acae043f455afc (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
# coding=utf-8

import sys
import importlib.machinery
import threading
from db import get_connection, return_connection

#sys.path.append('/home/aless/job/codeq/source/codeq-problems/')
_path_prefix = '/home/aless/job/codeq/source/codeq-problems/'
_module_loading_lock = threading.RLock()  # TODO: make a more fine-grained locking import machinery

def load_module(fullname):
#   return importlib.import_module(fullname)
    with _module_loading_lock:
        mod = sys.modules.get(fullname, None)
        if mod is None:
            parts = fullname.split('.')
            d = _path_prefix + '/'.join(parts[0:-1])
            ff = importlib.machinery.FileFinder(d, (importlib.machinery.SourceFileLoader, ['.py']))
            spec = ff.find_spec(fullname)
            mod = type(sys)(fullname)
            mod.__loader__ = spec.loader
            mod.__package__ = spec.parent
            mod.__spec__ = spec
            if spec.has_location:
                mod.__file__ = spec.origin
                mod.__cached__ = spec.cached
            sys.modules[fullname] = mod
            try:
                spec.loader.exec_module(mod)
            except:
                try:
                    del sys.modules[fullname]
                except KeyError:
                    pass
                raise
        return mod

def load_problem(language, problem_group, problem, tail_module):
    return load_module('{0}.problems.{1}.{2}.{3}'.format(language, problem_group, problem, tail_module))

def load_facts(language, fact_module):
    return load_module('{0}.facts.{1}'.format(language, fact_module))

def load_problems(language, tuples, tail_module):
    modules = []
    for problem_group, problem in tuples:
        mod = '{0}.problems.{1}.{2}.{3}'.format(language, problem_group, problem, tail_module)
        modules.append(load_module(mod))
    return modules

def get_facts(language, problem_module):
    try:
        facts = problem_module.facts
    except AttributeError as e:
        return None
    if facts is None:
        return None
    module = load_facts(language, facts)
    if module:
        try:
            return module.facts
        except AttributeError as e:
            return None
    return None

def solutions_for_problems(language, tuples):
    if not tuples:
        return ''
    modules = load_problems(language, tuples, 'common')
    solutions = []
    for module in modules:
        try:
            solutions.append(module.solution)
        except AttributeError as me:
            pass
    return '\n'.join(solutions)

def list_problems_in_groups(language):
    conn = get_connection()
    try:
        cur = conn.cursor()
        try:
            cur.arraysize = 1000
            cur.execute("select g.identifier, g.name, p.identifier, p.name from problem p inner join problem_group g on g.id = p.problem_group_id where p.language_id = (select id from language where identifier = %s) order by g.identifier, p.identifier", (language,))
            result = []
            previous_group_name = ''
            current_sublist = None
            row = cur.fetchone()
            while row:
                current_group_name = row[0]
                if previous_group_name != current_group_name:
                    current_sublist = []
                    result.append({'identifier': current_group_name, 'name': row[1], 'problems': current_sublist})
                    previous_group_name = current_group_name
                current_sublist.append({'identifier': row[2], 'name': row[3]})
                row = cur.fetchone()
            return result
        finally:
            cur.close()
    finally:
        conn.commit()
        return_connection(conn)