summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--scripts/build_web_resources.py186
1 files changed, 186 insertions, 0 deletions
diff --git a/scripts/build_web_resources.py b/scripts/build_web_resources.py
new file mode 100644
index 0000000..e3e6eff
--- /dev/null
+++ b/scripts/build_web_resources.py
@@ -0,0 +1,186 @@
+#!/usr/bin/python3
+# coding=utf-8
+
+"""This tool processes all problem files and database data, and outputs JSON
+files to describe problems, to be used as static web resources.
+Before running the script define the following environment variables, if defaults are not okay:
+CODEQ_WEB_OUTPUT - directory where to write the output, defaults to /var/www/html/data
+CODEQ_PROBLEMS - directory where you have codeq-problems checked out, defaults to /var/local/codeq-problems
+CODEQ_DB_HOST - hostname or IP of the database server, defaults to localhost
+CODEQ_DB_PORT - TCP port number of the database server, defaults to 5432
+CODEQ_DB_DATABASE - name of the database, defaults to codeq
+CODEQ_DB_USER - database username, defaults to codeq
+CODEQ_DB_PASS - database password, defaults to c0d3q
+"""
+
+import os
+import traceback
+import sys
+import json
+
+# insert the parent directory, so the problem scripts find their modules
+sys.path.insert(0, os.sep.join(os.path.dirname(__file__).split(os.sep)[:-1]))
+
+import server.problems
+import db
+
+problems_path = os.environ.get('CODEQ_PROBLEMS') or '/var/local/codeq-problems' # where to find problems, the same as server.problems._path_prefix
+output_path = os.environ.get('CODEQ_WEB_OUTPUT') or '/var/www/html/data' # the base directory where to create subdirectories and output the files for web
+
+translations = {'sl', 'en'} # translations to seek (sl.py, en.py, ...)
+
+# default values (properties) for various types of items, also the list of properties to copy from modules
+language_props = { # for translation files inside the language subdirectory
+ 'name': 'Name not set',
+ 'description': 'Description not set',
+ 'hint': {}
+}
+group_props = { # for translation files inside the problem group subdirectory
+ 'name': 'Name not set',
+ 'description': 'Description not set'
+}
+problem_props = { # for translation files inside the problem subdirectory
+ 'name': 'Name not set',
+ 'slug': 'Slug not set',
+ 'description': 'Description not set',
+ 'plan': [],
+ 'hint': {}
+}
+problem_common_props = { # for common.py inside the problem subdirectory
+ 'number': 0, # display index of problems inside their groups
+ 'visible': True
+}
+
+languages = {} # programming languages, info from database
+groups = {} # problem groups, info from database
+
+conn = db.get_connection()
+cur = conn.cursor()
+
+def load_translation_data(package, defaults):
+ result = {}
+ path = os.sep.join(package.split('.'))
+ for lang in translations:
+ mod_path = os.path.join(problems_path, path, lang + '.py')
+ if os.path.exists(mod_path) and os.path.isfile(mod_path):
+ mod = None
+ try:
+ mod = server.problems.load_module(package + '.' + lang)
+ except:
+ traceback.print_exc()
+ lang_data = {}
+ result[lang] = lang_data
+ if mod is None:
+ print('Could not load module {}'.format(package + '.' + lang))
+ for prop, default in defaults.items():
+ lang_data[prop] = default
+ else:
+ for prop, default in defaults.items():
+ lang_data[prop] = getattr(mod, prop, default)
+ return result
+
+def load_common_data(package, defaults):
+ result = {}
+ path = os.sep.join(package.split('.'))
+ mod_path = os.path.join(problems_path, path, 'common.py')
+ if os.path.exists(mod_path) and os.path.isfile(mod_path):
+ mod = None
+ try:
+ mod = server.problems.load_module(package + '.common')
+ except:
+ traceback.print_exc()
+ if mod is None:
+ print('Could not load module {}'.format(package + '.common'))
+ for prop, default in defaults.items():
+ result[prop] = default
+ else:
+ for prop, default in defaults.items():
+ result[prop] = getattr(mod, prop, default)
+ return result
+
+cur.execute('select id, name, identifier from language')
+row = cur.fetchone()
+while row:
+ languages[row[0]] = {'id': row[0], 'name': row[1], 'identifier': row[2]}
+ row = cur.fetchone()
+
+cur.execute('select id, name, identifier from problem_group')
+row = cur.fetchone()
+while row:
+ groups[row[0]] = {'id': row[0], 'name': row[1], 'identifier': row[2]}
+ row = cur.fetchone()
+
+cur.execute('select id, language_id, problem_group_id, identifier from problem where is_visible = true order by language_id, problem_group_id')
+previous_language_id = None
+previous_group_id = None
+lang_output_path = None
+lang_index = None
+row = cur.fetchone()
+
+def dump_language_defs():
+ if lang_index:
+ with open(os.path.join(lang_output_path, 'language.json'), 'w') as f:
+ json.dump(lang_index, f, indent=2)
+
+while row:
+ # process language data, it all goes into the language directory language.json
+ language_id = row[1]
+ if previous_language_id != language_id:
+ language = languages[language_id]
+ lang_identifier = language['identifier']
+ language_path = os.path.join(problems_path, lang_identifier)
+ if not (os.path.exists(language_path) and os.path.isdir(language_path)):
+ print('ERROR: the directory for language {0} does not exist: {1}'.format(lang_identifier, language_path))
+ continue
+ dump_language_defs() # dump the previous language index
+ lang_output_path = os.path.join(output_path, lang_identifier)
+ if not os.path.exists(lang_output_path):
+ os.mkdir(lang_output_path)
+ previous_language_id = language_id
+ problem_groups_map = {}
+ lang_index = {'id': language_id, 'identifier': lang_identifier, 'groups': problem_groups_map, 'translations': load_translation_data(lang_identifier, language_props)}
+ previous_group_id = None
+
+ # process problem group data, it all goes into the language directory language.json
+ group_id = row[2]
+ if previous_group_id != group_id:
+ group = groups[group_id]
+ group_identifier = group['identifier']
+ group_path = os.path.join(language_path, 'problems', group_identifier)
+ if not (os.path.exists(group_path) and os.path.isdir(group_path)):
+ print('ERROR: the directory for group {0}/{1} does not exist: {2}'.format(lang_identifier, group_identifier, group_path))
+ continue
+ group_output_path = os.path.join(lang_output_path, group_identifier)
+ if not os.path.exists(group_output_path):
+ os.mkdir(group_output_path)
+ group_package = lang_identifier + '.problems.' + group_identifier
+ previous_group_id = group_id
+ problems_map = {}
+ problem_groups_map[group_identifier] = {'id': group_id, 'identifier': group_identifier, 'problems': problems_map, 'translations': load_translation_data(group_package, group_props)}
+
+ # process problem data, from common.py goes into the language directory language.json, others go into problem subdirectory's problem.json
+ problem_id = row[0]
+ problem_identifier = row[3]
+ problem_path = os.path.join(group_path, problem_identifier)
+ if not (os.path.exists(problem_path) and os.path.isdir(problem_path)):
+ print('ERROR: the directory for problem {0}/{1}/{2} does not exist: {3}'.format(lang_identifier, group_identifier, problem_identifier, group_path))
+ continue
+ problem_package = group_package + '.' + problem_identifier
+ # load common data, for the language directory
+ common_data = load_common_data(problem_package, problem_common_props)
+ if not common_data['visible']:
+ continue # problem is not visible, do not generate anything
+ del common_data['visible'] # we don't need this field in the GUI
+ common_data['id'] = problem_id
+ common_data['identifier'] = problem_identifier
+ problems_map[problem_identifier] = common_data
+ # load and dump translations, for the problem subdirectory's problem.json
+ problem_data = {'id': problem_id, 'identifier': problem_identifier, 'translations': load_translation_data(problem_package, problem_props)}
+ problem_output_path = os.path.join(group_output_path, problem_identifier)
+ if not os.path.exists(problem_output_path):
+ os.mkdir(problem_output_path)
+ with open(os.path.join(problem_output_path, 'problem.json'), 'w') as f:
+ json.dump(problem_data, f, indent=2)
+ row = cur.fetchone()
+
+dump_language_defs() # dump the last language index