summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAleš Smodiš <aless@guru.si>2015-09-29 14:11:35 +0200
committerAleš Smodiš <aless@guru.si>2015-09-29 14:11:35 +0200
commite69f28df568407c5ca00563a13d267ae0860d12c (patch)
treec6cdf02da0360e31829701469ceea2e12259e66e
parent11c84622f9a3943824c0133c2ab96aeb7ba46038 (diff)
* Bugfix: always do cur.fetchone() on new iteration.
* Implemented ordering of problem groups and problems. * Added hint types based on hint definitions from common.py files.
-rw-r--r--scripts/build_web_resources.py167
1 files changed, 102 insertions, 65 deletions
diff --git a/scripts/build_web_resources.py b/scripts/build_web_resources.py
index 44a2073..604d9d9 100644
--- a/scripts/build_web_resources.py
+++ b/scripts/build_web_resources.py
@@ -35,10 +35,16 @@ language_props = { # for translation files inside the language subdirectory
'description': 'Description not set',
'hint': {}
}
+language_common_props = { # for common.py inside the language subdirectory
+ 'hint_type': {} # type definitions of common hints
+}
group_props = { # for translation files inside the problem group subdirectory
'name': 'Name not set',
'description': 'Description not set'
}
+group_common_props = { # for common.py inside the problem group subdirectory
+ 'number': 1 # display index of the problem group
+}
problem_props = { # for translation files inside the problem subdirectory
'name': 'Name not set',
'slug': 'Slug not set',
@@ -47,8 +53,9 @@ problem_props = { # for translation files inside the problem subdirectory
'hint': {}
}
problem_common_props = { # for common.py inside the problem subdirectory
- 'number': 0, # display index of problems inside their groups
- 'visible': True
+ 'number': 1, # display index of problems inside their groups
+ 'visible': True, # whether the problem is enabled (disabled problems are excluded from the application)
+ 'hint_type': {} # type definitions of problem hints
}
languages = {} # programming languages, info from database
@@ -99,6 +106,16 @@ def load_common_data(package, defaults):
else:
for prop, default in defaults.items():
result[prop] = getattr(mod, prop, default)
+ else:
+ print('Module {} does not exist'.format(package + '.common'))
+ for prop, default in defaults.items():
+ result[prop] = default
+ return result
+
+def process_hint_type(hint_type):
+ result = {}
+ for identifier, hint in hint_type.items():
+ result[identifier] = hint.hint_type
return result
cur.execute('select id, name, identifier from language')
@@ -122,74 +139,94 @@ row = cur.fetchone()
def dump_language_defs():
if lang_index:
+ # sort groups and problems
+ groups = lang_index['groups']
+ for group in groups:
+ group['problems'].sort(key=lambda p: p.get('number', 0))
+ groups.sort(key=lambda p: p.get('number', 0))
+ # write out the JSON file
with open(os.path.join(lang_output_path, 'language.json'), 'w') as f:
json.dump(lang_index, f, indent=2)
while row:
# process language data, it all goes into the language directory language.json
- language_id = row[1]
- if previous_language_id != language_id:
- language = languages[language_id]
- lang_identifier = language['identifier']
- language_path = os.path.join(problems_path, lang_identifier)
- if not (os.path.exists(language_path) and os.path.isdir(language_path)):
- print('ERROR: the directory for language {0} does not exist: {1}'.format(lang_identifier, language_path))
- continue
- dump_language_defs() # dump the previous language index
- lang_output_path = os.path.join(output_path, lang_identifier)
- if not os.path.exists(lang_output_path):
- os.mkdir(lang_output_path)
- previous_language_id = language_id
- problem_groups_map = {}
- lang_index = {'id': language_id, 'identifier': lang_identifier, 'groups': problem_groups_map, 'translations': load_translation_data(lang_identifier, language_props)}
- previous_group_id = None
-
- # process problem group data, it all goes into the language directory language.json
- group_id = row[2]
- if previous_group_id != group_id:
- group = groups[group_id]
- group_identifier = group['identifier']
- group_path = os.path.join(language_path, 'problems', group_identifier)
- if not (os.path.exists(group_path) and os.path.isdir(group_path)):
- print('ERROR: the directory for group {0}/{1} does not exist: {2}'.format(lang_identifier, group_identifier, group_path))
+ try:
+ language_id = row[1]
+ if previous_language_id != language_id:
+ language = languages[language_id]
+ lang_identifier = language['identifier']
+ language_path = os.path.join(problems_path, lang_identifier)
+ if not (os.path.exists(language_path) and os.path.isdir(language_path)):
+ print('ERROR: the directory for language {0} does not exist: {1}'.format(lang_identifier, language_path))
+ continue
+ dump_language_defs() # dump the previous language index
+ lang_output_path = os.path.join(output_path, lang_identifier)
+ if not os.path.exists(lang_output_path):
+ os.mkdir(lang_output_path)
+ previous_language_id = language_id
+ problem_groups_list = []
+ lang_index = load_common_data(lang_identifier, language_common_props)
+ lang_index['hint_type'] = process_hint_type(lang_index.get('hint_type', {})) # process type definitions for common hints
+ lang_index['id'] = language_id
+ lang_index['identifier'] = lang_identifier
+ lang_index['groups'] = problem_groups_list
+ lang_index['translations'] = load_translation_data(lang_identifier, language_props)
+ previous_group_id = None
+
+ # process problem group data, it all goes into the language directory language.json
+ group_id = row[2]
+ if previous_group_id != group_id:
+ group = groups[group_id]
+ group_identifier = group['identifier']
+ group_path = os.path.join(language_path, 'problems', group_identifier)
+ if not (os.path.exists(group_path) and os.path.isdir(group_path)):
+ print('ERROR: the directory for group {0}/{1} does not exist: {2}'.format(lang_identifier, group_identifier, group_path))
+ continue
+ group_output_path = os.path.join(lang_output_path, group_identifier)
+ if not os.path.exists(group_output_path):
+ os.mkdir(group_output_path)
+ group_package = lang_identifier + '.problems.' + group_identifier
+ previous_group_id = group_id
+ problems_list = []
+ group_data = load_common_data(group_package, group_common_props)
+ group_data['id'] = group_id
+ group_data['identifier'] = group_identifier
+ group_data['problems'] = problems_list
+ group_data['translations'] = load_translation_data(group_package, group_props)
+ problem_groups_list.append(group_data)
+
+ # process problem data, from common.py goes into the language directory language.json, others go into problem subdirectory's problem.json
+ problem_id = row[0]
+ problem_identifier = row[3]
+ problem_path = os.path.join(group_path, problem_identifier)
+ if not (os.path.exists(problem_path) and os.path.isdir(problem_path)):
+ print('ERROR: the directory for problem {0}/{1}/{2} does not exist: {3}'.format(lang_identifier, group_identifier, problem_identifier, group_path))
continue
- group_output_path = os.path.join(lang_output_path, group_identifier)
- if not os.path.exists(group_output_path):
- os.mkdir(group_output_path)
- group_package = lang_identifier + '.problems.' + group_identifier
- previous_group_id = group_id
- problems_map = {}
- problem_groups_map[group_identifier] = {'id': group_id, 'identifier': group_identifier, 'problems': problems_map, 'translations': load_translation_data(group_package, group_props)}
-
- # process problem data, from common.py goes into the language directory language.json, others go into problem subdirectory's problem.json
- problem_id = row[0]
- problem_identifier = row[3]
- problem_path = os.path.join(group_path, problem_identifier)
- if not (os.path.exists(problem_path) and os.path.isdir(problem_path)):
- print('ERROR: the directory for problem {0}/{1}/{2} does not exist: {3}'.format(lang_identifier, group_identifier, problem_identifier, group_path))
- continue
- problem_package = group_package + '.' + problem_identifier
- # load common data, for the language directory
- common_data = load_common_data(problem_package, problem_common_props)
- if not common_data['visible']:
- continue # problem is not visible, do not generate anything
- del common_data['visible'] # we don't need this field in the GUI
- common_data['id'] = problem_id
- common_data['identifier'] = problem_identifier
- problems_map[problem_identifier] = common_data
- # load translations, and copy only problem names to the common data
- problem_translations = load_translation_data(problem_package, problem_props)
- name_translations = {}
- for key, value in problem_translations.items():
- name_translations[key] = {'name': value.get('name')}
- common_data['translations'] = name_translations
- # dump translations, for the problem subdirectory's problem.json
- problem_data = {'id': problem_id, 'identifier': problem_identifier, 'translations': problem_translations}
- problem_output_path = os.path.join(group_output_path, problem_identifier)
- if not os.path.exists(problem_output_path):
- os.mkdir(problem_output_path)
- with open(os.path.join(problem_output_path, 'problem.json'), 'w') as f:
- json.dump(problem_data, f, indent=2)
- row = cur.fetchone()
+ problem_package = group_package + '.' + problem_identifier
+ # load common data, for the language directory
+ common_data = load_common_data(problem_package, problem_common_props)
+ if not common_data['visible']:
+ continue # problem is not visible, do not generate anything
+ del common_data['visible'] # we don't need this field in the GUI
+ hint_type = process_hint_type(common_data['hint_type']) # save for later, to be used in problem_data below
+ del common_data['hint_type'] # we don't need this field in the language index
+ common_data['id'] = problem_id
+ common_data['identifier'] = problem_identifier
+ problems_list.append(common_data)
+ # load translations, and copy only problem names to the common data
+ problem_translations = load_translation_data(problem_package, problem_props)
+ name_translations = {}
+ for key, value in problem_translations.items():
+ name_translations[key] = {'name': value.get('name')}
+ common_data['translations'] = name_translations
+ # dump translations, for the problem subdirectory's problem.json
+ problem_data = {'id': problem_id, 'identifier': problem_identifier, 'translations': problem_translations, 'hint_type': hint_type}
+ problem_output_path = os.path.join(group_output_path, problem_identifier)
+ if not os.path.exists(problem_output_path):
+ os.mkdir(problem_output_path)
+ with open(os.path.join(problem_output_path, 'problem.json'), 'w') as f:
+ json.dump(problem_data, f, indent=2)
+ finally:
+ row = cur.fetchone()
dump_language_defs() # dump the last language index