summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTimotej Lazar <timotej.lazar@fri.uni-lj.si>2016-03-17 15:11:19 +0100
committerTimotej Lazar <timotej.lazar@fri.uni-lj.si>2016-03-17 15:11:19 +0100
commit880c601c90a229340892b680dbce0e09c6aaeedb (patch)
treeda2c5e96b8b711d7e40cd302279ad7e073ab699f
parent7267e65d4257402000e6b163291028e191907580 (diff)
Improve build_web_resources.py
-rw-r--r--scripts/build_web_resources.py23
1 files changed, 14 insertions, 9 deletions
diff --git a/scripts/build_web_resources.py b/scripts/build_web_resources.py
index 1e465e0..f0dc6b1 100644
--- a/scripts/build_web_resources.py
+++ b/scripts/build_web_resources.py
@@ -169,18 +169,23 @@ def dump_language_defs(data, output_path):
conn = db.get_connection()
cur = conn.cursor()
-def db_add(table, identifier, data):
- cur.execute('select id from ' + table + ' where identifier = %s', (identifier,))
+def db_add(table, id, data):
+ data = sorted(data.items())
+ cols = tuple([d[0] for d in data])
+ vals = tuple([d[1] for d in data])
+
+ cur.execute('select ' + ','.join(cols) + ' from ' + table + ' where id = %s', (id,))
row = cur.fetchone()
if row is None:
- data = sorted(data.items())
- cols = [d[0] for d in data]
- vals = [d[1] for d in data]
print('Inserting new {} in database: cols={} vals={}'.format(table, cols, vals))
-
args = ','.join(['%s'] * len(cols))
sql = 'insert into ' + table + ' (' + ','.join(cols) + ') values (' + args + ')'
cur.execute(sql, vals)
+ elif row != vals:
+ print('Updating {} {} in database: cols={} vals={}'.format(table, id, cols, vals))
+ args = ','.join([col + ' = %s' for col in cols])
+ sql = 'update ' + table + ' set ' + args + ' where id = %s'
+ cur.execute(sql, vals + (id,))
try:
# get problem descriptors
@@ -201,7 +206,7 @@ try:
lang_data['groups'] = []
lang_data['translations'] = load_translation_data(lang_identifier, language_props)
copy_web_resources(lang_identifier, [lang_identifier])
- db_add('language', lang_identifier,
+ db_add('language', lang_data['id'],
{'id': lang_data['id'], 'identifier': lang_identifier})
groups_path = os.path.join(lang_path, 'problems')
@@ -220,7 +225,7 @@ try:
group_data['identifier'] = group_identifier
group_data['problems'] = []
group_data['translations'] = load_translation_data(group_package, group_props)
- db_add('problem_group', group_identifier,
+ db_add('problem_group', group_data['id'],
{'id': group_data['id'], 'identifier': group_identifier})
for problem_identifier in os.listdir(group_path):
@@ -262,7 +267,7 @@ try:
with open(os.path.join(problem_output_path, 'problem.json'), 'w') as f:
json.dump(problem_data, f, indent=2)
copy_web_resources(problem_package, [lang_identifier, group_identifier, problem_identifier])
- db_add('problem', problem_identifier, {
+ db_add('problem', problem_data['id'], {
'id': problem_data['id'],
'language_id': lang_data['id'],
'problem_group_id': group_data['id'],