Optimize sync_missing_games()

This commit is contained in:
Xodetaetl 2015-08-29 02:27:57 +02:00
parent f9d9fc902d
commit 2b520cc72a
3 changed files with 48 additions and 16 deletions

View file

@ -176,6 +176,16 @@ def add_game(name, **game_data):
sql.db_insert(PGA_DB, "games", game_data)
def add_games_bulk(games):
"""Adds a list of games to the PGA database.
The dicts must have an identical set of keys.
:type games: list of dicts
"""
sql.db_insert_bulk(PGA_DB, "games", games)
def add_or_update(name, runner, slug=None, **kwargs):
if not slug:
slug = slugify(name)

View file

@ -54,19 +54,23 @@ class Sync(object):
if not not_in_local:
return set()
missing_slugs = set()
missing = []
for game in remote_library:
slug = game['slug']
# Sync
if slug in not_in_local:
logger.debug("Adding to local library: %s", slug)
pga.add_game(
game['name'], slug=slug, year=game['year'],
updated=game['updated'], steamid=game['steamid']
missing_slugs.add(slug)
missing.append(
{'name': game['name'],
'slug': slug,
'year': game['year'],
'updated': game['updated'],
'steamid': game['steamid']}
)
else:
not_in_local.discard(slug)
logger.debug("%d games added", len(not_in_local))
return not_in_local
pga.add_games_bulk(missing)
logger.debug("%d games added", len(missing))
return missing_slugs
@staticmethod
def sync_game_details(remote_library):

View file

@ -16,28 +16,46 @@ class db_cursor(object):
def db_insert(db_path, table, fields):
field_names = ", ".join(fields.keys())
columns = ", ".join(fields.keys())
placeholders = ("?, " * len(fields))[:-2]
field_values = _decode_utf8_values(fields.values())
with db_cursor(db_path) as cursor:
cursor.execute(
"insert into {0}({1}) values ({2})".format(table,
field_names,
columns,
placeholders),
field_values
)
def db_insert_bulk(db_path, table, fields_bulk):
"""Insert several rows. The dicts must have an identical set of keys.
:type fields_bulk: list of dicts
"""
columns = fields_bulk[0].keys()
placeholders = ("?, " * len(columns))[:-2]
for i, fields in enumerate(fields_bulk):
fields_bulk[i] = _decode_utf8_values(fields.values())
with db_cursor(db_path) as cursor:
cursor.executemany(
"insert into {0}({1}) values ({2})".format(table,
", ".join(columns),
placeholders),
fields_bulk
)
def db_update(db_path, table, updated_fields, row):
""" update `table` with the values given in the dict `values` on the
condition given with the tuple `row`
"""
field_names = "=?, ".join(updated_fields.keys()) + "=?"
columns = "=?, ".join(updated_fields.keys()) + "=?"
field_values = _decode_utf8_values(updated_fields.values())
condition_field = "{0}=?".format(row[0])
condition_value = (row[1], )
with db_cursor(db_path) as cursor:
query = "UPDATE {0} SET {1} WHERE {2}".format(table, field_names,
query = "UPDATE {0} SET {1} WHERE {2}".format(table, columns,
condition_field)
cursor.execute(query, field_values + condition_value)
@ -50,19 +68,19 @@ def db_delete(db_path, table, field, value):
def db_select(db_path, table, fields=None, condition=None):
if fields:
field_names = ", ".join(fields)
columns = ", ".join(fields)
else:
field_names = "*"
columns = "*"
with db_cursor(db_path) as cursor:
if condition:
assert len(condition) == 2
cursor.execute(
"SELECT {0} FROM {1} where {2}=?".format(
field_names, table, condition[0]
columns, table, condition[0]
), (condition[1], )
)
else:
cursor.execute("SELECT {0} FROM {1}".format(field_names, table))
cursor.execute("SELECT {0} FROM {1}".format(columns, table))
rows = cursor.fetchall()
column_names = [column[0] for column in cursor.description]
results = []