Skip to content

Commit

Permalink
mv user data to sqlite3
Browse files Browse the repository at this point in the history
delete old user_data.py. add database.py
add cache table and methods for e.g. remembering user settings.
clean up code.
  • Loading branch information
diversen7 committed Oct 3, 2024
1 parent dc8e87d commit 5fe7b71
Show file tree
Hide file tree
Showing 8 changed files with 237 additions and 199 deletions.
16 changes: 16 additions & 0 deletions bin/create_db_tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,26 @@
CREATE INDEX idx_searches_user_id ON searches (user_id);
"""

create_cache_query = """
CREATE TABLE IF NOT EXISTS cache (
id INTEGER PRIMARY KEY,
key VARCHAR(128),
value TEXT,
unix_timestamp INTEGER DEFAULT 0
);
"""

create_cache_index_query = """
-- Generate index on key
CREATE INDEX idx_cache_key ON cache (key);
"""

sql_statements.append(create_booksmarks_query)
sql_statements.append(create_booksmarks_index_query)
sql_statements.append(create_searches_query)
sql_statements.append(create_searches_index_query)
sql_statements.append(create_cache_query)
sql_statements.append(create_cache_index_query)


# Create the table
Expand Down
17 changes: 8 additions & 9 deletions example-config-aarhus/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@
from stadsarkiv_client.records import record_utils
from stadsarkiv_client.records import record_alter
from stadsarkiv_client.core import api
from stadsarkiv_client.core.user_data import UserData
import json
import csv
import os
from stadsarkiv_client.core import database


log = get_log()
Expand Down Expand Up @@ -54,20 +54,19 @@ async def after_login_success(self, response: dict) -> dict:
After a successful login.
"""
me = await api.me_get(self.request)
id = me["id"]
user_id = me["id"]
email = me["email"]

custom_data = UserData(me)
if not custom_data.get_custom_data("bookmarks_imported"):
cache_key = f"bookmarks_imported_{user_id}"
result = await database.cache_get(cache_key)

if not result:
bookmarks = _get_bookmarks_by_email(email)

for bookmark in bookmarks:
custom_data.append_bookmark(bookmark)
await database.bookmarks_insert(user_id, bookmark)

# This needs to be fixed in the webservice
custom_data.set_custom_value("bookmarks_imported", True)
data = custom_data.get_data()
response = await api.users_data_post(self.request, id=id, data=data)
await database.cache_set(cache_key, True)

return response

Expand Down
25 changes: 12 additions & 13 deletions example-config-demo/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from stadsarkiv_client.core.hooks_spec import HooksSpec
from stadsarkiv_client.records import record_utils
from stadsarkiv_client.records import record_alter
from stadsarkiv_client.core.user_data import UserData
from stadsarkiv_client.core import database
from stadsarkiv_client.core import api
from stadsarkiv_client.core.context import get_context
import json
Expand All @@ -21,13 +21,13 @@
bookmarks_file = os.path.join(base_dir, "..", "data", "bookmarks_with_emails.csv")


def get_bookmarks_by_email(email):
def _get_bookmarks_by_email(email):
""" "
Get bookmarks by email from csv file
"""
# file = bookmarks_file
file = bookmarks_file
resource_ids = []
with open(bookmarks_file, "r") as file:
with open(file, "r") as file:
reader = csv.DictReader(file)
for row in reader:
if row["email"] == email:
Expand Down Expand Up @@ -83,20 +83,19 @@ async def after_login_success(self, response: dict) -> dict:
After a successful login.
"""
me = await api.me_get(self.request)
id = me["id"]
user_id = me["id"]
email = me["email"]

custom_data = UserData(me)
if not custom_data.get_custom_data("bookmarks_imported"):
bookmarks = get_bookmarks_by_email(email)
cache_key = f"bookmarks_imported_{user_id}"
result = await database.cache_get(cache_key)

if not result:
bookmarks = _get_bookmarks_by_email(email)

for bookmark in bookmarks:
custom_data.append_bookmark(bookmark)
await database.bookmarks_insert(user_id, bookmark)

# This needs to be fixed in the webservice
# custom_data.set_key_value("bookmarks_imported", True)
data = custom_data.get_data()
response = await api.users_data_post(self.request, id=id, data=data)
await database.cache_set(cache_key, True)

return response

Expand Down
165 changes: 165 additions & 0 deletions stadsarkiv_client/core/database.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
from stadsarkiv_client.core.logging import get_log
import sqlite3
import os
import typing
import json
import time
from typing import Any


DATABASE_URL = str(os.getenv("DATABASE_URL"))


log = get_log()


async def _get_db_connection() -> sqlite3.Connection:
connection = sqlite3.connect(DATABASE_URL)
connection.row_factory = sqlite3.Row
connection.execute("PRAGMA journal_mode=WAL;")
return connection


async def bookmarks_insert(user_id, bookmark) -> bool:

connection = await _get_db_connection()
cursor = connection.cursor()

try:
values = {"user_id": user_id, "bookmark": bookmark}
query = "INSERT INTO bookmarks (user_id, bookmark) VALUES (:user_id, :bookmark)"
cursor.execute(query, values)
connection.commit()
return True

except sqlite3.Error as e:
log.error(f"Failed to insert note: {e}")
connection.rollback()
return False
finally:
cursor.close()
connection.close()


async def bookmarks_get(user_id) -> typing.Any:

connection = await _get_db_connection()
cursor = connection.cursor()

try:
values = {"user_id": user_id}
query = "SELECT * FROM bookmarks WHERE user_id = :user_id"
cursor.execute(query, values)
result = cursor.fetchall()
return result

except sqlite3.Error as e:
log.error(f"Failed to get notes: {e}")
finally:
cursor.close()
connection.close()


async def bookmarks_delete(user_id, bookmark_id) -> typing.Any:

connection = await _get_db_connection()
cursor = connection.cursor()

try:
values = {"bookmark": bookmark_id, "user_id": user_id}
query = "DELETE FROM bookmarks WHERE bookmark = :bookmark AND user_id = :user_id"
cursor.execute(query, values)
connection.commit()

except sqlite3.Error as e:
log.error(f"Failed to delete note: {e}")
connection.rollback()
finally:
cursor.close()
connection.close()

"""
# Simple sqlite3 cache implementation
# Example usage:
```py
insert_value = None
# Get a result that is max 10 seconds old
cache_expire = 10
has_result = False
result = await database.cache_get("test", cache_expire)
if not result:
# Set a new cache value
insert_value = "".join(random.choices("abcdefghijklmnopqrstuvwxyz", k=10))
await database.cache_set("test", {"random": insert_value})
else:
has_result = True
return JSONResponse(
{
"message": "Test note inserted",
"result": result,
"has_result": has_result,
"expire": cache_expire,
"inserted_value": insert_value,
}
)
```
"""


async def cache_set(key: str, data: Any):
"""
Set a cache value
"""
json_data = json.dumps(data)

connection = await _get_db_connection()
try:
connection.execute("DELETE FROM cache WHERE key = ?", (key,))
connection.execute("INSERT INTO cache (key, value, unix_timestamp) VALUES (?, ?, ?)", (key, json_data, int(time.time())))
connection.commit()
return True

except sqlite3.Error as e:
log.error(f"Failed to delete note: {e}")
connection.rollback()
return False
finally:
connection.close()


async def cache_get(key: str, expire_in: int = 0) -> Any:
"""
Will return the value if the key exists and is not expired
Will return None if the key does not exist or if the key is expired
"""
connection = await _get_db_connection()
try:
result = connection.execute("SELECT * FROM cache WHERE key = ?", (key,)).fetchone()

if result:
if expire_in == 0:
return json.loads(result["value"])

current_time = int(time.time())
if current_time - result["unix_timestamp"] < expire_in:
return json.loads(result["value"])
else:
await cache_delete(result["id"])
return None
finally:
connection.close()


async def cache_delete(id: int):
connection = await _get_db_connection()
try:
connection.execute("DELETE FROM cache WHERE id = ?", (id,))
connection.commit()
finally:
connection.close()
Loading

0 comments on commit 5fe7b71

Please sign in to comment.