1297 lines
46 KiB
Python
1297 lines
46 KiB
Python
# modules/db.py
|
||
import os
|
||
import re
|
||
import time, datetime
|
||
import sqlite3
|
||
import uuid
|
||
|
||
import globals
|
||
|
||
try:
|
||
import mariadb
|
||
except ImportError:
|
||
mariadb = None # We handle gracefully if 'mariadb' isn't installed.
|
||
|
||
def checkenable_db_fk(db_conn):
|
||
"""
|
||
Attempt to enable foreign key checks where it is relevant
|
||
(i.e. in SQLite). For MariaDB/MySQL, nothing special is needed.
|
||
"""
|
||
is_sqlite = "sqlite3" in str(type(db_conn)).lower()
|
||
if is_sqlite:
|
||
try:
|
||
cursor = db_conn.cursor()
|
||
# Try enabling foreign key checks
|
||
cursor.execute("PRAGMA foreign_keys = ON;")
|
||
cursor.close()
|
||
db_conn.commit()
|
||
globals.log("Enabled foreign key support in SQLite (PRAGMA foreign_keys=ON).", "DEBUG")
|
||
except Exception as e:
|
||
globals.log(f"Failed to enable foreign key support in SQLite: {e}", "WARNING")
|
||
else:
|
||
# For MariaDB/MySQL, they're typically enabled with InnoDB
|
||
globals.log("Assuming DB is MariaDB/MySQL with FKs enabled", "DEBUG")
|
||
|
||
|
||
def init_db_connection(config):
|
||
"""
|
||
Initializes a database connection based on config.json contents:
|
||
- If config says 'use_mariadb', tries connecting to MariaDB.
|
||
- If that fails (or not configured), falls back to SQLite.
|
||
- Logs FATAL if neither can be established (the bot likely depends on DB).
|
||
|
||
:param config: (dict) The loaded config.json data
|
||
:param log: (function) Logging function (message, level="INFO")
|
||
:return: a connection object (MariaDB or sqlite3.Connection), or None on failure
|
||
"""
|
||
db_settings = config.get("database", {})
|
||
use_mariadb = db_settings.get("use_mariadb", False)
|
||
|
||
if use_mariadb and mariadb is not None or False:
|
||
# Attempt MariaDB
|
||
host = db_settings.get("mariadb_host", "localhost")
|
||
user = db_settings.get("mariadb_user", "")
|
||
password = db_settings.get("mariadb_password", "")
|
||
dbname = db_settings.get("mariadb_dbname", "")
|
||
port = int(db_settings.get("mariadb_port", 3306))
|
||
|
||
if user and password and dbname:
|
||
try:
|
||
conn = mariadb.connect(
|
||
host=host,
|
||
user=user,
|
||
password=password,
|
||
database=dbname,
|
||
port=port
|
||
)
|
||
conn.autocommit = False # We'll manage commits manually
|
||
globals.log(f"Database connection established using MariaDB (host={host}, db={dbname}).")
|
||
return conn
|
||
except mariadb.Error as e:
|
||
globals.log(f"Error connecting to MariaDB: {e}", "WARNING")
|
||
else:
|
||
globals.log("MariaDB config incomplete. Falling back to SQLite...", "WARNING")
|
||
else:
|
||
if use_mariadb and mariadb is None:
|
||
globals.log("mariadb module not installed but use_mariadb=True. Falling back to SQLite...", "WARNING")
|
||
|
||
# Fallback to local SQLite
|
||
sqlite_path = db_settings.get("sqlite_path", "local_database.sqlite")
|
||
try:
|
||
conn = sqlite3.connect(sqlite_path)
|
||
globals.log(f"Database connection established using local SQLite: {sqlite_path}", "DEBUG")
|
||
return conn
|
||
except sqlite3.Error as e:
|
||
globals.log(f"Could not open local SQLite database '{sqlite_path}': {e}", "WARNING")
|
||
|
||
# If neither MariaDB nor SQLite connected, that's fatal for the bot
|
||
globals.log("No valid database connection could be established! Exiting...", "FATAL")
|
||
return None
|
||
|
||
|
||
def run_db_operation(conn, operation, query, params=None):
|
||
"""
|
||
Executes a parameterized query with basic screening for injection attempts:
|
||
- 'operation' can be "read", "write", "update", "delete", "lookup", etc.
|
||
- 'query' is the SQL statement, with placeholders (? in SQLite or %s in MariaDB both work).
|
||
- 'params' is a tuple/list of parameters for the query (preferred for security).
|
||
|
||
1) We do a minimal check for suspicious patterns, e.g. multiple statements or known bad keywords.
|
||
2) We execute the query with parameters, and commit on write/update/delete.
|
||
3) On read/lookup, we fetch and return rows. Otherwise, return rowcount.
|
||
|
||
NOTE:
|
||
- This is still not a replacement for well-structured queries and security best practices.
|
||
- Always use parameterized queries wherever possible to avoid injection.
|
||
"""
|
||
if conn is None:
|
||
if globals.log:
|
||
globals.log("run_db_operation called but no valid DB connection!", "FATAL")
|
||
return None
|
||
|
||
if params is None:
|
||
params = ()
|
||
|
||
# Basic screening for malicious usage (multiple statements, forced semicolons, suspicious SQL keywords, etc.)
|
||
# This is minimal and can be expanded if needed.
|
||
lowered = query.strip().lower()
|
||
|
||
# Check for multiple statements separated by semicolons (beyond the last one)
|
||
if lowered.count(";") > 1:
|
||
if globals.log:
|
||
globals.log("Query blocked: multiple SQL statements detected.", "WARNING")
|
||
globals.log(f"Offending query: {query}", "WARNING")
|
||
return None
|
||
|
||
# Potentially dangerous SQL keywords
|
||
forbidden_keywords = ["drop table", "union select", "exec ", "benchmark(", "sleep("]
|
||
for kw in forbidden_keywords:
|
||
if kw in lowered:
|
||
if globals.log:
|
||
globals.log(f"Query blocked due to forbidden keyword: '{kw}'", "WARNING")
|
||
globals.log(f"Offending query: {query}", "WARNING")
|
||
return None
|
||
|
||
cursor = conn.cursor()
|
||
try:
|
||
cursor.execute(query, params)
|
||
|
||
# If it's a write/update/delete, commit the changes
|
||
write_ops = ("write", "insert", "update", "delete", "change")
|
||
if operation.lower() in write_ops:
|
||
conn.commit()
|
||
if globals.log:
|
||
globals.log(f"DB operation '{operation}' committed.", "DEBUG")
|
||
# If the query is an INSERT, return the last inserted row ID
|
||
if query.strip().lower().startswith("insert"):
|
||
try:
|
||
return cursor.lastrowid
|
||
except Exception as e:
|
||
if globals.log:
|
||
globals.log(f"Error retrieving lastrowid: {e}", "ERROR")
|
||
return cursor.rowcount
|
||
else:
|
||
return cursor.rowcount
|
||
|
||
# If it's read/lookup, fetch results
|
||
read_ops = ("read", "lookup", "select")
|
||
if operation.lower() in read_ops:
|
||
rows = cursor.fetchall()
|
||
return rows
|
||
else:
|
||
return cursor.rowcount # for insert/update/delete, rowcount can be helpful
|
||
except Exception as e:
|
||
# Rollback on any error
|
||
conn.rollback()
|
||
if globals.log:
|
||
globals.log(f"Error during '{operation}' query execution: {e}", "ERROR")
|
||
return None
|
||
finally:
|
||
cursor.close()
|
||
|
||
#######################
|
||
# Ensure quotes table exists
|
||
#######################
|
||
|
||
def ensure_quotes_table(db_conn):
|
||
"""
|
||
Checks if 'quotes' table exists. If not, attempts to create it.
|
||
Raises an Exception or logs errors if creation fails.
|
||
"""
|
||
|
||
# 1) Determine if DB is sqlite or mariadb for the system table check
|
||
is_sqlite = "sqlite3" in str(type(db_conn)).lower()
|
||
|
||
# 2) Check existence
|
||
if is_sqlite:
|
||
# For SQLite: check the sqlite_master table
|
||
check_sql = """
|
||
SELECT name
|
||
FROM sqlite_master
|
||
WHERE type='table'
|
||
AND name='quotes'
|
||
"""
|
||
else:
|
||
# For MariaDB/MySQL: check information_schema
|
||
check_sql = """
|
||
SELECT table_name
|
||
FROM information_schema.tables
|
||
WHERE table_name = 'quotes'
|
||
AND table_schema = DATABASE()
|
||
"""
|
||
|
||
from modules.db import run_db_operation
|
||
rows = run_db_operation(db_conn, "read", check_sql)
|
||
if rows and rows[0] and rows[0][0]:
|
||
# The table 'quotes' already exists
|
||
globals.log("Table 'quotes' already exists, skipping creation.", "DEBUG")
|
||
return # We can just return
|
||
|
||
# 3) Table does NOT exist => create it
|
||
globals.log("Table 'quotes' does not exist; creating now...")
|
||
|
||
if is_sqlite:
|
||
create_table_sql = """
|
||
CREATE TABLE quotes (
|
||
ID INTEGER PRIMARY KEY AUTOINCREMENT,
|
||
QUOTE_TEXT TEXT,
|
||
QUOTEE TEXT,
|
||
QUOTE_CHANNEL TEXT,
|
||
QUOTE_DATETIME TEXT,
|
||
QUOTE_GAME TEXT,
|
||
QUOTE_REMOVED BOOLEAN DEFAULT 0,
|
||
QUOTE_REMOVED_BY TEXT,
|
||
QUOTE_REMOVED_DATETIME TEXT DEFAULT NULL,
|
||
FOREIGN KEY (QUOTEE) REFERENCES users(UUID),
|
||
FOREIGN KEY (QUOTE_REMOVED_BY) REFERENCES users(UUID)
|
||
)
|
||
"""
|
||
else:
|
||
create_table_sql = """
|
||
CREATE TABLE quotes (
|
||
ID INT PRIMARY KEY AUTO_INCREMENT,
|
||
QUOTE_TEXT TEXT,
|
||
QUOTEE VARCHAR(100),
|
||
QUOTE_CHANNEL VARCHAR(100),
|
||
QUOTE_DATETIME DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||
QUOTE_GAME VARCHAR(200),
|
||
QUOTE_REMOVED BOOLEAN DEFAULT FALSE,
|
||
QUOTE_REMOVED_BY VARCHAR(100),
|
||
QUOTE_REMOVED_DATETIME DATETIME DEFAULT NULL,
|
||
FOREIGN KEY (QUOTEE) REFERENCES users(UUID) ON DELETE SET NULL
|
||
FOREIGN KEY (QUOTE_REMOVED_BY) REFERENCES users(UUID) ON DELETE SET NULL
|
||
)
|
||
"""
|
||
|
||
result = run_db_operation(db_conn, "write", create_table_sql)
|
||
if result is None:
|
||
# If run_db_operation returns None on error, handle or raise:
|
||
error_msg = "Failed to create 'quotes' table!"
|
||
globals.log(error_msg, "CRITICAL")
|
||
raise RuntimeError(error_msg)
|
||
|
||
globals.log("Successfully created table 'quotes'.")
|
||
|
||
#######################
|
||
# Ensure 'users' table
|
||
#######################
|
||
|
||
def ensure_users_table(db_conn):
|
||
"""
|
||
Ensures the 'Users' table exists and has the necessary columns.
|
||
"""
|
||
is_sqlite = "sqlite3" in str(type(db_conn)).lower()
|
||
|
||
if is_sqlite:
|
||
check_sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='Users'"
|
||
else:
|
||
check_sql = """
|
||
SELECT table_name FROM information_schema.tables
|
||
WHERE table_name = 'Users' AND table_schema = DATABASE()
|
||
"""
|
||
|
||
rows = run_db_operation(db_conn, "read", check_sql)
|
||
if rows and rows[0]:
|
||
globals.log("Table 'Users' already exists, checking for column updates.", "DEBUG")
|
||
|
||
# Ensure 'last_seen' column exists
|
||
column_check_sql = "PRAGMA table_info(Users)" if is_sqlite else """
|
||
SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS
|
||
WHERE TABLE_NAME = 'Users' AND COLUMN_NAME = 'last_seen'
|
||
"""
|
||
columns = run_db_operation(db_conn, "read", column_check_sql)
|
||
if not any("last_seen" in col for col in columns):
|
||
globals.log("Adding 'last_seen' column to 'Users'...", "INFO")
|
||
alter_sql = "ALTER TABLE Users ADD COLUMN last_seen TEXT DEFAULT CURRENT_TIMESTAMP" if is_sqlite else """
|
||
ALTER TABLE Users ADD COLUMN last_seen DATETIME DEFAULT CURRENT_TIMESTAMP
|
||
"""
|
||
run_db_operation(db_conn, "write", alter_sql)
|
||
|
||
return
|
||
|
||
globals.log("Table 'Users' does not exist; creating now...", "INFO")
|
||
|
||
create_sql = """
|
||
CREATE TABLE Users (
|
||
UUID TEXT PRIMARY KEY,
|
||
Unified_Username TEXT,
|
||
datetime_linked TEXT,
|
||
user_is_banned BOOLEAN DEFAULT 0,
|
||
user_is_bot BOOLEAN DEFAULT 0,
|
||
last_seen TEXT DEFAULT CURRENT_TIMESTAMP
|
||
)
|
||
""" if is_sqlite else """
|
||
CREATE TABLE Users (
|
||
UUID VARCHAR(36) PRIMARY KEY,
|
||
Unified_Username VARCHAR(100),
|
||
datetime_linked DATETIME,
|
||
user_is_banned BOOLEAN DEFAULT FALSE,
|
||
user_is_bot BOOLEAN DEFAULT FALSE,
|
||
last_seen DATETIME DEFAULT CURRENT_TIMESTAMP
|
||
)
|
||
"""
|
||
|
||
result = run_db_operation(db_conn, "write", create_sql)
|
||
if result is None:
|
||
error_msg = "Failed to create 'Users' table!"
|
||
globals.log(error_msg, "CRITICAL")
|
||
raise RuntimeError(error_msg)
|
||
|
||
globals.log("Successfully created table 'Users'.", "INFO")
|
||
|
||
|
||
#######################
|
||
# Ensure 'platform_mapping' table
|
||
#######################
|
||
|
||
def ensure_platform_mapping_table(db_conn):
|
||
"""
|
||
Ensures the 'Platform_Mapping' table exists and has the necessary columns.
|
||
"""
|
||
is_sqlite = "sqlite3" in str(type(db_conn)).lower()
|
||
|
||
if is_sqlite:
|
||
check_sql = """
|
||
SELECT name
|
||
FROM sqlite_master
|
||
WHERE type='table'
|
||
AND name='Platform_Mapping'
|
||
"""
|
||
else:
|
||
check_sql = """
|
||
SELECT table_name
|
||
FROM information_schema.tables
|
||
WHERE table_name = 'Platform_Mapping'
|
||
AND table_schema = DATABASE()
|
||
"""
|
||
|
||
rows = run_db_operation(db_conn, "read", check_sql)
|
||
if rows and rows[0] and rows[0][0]:
|
||
globals.log("Table 'Platform_Mapping' already exists, checking for column updates.", "DEBUG")
|
||
|
||
# Check if last_seen column exists
|
||
column_check_sql = """
|
||
PRAGMA table_info(Platform_Mapping)
|
||
""" if is_sqlite else """
|
||
SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS
|
||
WHERE TABLE_NAME = 'Platform_Mapping' AND COLUMN_NAME = 'last_seen'
|
||
"""
|
||
columns = run_db_operation(db_conn, "read", column_check_sql)
|
||
|
||
# If column doesn't exist, add it
|
||
if not any("last_seen" in col for col in columns):
|
||
globals.log("Adding 'last_seen' column to 'Platform_Mapping'...", "INFO")
|
||
alter_sql = """
|
||
ALTER TABLE Platform_Mapping ADD COLUMN last_seen TEXT DEFAULT CURRENT_TIMESTAMP
|
||
""" if is_sqlite else """
|
||
ALTER TABLE Platform_Mapping ADD COLUMN last_seen DATETIME DEFAULT CURRENT_TIMESTAMP
|
||
"""
|
||
run_db_operation(db_conn, "write", alter_sql)
|
||
|
||
return
|
||
|
||
globals.log("Table 'Platform_Mapping' does not exist; creating now...", "INFO")
|
||
|
||
if is_sqlite:
|
||
create_sql = """
|
||
CREATE TABLE Platform_Mapping (
|
||
Platform_User_ID TEXT,
|
||
Platform_Type TEXT,
|
||
UUID TEXT,
|
||
Display_Name TEXT,
|
||
Username TEXT,
|
||
last_seen TEXT DEFAULT CURRENT_TIMESTAMP,
|
||
PRIMARY KEY (Platform_User_ID, Platform_Type),
|
||
FOREIGN KEY (UUID) REFERENCES Users(UUID) ON DELETE CASCADE
|
||
)
|
||
"""
|
||
else:
|
||
create_sql = """
|
||
CREATE TABLE Platform_Mapping (
|
||
Platform_User_ID VARCHAR(100),
|
||
Platform_Type ENUM('Discord', 'Twitch'),
|
||
UUID VARCHAR(36),
|
||
Display_Name VARCHAR(100),
|
||
Username VARCHAR(100),
|
||
last_seen DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||
PRIMARY KEY (Platform_User_ID, Platform_Type),
|
||
FOREIGN KEY (UUID) REFERENCES Users(UUID) ON DELETE CASCADE
|
||
)
|
||
"""
|
||
|
||
result = run_db_operation(db_conn, "write", create_sql)
|
||
if result is None:
|
||
error_msg = "Failed to create 'Platform_Mapping' table!"
|
||
globals.log(error_msg, "CRITICAL")
|
||
raise RuntimeError(error_msg)
|
||
|
||
globals.log("Successfully created table 'Platform_Mapping'.", "INFO")
|
||
|
||
|
||
########################
|
||
# Lookup user function
|
||
########################
|
||
|
||
def lookup_user(db_conn, identifier: str, identifier_type: str, target_identifier: str = None):
|
||
"""
|
||
Looks up a user in the 'Users' table using 'Platform_Mapping' for platform-specific IDs, UUID, usernames, and display names.
|
||
|
||
identifier_type can be:
|
||
- "uuid" (to lookup by UUID directly)
|
||
- "unified_username"
|
||
- "discord_user_id" / "twitch_user_id" (Platform-specific user ID)
|
||
- "discord_display_name" / "twitch_display_name" (Platform-specific display name)
|
||
- "discord_username" / "twitch_username" (Platform-specific raw username)
|
||
|
||
Returns:
|
||
If target_identifier is None: A dictionary with the following keys:
|
||
{
|
||
"UUID": str,
|
||
"Unified_Username": str,
|
||
"datetime_linked": str,
|
||
"user_is_banned": bool or int,
|
||
"user_is_bot": bool or int,
|
||
"platform_user_id": str,
|
||
"platform_display_name": str,
|
||
"platform_username": str,
|
||
"platform_type": str
|
||
}
|
||
If target_identifier is provided: The value from the record corresponding to that column.
|
||
If the lookup fails or the parameters are invalid: None.
|
||
"""
|
||
|
||
PRINT_QUERY_DEBUG = False
|
||
|
||
# Debug: Log the inputs
|
||
if PRINT_QUERY_DEBUG: globals.log(f"lookup_user() called with: identifier='{identifier}', identifier_type='{identifier_type}', target_identifier='{target_identifier}'", "DEBUG")
|
||
|
||
# Normalize identifier_type to lowercase
|
||
identifier_type = identifier_type.lower()
|
||
|
||
# Define valid identifier types with SQL column mappings
|
||
valid_identifier_types = {
|
||
"uuid": "u.UUID",
|
||
"unified_username": "u.Unified_Username",
|
||
"discord_user_id": "pm.Platform_User_ID",
|
||
"twitch_user_id": "pm.Platform_User_ID",
|
||
"discord_display_name": "pm.Display_Name",
|
||
"twitch_display_name": "pm.Display_Name",
|
||
"discord_username": "pm.Username",
|
||
"twitch_username": "pm.Username",
|
||
}
|
||
|
||
# Extract platform from identifier type (if applicable)
|
||
platform_map = {
|
||
"discord_user_id": "Discord",
|
||
"twitch_user_id": "Twitch",
|
||
"discord_display_name": "Discord",
|
||
"twitch_display_name": "Twitch",
|
||
"discord_username": "Discord",
|
||
"twitch_username": "Twitch",
|
||
}
|
||
|
||
if identifier_type not in valid_identifier_types:
|
||
globals.log(f"lookup_user error: invalid identifier_type '{identifier_type}'", "WARNING")
|
||
return None
|
||
|
||
column_to_lookup = valid_identifier_types[identifier_type]
|
||
platform_filter = platform_map.get(identifier_type, None)
|
||
|
||
# Construct query
|
||
query = f"""
|
||
SELECT
|
||
u.UUID,
|
||
u.Unified_Username,
|
||
u.datetime_linked,
|
||
u.user_is_banned,
|
||
u.user_is_bot,
|
||
pm.Platform_User_ID,
|
||
pm.Display_Name,
|
||
pm.Username,
|
||
pm.Platform_Type
|
||
FROM Users u
|
||
LEFT JOIN Platform_Mapping pm ON u.UUID = pm.UUID
|
||
WHERE {column_to_lookup} = ?
|
||
"""
|
||
|
||
params = [identifier]
|
||
|
||
# Apply platform filter if applicable
|
||
if platform_filter:
|
||
query += " AND pm.Platform_Type = ?"
|
||
params.append(platform_filter)
|
||
|
||
query += " LIMIT 1"
|
||
|
||
# Debug: Log the query and parameters
|
||
if PRINT_QUERY_DEBUG: globals.log(f"lookup_user() executing query: {query} with params={params}", "DEBUG")
|
||
|
||
# Run the query
|
||
rows = run_db_operation(db_conn, "read", query, tuple(params))
|
||
|
||
# Debug: Log the result of the query
|
||
if PRINT_QUERY_DEBUG: globals.log(f"lookup_user() query result: {rows}", "DEBUG")
|
||
|
||
# Handle no result case
|
||
if not rows:
|
||
globals.log(f"lookup_user: No user found for {identifier_type}='{identifier}'", "INFO")
|
||
return None
|
||
|
||
# Convert the row to a dictionary
|
||
row = rows[0]
|
||
user_data = {
|
||
"uuid": row[0], # Ensure UUID is uppercase
|
||
"unified_username": row[1],
|
||
"datetime_linked": row[2],
|
||
"user_is_banned": row[3],
|
||
"user_is_bot": row[4],
|
||
"platform_user_id": row[5],
|
||
"platform_display_name": row[6],
|
||
"platform_username": row[7],
|
||
"platform_type": row[8]
|
||
}
|
||
|
||
# Debug: Log the constructed user data
|
||
if PRINT_QUERY_DEBUG: globals.log(f"lookup_user() constructed user_data: {user_data}", "DEBUG")
|
||
|
||
# If target_identifier is provided, return just that value
|
||
if target_identifier:
|
||
target_identifier = target_identifier.lower()
|
||
if target_identifier in user_data:
|
||
if PRINT_QUERY_DEBUG: globals.log(f"lookup_user() returning target_identifier='{target_identifier}' with value='{user_data[target_identifier]}'", "DEBUG")
|
||
return user_data[target_identifier]
|
||
else:
|
||
globals.log(f"lookup_user error: target_identifier '{target_identifier}' not found in user_data. Available keys: {list(user_data.keys())}", "WARNING")
|
||
return None
|
||
|
||
if PRINT_QUERY_DEBUG: globals.log(f"lookup_user() returning full user_data: {user_data}", "DEBUG")
|
||
return user_data
|
||
|
||
def user_lastseen(db_conn, UUID: str, platform_name: str = None, platform_user_id: str | int = None, lookup: bool = False, update: bool = False):
|
||
"""
|
||
Handles user last seen updates and lookups.
|
||
|
||
- `lookup=True`: Fetches the last-seen timestamp.
|
||
- `update=True`: Updates the last-seen timestamp.
|
||
- If platform_name and platform_user_id are provided, the query will be scoped to that account.
|
||
- Otherwise, it applies to all accounts unified under the UUI system.
|
||
"""
|
||
if not UUID:
|
||
globals.log("UUID is required for user_lastseen()", "ERROR")
|
||
return None
|
||
|
||
if lookup:
|
||
query = """
|
||
SELECT last_seen FROM Platform_Mapping WHERE UUID = ?
|
||
""" if not platform_name or not platform_user_id else """
|
||
SELECT last_seen FROM Platform_Mapping WHERE UUID = ? AND Platform_Type = ? AND Platform_User_ID = ?
|
||
"""
|
||
|
||
params = (UUID,) if not platform_name or not platform_user_id else (UUID, platform_name, str(platform_user_id))
|
||
result = run_db_operation(db_conn, "read", query, params)
|
||
|
||
if update:
|
||
update_sql = """
|
||
UPDATE Platform_Mapping SET last_seen = CURRENT_TIMESTAMP WHERE UUID = ?
|
||
""" if not platform_name or not platform_user_id else """
|
||
UPDATE Platform_Mapping SET last_seen = CURRENT_TIMESTAMP WHERE UUID = ? AND Platform_Type = ? AND Platform_User_ID = ?
|
||
"""
|
||
|
||
params = (UUID,) if not platform_name or not platform_user_id else (UUID, platform_name, str(platform_user_id))
|
||
run_db_operation(db_conn, "write", update_sql, params)
|
||
globals.log(f"Updated last_seen timestamp for UUID={UUID}", "DEBUG")
|
||
|
||
if lookup:
|
||
if result and result[0]:
|
||
return result[0][0] # Return last seen datetime
|
||
return None # No data found
|
||
elif update and not lookup:
|
||
return True
|
||
|
||
return False # No action taken
|
||
|
||
|
||
def ensure_chatlog_table(db_conn):
|
||
"""
|
||
Ensures the 'chat_log' table exists, updating the schema to use a UUID primary key
|
||
and an additional column for platform-specific message IDs.
|
||
"""
|
||
|
||
is_sqlite = "sqlite3" in str(type(db_conn)).lower()
|
||
|
||
# Check if table exists
|
||
check_sql = """
|
||
SELECT name FROM sqlite_master WHERE type='table' AND name='chat_log'
|
||
""" if is_sqlite else """
|
||
SELECT table_name FROM information_schema.tables
|
||
WHERE table_name = 'chat_log' AND table_schema = DATABASE()
|
||
"""
|
||
|
||
rows = run_db_operation(db_conn, "read", check_sql)
|
||
if rows and rows[0] and rows[0][0]:
|
||
globals.log("Table 'chat_log' already exists, skipping creation.", "DEBUG")
|
||
return
|
||
|
||
# Table does not exist, create it
|
||
globals.log("Table 'chat_log' does not exist; creating now...", "INFO")
|
||
|
||
create_sql = """
|
||
CREATE TABLE chat_log (
|
||
UUID TEXT PRIMARY KEY,
|
||
PLATFORM_MESSAGE_ID TEXT DEFAULT NULL,
|
||
USER_UUID TEXT,
|
||
MESSAGE_CONTENT TEXT,
|
||
PLATFORM TEXT,
|
||
CHANNEL TEXT,
|
||
DATETIME TEXT DEFAULT CURRENT_TIMESTAMP,
|
||
ATTACHMENTS TEXT,
|
||
FOREIGN KEY (USER_UUID) REFERENCES users(UUID)
|
||
)
|
||
""" if is_sqlite else """
|
||
CREATE TABLE chat_log (
|
||
UUID VARCHAR(36) PRIMARY KEY,
|
||
PLATFORM_MESSAGE_ID VARCHAR(100) DEFAULT NULL,
|
||
USER_UUID VARCHAR(36),
|
||
MESSAGE_CONTENT TEXT,
|
||
PLATFORM VARCHAR(100),
|
||
CHANNEL VARCHAR(100),
|
||
DATETIME DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||
ATTACHMENTS TEXT,
|
||
FOREIGN KEY (USER_UUID) REFERENCES users(UUID) ON DELETE SET NULL
|
||
)
|
||
"""
|
||
|
||
result = run_db_operation(db_conn, "write", create_sql)
|
||
if result is None:
|
||
error_msg = "Failed to create 'chat_log' table!"
|
||
globals.log(error_msg, "CRITICAL")
|
||
raise RuntimeError(error_msg)
|
||
|
||
globals.log("Successfully created table 'chat_log'.", "INFO")
|
||
|
||
|
||
|
||
def log_message(db_conn, identifier, identifier_type, message_content, platform, channel, attachments=None, platform_message_id=None):
|
||
"""
|
||
Logs a message in 'chat_log' with UUID fetched using the Platform_Mapping structure.
|
||
|
||
- Uses a UUID as the primary key for uniqueness across platforms.
|
||
- Stores platform-specific message IDs when provided.
|
||
- Logs a warning if a message ID is expected but not provided.
|
||
"""
|
||
|
||
# Get UUID using lookup_user
|
||
user_data = lookup_user(db_conn, identifier, identifier_type)
|
||
if not user_data:
|
||
globals.log(f"User not found for {identifier_type}='{identifier}'", "WARNING")
|
||
return
|
||
|
||
user_uuid = user_data["uuid"]
|
||
message_uuid = str(uuid.uuid4()) # Generate a new UUID for the entry
|
||
|
||
# Determine if a message ID is required for this platform
|
||
requires_message_id = platform.startswith("discord") or platform == "twitch"
|
||
|
||
if requires_message_id and not platform_message_id:
|
||
globals.log(f"Warning: Platform '{platform}' usually requires a message ID, but none was provided.", "WARNING")
|
||
|
||
if attachments is None or not "https://" in attachments:
|
||
attachments = ""
|
||
|
||
insert_sql = """
|
||
INSERT INTO chat_log (
|
||
UUID,
|
||
PLATFORM_MESSAGE_ID,
|
||
USER_UUID,
|
||
MESSAGE_CONTENT,
|
||
PLATFORM,
|
||
CHANNEL,
|
||
ATTACHMENTS
|
||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||
"""
|
||
params = (message_uuid, platform_message_id, user_uuid, message_content, platform, channel, attachments)
|
||
rowcount = run_db_operation(db_conn, "write", insert_sql, params)
|
||
|
||
if rowcount and rowcount > 0:
|
||
globals.log(f"Logged message for UUID={user_uuid} with Message UUID={message_uuid}.", "DEBUG")
|
||
else:
|
||
globals.log("Failed to log message in 'chat_log'.", "ERROR")
|
||
|
||
|
||
def ensure_userhowls_table(db_conn):
|
||
"""
|
||
Checks if 'user_howls' table exists; if not, creates it:
|
||
ID (PK) | UUID (FK -> users.UUID) | HOWL (int) | DATETIME (auto timestamp)
|
||
"""
|
||
is_sqlite = "sqlite3" in str(type(db_conn)).lower()
|
||
|
||
# Existence check
|
||
if is_sqlite:
|
||
check_sql = """
|
||
SELECT name
|
||
FROM sqlite_master
|
||
WHERE type='table'
|
||
AND name='user_howls'
|
||
"""
|
||
else:
|
||
check_sql = """
|
||
SELECT table_name
|
||
FROM information_schema.tables
|
||
WHERE table_name = 'user_howls'
|
||
AND table_schema = DATABASE()
|
||
"""
|
||
|
||
rows = run_db_operation(db_conn, "read", check_sql)
|
||
if rows and rows[0] and rows[0][0]:
|
||
globals.log("Table 'user_howls' already exists, skipping creation.", "DEBUG")
|
||
return
|
||
|
||
globals.log("Table 'user_howls' does not exist; creating now...", "INFO")
|
||
|
||
if is_sqlite:
|
||
create_sql = """
|
||
CREATE TABLE user_howls (
|
||
ID INTEGER PRIMARY KEY AUTOINCREMENT,
|
||
UUID TEXT,
|
||
HOWL INT,
|
||
DATETIME TEXT DEFAULT CURRENT_TIMESTAMP,
|
||
FOREIGN KEY (UUID) REFERENCES users(UUID)
|
||
)
|
||
"""
|
||
else:
|
||
create_sql = """
|
||
CREATE TABLE user_howls (
|
||
ID INT PRIMARY KEY AUTO_INCREMENT,
|
||
UUID VARCHAR(36),
|
||
HOWL INT,
|
||
DATETIME DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||
FOREIGN KEY (UUID) REFERENCES users(UUID) ON DELETE SET NULL
|
||
)
|
||
"""
|
||
|
||
result = run_db_operation(db_conn, "write", create_sql)
|
||
if result is None:
|
||
err_msg = "Failed to create 'user_howls' table!"
|
||
globals.log(err_msg, "ERROR")
|
||
raise RuntimeError(err_msg)
|
||
|
||
globals.log("Successfully created table 'user_howls'.", "INFO")
|
||
|
||
def insert_howl(db_conn, user_uuid, howl_value):
|
||
"""
|
||
Insert a row into user_howls with the user's UUID, the integer 0-100,
|
||
and DATETIME defaulting to now.
|
||
"""
|
||
sql = """
|
||
INSERT INTO user_howls (UUID, HOWL)
|
||
VALUES (?, ?)
|
||
"""
|
||
params = (user_uuid, howl_value)
|
||
rowcount = run_db_operation(db_conn, "write", sql, params)
|
||
if rowcount and rowcount > 0:
|
||
globals.log(f"Recorded a {howl_value}% howl for UUID={user_uuid}.", "DEBUG")
|
||
else:
|
||
globals.log(f"Failed to record {howl_value}% howl for UUID={user_uuid}.", "ERROR")
|
||
|
||
def get_howl_stats(db_conn, user_uuid):
|
||
"""
|
||
Returns a dict with { 'count': int, 'average': float, 'count_zero': int, 'count_hundred': int }
|
||
or None if there are no rows at all for that UUID.
|
||
"""
|
||
sql = """
|
||
SELECT
|
||
COUNT(*),
|
||
AVG(HOWL),
|
||
SUM(HOWL=0),
|
||
SUM(HOWL=100)
|
||
FROM user_howls
|
||
WHERE UUID = ?
|
||
"""
|
||
rows = run_db_operation(db_conn, "read", sql, (user_uuid,))
|
||
if not rows:
|
||
return None
|
||
|
||
row = rows[0] # (count, avg, zero_count, hundred_count)
|
||
count = row[0] if row[0] else 0
|
||
avg = float(row[1]) if row[1] else 0.0
|
||
zero_count = row[2] if row[2] else 0
|
||
hundred_count = row[3] if row[3] else 0
|
||
|
||
if count < 1:
|
||
return None # user has no howls
|
||
return {
|
||
"count": count,
|
||
"average": avg,
|
||
"count_zero": zero_count,
|
||
"count_hundred": hundred_count
|
||
}
|
||
|
||
def get_global_howl_stats(db_conn):
|
||
"""
|
||
Returns a dictionary with total howls, average howl percentage, unique users,
|
||
and counts of extreme (0% and 100%) howls.
|
||
"""
|
||
sql = """
|
||
SELECT COUNT(*) AS total_howls,
|
||
AVG(HOWL) AS average_howl,
|
||
COUNT(DISTINCT UUID) AS unique_users,
|
||
SUM(HOWL = 0) AS count_zero,
|
||
SUM(HOWL = 100) AS count_hundred
|
||
FROM user_howls
|
||
"""
|
||
rows = run_db_operation(db_conn, "read", sql)
|
||
|
||
if not rows or not rows[0] or rows[0][0] is None:
|
||
return None # No howl data exists
|
||
|
||
return {
|
||
"total_howls": rows[0][0],
|
||
"average_howl": float(rows[0][1]) if rows[0][1] is not None else 0.0,
|
||
"unique_users": rows[0][2],
|
||
"count_zero": rows[0][3],
|
||
"count_hundred": rows[0][4],
|
||
}
|
||
|
||
def ensure_discord_activity_table(db_conn):
|
||
"""
|
||
Ensures the 'discord_activity' table exists.
|
||
Logs voice events, cameras, streaming, gaming, and Discord activities.
|
||
"""
|
||
is_sqlite = "sqlite3" in str(type(db_conn)).lower()
|
||
|
||
if is_sqlite:
|
||
check_sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='discord_activity'"
|
||
else:
|
||
check_sql = """
|
||
SELECT table_name FROM information_schema.tables
|
||
WHERE table_name = 'discord_activity' AND table_schema = DATABASE()
|
||
"""
|
||
|
||
rows = run_db_operation(db_conn, "read", check_sql)
|
||
if rows and rows[0]:
|
||
globals.log("Table 'discord_activity' already exists, skipping creation.", "DEBUG")
|
||
return
|
||
|
||
globals.log("Creating 'discord_activity' table...", "INFO")
|
||
|
||
if is_sqlite:
|
||
create_sql = """
|
||
CREATE TABLE discord_activity (
|
||
ID INTEGER PRIMARY KEY AUTOINCREMENT,
|
||
UUID TEXT,
|
||
ACTION TEXT CHECK(ACTION IN (
|
||
'JOIN', 'LEAVE', 'MUTE', 'UNMUTE', 'DEAFEN', 'UNDEAFEN',
|
||
'STREAM_START', 'STREAM_STOP', 'CAMERA_ON', 'CAMERA_OFF',
|
||
'GAME_START', 'GAME_STOP', 'LISTENING_SPOTIFY', 'DISCORD_ACTIVITY', 'VC_MOVE'
|
||
)),
|
||
GUILD_ID TEXT,
|
||
VOICE_CHANNEL TEXT,
|
||
ACTION_DETAIL TEXT DEFAULT NULL,
|
||
DATETIME TEXT DEFAULT CURRENT_TIMESTAMP,
|
||
FOREIGN KEY (UUID) REFERENCES users(UUID)
|
||
)
|
||
"""
|
||
else:
|
||
create_sql = """
|
||
CREATE TABLE discord_activity (
|
||
ID INT PRIMARY KEY AUTO_INCREMENT,
|
||
UUID VARCHAR(36),
|
||
ACTION ENUM(
|
||
'JOIN', 'LEAVE', 'MUTE', 'UNMUTE', 'DEAFEN', 'UNDEAFEN',
|
||
'STREAM_START', 'STREAM_STOP', 'CAMERA_ON', 'CAMERA_OFF',
|
||
'GAME_START', 'GAME_STOP', 'LISTENING_SPOTIFY', 'DISCORD_ACTIVITY', 'VC_MOVE'
|
||
),
|
||
GUILD_ID VARCHAR(36),
|
||
VOICE_CHANNEL VARCHAR(100),
|
||
ACTION_DETAIL TEXT DEFAULT NULL,
|
||
DATETIME DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||
FOREIGN KEY (UUID) REFERENCES users(UUID) ON DELETE SET NULL
|
||
)
|
||
"""
|
||
|
||
try:
|
||
result = run_db_operation(db_conn, "write", create_sql)
|
||
except Exception as e:
|
||
globals.log(f"Unable to create the table: discord_activity: {e}")
|
||
if result is None:
|
||
globals.log("Failed to create 'discord_activity' table!", "CRITICAL")
|
||
raise RuntimeError("Database table creation failed.")
|
||
|
||
globals.log("Successfully created table 'discord_activity'.", "INFO")
|
||
|
||
|
||
def log_discord_activity(db_conn, guild_id, user_identifier, action, voice_channel, action_detail=None):
|
||
"""
|
||
Logs Discord activities with duplicate detection to prevent redundant logs.
|
||
"""
|
||
|
||
# Resolve UUID using the new Platform_Mapping
|
||
user_data = lookup_user(db_conn, user_identifier, identifier_type="discord_user_id")
|
||
if not user_data:
|
||
globals.log(f"User not found for Discord ID: {user_identifier}", "WARNING")
|
||
return
|
||
|
||
user_uuid = user_data["uuid"]
|
||
|
||
# Prepare the voice_channel value (if it’s an object with a name, use that).
|
||
channel_val = voice_channel.name if (voice_channel and hasattr(voice_channel, "name")) else voice_channel
|
||
|
||
# Duplicate Detection Logic
|
||
def normalize_detail(detail):
|
||
"""Normalize detail for comparison (lowercase, stripped of whitespace)."""
|
||
return detail.strip().lower() if detail else None
|
||
|
||
DUPLICATE_THRESHOLD = datetime.timedelta(minutes=5)
|
||
NUM_RECENT_ENTRIES = 5
|
||
now = datetime.datetime.now()
|
||
normalized_new = normalize_detail(action_detail)
|
||
|
||
# Query the last NUM_RECENT_ENTRIES events for this user and action.
|
||
query = """
|
||
SELECT DATETIME, ACTION_DETAIL
|
||
FROM discord_activity
|
||
WHERE UUID = ? AND ACTION = ?
|
||
ORDER BY DATETIME DESC
|
||
LIMIT ?
|
||
"""
|
||
rows = run_db_operation(db_conn, "read", query, params=(user_uuid, action, NUM_RECENT_ENTRIES))
|
||
|
||
last_same, last_different = None, None
|
||
for row in rows:
|
||
dt_str, detail = row
|
||
try:
|
||
dt = datetime.datetime.strptime(dt_str, "%Y-%m-%d %H:%M:%S")
|
||
except Exception as e:
|
||
globals.log(f"Error parsing datetime '{dt_str}': {e}", "ERROR")
|
||
continue
|
||
normalized_existing = normalize_detail(detail)
|
||
if normalized_existing == normalized_new:
|
||
if last_same is None or dt > last_same:
|
||
last_same = dt
|
||
else:
|
||
if last_different is None or dt > last_different:
|
||
last_different = dt
|
||
|
||
# Check duplicate conditions
|
||
if last_same is not None:
|
||
if (last_different is None) or (last_same > last_different):
|
||
if now - last_same < DUPLICATE_THRESHOLD:
|
||
globals.log(f"Duplicate {action} event for {user_uuid} within threshold; skipping log.", "DEBUG")
|
||
return
|
||
|
||
# Insert the new event
|
||
insert_sql = """
|
||
INSERT INTO discord_activity (UUID, ACTION, GUILD_ID, VOICE_CHANNEL, ACTION_DETAIL)
|
||
VALUES (?, ?, ?, ?, ?)
|
||
""" if "sqlite3" in str(type(db_conn)).lower() else """
|
||
INSERT INTO discord_activity (UUID, ACTION, GUILD_ID, VOICE_CHANNEL, ACTION_DETAIL)
|
||
VALUES (%s, %s, %s, %s, %s)
|
||
"""
|
||
params = (user_uuid, action, guild_id, channel_val, action_detail)
|
||
rowcount = run_db_operation(db_conn, "write", insert_sql, params)
|
||
|
||
if rowcount and rowcount > 0:
|
||
detail_str = f" ({action_detail})" if action_detail else ""
|
||
globals.log(f"Logged Discord activity for UUID={user_uuid} in Guild {guild_id}: {action}{detail_str}", "DEBUG")
|
||
else:
|
||
globals.log("Failed to log Discord activity.", "ERROR")
|
||
|
||
|
||
def ensure_bot_events_table(db_conn):
|
||
"""
|
||
Ensures the 'bot_events' table exists, which logs major bot-related events.
|
||
"""
|
||
is_sqlite = "sqlite3" in str(type(db_conn)).lower()
|
||
|
||
# Check if table exists
|
||
check_sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='bot_events'" if is_sqlite else """
|
||
SELECT table_name FROM information_schema.tables
|
||
WHERE table_name = 'bot_events' AND table_schema = DATABASE()
|
||
"""
|
||
|
||
rows = run_db_operation(db_conn, "read", check_sql)
|
||
if rows and rows[0]:
|
||
globals.log("Table 'bot_events' already exists, skipping creation.", "DEBUG")
|
||
return
|
||
|
||
globals.log("Creating 'bot_events' table...", "INFO")
|
||
|
||
# Define SQL Schema
|
||
create_sql = """
|
||
CREATE TABLE bot_events (
|
||
EVENT_ID INTEGER PRIMARY KEY AUTOINCREMENT,
|
||
EVENT_TYPE TEXT,
|
||
EVENT_DETAILS TEXT,
|
||
DATETIME TEXT DEFAULT CURRENT_TIMESTAMP
|
||
)
|
||
""" if is_sqlite else """
|
||
CREATE TABLE bot_events (
|
||
EVENT_ID INT PRIMARY KEY AUTO_INCREMENT,
|
||
EVENT_TYPE VARCHAR(50),
|
||
EVENT_DETAILS TEXT,
|
||
DATETIME DATETIME DEFAULT CURRENT_TIMESTAMP
|
||
)
|
||
"""
|
||
|
||
# Create the table
|
||
result = run_db_operation(db_conn, "write", create_sql)
|
||
if result is None:
|
||
globals.log("Failed to create 'bot_events' table!", "CRITICAL")
|
||
raise RuntimeError("Database table creation failed.")
|
||
|
||
globals.log("Successfully created table 'bot_events'.", "INFO")
|
||
|
||
def log_bot_event(db_conn, event_type, event_details):
|
||
"""
|
||
Logs a bot event (e.g., startup, shutdown, disconnection).
|
||
"""
|
||
sql = """
|
||
INSERT INTO bot_events (EVENT_TYPE, EVENT_DETAILS)
|
||
VALUES (?, ?)
|
||
"""
|
||
params = (event_type, event_details)
|
||
rowcount = run_db_operation(db_conn, "write", sql, params)
|
||
|
||
if rowcount and rowcount > 0:
|
||
globals.log(f"Logged bot event: {event_type} - {event_details}", "DEBUG")
|
||
else:
|
||
globals.log("Failed to log bot event.", "ERROR")
|
||
|
||
def get_event_summary(db_conn, time_span="7d"):
|
||
"""
|
||
Retrieves bot event statistics based on a given time span.
|
||
Supports:
|
||
- "7d" (7 days)
|
||
- "1m" (1 month)
|
||
- "24h" (last 24 hours)
|
||
Returns:
|
||
OrderedDict with event statistics.
|
||
"""
|
||
from collections import OrderedDict
|
||
import datetime
|
||
|
||
# Time span mapping
|
||
time_mappings = {
|
||
"7d": "7 days",
|
||
"1m": "1 month",
|
||
"24h": "24 hours"
|
||
}
|
||
|
||
if time_span not in time_mappings:
|
||
globals.log(f"Invalid time span '{time_span}', defaulting to '7d'", "WARNING")
|
||
time_span = "7d"
|
||
|
||
# Define SQL query
|
||
sql = f"""
|
||
SELECT EVENT_TYPE, COUNT(*)
|
||
FROM bot_events
|
||
WHERE DATETIME >= datetime('now', '-{time_mappings[time_span]}')
|
||
GROUP BY EVENT_TYPE
|
||
ORDER BY COUNT(*) DESC
|
||
"""
|
||
|
||
rows = run_db_operation(db_conn, "read", sql)
|
||
|
||
# Organize data into OrderedDict
|
||
summary = OrderedDict()
|
||
summary["time_span"] = time_span
|
||
for event_type, count in rows:
|
||
summary[event_type] = count
|
||
|
||
return summary
|
||
|
||
def ensure_link_codes_table(db_conn):
|
||
"""
|
||
Ensures the 'link_codes' table exists.
|
||
This table stores one-time-use account linking codes.
|
||
"""
|
||
is_sqlite = "sqlite3" in str(type(db_conn)).lower()
|
||
|
||
check_sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='link_codes'" if is_sqlite else """
|
||
SELECT table_name FROM information_schema.tables
|
||
WHERE table_name = 'link_codes' AND table_schema = DATABASE()
|
||
"""
|
||
|
||
|
||
rows = run_db_operation(db_conn, "read", check_sql)
|
||
if rows and rows[0]:
|
||
globals.log("Table 'link_codes' already exists, skipping creation.", "DEBUG")
|
||
return
|
||
|
||
globals.log("Creating 'link_codes' table...", "INFO")
|
||
|
||
create_sql = """
|
||
CREATE TABLE link_codes (
|
||
ID INTEGER PRIMARY KEY AUTOINCREMENT,
|
||
DISCORD_USER_ID TEXT UNIQUE,
|
||
LINK_CODE TEXT UNIQUE,
|
||
CREATED_AT TEXT DEFAULT CURRENT_TIMESTAMP
|
||
)
|
||
""" if is_sqlite else """
|
||
CREATE TABLE link_codes (
|
||
ID INT PRIMARY KEY AUTO_INCREMENT,
|
||
DISCORD_USER_ID VARCHAR(50) UNIQUE,
|
||
LINK_CODE VARCHAR(50) UNIQUE,
|
||
CREATED_AT DATETIME DEFAULT CURRENT_TIMESTAMP
|
||
)
|
||
"""
|
||
|
||
result = run_db_operation(db_conn, "write", create_sql)
|
||
if result is None:
|
||
globals.log("Failed to create 'link_codes' table!", "CRITICAL")
|
||
raise RuntimeError("Database table creation failed.")
|
||
|
||
globals.log("Successfully created table 'link_codes'.", "INFO")
|
||
|
||
def merge_uuid_data(db_conn, old_uuid, new_uuid):
|
||
"""
|
||
Merges data from old UUID to new UUID, updating references in Platform_Mapping.
|
||
"""
|
||
globals.log(f"Merging UUID data: {old_uuid} -> {new_uuid}", "INFO")
|
||
|
||
# Update references in Platform_Mapping
|
||
update_mapping_sql = """
|
||
UPDATE Platform_Mapping SET UUID = ? WHERE UUID = ?
|
||
"""
|
||
run_db_operation(db_conn, "update", update_mapping_sql, (new_uuid, old_uuid))
|
||
|
||
tables_to_update = [
|
||
"chat_log",
|
||
"user_howls",
|
||
"discord_activity",
|
||
"community_events"
|
||
]
|
||
|
||
for table in tables_to_update:
|
||
sql = f"UPDATE {table} SET UUID = ? WHERE UUID = ?"
|
||
rowcount = run_db_operation(db_conn, "update", sql, (new_uuid, old_uuid))
|
||
globals.log(f"Updated {rowcount} rows in {table} (transferred {old_uuid} -> {new_uuid})", "DEBUG")
|
||
|
||
# Finally, delete the old UUID from Users table
|
||
delete_sql = "DELETE FROM Users WHERE UUID = ?"
|
||
rowcount = run_db_operation(db_conn, "write", delete_sql, (old_uuid,))
|
||
globals.log(f"Deleted old UUID {old_uuid} from 'Users' table ({rowcount} rows affected)", "INFO")
|
||
|
||
globals.log(f"UUID merge complete: {old_uuid} -> {new_uuid}", "INFO")
|
||
|
||
|
||
def ensure_community_events_table(db_conn):
|
||
"""
|
||
Checks if 'community_events' table exists. If not, attempts to create it.
|
||
Raises an Exception or logs errors if creation fails.
|
||
"""
|
||
# 1) Determine if DB is sqlite or MariaDB/MySQL
|
||
is_sqlite = "sqlite3" in str(type(db_conn)).lower()
|
||
|
||
# 2) Check for existence
|
||
if is_sqlite:
|
||
check_sql = """
|
||
SELECT name
|
||
FROM sqlite_master
|
||
WHERE type='table'
|
||
AND name='community_events'
|
||
"""
|
||
else:
|
||
check_sql = """
|
||
SELECT table_name
|
||
FROM information_schema.tables
|
||
WHERE table_name = 'community_events'
|
||
AND table_schema = DATABASE()
|
||
"""
|
||
from modules.db import run_db_operation
|
||
rows = run_db_operation(db_conn, "read", check_sql)
|
||
if rows and rows[0] and rows[0][0]:
|
||
globals.log("Table 'community_events' already exists, skipping creation.", "DEBUG")
|
||
return
|
||
|
||
globals.log("Table 'community_events' does not exist; creating now...", "DEBUG")
|
||
if is_sqlite:
|
||
create_table_sql = """
|
||
CREATE TABLE community_events (
|
||
EVENT_ID INTEGER PRIMARY KEY AUTOINCREMENT,
|
||
EVENT_PLATFORM TEXT NOT NULL,
|
||
EVENT_TYPE TEXT NOT NULL,
|
||
EVENT_DETAILS TEXT,
|
||
EVENT_USER TEXT NOT NULL,
|
||
DATETIME TEXT NOT NULL,
|
||
EVENT_EXTRAS TEXT
|
||
)
|
||
"""
|
||
else:
|
||
create_table_sql = """
|
||
CREATE TABLE community_events (
|
||
EVENT_ID INT PRIMARY KEY AUTO_INCREMENT,
|
||
EVENT_PLATFORM VARCHAR(50) NOT NULL,
|
||
EVENT_TYPE VARCHAR(100) NOT NULL,
|
||
EVENT_DETAILS TEXT,
|
||
EVENT_USER VARCHAR(100) NOT NULL,
|
||
DATETIME DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||
EVENT_EXTRAS TEXT
|
||
)
|
||
"""
|
||
result = run_db_operation(db_conn, "write", create_table_sql)
|
||
if result is None:
|
||
error_msg = "Failed to create 'community_events' table!"
|
||
globals.log(error_msg, "CRITICAL")
|
||
raise RuntimeError(error_msg)
|
||
globals.log("Successfully created table 'community_events'.", "DEBUG")
|
||
|
||
|
||
async def handle_community_event(db_conn, is_discord, ctx, args):
|
||
"""
|
||
Handles community event commands including add, info, list, and search.
|
||
"""
|
||
|
||
if len(args) == 0:
|
||
args = ["list"]
|
||
|
||
sub = args[0].lower()
|
||
|
||
if sub == "add":
|
||
if len(args) < 2:
|
||
return "Please provide the event type after 'add'."
|
||
|
||
event_type = args[1]
|
||
event_details = " ".join(args[2:]).strip() if len(args) > 2 else None
|
||
event_extras = None
|
||
|
||
# Support extras using "||" separator
|
||
if event_details and "||" in event_details:
|
||
parts = event_details.split("||", 1)
|
||
event_details = parts[0].strip()
|
||
event_extras = parts[1].strip()
|
||
|
||
platform = "Discord" if is_discord else "Twitch"
|
||
user_id = str(ctx.author.id)
|
||
|
||
# Get UUID using lookup_user()
|
||
user_data = lookup_user(db_conn, identifier=user_id, identifier_type=f"{platform.lower()}_user_id")
|
||
if not user_data:
|
||
globals.log(f"User not found: {ctx.author.name} ({user_id}) on {platform}", "ERROR")
|
||
return "Could not log event: user data missing."
|
||
|
||
user_uuid = user_data["uuid"]
|
||
|
||
# Insert new event. Adjust for SQLite or MariaDB.
|
||
insert_sql = """
|
||
INSERT INTO community_events
|
||
(EVENT_PLATFORM, EVENT_TYPE, EVENT_DETAILS, EVENT_USER, DATETIME, EVENT_EXTRAS)
|
||
VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP, ?)
|
||
""" if "sqlite3" in str(type(db_conn)).lower() else """
|
||
INSERT INTO community_events
|
||
(EVENT_PLATFORM, EVENT_TYPE, EVENT_DETAILS, EVENT_USER, DATETIME, EVENT_EXTRAS)
|
||
VALUES (%s, %s, %s, %s, CURRENT_TIMESTAMP, %s)
|
||
"""
|
||
params = (platform, event_type, event_details, user_uuid, event_extras)
|
||
result = run_db_operation(db_conn, "write", insert_sql, params)
|
||
|
||
if result is not None:
|
||
globals.log(f"New event added: {event_type} by {ctx.author.name}", "DEBUG")
|
||
return f"Successfully logged event: {event_type}"
|
||
else:
|
||
return "Failed to log event."
|
||
|
||
elif sub == "info":
|
||
if len(args) < 2 or not args[1].isdigit():
|
||
return "Please specify a valid event ID."
|
||
event_id = int(args[1])
|
||
select_sql = "SELECT * FROM community_events WHERE EVENT_ID = ?"
|
||
if "sqlite3" not in str(type(db_conn)).lower():
|
||
select_sql = "SELECT * FROM community_events WHERE EVENT_ID = %s"
|
||
rows = run_db_operation(db_conn, "read", select_sql, (event_id,))
|
||
if not rows:
|
||
return f"No event found with ID {event_id}."
|
||
row = rows[0]
|
||
return (
|
||
f"Event #{row[0]}:\n"
|
||
f"Platform: {row[1]}\n"
|
||
f"Type: {row[2]}\n"
|
||
f"Details: {row[3] or 'N/A'}\n"
|
||
f"User UUID: {row[4]}\n"
|
||
f"Datetime: {row[5]}\n"
|
||
f"Extras: {row[6] or 'N/A'}"
|
||
)
|
||
else:
|
||
return await handle_community_event(db_conn, is_discord, ctx, ["list"])
|
||
|