Compare commits

..

No commits in common. "7f84542cfb99f4486ecec6bb3982d3d3cde3d179" and "454429ba7f21a2de3823d9c52395b6932543a087" have entirely different histories.

10 changed files with 613 additions and 843 deletions

View File

@ -1,5 +1,3 @@
"""Module to cache data to MariaDB."""
import os import os
import yaml import yaml
import mysql.connector import mysql.connector
@ -7,10 +5,8 @@ from mysql.connector import Error
def get_db_config(): def get_db_config():
"""Get the database configuration for MariaDB.""" # Configure your MariaDB connection
with open( with open(f"{os.getcwd()}/configs/config.yaml", "r") as file:
os.path.join(os.getcwd(), "configs", "config.yaml"), "r", encoding="utf-8"
) as file:
config = yaml.safe_load(file) config = yaml.safe_load(file)
db_config = { db_config = {
"host": f'{config["mariadb"]["host"]}', "host": f'{config["mariadb"]["host"]}',
@ -22,7 +18,6 @@ def get_db_config():
def create_database(): def create_database():
"""Create the database for MariaDB."""
try: try:
with mysql.connector.connect(**get_db_config()) as conn: with mysql.connector.connect(**get_db_config()) as conn:
cursor = conn.cursor() cursor = conn.cursor()
@ -46,16 +41,15 @@ def create_database():
def cache_to_db( def cache_to_db(
service: str = "", service=None,
pssh: str = "", pssh=None,
kid: str = "", kid=None,
key: str = "", key=None,
license_url: str = "", license_url=None,
headers: str = "", headers=None,
cookies: str = "", cookies=None,
data: str = "", data=None,
): ):
"""Cache data to the database for MariaDB."""
try: try:
with mysql.connector.connect(**get_db_config()) as conn: with mysql.connector.connect(**get_db_config()) as conn:
cursor = conn.cursor() cursor = conn.cursor()
@ -87,7 +81,6 @@ def cache_to_db(
def search_by_pssh_or_kid(search_filter): def search_by_pssh_or_kid(search_filter):
"""Search the database by PSSH or KID for MariaDB."""
results = set() results = set()
try: try:
with mysql.connector.connect(**get_db_config()) as conn: with mysql.connector.connect(**get_db_config()) as conn:
@ -116,7 +109,6 @@ def search_by_pssh_or_kid(search_filter):
def get_key_by_kid_and_service(kid, service): def get_key_by_kid_and_service(kid, service):
"""Get the key by KID and service for MariaDB."""
try: try:
with mysql.connector.connect(**get_db_config()) as conn: with mysql.connector.connect(**get_db_config()) as conn:
cursor = conn.cursor() cursor = conn.cursor()
@ -132,7 +124,6 @@ def get_key_by_kid_and_service(kid, service):
def get_kid_key_dict(service_name): def get_kid_key_dict(service_name):
"""Get the KID and key dictionary for MariaDB."""
try: try:
with mysql.connector.connect(**get_db_config()) as conn: with mysql.connector.connect(**get_db_config()) as conn:
cursor = conn.cursor() cursor = conn.cursor()
@ -146,7 +137,6 @@ def get_kid_key_dict(service_name):
def get_unique_services(): def get_unique_services():
"""Get the unique services for MariaDB."""
try: try:
with mysql.connector.connect(**get_db_config()) as conn: with mysql.connector.connect(**get_db_config()) as conn:
cursor = conn.cursor() cursor = conn.cursor()
@ -158,7 +148,6 @@ def get_unique_services():
def key_count(): def key_count():
"""Get the key count for MariaDB."""
try: try:
with mysql.connector.connect(**get_db_config()) as conn: with mysql.connector.connect(**get_db_config()) as conn:
cursor = conn.cursor() cursor = conn.cursor()

View File

@ -1,14 +1,10 @@
"""Module to cache data to SQLite."""
import sqlite3 import sqlite3
import os import os
def create_database(): def create_database():
"""Create the database for SQLite.""" # Using with statement to manage the connection and cursor
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/key_cache.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "key_cache.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute( cursor.execute(
""" """
@ -27,19 +23,16 @@ def create_database():
def cache_to_db( def cache_to_db(
service: str = "", service: str = None,
pssh: str = "", pssh: str = None,
kid: str = "", kid: str = None,
key: str = "", key: str = None,
license_url: str = "", license_url: str = None,
headers: str = "", headers: str = None,
cookies: str = "", cookies: str = None,
data: str = "", data: str = None,
): ):
"""Cache data to the database for SQLite.""" with sqlite3.connect(f"{os.getcwd()}/databases/sql/key_cache.db") as conn:
with sqlite3.connect(
os.path.join(os.getcwd(), "databases", "sql", "key_cache.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
# Check if the record with the given KID already exists # Check if the record with the given KID already exists
@ -60,10 +53,8 @@ def cache_to_db(
def search_by_pssh_or_kid(search_filter): def search_by_pssh_or_kid(search_filter):
"""Search the database by PSSH or KID for SQLite.""" # Using with statement to automatically close the connection
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/key_cache.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "key_cache.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
# Initialize a set to store unique matching records # Initialize a set to store unique matching records
@ -101,10 +92,8 @@ def search_by_pssh_or_kid(search_filter):
def get_key_by_kid_and_service(kid, service): def get_key_by_kid_and_service(kid, service):
"""Get the key by KID and service for SQLite.""" # Using 'with' to automatically close the connection when done
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/key_cache.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "key_cache.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
# Query to search by KID and SERVICE # Query to search by KID and SERVICE
@ -125,10 +114,8 @@ def get_key_by_kid_and_service(kid, service):
def get_kid_key_dict(service_name): def get_kid_key_dict(service_name):
"""Get the KID and key dictionary for SQLite.""" # Using with statement to automatically manage the connection and cursor
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/key_cache.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "key_cache.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
# Query to fetch KID and Key for the selected service # Query to fetch KID and Key for the selected service
@ -146,10 +133,8 @@ def get_kid_key_dict(service_name):
def get_unique_services(): def get_unique_services():
"""Get the unique services for SQLite.""" # Using with statement to automatically manage the connection and cursor
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/key_cache.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "key_cache.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
# Query to get distinct services from the 'licenses' table # Query to get distinct services from the 'licenses' table
@ -165,10 +150,8 @@ def get_unique_services():
def key_count(): def key_count():
"""Get the key count for SQLite.""" # Using with statement to automatically manage the connection and cursor
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/key_cache.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "key_cache.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
# Count the number of KID entries in the licenses table # Count the number of KID entries in the licenses table

View File

@ -1,159 +0,0 @@
"""Unified database operations module that automatically uses the correct backend."""
import os
from typing import Optional, List, Dict, Any
import yaml
# Import both backend modules
try:
import custom_functions.database.cache_to_db_sqlite as sqlite_db
except ImportError:
sqlite_db = None
try:
import custom_functions.database.cache_to_db_mariadb as mariadb_db
except ImportError:
mariadb_db = None
class DatabaseOperations:
"""Unified database operations class that automatically selects the correct backend."""
def __init__(self):
self.backend = self._get_database_backend()
self.db_module = self._get_db_module()
def _get_database_backend(self) -> str:
"""Get the database backend from config, default to sqlite."""
try:
config_path = os.path.join(os.getcwd(), "configs", "config.yaml")
with open(config_path, "r", encoding="utf-8") as file:
config = yaml.safe_load(file)
return config.get("database_type", "sqlite").lower()
except (FileNotFoundError, KeyError, yaml.YAMLError):
return "sqlite"
def _get_db_module(self):
"""Get the appropriate database module based on backend."""
if self.backend == "mariadb" and mariadb_db:
return mariadb_db
if sqlite_db:
return sqlite_db
raise ImportError(f"Database module for {self.backend} not available")
def get_backend_info(self) -> Dict[str, str]:
"""Get information about the current database backend being used."""
return {
"backend": self.backend,
"module": self.db_module.__name__ if self.db_module else "None",
}
def create_database(self) -> None:
"""Create the database using the configured backend."""
return self.db_module.create_database()
def cache_to_db(
self,
service: str = "",
pssh: str = "",
kid: str = "",
key: str = "",
license_url: str = "",
headers: str = "",
cookies: str = "",
data: str = "",
) -> bool:
"""Cache data to the database using the configured backend."""
return self.db_module.cache_to_db(
service=service,
pssh=pssh,
kid=kid,
key=key,
license_url=license_url,
headers=headers,
cookies=cookies,
data=data,
)
def search_by_pssh_or_kid(self, search_filter: str) -> List[Dict[str, str]]:
"""Search the database by PSSH or KID using the configured backend."""
return self.db_module.search_by_pssh_or_kid(search_filter)
def get_key_by_kid_and_service(self, kid: str, service: str) -> Optional[str]:
"""Get the key by KID and service using the configured backend."""
return self.db_module.get_key_by_kid_and_service(kid, service)
def get_kid_key_dict(self, service_name: str) -> Dict[str, str]:
"""Get the KID and key dictionary using the configured backend."""
return self.db_module.get_kid_key_dict(service_name)
def get_unique_services(self) -> List[str]:
"""Get the unique services using the configured backend."""
return self.db_module.get_unique_services()
def key_count(self) -> int:
"""Get the key count using the configured backend."""
return self.db_module.key_count()
# Create a singleton instance for easy import and use
db_ops = DatabaseOperations()
# Convenience functions that use the singleton instance
def get_backend_info() -> Dict[str, str]:
"""Get information about the current database backend being used."""
return db_ops.get_backend_info()
def create_database() -> None:
"""Create the database using the configured backend."""
return db_ops.create_database()
def cache_to_db(
service: str = "",
pssh: str = "",
kid: str = "",
key: str = "",
license_url: str = "",
headers: str = "",
cookies: str = "",
data: str = "",
) -> bool:
"""Cache data to the database using the configured backend."""
return db_ops.cache_to_db(
service=service,
pssh=pssh,
kid=kid,
key=key,
license_url=license_url,
headers=headers,
cookies=cookies,
data=data,
)
def search_by_pssh_or_kid(search_filter: str) -> List[Dict[str, str]]:
"""Search the database by PSSH or KID using the configured backend."""
return db_ops.search_by_pssh_or_kid(search_filter)
def get_key_by_kid_and_service(kid: str, service: str) -> Optional[str]:
"""Get the key by KID and service using the configured backend."""
return db_ops.get_key_by_kid_and_service(kid, service)
def get_kid_key_dict(service_name: str) -> Dict[str, str]:
"""Get the KID and key dictionary using the configured backend."""
return db_ops.get_kid_key_dict(service_name)
def get_unique_services() -> List[str]:
"""Get the unique services using the configured backend."""
return db_ops.get_unique_services()
def key_count() -> int:
"""Get the key count using the configured backend."""
return db_ops.key_count()

View File

@ -7,11 +7,9 @@ import bcrypt
def create_user_database(): def create_user_database():
"""Create the user database.""" """Create the user database."""
os.makedirs(os.path.join(os.getcwd(), "databases", "sql"), exist_ok=True) os.makedirs(f"{os.getcwd()}/databases/sql", exist_ok=True)
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/users.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "users.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute( cursor.execute(
""" """
@ -29,9 +27,7 @@ def add_user(username, password, api_key):
"""Add a user to the database.""" """Add a user to the database."""
hashed_pw = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()) hashed_pw = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt())
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/users.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "users.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
try: try:
cursor.execute( cursor.execute(
@ -46,9 +42,7 @@ def add_user(username, password, api_key):
def verify_user(username, password): def verify_user(username, password):
"""Verify a user.""" """Verify a user."""
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/users.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "users.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute( cursor.execute(
"SELECT Password FROM user_info WHERE Username = ?", (username.lower(),) "SELECT Password FROM user_info WHERE Username = ?", (username.lower(),)
@ -61,14 +55,13 @@ def verify_user(username, password):
if isinstance(stored_hash, str): if isinstance(stored_hash, str):
stored_hash = stored_hash.encode("utf-8") stored_hash = stored_hash.encode("utf-8")
return bcrypt.checkpw(password.encode("utf-8"), stored_hash) return bcrypt.checkpw(password.encode("utf-8"), stored_hash)
else:
return False return False
def fetch_api_key(username): def fetch_api_key(username):
"""Fetch the API key for a user.""" """Fetch the API key for a user."""
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/users.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "users.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute( cursor.execute(
"SELECT API_Key FROM user_info WHERE Username = ?", (username.lower(),) "SELECT API_Key FROM user_info WHERE Username = ?", (username.lower(),)
@ -77,6 +70,7 @@ def fetch_api_key(username):
if result: if result:
return result[0] return result[0]
else:
return None return None
@ -86,9 +80,7 @@ def change_password(username, new_password):
new_hashed_pw = bcrypt.hashpw(new_password.encode("utf-8"), bcrypt.gensalt()) new_hashed_pw = bcrypt.hashpw(new_password.encode("utf-8"), bcrypt.gensalt())
# Update the password in the database # Update the password in the database
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/users.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "users.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute( cursor.execute(
"UPDATE user_info SET Password = ? WHERE Username = ?", "UPDATE user_info SET Password = ? WHERE Username = ?",
@ -101,9 +93,7 @@ def change_password(username, new_password):
def change_api_key(username, new_api_key): def change_api_key(username, new_api_key):
"""Change the API key for a user.""" """Change the API key for a user."""
# Update the API key in the database # Update the API key in the database
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/users.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "users.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute( cursor.execute(
"UPDATE user_info SET API_Key = ? WHERE Username = ?", "UPDATE user_info SET API_Key = ? WHERE Username = ?",
@ -115,9 +105,7 @@ def change_api_key(username, new_api_key):
def fetch_styled_username(username): def fetch_styled_username(username):
"""Fetch the styled username for a user.""" """Fetch the styled username for a user."""
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/users.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "users.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute( cursor.execute(
"SELECT Styled_Username FROM user_info WHERE Username = ?", "SELECT Styled_Username FROM user_info WHERE Username = ?",
@ -127,18 +115,18 @@ def fetch_styled_username(username):
if result: if result:
return result[0] return result[0]
else:
return None return None
def fetch_username_by_api_key(api_key): def fetch_username_by_api_key(api_key):
"""Fetch the username for a user by API key.""" """Fetch the username for a user by API key."""
with sqlite3.connect( with sqlite3.connect(f"{os.getcwd()}/databases/sql/users.db") as conn:
os.path.join(os.getcwd(), "databases", "sql", "users.db")
) as conn:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute("SELECT Username FROM user_info WHERE API_Key = ?", (api_key,)) cursor.execute("SELECT Username FROM user_info WHERE API_Key = ?", (api_key,))
result = cursor.fetchone() result = cursor.fetchone()
if result: if result:
return result[0] # Return the username return result[0] # Return the username
else:
return None # If no user is found for the API key return None # If no user is found for the API key

View File

@ -1,29 +1,19 @@
"""Module to decrypt the license using the API."""
import base64
import ast
import glob
import json
import os
from urllib.parse import urlparse
import binascii
import requests
from requests.exceptions import Timeout, RequestException
import yaml
from pywidevine.cdm import Cdm as widevineCdm from pywidevine.cdm import Cdm as widevineCdm
from pywidevine.device import Device as widevineDevice from pywidevine.device import Device as widevineDevice
from pywidevine.pssh import PSSH as widevinePSSH from pywidevine.pssh import PSSH as widevinePSSH
from pyplayready.cdm import Cdm as playreadyCdm from pyplayready.cdm import Cdm as playreadyCdm
from pyplayready.device import Device as playreadyDevice from pyplayready.device import Device as playreadyDevice
from pyplayready.system.pssh import PSSH as playreadyPSSH from pyplayready.system.pssh import PSSH as playreadyPSSH
import requests
from custom_functions.database.unified_db_ops import cache_to_db import base64
import ast
import glob
import os
import yaml
from urllib.parse import urlparse
def find_license_key(data, keywords=None): def find_license_key(data, keywords=None):
"""Find the license key in the data."""
if keywords is None: if keywords is None:
keywords = [ keywords = [
"license", "license",
@ -57,7 +47,6 @@ def find_license_key(data, keywords=None):
def find_license_challenge(data, keywords=None, new_value=None): def find_license_challenge(data, keywords=None, new_value=None):
"""Find the license challenge in the data."""
if keywords is None: if keywords is None:
keywords = [ keywords = [
"license", "license",
@ -90,19 +79,17 @@ def find_license_challenge(data, keywords=None, new_value=None):
def is_base64(string): def is_base64(string):
"""Check if the string is base64 encoded."""
try: try:
# Try decoding the string # Try decoding the string
decoded_data = base64.b64decode(string) decoded_data = base64.b64decode(string)
# Check if the decoded data, when re-encoded, matches the original string # Check if the decoded data, when re-encoded, matches the original string
return base64.b64encode(decoded_data).decode("utf-8") == string return base64.b64encode(decoded_data).decode("utf-8") == string
except (binascii.Error, TypeError): except Exception:
# If decoding or encoding fails, it's not Base64 # If decoding or encoding fails, it's not Base64
return False return False
def is_url_and_split(input_str): def is_url_and_split(input_str):
"""Check if the string is a URL and split it into protocol and FQDN."""
parsed = urlparse(input_str) parsed = urlparse(input_str)
# Check if it's a valid URL with scheme and netloc # Check if it's a valid URL with scheme and netloc
@ -110,210 +97,134 @@ def is_url_and_split(input_str):
protocol = parsed.scheme protocol = parsed.scheme
fqdn = parsed.netloc fqdn = parsed.netloc
return True, protocol, fqdn return True, protocol, fqdn
else:
return False, None, None return False, None, None
def load_device(device_type, device, username, config):
"""Load the appropriate device file for PlayReady or Widevine."""
if device_type == "PR":
ext, config_key, class_loader = ".prd", "default_pr_cdm", playreadyDevice.load
base_dir = "PR"
else:
ext, config_key, class_loader = ".wvd", "default_wv_cdm", widevineDevice.load
base_dir = "WV"
if device == "public":
base_name = config[config_key]
if not base_name.endswith(ext):
base_name += ext
search_path = f"{os.getcwd()}/configs/CDMs/{base_dir}/{base_name}"
else:
base_name = device
if not base_name.endswith(ext):
base_name += ext
search_path = f"{os.getcwd()}/configs/CDMs/{username}/{base_dir}/{base_name}"
files = glob.glob(search_path)
if not files:
return None, f"No {ext} file found for device '{device}'"
try:
return class_loader(files[0]), None
except (IOError, OSError) as e:
return None, f"Failed to read device file: {e}"
except (ValueError, TypeError, AttributeError) as e:
return None, f"Failed to parse device file: {e}"
def prepare_request_data(headers, cookies, json_data, challenge, is_widevine):
"""Prepare headers, cookies, and json_data for the license request."""
try:
format_headers = ast.literal_eval(headers) if headers else None
except (ValueError, SyntaxError) as e:
raise ValueError(f"Invalid headers format: {e}") from e
try:
format_cookies = ast.literal_eval(cookies) if cookies else None
except (ValueError, SyntaxError) as e:
raise ValueError(f"Invalid cookies format: {e}") from e
format_json_data = None
if json_data and not is_base64(json_data):
try:
format_json_data = ast.literal_eval(json_data)
if is_widevine:
format_json_data = find_license_challenge(
data=format_json_data,
new_value=base64.b64encode(challenge).decode(),
)
except (ValueError, SyntaxError) as e:
raise ValueError(f"Invalid json_data format: {e}") from e
except (TypeError, AttributeError) as e:
raise ValueError(f"Error processing json_data: {e}") from e
return format_headers, format_cookies, format_json_data
def send_license_request(license_url, headers, cookies, json_data, challenge, proxies):
"""Send the license request and return the response."""
try:
response = requests.post(
url=license_url,
headers=headers,
proxies=proxies,
cookies=cookies,
json=json_data if json_data is not None else None,
data=challenge if json_data is None else None,
timeout=10,
)
return response, None
except ConnectionError as error:
return None, f"Connection error: {error}"
except Timeout as error:
return None, f"Request timeout: {error}"
except RequestException as error:
return None, f"Request error: {error}"
def extract_and_cache_keys(
cdm,
session_id,
cache_to_db,
pssh,
license_url,
headers,
cookies,
challenge,
json_data,
is_widevine,
):
"""Extract keys from the session and cache them."""
returned_keys = ""
try:
keys = list(cdm.get_keys(session_id))
for index, key in enumerate(keys):
# Widevine: key.type, PlayReady: key.key_type
key_type = getattr(key, "type", getattr(key, "key_type", None))
kid = getattr(key, "kid", getattr(key, "key_id", None))
if key_type != "SIGNING" and kid is not None:
cache_to_db(
pssh=pssh,
license_url=license_url,
headers=headers,
cookies=cookies,
data=challenge if json_data is None else json_data,
kid=kid.hex,
key=key.key.hex(),
)
if index != len(keys) - 1:
returned_keys += f"{kid.hex}:{key.key.hex()}\n"
else:
returned_keys += f"{kid.hex}:{key.key.hex()}"
return returned_keys, None
except AttributeError as error:
return None, f"Error accessing CDM keys: {error}"
except (TypeError, ValueError) as error:
return None, f"Error processing keys: {error}"
def api_decrypt( def api_decrypt(
pssh: str = "", pssh: str = None,
license_url: str = "", license_url: str = None,
proxy: str = "", proxy: str = None,
headers: str = "", headers: str = None,
cookies: str = "", cookies: str = None,
json_data: str = "", json_data: str = None,
device: str = "public", device: str = "public",
username: str = "", username: str = None,
): ):
"""Decrypt the license using the API."""
print(f"Using device {device} for user {username}") print(f"Using device {device} for user {username}")
with open(f"{os.getcwd()}/configs/config.yaml", "r", encoding="utf-8") as file: with open(f"{os.getcwd()}/configs/config.yaml", "r") as file:
config = yaml.safe_load(file) config = yaml.safe_load(file)
if config["database_type"].lower() == "sqlite":
if pssh == "": from custom_functions.database.cache_to_db_sqlite import cache_to_db
elif config["database_type"].lower() == "mariadb":
from custom_functions.database.cache_to_db_mariadb import cache_to_db
if pssh is None:
return {"status": "error", "message": "No PSSH provided"} return {"status": "error", "message": "No PSSH provided"}
# Detect PlayReady or Widevine
try: try:
is_pr = "</WRMHEADER>".encode("utf-16-le") in base64.b64decode(pssh) if "</WRMHEADER>".encode("utf-16-le") in base64.b64decode(pssh): # PR
except (binascii.Error, TypeError) as error: try:
pr_pssh = playreadyPSSH(pssh)
except Exception as error:
return { return {
"status": "error", "status": "error",
"message": f"An error occurred processing PSSH\n\n{error}", "message": f"An error occurred processing PSSH\n\n{error}",
} }
device_type = "PR" if is_pr else "WV"
cdm_class = playreadyCdm if is_pr else widevineCdm
pssh_class = playreadyPSSH if is_pr else widevinePSSH
# Load device
device_obj, device_err = load_device(device_type, device, username, config)
if device_obj is None:
return {"status": "error", "message": device_err}
# Create CDM
try: try:
cdm = cdm_class.from_device(device_obj) if device == "public":
except (IOError, ValueError, AttributeError) as error: base_name = config["default_pr_cdm"]
if not base_name.endswith(".prd"):
base_name += ".prd"
prd_files = glob.glob(
f"{os.getcwd()}/configs/CDMs/PR/{base_name}"
)
else:
prd_files = glob.glob(
f"{os.getcwd()}/configs/CDMs/PR/{base_name}"
)
if prd_files:
pr_device = playreadyDevice.load(prd_files[0])
else:
return { return {
"status": "error", "status": "error",
"message": f"An error occurred loading {device_type} CDM\n\n{error}", "message": "No default .prd file found",
}
else:
base_name = device
if not base_name.endswith(".prd"):
base_name += ".prd"
prd_files = glob.glob(
f"{os.getcwd()}/configs/CDMs/{username}/PR/{base_name}"
)
else:
prd_files = glob.glob(
f"{os.getcwd()}/configs/CDMs/{username}/PR/{base_name}"
)
if prd_files:
pr_device = playreadyDevice.load(prd_files[0])
else:
return {
"status": "error",
"message": f"{base_name} does not exist",
}
except Exception as error:
return {
"status": "error",
"message": f"An error occurred location PlayReady CDM file\n\n{error}",
} }
# Open session
try: try:
session_id = cdm.open() pr_cdm = playreadyCdm.from_device(pr_device)
except (IOError, ValueError, AttributeError) as error: except Exception as error:
return {
"status": "error",
"message": f"An error occurred loading PlayReady CDM\n\n{error}",
}
try:
pr_session_id = pr_cdm.open()
except Exception as error:
return { return {
"status": "error", "status": "error",
"message": f"An error occurred opening a CDM session\n\n{error}", "message": f"An error occurred opening a CDM session\n\n{error}",
} }
# Parse PSSH and get challenge
try: try:
pssh_obj = pssh_class(pssh) pr_challenge = pr_cdm.get_license_challenge(
if is_pr: pr_session_id, pr_pssh.wrm_headers[0]
challenge = cdm.get_license_challenge(session_id, pssh_obj.wrm_headers[0]) )
else: except Exception as error:
challenge = cdm.get_license_challenge(session_id, pssh_obj)
except (ValueError, AttributeError, IndexError) as error:
return { return {
"status": "error", "status": "error",
"message": f"An error occurred getting license challenge\n\n{error}", "message": f"An error occurred getting license challenge\n\n{error}",
} }
# Prepare request data
try: try:
format_headers, format_cookies, format_json_data = prepare_request_data( if headers:
headers, cookies, json_data, challenge, is_widevine=(not is_pr) format_headers = ast.literal_eval(headers)
) else:
except (ValueError, SyntaxError) as error: format_headers = None
except Exception as error:
return { return {
"status": "error", "status": "error",
"message": f"An error occurred preparing request data\n\n{error}", "message": f"An error occurred getting headers\n\n{error}",
} }
try:
# Prepare proxies if cookies:
format_cookies = ast.literal_eval(cookies)
else:
format_cookies = None
except Exception as error:
return {
"status": "error",
"message": f"An error occurred getting cookies\n\n{error}",
}
try:
if json_data and not is_base64(json_data):
format_json_data = ast.literal_eval(json_data)
else:
format_json_data = None
except Exception as error:
return {
"status": "error",
"message": f"An error occurred getting json_data\n\n{error}",
}
licence = None
proxies = None proxies = None
if proxy is not None: if proxy is not None:
is_url, protocol, fqdn = is_url_and_split(proxy) is_url, protocol, fqdn = is_url_and_split(proxy)
@ -322,74 +233,254 @@ def api_decrypt(
else: else:
return { return {
"status": "error", "status": "error",
"message": "Your proxy is invalid, please put it in the format of http(s)://fqdn.tld:port", "message": f"Your proxy is invalid, please put it in the format of http(s)://fqdn.tld:port",
} }
try:
# Send license request licence = requests.post(
licence, req_err = send_license_request( url=license_url,
license_url, headers=format_headers,
format_headers, proxies=proxies,
format_cookies, cookies=format_cookies,
format_json_data, json=format_json_data if format_json_data is not None else None,
challenge, data=pr_challenge if format_json_data is None else None,
proxies,
) )
if licence is None: except requests.exceptions.ConnectionError as error:
return {"status": "error", "message": req_err}
# Parse license
try:
if is_pr:
cdm.parse_license(session_id, licence.text)
else:
try:
cdm.parse_license(session_id, licence.content) # type: ignore[arg-type]
except (ValueError, TypeError):
# Try to extract license from JSON
try:
license_json = licence.json()
license_value = find_license_key(license_json)
if license_value is not None:
cdm.parse_license(session_id, license_value)
else:
return { return {
"status": "error", "status": "error",
"message": f"Could not extract license from JSON: {license_json}", "message": f"An error occurred sending license challenge through your proxy\n\n{error}",
} }
except (ValueError, json.JSONDecodeError, AttributeError) as error: except Exception as error:
return {
"status": "error",
"message": f"An error occurred sending license reqeust\n\n{error}\n\n{licence.content}",
}
try:
pr_cdm.parse_license(pr_session_id, licence.text)
except Exception as error:
return { return {
"status": "error", "status": "error",
"message": f"An error occurred parsing license content\n\n{error}\n\n{licence.content}", "message": f"An error occurred parsing license content\n\n{error}\n\n{licence.content}",
} }
except (ValueError, TypeError, AttributeError) as error: returned_keys = ""
try:
keys = list(pr_cdm.get_keys(pr_session_id))
except Exception as error:
return { return {
"status": "error", "status": "error",
"message": f"An error occurred parsing license content\n\n{error}\n\n{licence.content}", "message": f"An error occurred getting keys\n\n{error}",
} }
# Extract and cache keys
returned_keys, key_err = extract_and_cache_keys(
cdm,
session_id,
cache_to_db,
pssh,
license_url,
headers,
cookies,
challenge,
json_data,
is_widevine=(not is_pr),
)
if returned_keys is None:
return {"status": "error", "message": key_err}
# Close session
try: try:
cdm.close(session_id) for index, key in enumerate(keys):
except (IOError, ValueError, AttributeError) as error: if key.key_type != "SIGNING":
cache_to_db(
pssh=pssh,
license_url=license_url,
headers=headers,
cookies=cookies,
data=pr_challenge if json_data is None else json_data,
kid=key.key_id.hex,
key=key.key.hex(),
)
if index != len(keys) - 1:
returned_keys += f"{key.key_id.hex}:{key.key.hex()}\n"
else:
returned_keys += f"{key.key_id.hex}:{key.key.hex()}"
except Exception as error:
return {
"status": "error",
"message": f"An error occurred formatting keys\n\n{error}",
}
try:
pr_cdm.close(pr_session_id)
except Exception as error:
return { return {
"status": "error", "status": "error",
"message": f"An error occurred closing session\n\n{error}", "message": f"An error occurred closing session\n\n{error}",
} }
try:
return {"status": "success", "message": returned_keys} return {"status": "success", "message": returned_keys}
except Exception as error:
return {
"status": "error",
"message": f"An error occurred getting returned_keys\n\n{error}",
}
except Exception as error:
return {
"status": "error",
"message": f"An error occurred processing PSSH\n\n{error}",
}
else:
try:
wv_pssh = widevinePSSH(pssh)
except Exception as error:
return {
"status": "error",
"message": f"An error occurred processing PSSH\n\n{error}",
}
try:
if device == "public":
base_name = config["default_wv_cdm"]
if not base_name.endswith(".wvd"):
base_name += ".wvd"
wvd_files = glob.glob(f"{os.getcwd()}/configs/CDMs/WV/{base_name}")
else:
wvd_files = glob.glob(f"{os.getcwd()}/configs/CDMs/WV/{base_name}")
if wvd_files:
wv_device = widevineDevice.load(wvd_files[0])
else:
return {"status": "error", "message": "No default .wvd file found"}
else:
base_name = device
if not base_name.endswith(".wvd"):
base_name += ".wvd"
wvd_files = glob.glob(
f"{os.getcwd()}/configs/CDMs/{username}/WV/{base_name}"
)
else:
wvd_files = glob.glob(
f"{os.getcwd()}/configs/CDMs/{username}/WV/{base_name}"
)
if wvd_files:
wv_device = widevineDevice.load(wvd_files[0])
else:
return {"status": "error", "message": f"{base_name} does not exist"}
except Exception as error:
return {
"status": "error",
"message": f"An error occurred location Widevine CDM file\n\n{error}",
}
try:
wv_cdm = widevineCdm.from_device(wv_device)
except Exception as error:
return {
"status": "error",
"message": f"An error occurred loading Widevine CDM\n\n{error}",
}
try:
wv_session_id = wv_cdm.open()
except Exception as error:
return {
"status": "error",
"message": f"An error occurred opening a CDM session\n\n{error}",
}
try:
wv_challenge = wv_cdm.get_license_challenge(wv_session_id, wv_pssh)
except Exception as error:
return {
"status": "error",
"message": f"An error occurred getting license challenge\n\n{error}",
}
try:
if headers:
format_headers = ast.literal_eval(headers)
else:
format_headers = None
except Exception as error:
return {
"status": "error",
"message": f"An error occurred getting headers\n\n{error}",
}
try:
if cookies:
format_cookies = ast.literal_eval(cookies)
else:
format_cookies = None
except Exception as error:
return {
"status": "error",
"message": f"An error occurred getting cookies\n\n{error}",
}
try:
if json_data and not is_base64(json_data):
format_json_data = ast.literal_eval(json_data)
format_json_data = find_license_challenge(
data=format_json_data,
new_value=base64.b64encode(wv_challenge).decode(),
)
else:
format_json_data = None
except Exception as error:
return {
"status": "error",
"message": f"An error occurred getting json_data\n\n{error}",
}
licence = None
proxies = None
if proxy is not None:
is_url, protocol, fqdn = is_url_and_split(proxy)
if is_url:
proxies = {"http": proxy, "https": proxy}
try:
licence = requests.post(
url=license_url,
headers=format_headers,
proxies=proxies,
cookies=format_cookies,
json=format_json_data if format_json_data is not None else None,
data=wv_challenge if format_json_data is None else None,
)
except requests.exceptions.ConnectionError as error:
return {
"status": "error",
"message": f"An error occurred sending license challenge through your proxy\n\n{error}",
}
except Exception as error:
return {
"status": "error",
"message": f"An error occurred sending license reqeust\n\n{error}\n\n{licence.content}",
}
try:
wv_cdm.parse_license(wv_session_id, licence.content)
except:
try:
license_json = licence.json()
license_value = find_license_key(license_json)
wv_cdm.parse_license(wv_session_id, license_value)
except Exception as error:
return {
"status": "error",
"message": f"An error occurred parsing license content\n\n{error}\n\n{licence.content}",
}
returned_keys = ""
try:
keys = list(wv_cdm.get_keys(wv_session_id))
except Exception as error:
return {
"status": "error",
"message": f"An error occurred getting keys\n\n{error}",
}
try:
for index, key in enumerate(keys):
if key.type != "SIGNING":
cache_to_db(
pssh=pssh,
license_url=license_url,
headers=headers,
cookies=cookies,
data=wv_challenge if json_data is None else json_data,
kid=key.kid.hex,
key=key.key.hex(),
)
if index != len(keys) - 1:
returned_keys += f"{key.kid.hex}:{key.key.hex()}\n"
else:
returned_keys += f"{key.kid.hex}:{key.key.hex()}"
except Exception as error:
return {
"status": "error",
"message": f"An error occurred formatting keys\n\n{error}",
}
try:
wv_cdm.close(wv_session_id)
except Exception as error:
return {
"status": "error",
"message": f"An error occurred closing session\n\n{error}",
}
try:
return {"status": "success", "message": returned_keys}
except Exception as error:
return {
"status": "error",
"message": f"An error occurred getting returned_keys\n\n{error}",
}

View File

@ -1,159 +1,52 @@
"""Module to check for the database with unified backend support.""" """Module to check for the database."""
import os import os
from typing import Dict, Any
import yaml import yaml
from custom_functions.database.unified_db_ops import ( from custom_functions.database.cache_to_db_mariadb import (
db_ops, create_database as create_mariadb_database,
get_backend_info, )
key_count, from custom_functions.database.cache_to_db_sqlite import (
create_database as create_sqlite_database,
) )
from custom_functions.database.user_db import create_user_database from custom_functions.database.user_db import create_user_database
def get_database_config() -> Dict[str, Any]: def check_for_sqlite_database():
"""Get the database configuration from config.yaml.""" """Check for the SQLite database."""
try: with open(
config_path = os.path.join(os.getcwd(), "configs", "config.yaml") os.path.join(os.getcwd(), "configs", "config.yaml"), "r", encoding="utf-8"
with open(config_path, "r", encoding="utf-8") as file: ) as file:
config = yaml.safe_load(file) config = yaml.safe_load(file)
return config if os.path.exists(os.path.join(os.getcwd(), "databases", "key_cache.db")):
except (FileNotFoundError, KeyError, yaml.YAMLError) as e: return
print(f"Warning: Could not load config.yaml: {e}") if config["database_type"].lower() == "sqlite":
return {"database_type": "sqlite"} # Default fallback create_sqlite_database()
return
return
def check_for_sqlite_database() -> None: def check_for_user_database():
"""Check for the SQLite database file and create if needed.""" """Check for the user database."""
config = get_database_config() if os.path.exists(os.path.join(os.getcwd(), "databases", "users.db")):
database_type = config.get("database_type", "sqlite").lower() return
# Only check for SQLite file if we're using SQLite
if database_type == "sqlite":
sqlite_path = os.path.join(os.getcwd(), "databases", "sql", "key_cache.db")
if not os.path.exists(sqlite_path):
print("SQLite database not found, creating...")
# Ensure directory exists
os.makedirs(os.path.dirname(sqlite_path), exist_ok=True)
db_ops.create_database()
print(f"SQLite database created at: {sqlite_path}")
else:
print(f"SQLite database found at: {sqlite_path}")
def check_for_mariadb_database() -> None:
"""Check for the MariaDB database and create if needed."""
config = get_database_config()
database_type = config.get("database_type", "sqlite").lower()
# Only check MariaDB if we're using MariaDB
if database_type == "mariadb":
try:
print("Checking MariaDB connection and creating database if needed...")
db_ops.create_database()
print("MariaDB database check completed successfully")
except Exception as e:
print(f"Error checking/creating MariaDB database: {e}")
print("Falling back to SQLite...")
# Fallback to SQLite if MariaDB fails
fallback_config_path = os.path.join(os.getcwd(), "configs", "config.yaml")
try:
with open(fallback_config_path, "r", encoding="utf-8") as file:
config = yaml.safe_load(file)
config["database_type"] = "sqlite"
with open(fallback_config_path, "w", encoding="utf-8") as file:
yaml.safe_dump(config, file)
check_for_sqlite_database()
except Exception as fallback_error:
print(f"Error during fallback to SQLite: {fallback_error}")
def check_for_user_database() -> None:
"""Check for the user database and create if needed."""
user_db_path = os.path.join(os.getcwd(), "databases", "users.db")
if not os.path.exists(user_db_path):
print("User database not found, creating...")
# Ensure directory exists
os.makedirs(os.path.dirname(user_db_path), exist_ok=True)
create_user_database() create_user_database()
print(f"User database created at: {user_db_path}")
else:
print(f"User database found at: {user_db_path}")
def check_for_sql_database() -> None: def check_for_mariadb_database():
"""Check for the SQL database based on configuration.""" """Check for the MariaDB database."""
print("=== Database Check Starting ===") with open(
os.path.join(os.getcwd(), "configs", "config.yaml"), "r", encoding="utf-8"
) as file:
config = yaml.safe_load(file)
if config["database_type"].lower() == "mariadb":
create_mariadb_database()
return
return
# Get backend information
backend_info = get_backend_info()
print(f"Database backend: {backend_info['backend']}")
print(f"Using module: {backend_info['module']}")
config = get_database_config() def check_for_sql_database():
database_type = config.get("database_type", "sqlite").lower() """Check for the SQL database."""
# Ensure databases directory exists
os.makedirs(os.path.join(os.getcwd(), "databases"), exist_ok=True)
os.makedirs(os.path.join(os.getcwd(), "databases", "sql"), exist_ok=True)
# Check main database based on type
if database_type == "mariadb":
check_for_mariadb_database()
else: # Default to SQLite
check_for_sqlite_database() check_for_sqlite_database()
check_for_mariadb_database()
# Always check user database (always SQLite)
check_for_user_database() check_for_user_database()
print("=== Database Check Completed ===")
def get_database_status() -> Dict[str, Any]:
"""Get the current database status and configuration."""
config = get_database_config()
backend_info = get_backend_info()
status = {
"configured_backend": config.get("database_type", "sqlite").lower(),
"active_backend": backend_info["backend"],
"module_in_use": backend_info["module"],
"sqlite_file_exists": os.path.exists(
os.path.join(os.getcwd(), "databases", "sql", "key_cache.db")
),
"user_db_exists": os.path.exists(
os.path.join(os.getcwd(), "databases", "users.db")
),
}
# Try to get key count to verify database is working
try:
status["key_count"] = key_count()
status["database_operational"] = True
except Exception as e:
status["key_count"] = "Error"
status["database_operational"] = False
status["error"] = str(e)
return status
def print_database_status() -> None:
"""Print a formatted database status report."""
status = get_database_status()
print("\n=== Database Status Report ===")
print(f"Configured Backend: {status['configured_backend']}")
print(f"Active Backend: {status['active_backend']}")
print(f"Module in Use: {status['module_in_use']}")
print(f"SQLite File Exists: {status['sqlite_file_exists']}")
print(f"User DB Exists: {status['user_db_exists']}")
print(f"Database Operational: {status['database_operational']}")
print(f"Key Count: {status['key_count']}")
if not status["database_operational"]:
print(f"Error: {status.get('error', 'Unknown error')}")
print("==============================\n")

View File

@ -1,21 +1,32 @@
"""Module to handle the API routes."""
import os import os
import sqlite3 import sqlite3
from flask import Blueprint, jsonify, request, send_file, session
import json import json
from custom_functions.decrypt.api_decrypt import api_decrypt
from custom_functions.user_checks.device_allowed import user_allowed_to_use_device
import shutil import shutil
import math import math
import yaml
import mysql.connector
from io import StringIO from io import StringIO
import tempfile import tempfile
import time import time
from configs.icon_links import data as icon_data
from flask import Blueprint, jsonify, request, send_file, session, after_this_request api_bp = Blueprint("api", __name__)
import yaml with open(f"{os.getcwd()}/configs/config.yaml", "r") as file:
import mysql.connector config = yaml.safe_load(file)
if config["database_type"].lower() != "mariadb":
from custom_functions.decrypt.api_decrypt import api_decrypt from custom_functions.database.cache_to_db_sqlite import (
from custom_functions.user_checks.device_allowed import user_allowed_to_use_device search_by_pssh_or_kid,
from custom_functions.database.unified_db_ops import ( cache_to_db,
get_key_by_kid_and_service,
get_unique_services,
get_kid_key_dict,
key_count,
)
elif config["database_type"].lower() == "mariadb":
from custom_functions.database.cache_to_db_mariadb import (
search_by_pssh_or_kid, search_by_pssh_or_kid,
cache_to_db, cache_to_db,
get_key_by_kid_and_service, get_key_by_kid_and_service,
@ -23,31 +34,23 @@ from custom_functions.database.unified_db_ops import (
get_kid_key_dict, get_kid_key_dict,
key_count, key_count,
) )
from configs.icon_links import data as icon_data
api_bp = Blueprint("api", __name__)
with open(os.path.join(os.getcwd(), "configs", "config.yaml"), "r", encoding="utf-8") as file:
config = yaml.safe_load(file)
def get_db_config(): def get_db_config():
"""Get the MariaDB database configuration.""" # Configure your MariaDB connection
with open( with open(f"{os.getcwd()}/configs/config.yaml", "r") as file:
os.path.join(os.getcwd(), "configs", "config.yaml"), "r", encoding="utf-8" config = yaml.safe_load(file)
) as file_mariadb:
config_mariadb = yaml.safe_load(file_mariadb)
db_config = { db_config = {
"host": f'{config_mariadb["mariadb"]["host"]}', "host": f'{config["mariadb"]["host"]}',
"user": f'{config_mariadb["mariadb"]["user"]}', "user": f'{config["mariadb"]["user"]}',
"password": f'{config_mariadb["mariadb"]["password"]}', "password": f'{config["mariadb"]["password"]}',
"database": f'{config_mariadb["mariadb"]["database"]}', "database": f'{config["mariadb"]["database"]}',
} }
return db_config return db_config
@api_bp.route("/api/cache/search", methods=["POST"]) @api_bp.route("/api/cache/search", methods=["POST"])
def get_data(): def get_data():
"""Get the data from the database."""
search_argument = json.loads(request.data)["input"] search_argument = json.loads(request.data)["input"]
results = search_by_pssh_or_kid(search_filter=search_argument) results = search_by_pssh_or_kid(search_filter=search_argument)
return jsonify(results) return jsonify(results)
@ -55,7 +58,6 @@ def get_data():
@api_bp.route("/api/cache/<service>/<kid>", methods=["GET"]) @api_bp.route("/api/cache/<service>/<kid>", methods=["GET"])
def get_single_key_service(service, kid): def get_single_key_service(service, kid):
"""Get the single key from the database."""
result = get_key_by_kid_and_service(kid=kid, service=service) result = get_key_by_kid_and_service(kid=kid, service=service)
return jsonify( return jsonify(
{ {
@ -67,7 +69,6 @@ def get_single_key_service(service, kid):
@api_bp.route("/api/cache/<service>", methods=["GET"]) @api_bp.route("/api/cache/<service>", methods=["GET"])
def get_multiple_key_service(service): def get_multiple_key_service(service):
"""Get the multiple keys from the database."""
result = get_kid_key_dict(service_name=service) result = get_kid_key_dict(service_name=service)
pages = math.ceil(len(result) / 10) pages = math.ceil(len(result) / 10)
return jsonify({"code": 0, "content_keys": result, "pages": pages}) return jsonify({"code": 0, "content_keys": result, "pages": pages})
@ -75,7 +76,6 @@ def get_multiple_key_service(service):
@api_bp.route("/api/cache/<service>/<kid>", methods=["POST"]) @api_bp.route("/api/cache/<service>/<kid>", methods=["POST"])
def add_single_key_service(service, kid): def add_single_key_service(service, kid):
"""Add the single key to the database."""
body = request.get_json() body = request.get_json()
content_key = body["content_key"] content_key = body["content_key"]
result = cache_to_db(service=service, kid=kid, key=content_key) result = cache_to_db(service=service, kid=kid, key=content_key)
@ -86,6 +86,7 @@ def add_single_key_service(service, kid):
"updated": True, "updated": True,
} }
) )
elif result is False:
return jsonify( return jsonify(
{ {
"code": 0, "code": 0,
@ -96,7 +97,6 @@ def add_single_key_service(service, kid):
@api_bp.route("/api/cache/<service>", methods=["POST"]) @api_bp.route("/api/cache/<service>", methods=["POST"])
def add_multiple_key_service(service): def add_multiple_key_service(service):
"""Add the multiple keys to the database."""
body = request.get_json() body = request.get_json()
keys_added = 0 keys_added = 0
keys_updated = 0 keys_updated = 0
@ -104,7 +104,7 @@ def add_multiple_key_service(service):
result = cache_to_db(service=service, kid=kid, key=key) result = cache_to_db(service=service, kid=kid, key=key)
if result is True: if result is True:
keys_updated += 1 keys_updated += 1
else: elif result is False:
keys_added += 1 keys_added += 1
return jsonify( return jsonify(
{ {
@ -117,7 +117,6 @@ def add_multiple_key_service(service):
@api_bp.route("/api/cache", methods=["POST"]) @api_bp.route("/api/cache", methods=["POST"])
def unique_service(): def unique_service():
"""Get the unique services from the database."""
services = get_unique_services() services = get_unique_services()
return jsonify( return jsonify(
{ {
@ -129,12 +128,11 @@ def unique_service():
@api_bp.route("/api/cache/download", methods=["GET"]) @api_bp.route("/api/cache/download", methods=["GET"])
def download_database(): def download_database():
"""Download the database."""
if config["database_type"].lower() != "mariadb": if config["database_type"].lower() != "mariadb":
original_database_path = os.path.join(os.getcwd(), "databases", "sql", "key_cache.db") original_database_path = f"{os.getcwd()}/databases/sql/key_cache.db"
# Make a copy of the original database (without locking the original) # Make a copy of the original database (without locking the original)
modified_database_path = os.path.join(os.getcwd(), "databases", "sql", "key_cache_modified.db") modified_database_path = f"{os.getcwd()}/databases/sql/key_cache_modified.db"
# Using shutil.copy2 to preserve metadata (timestamps, etc.) # Using shutil.copy2 to preserve metadata (timestamps, etc.)
shutil.copy2(original_database_path, modified_database_path) shutil.copy2(original_database_path, modified_database_path)
@ -159,35 +157,38 @@ def download_database():
return send_file( return send_file(
modified_database_path, as_attachment=True, download_name="key_cache.db" modified_database_path, as_attachment=True, download_name="key_cache.db"
) )
if config["database_type"].lower() == "mariadb":
try: try:
# Connect to MariaDB
conn = mysql.connector.connect(**get_db_config()) conn = mysql.connector.connect(**get_db_config())
cursor = conn.cursor() cursor = conn.cursor()
# Get column names # Update sensitive data (this updates the live DB, you may want to duplicate rows instead)
cursor.execute("SHOW COLUMNS FROM licenses") cursor.execute(
columns = [row[0] for row in cursor.fetchall()] """
UPDATE licenses
SET Headers = NULL,
Cookies = NULL
"""
)
# Build SELECT with Headers and Cookies as NULL conn.commit()
select_columns = []
for col in columns: # Now export the table
if col.lower() in ("headers", "cookies"): cursor.execute("SELECT * FROM licenses")
select_columns.append("NULL AS " + col)
else:
select_columns.append(col)
select_query = f"SELECT {', '.join(select_columns)} FROM licenses"
cursor.execute(select_query)
rows = cursor.fetchall() rows = cursor.fetchall()
column_names = [desc[0] for desc in cursor.description]
# Dump to SQL-like format # Dump to SQL-like format
output = StringIO() output = StringIO()
output.write("-- Dump of `licenses` table (Headers and Cookies are NULL)\n") output.write(f"-- Dump of `licenses` table\n")
for row in rows: for row in rows:
values = ", ".join( values = ", ".join(
f"'{str(v).replace('\'', '\\\'')}'" if v is not None else "NULL" f"'{str(v).replace('\'', '\\\'')}'" if v is not None else "NULL"
for v in row for v in row
) )
output.write( output.write(
f"INSERT INTO licenses ({', '.join(columns)}) VALUES ({values});\n" f"INSERT INTO licenses ({', '.join(column_names)}) VALUES ({values});\n"
) )
# Write to a temp file for download # Write to a temp file for download
@ -196,14 +197,6 @@ def download_database():
with open(temp_path, "w", encoding="utf-8") as f: with open(temp_path, "w", encoding="utf-8") as f:
f.write(output.getvalue()) f.write(output.getvalue())
@after_this_request
def remove_file(response):
try:
os.remove(temp_path)
except Exception:
pass
return response
return send_file( return send_file(
temp_path, as_attachment=True, download_name="licenses_dump.sql" temp_path, as_attachment=True, download_name="licenses_dump.sql"
) )
@ -216,7 +209,6 @@ _keycount_cache = {"count": None, "timestamp": 0}
@api_bp.route("/api/cache/keycount", methods=["GET"]) @api_bp.route("/api/cache/keycount", methods=["GET"])
def get_count(): def get_count():
"""Get the count of the keys in the database."""
now = time.time() now = time.time()
if now - _keycount_cache["timestamp"] > 10 or _keycount_cache["count"] is None: if now - _keycount_cache["timestamp"] > 10 or _keycount_cache["count"] is None:
_keycount_cache["count"] = key_count() _keycount_cache["count"] = key_count()
@ -226,42 +218,69 @@ def get_count():
@api_bp.route("/api/decrypt", methods=["POST"]) @api_bp.route("/api/decrypt", methods=["POST"])
def decrypt_data(): def decrypt_data():
"""Decrypt the data.""" api_request_data = json.loads(request.data)
api_request_data = request.get_json(force=True) if "pssh" in api_request_data:
if api_request_data["pssh"] == "":
# Helper to get fields or None if missing/empty api_request_pssh = None
def get_field(key, default=""): else:
value = api_request_data.get(key, default) api_request_pssh = api_request_data["pssh"]
return value if value != "" else default else:
api_request_pssh = None
api_request_pssh = get_field("pssh") if "licurl" in api_request_data:
api_request_licurl = get_field("licurl") if api_request_data["licurl"] == "":
api_request_proxy = get_field("proxy") api_request_licurl = None
api_request_headers = get_field("headers") else:
api_request_cookies = get_field("cookies") api_request_licurl = api_request_data["licurl"]
api_request_data_func = get_field("data") else:
api_request_licurl = None
# Device logic if "proxy" in api_request_data:
device = get_field("device", "public") if api_request_data["proxy"] == "":
if device in [ api_request_proxy = None
"default", else:
"CDRM-Project Public Widevine CDM", api_request_proxy = api_request_data["proxy"]
"CDRM-Project Public PlayReady CDM", else:
"", api_request_proxy = None
None, if "headers" in api_request_data:
]: if api_request_data["headers"] == "":
api_request_headers = None
else:
api_request_headers = api_request_data["headers"]
else:
api_request_headers = None
if "cookies" in api_request_data:
if api_request_data["cookies"] == "":
api_request_cookies = None
else:
api_request_cookies = api_request_data["cookies"]
else:
api_request_cookies = None
if "data" in api_request_data:
if api_request_data["data"] == "":
api_request_data_func = None
else:
api_request_data_func = api_request_data["data"]
else:
api_request_data_func = None
if "device" in api_request_data:
if (
api_request_data["device"] == "default"
or api_request_data["device"] == "CDRM-Project Public Widevine CDM"
or api_request_data["device"] == "CDRM-Project Public PlayReady CDM"
):
api_request_device = "public" api_request_device = "public"
else: else:
api_request_device = device api_request_device = api_request_data["device"]
else:
username = "" api_request_device = "public"
username = None
if api_request_device != "public": if api_request_device != "public":
username = session.get("username") username = session.get("username")
if not username: if not username:
return jsonify({"message": "Not logged in, not allowed"}), 400 return jsonify({"message": "Not logged in, not allowed"}), 400
if not user_allowed_to_use_device(device=api_request_device, username=username): if user_allowed_to_use_device(device=api_request_device, username=username):
return jsonify({"message": "Not authorized / Not found"}), 403 api_request_device = api_request_device
else:
return jsonify({"message": f"Not authorized / Not found"}), 403
result = api_decrypt( result = api_decrypt(
pssh=api_request_pssh, pssh=api_request_pssh,
proxy=api_request_proxy, proxy=api_request_proxy,
@ -274,12 +293,12 @@ def decrypt_data():
) )
if result["status"] == "success": if result["status"] == "success":
return jsonify({"status": "success", "message": result["message"]}) return jsonify({"status": "success", "message": result["message"]})
else:
return jsonify({"status": "fail", "message": result["message"]}) return jsonify({"status": "fail", "message": result["message"]})
@api_bp.route("/api/links", methods=["GET"]) @api_bp.route("/api/links", methods=["GET"])
def get_links(): def get_links():
"""Get the links."""
return jsonify( return jsonify(
{ {
"discord": icon_data["discord"], "discord": icon_data["discord"],
@ -291,7 +310,6 @@ def get_links():
@api_bp.route("/api/extension", methods=["POST"]) @api_bp.route("/api/extension", methods=["POST"])
def verify_extension(): def verify_extension():
"""Verify the extension."""
return jsonify( return jsonify(
{ {
"status": True, "status": True,

View File

@ -1,36 +1,23 @@
"""Module to handle the upload process.""" from flask import Blueprint, request, jsonify, session
import os import os
import logging import logging
import re
from flask import Blueprint, request, jsonify, session
upload_bp = Blueprint("upload_bp", __name__) upload_bp = Blueprint("upload_bp", __name__)
def sanitize_username(username):
"""Sanitize the username."""
return re.sub(r"[^a-zA-Z0-9_\-]", "_", username).lower()
@upload_bp.route("/upload/<cdmtype>", methods=["POST"]) @upload_bp.route("/upload/<cdmtype>", methods=["POST"])
def upload(cdmtype): def upload(cdmtype):
"""Handle the upload process."""
try: try:
username = session.get("username") username = session.get("username")
if not username: if not username:
return jsonify({"message": "False", "error": "No username in session"}), 400 return jsonify({"message": "False", "error": "No username in session"}), 400
safe_username = sanitize_username(username)
# Validate CDM type # Validate CDM type
if cdmtype not in ["PR", "WV"]: if cdmtype not in ["PR", "WV"]:
return jsonify({"message": "False", "error": "Invalid CDM type"}), 400 return jsonify({"message": "False", "error": "Invalid CDM type"}), 400
# Set up user directory paths # Set up user directory paths
base_path = os.path.join( base_path = os.path.join(os.getcwd(), "configs", "CDMs", username)
os.getcwd(), "configs", "CDMs", "users_uploaded", safe_username
)
pr_path = os.path.join(base_path, "PR") pr_path = os.path.join(base_path, "PR")
wv_path = os.path.join(base_path, "WV") wv_path = os.path.join(base_path, "WV")
@ -45,13 +32,11 @@ def upload(cdmtype):
# Determine correct save path based on cdmtype # Determine correct save path based on cdmtype
filename = uploaded_file.filename filename = uploaded_file.filename
assert filename is not None save_path = os.path.join(pr_path if cdmtype == "PR" else wv_path, filename)
target_path = pr_path if cdmtype == "PR" else wv_path
save_path = os.path.join(target_path, filename)
uploaded_file.save(save_path) uploaded_file.save(save_path)
return jsonify({"message": "Success", "file_saved_to": save_path}) return jsonify({"message": "Success", "file_saved_to": save_path})
except (OSError, IOError, ValueError, AttributeError) as e: except Exception as e:
logging.exception("Upload failed: %s", {e}) logging.exception("Upload failed")
return jsonify({"message": "False", "error": "Server error"}), 500 return jsonify({"message": "False", "error": "Server error"}), 500

View File

@ -1,5 +1,3 @@
"""Module to handle the user changes."""
import re import re
from flask import Blueprint, request, jsonify, session from flask import Blueprint, request, jsonify, session
from custom_functions.database.user_db import change_password, change_api_key from custom_functions.database.user_db import change_password, change_api_key
@ -12,7 +10,6 @@ PASSWORD_REGEX = re.compile(r'^[A-Za-z0-9!@#$%^&*()_+\-=\[\]{};\'":\\|,.<>\/?`~]
@user_change_bp.route("/user/change_password", methods=["POST"]) @user_change_bp.route("/user/change_password", methods=["POST"])
def change_password_route(): def change_password_route():
"""Handle the change password route."""
username = session.get("username") username = session.get("username")
if not username: if not username:
return jsonify({"message": "False"}), 400 return jsonify({"message": "False"}), 400
@ -28,19 +25,18 @@ def change_password_route():
return jsonify({"message": "True"}), 200 return jsonify({"message": "True"}), 200
except Exception as e: except Exception as e:
return jsonify({"message": "False", "error": str(e)}), 400 return jsonify({"message": "False"}), 400
@user_change_bp.route("/user/change_api_key", methods=["POST"]) @user_change_bp.route("/user/change_api_key", methods=["POST"])
def change_api_key_route(): def change_api_key_route():
"""Handle the change API key route."""
# Ensure the user is logged in by checking session for 'username' # Ensure the user is logged in by checking session for 'username'
username = session.get("username") username = session.get("username")
if not username: if not username:
return jsonify({"message": "False", "error": "User not logged in"}), 400 return jsonify({"message": "False", "error": "User not logged in"}), 400
# Get the new API key from the request body # Get the new API key from the request body
new_api_key = request.get_json().get("new_api_key") new_api_key = request.json.get("new_api_key")
if not new_api_key: if not new_api_key:
return jsonify({"message": "False", "error": "New API key not provided"}), 400 return jsonify({"message": "False", "error": "New API key not provided"}), 400

View File

@ -1,10 +1,7 @@
"""Module to handle the user info request.""" from flask import Blueprint, request, jsonify, session
import os import os
import glob import glob
import logging import logging
import re
from flask import Blueprint, request, jsonify, session
from custom_functions.database.user_db import ( from custom_functions.database.user_db import (
fetch_api_key, fetch_api_key,
fetch_styled_username, fetch_styled_username,
@ -14,30 +11,19 @@ from custom_functions.database.user_db import (
user_info_bp = Blueprint("user_info_bp", __name__) user_info_bp = Blueprint("user_info_bp", __name__)
def sanitize_username(username):
"""Sanitize the username."""
return re.sub(r"[^a-zA-Z0-9_\-]", "_", username).lower()
@user_info_bp.route("/userinfo", methods=["POST"]) @user_info_bp.route("/userinfo", methods=["POST"])
def user_info(): def user_info():
"""Handle the user info request."""
username = session.get("username") username = session.get("username")
if not username: if not username:
try: try:
headers = request.headers headers = request.headers
api_key = headers["Api-Key"] api_key = headers["Api-Key"]
username = fetch_username_by_api_key(api_key) username = fetch_username_by_api_key(api_key)
except Exception as e: except:
logging.exception("Error retrieving username by API key, %s", {e})
return jsonify({"message": "False"}), 400 return jsonify({"message": "False"}), 400
safe_username = sanitize_username(username)
try: try:
base_path = os.path.join( base_path = os.path.join(os.getcwd(), "configs", "CDMs", username.lower())
os.getcwd(), "configs", "CDMs", "users_uploaded", safe_username
)
pr_files = [ pr_files = [
os.path.basename(f) os.path.basename(f)
for f in glob.glob(os.path.join(base_path, "PR", "*.prd")) for f in glob.glob(os.path.join(base_path, "PR", "*.prd"))
@ -57,5 +43,5 @@ def user_info():
} }
) )
except Exception as e: except Exception as e:
logging.exception("Error retrieving device files, %s", {e}) logging.exception("Error retrieving device files")
return jsonify({"message": "False"}), 500 return jsonify({"message": "False"}), 500