commit 7fd87b8aa21463e36550d9082140a6e8a15b6c88
Author: rlaphoenix <rlaphoenix@pm.me>
Date:   Mon Feb 6 02:33:09 2023 +0000

    Initial commit

diff --git a/.deepsource.toml b/.deepsource.toml
new file mode 100644
index 0000000..4524488
--- /dev/null
+++ b/.deepsource.toml
@@ -0,0 +1,13 @@
+version = 1
+
+exclude_patterns = [
+  "**_pb2.py"  # protobuf files
+]
+
+[[analyzers]]
+name = "python"
+enabled = true
+
+  [analyzers.meta]
+  runtime_version = "3.x.x"
+  max_line_length = 120
diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000..bf2c5f2
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,3 @@
+[flake8]
+exclude = .venv,build,dist,*_pb2.py,*.pyi
+max-line-length = 120
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 0000000..2a49387
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,27 @@
+---
+name: Bug report
+about: Create a report to help us improve
+title: ''
+labels: ''
+assignees: ''
+
+---
+
+**Describe the bug**
+A clear and concise description of what the bug is.
+
+**To Reproduce**
+Steps to reproduce the behavior:
+
+1. Clone repository '...'
+2. Run '....'
+3. See error
+
+**Expected behavior**
+A clear and concise description of what you expected to happen.
+
+**Screenshots**
+If applicable, add screenshots to help explain your problem.
+
+**Additional context**
+Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 0000000..11fc491
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,20 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+title: ''
+labels: enhancement
+assignees: ''
+
+---
+
+**Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+
+**Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+
+**Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+
+**Additional context**
+Add any other context or screenshots about the feature request here.
diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml
new file mode 100644
index 0000000..262921b
--- /dev/null
+++ b/.github/workflows/cd.yml
@@ -0,0 +1,41 @@
+name: cd
+
+on:
+  push:
+    tags:
+      - "v*"
+
+jobs:
+  tagged-release:
+    name: Tagged Release
+    runs-on: ubuntu-latest
+    steps:
+    - uses: actions/checkout@v3
+    - name: Set up Python
+      uses: actions/setup-python@v4
+      with:
+        python-version: '3.10.x'
+    - name: Install Poetry
+      uses: abatilo/actions-poetry@v2.2.0
+      with:
+        poetry-version: '1.3.2'
+    - name: Install dependencies
+      run: poetry install
+    - name: Build wheel
+      run: poetry build -f wheel
+    - name: Upload wheel
+      uses: actions/upload-artifact@v2.2.4
+      with:
+        name: Python Wheel
+        path: "dist/*.whl"
+    - name: Deploy release
+      uses: marvinpinto/action-automatic-releases@latest
+      with:
+        prerelease: false
+        repo_token: "${{ secrets.GITHUB_TOKEN }}"
+        files: |
+          dist/*.whl
+    - name: Publish to PyPI
+      env:
+        POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }}
+      run: poetry publish
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..425a943
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,38 @@
+name: ci
+
+on:
+  push:
+    branches: [ master ]
+  pull_request:
+    branches: [ master ]
+
+jobs:
+  build:
+
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        python-version: ['3.8', '3.9', '3.10', '3.11']
+
+    steps:
+    - uses: actions/checkout@v3
+    - name: Set up Python ${{ matrix.python-version }}
+      uses: actions/setup-python@v4
+      with:
+        python-version: ${{ matrix.python-version }}
+    - name: Install flake8
+      run: python -m pip install flake8
+    - name: Lint with flake8
+      run: |
+        # stop the build if there are Python syntax errors or undefined names
+        flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
+        # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
+        flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
+    - name: Install poetry
+      uses: abatilo/actions-poetry@v2.2.0
+      with:
+        poetry-version: 1.3.2
+    - name: Install project
+      run: poetry install --no-dev
+    - name: Build project
+      run: poetry build
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..43e7602
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,125 @@
+# devine
+*.mkv
+*.mp4
+*.exe
+*.dll
+*.crt
+*.wvd
+*.der
+*.pem
+*.bin
+*.db
+device_cert
+device_client_id_blob
+device_private_key
+device_vmp_blob
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# JetBrains project settings
+.idea
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.directory
+.idea/dataSources.local.xml
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..d6340d4
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,18 @@
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+
+repos:
+  - repo: https://github.com/pycqa/isort
+    rev: 5.12.0
+    hooks:
+      - id: isort
+  - repo: https://github.com/pycqa/flake8
+    rev: 6.0.0
+    hooks:
+      - id: flake8
+  - repo: https://github.com/pre-commit/pre-commit-hooks
+    rev: v4.4.0
+    hooks:
+      - id: trailing-whitespace
+        args: [--markdown-linebreak-ext=md]
+      - id: end-of-file-fixer
diff --git a/CONFIG.md b/CONFIG.md
new file mode 100644
index 0000000..00c47c5
--- /dev/null
+++ b/CONFIG.md
@@ -0,0 +1,353 @@
+# Config Documentation
+
+This page documents configuration values and what they do. You begin with an empty configuration file.  
+You may alter your configuration with `devine cfg --help`, or find the direct location with `devine env info`.  
+Configuration values are listed in alphabetical order.
+
+Avoid putting comments in the config file as they may be removed. Comments are currently kept only thanks
+to the usage of `ruamel.yaml` to parse and write YAML files. In the future `yaml` may be used instead,
+which does not keep comments.
+
+## aria2c (dict)
+
+- `file_allocation`
+  Specify file allocation method. Default: `"prealloc"`
+
+  - `"none"` doesn't pre-allocate file space.
+  - `"prealloc"` pre-allocates file space before download begins. This may take some time depending on the size of the
+    file.
+  - `"falloc"` is your best choice if you are using newer file systems such as ext4 (with extents support), btrfs, xfs
+    or NTFS (MinGW build only). It allocates large(few GiB) files almost instantly. Don't use falloc with legacy file
+    systems such as ext3 and FAT32 because it takes almost same time as prealloc, and it blocks aria2 entirely until
+    allocation finishes. falloc may not be available if your system doesn't have posix_fallocate(3) function.
+  - `"trunc"` uses ftruncate(2) system call or platform-specific counterpart to truncate a file to a specified length.
+
+## cdm (dict)
+
+Pre-define which widevine device to use for each Service by Service Tag as Key (case-sensitive).  
+The value should be a WVD filename without the file extension.
+
+For example,
+
+```yaml
+AMZN: chromecdm_903_l3
+NF: nexus_6_l1
+```
+
+You may also specify this device based on the profile used.
+
+For example,
+
+```yaml
+AMZN: chromecdm_903_l3
+NF: nexus_6_l1
+DSNP:
+  john_sd: chromecdm_903_l3
+  jane_uhd: nexus_5_l1
+```
+
+You can also specify a fallback value to predefine if a match was not made.  
+This can be done using `default` key. This can help reduce redundancy in your specifications.
+
+For example, the following has the same result as the previous example, as well as all other
+services and profiles being pre-defined to use `chromecdm_903_l3`.
+
+```yaml
+NF: nexus_6_l1
+DSNP:
+  jane_uhd: nexus_5_l1
+default: chromecdm_903_l3
+```
+
+## credentials (dict)
+
+Specify login credentials to use for each Service by Profile as Key (case-sensitive).
+
+The value should be `email:password` or `username:password` (with some exceptions).  
+The first section does not have to be an email or username. It may also be a Phone number.
+
+For example,
+
+```yaml
+AMZN:
+  james: james@gmail.com:TheFriend97
+  jane: jane@example.tld:LoremIpsum99
+  john: john@example.tld:LoremIpsum98
+NF:
+  john: john@gmail.com:TheGuyWhoPaysForTheNetflix69420
+```
+
+Credentials must be specified per-profile. You cannot specify a fallback or default credential.
+Please be aware that this information is sensitive and to keep it safe. Do not share your config.
+
+## directories (dict)
+
+Override the default directories used across devine.  
+The directories are set to common values by default.
+
+The following directories are available and may be overridden,
+
+- `commands` - CLI Command Classes.
+- `services` - Service Classes.
+- `vaults` - Vault Classes.
+- `downloads` - Downloads.
+- `temp` - Temporary files or conversions during download.
+- `cache` - Expiring data like Authorization tokens, or other misc data.
+- `cookies` - Expiring Cookie data.
+- `logs` - Logs.
+- `wvds` - Widevine Devices.
+
+For example,
+
+```yaml
+downloads: "D:/Downloads/devine"
+temp: "D:/Temp/devine"
+```
+
+There are directories not listed that cannot be modified as they are crucial to the operation of devine.
+
+## dl (dict)
+
+Pre-define default options and switches of the `dl` command.  
+The values will be ignored if explicitly set in the CLI call.
+
+The Key must be the same value Python click would resolve it to as an argument.  
+E.g., `@click.option("-r", "--range", "range_", type=...` actually resolves as `range_` variable.
+
+For example to set the default primary language to download to German,
+
+```yaml
+lang: de
+```
+
+or to set `--bitrate=CVBR` for the AMZN service,
+
+```yaml
+lang: de
+AMZN:
+  bitrate: CVBR
+```
+
+## headers (dict)
+
+Case-Insensitive dictionary of headers that all Services begin their Request Session state with.  
+All requests will use these unless changed explicitly or implicitly via a Server response.  
+These should be sane defaults and anything that would only be useful for some Services should not
+be put here.
+
+Avoid headers like 'Accept-Encoding' as that would be a compatibility header that Python-requests will
+set for you.
+
+I recommend using,
+
+```yaml
+Accept-Language: "en-US,en;q=0.8"
+User-Agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.75 Safari/537.36"
+```
+
+## key_vaults (list\[dict])
+
+Key Vaults store your obtained Content Encryption Keys (CEKs) and Key IDs per-service.
+
+This can help reduce unnecessary License calls even during the first download. This is because a Service may
+provide the same Key ID and CEK for both Video and Audio, as well as for multiple resolutions or bitrates.
+
+You can have as many Key Vaults as you would like. It's nice to share Key Vaults or use a unified Vault on
+Teams as sharing CEKs immediately can help reduce License calls drastically.
+
+Two types of Vaults are in the Core codebase, SQLite and MySQL Vaults. Both directly connect to an SQLite or MySQL
+Server. It has to connect directly to the Host/IP. It cannot be in front of a PHP API or such. Beware that some Hosts
+do not let you access the MySQL server outside their intranet (aka Don't port forward or use permissive network
+interfaces).
+
+### Connecting to a MySQL Vault
+
+MySQL vaults can be either MySQL or MariaDB servers. I recommend MariaDB.  
+A MySQL Vault can be on a local or remote network, but I recommend SQLite for local Vaults.
+
+```yaml
+- type: MySQL
+  name: "John#0001's Vault"  # arbitrary vault name
+  host: "127.0.0.1"          # host/ip
+  # port: 3306               # port (defaults to 3306)
+  database: vault            # database used for devine
+  username: jane11
+  password: Doe123
+```
+
+I recommend giving only a trustable user (or yourself) CREATE permission and then use devine to cache at least one CEK
+per Service to have it create the tables. If you don't give any user permissions to create tables, you will need to
+make tables yourself.
+
+- Use a password on all user accounts.
+- Never use the root account with devine (even if it's you).
+- Do not give multiple users the same username and/or password.
+- Only give users access to the database used for devine.
+- You may give trusted users CREATE permission so devine can create tables if needed.
+- Other uses should only be given SELECT and INSERT permissions.
+
+### Connecting to an SQLite Vault
+
+SQLite Vaults are usually only used for locally stored vaults. This vault may be stored on a mounted Cloud storage
+drive, but I recommend using SQLite exclusively as an offline-only vault. Effectively this is your backup vault in
+case something happens to your MySQL Vault.
+
+```yaml
+- type: SQLite
+  name: "My Local Vault"  # arbitrary vault name
+  path: "C:/Users/Jane11/Documents/devine/data/key_vault.db"
+```
+
+**Note**: You do not need to create the file at the specified path.  
+SQLite will create a new SQLite database at that path if one does not exist.  
+Try not to accidentally move the `db` file once created without reflecting the change in the config, or you will end
+up with multiple databases.
+
+If you work on a Team I recommend every team member having their own SQLite Vault even if you all use a MySQL vault
+together.
+
+## muxing (dict)
+
+- `set_title`
+  Set the container title to `Show SXXEXX Episode Name` or `Movie (Year)`. Default: `true`
+
+## nordvpn (dict)
+
+Set your NordVPN Service credentials with `username` and `password` keys to automate the use of NordVPN as a Proxy
+system where required.
+
+You can also specify specific servers to use per-region with the `servers` key.  
+Sometimes a specific server works best for a service than others, so hard-coding one for a day or two helps.
+
+For example,
+
+```yaml
+username: zxqsR7C5CyGwmGb6KSvk8qsZ  # example of the login format
+password: wXVHmht22hhRKUEQ32PQVjCZ
+servers:
+  - us: 12  # force US server #12 for US proxies
+```
+
+The username and password should NOT be your normal NordVPN Account Credentials.  
+They should be the `Service credentials` which can be found on your Nord Account Dashboard.
+
+Once set, you can also specifically opt in to use a NordVPN proxy by specifying `--proxy=gb` or such.
+You can even set a specific server number this way, e.g., `--proxy=gb2366`.
+
+Note that `gb` is used instead of `uk` to be more consistent across regional systems.
+
+## profiles (dict)
+
+Pre-define Profiles to use Per-Service.
+
+For example,
+
+```yaml
+AMZN: jane
+DSNP: john
+```
+
+You can also specify a fallback value to pre-define if a match was not made.  
+This can be done using `default` key. This can help reduce redundancy in your specifications.
+
+```yaml
+AMZN: jane
+DSNP: john
+default: james
+```
+
+If a Service doesn't require a profile (as it does not require Credentials or Authorization of any kind), you can
+disable the profile checks by specifying `false` as the profile for the Service.
+
+```yaml
+ALL4: false
+CTV: false
+```
+
+## proxies (dict)
+
+Define a list of proxies to use where required.  
+The keys are region Alpha 2 Country Codes. Alpha 2 Country Codes are `a-z{2}` codes, e.g., `us`, `gb`, and `jp`.  
+Don't get mixed up between language codes like `en` vs. `gb`, or `ja` vs. `jp`.
+
+For example,
+
+```yaml
+us: "http://john%40email.tld:password123@proxy-us.domain.tld:8080"
+de: "http://127.0.0.1:8888"
+```
+
+## remote_cdm (list\[dict])
+
+Use [pywidevine] Serve-compliant Remote CDMs in devine as if it was a local widevine device file.  
+The name of each defined device maps as if it was a local device and should be used like a local device.
+
+For example,
+
+```yaml
+- name: chromecdm_903_l3   # name must be unique for each remote CDM
+  # the device type, system id and security level must match the values of the device on the API
+  # if any of the information is wrong, it will raise an error, if you do not know it ask the API owner
+  device_type: CHROME
+  system_id: 1234
+  security_level: 3
+  host: "http://xxxxxxxxxxxxxxxx/the_cdm_endpoint"
+  secret: "secret/api key"
+  device_name: "remote device to use"  # the device name from the API, usually a wvd filename
+```
+
+  [pywidevine]: <https://github.com/rlaphoenix/pywidevine>
+
+## serve (dict)
+
+Configuration data for pywidevine's serve functionality run through devine.
+This effectively allows you to run `devine serve` to start serving pywidevine Serve-compliant CDMs right from your
+local widevine device files.
+
+For example,
+
+```yaml
+users:
+  secret_key_for_jane:  # 32bit hex recommended, case-sensitive
+    devices:  # list of allowed devices for this user
+      - generic_nexus_4464_l3
+    username: jane  # only for internal logging, users will not see this name
+  secret_key_for_james:
+    devices:
+      - generic_nexus_4464_l3
+    username: james
+  secret_key_for_john:
+    devices:
+      - generic_nexus_4464_l3
+    username: john
+# devices can be manually specified by path if you don't want to add it to
+# devine's WVDs directory for whatever reason
+# devices:
+#   - 'C:\Users\john\Devices\test_devices_001.wvd'
+```
+
+## services (dict)
+
+Configuration data for each Service. The Service will have the data within this section merged into the `config.yaml`
+before provided to the Service class.
+
+Think of this config to be used for more sensitive configuration data, like user or device-specific API keys, IDs,
+device attributes, and so on. A `config.yaml` file is typically shared and not meant to be modified, so use this for
+any sensitive configuration data.
+
+The Key is the Service Tag, but can take any arbitrary form for its value. It's expected to begin as either a list or
+a dictionary.
+
+For example,
+
+```yaml
+NOW:
+  client:
+    auth_scheme: MESSO
+    # ... more sensitive data
+```
+
+## tag (str)
+
+Group or Username to postfix to the end of all download filenames following a dash.  
+For example, `tag: "J0HN"` will have `-J0HN` at the end of all download filenames.
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..871ce8e
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,674 @@
+                    GNU GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+  The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.  We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors.  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+  To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights.  Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received.  You must make sure that they, too, receive
+or can get the source code.  And you must show them these terms so they
+know their rights.
+
+  Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+  For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software.  For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+  Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so.  This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software.  The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable.  Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products.  If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+  Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary.  To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                       TERMS AND CONDITIONS
+
+  0. Definitions.
+
+  "This License" refers to version 3 of the GNU General Public License.
+
+  "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+  "The Program" refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as "you".  "Licensees" and
+"recipients" may be individuals or organizations.
+
+  To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy.  The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+  A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+  To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+  To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+  An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+  1. Source Code.
+
+  The "source code" for a work means the preferred form of the work
+for making modifications to it.  "Object code" means any non-source
+form of a work.
+
+  A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+  The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+  The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+  The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+  The Corresponding Source for a work in source code form is that
+same work.
+
+  2. Basic Permissions.
+
+  All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+  You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force.  You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright.  Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+  Conveying under any other circumstances is permitted solely under
+the conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+  No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+  When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+  4. Conveying Verbatim Copies.
+
+  You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+  You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+  You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+    a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+
+    b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under section
+    7.  This requirement modifies the requirement in section 4 to
+    "keep intact all notices".
+
+    c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy.  This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged.  This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+
+    d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+  A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+  You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+    a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+
+    b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the
+    Corresponding Source from a network server at no charge.
+
+    c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source.  This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+
+    d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge.  You need not require recipients to copy the
+    Corresponding Source along with the object code.  If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source.  Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+
+    e) Convey the object code using peer-to-peer transmission, provided
+    you inform other peers where the object code and Corresponding
+    Source of the work are being offered to the general public at no
+    charge under subsection 6d.
+
+  A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+  A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling.  In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage.  For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product.  A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+  "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source.  The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+  If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+  The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed.  Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+  Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+  7. Additional Terms.
+
+  "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+  When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+  Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+    a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+
+    b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+
+    c) Prohibiting misrepresentation of the origin of that material, or
+    requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+
+    d) Limiting the use for publicity purposes of names of licensors or
+    authors of the material; or
+
+    e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+
+    f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions of
+    it) with contractual assumptions of liability to the recipient, for
+    any liability that these contractual assumptions directly impose on
+    those licensors and authors.
+
+  All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+  If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+  Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+  8. Termination.
+
+  You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+  However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+  Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+  Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+  You are not required to accept this License in order to receive or
+run a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+  Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+  An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+  You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+  11. Patents.
+
+  A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's "contributor version".
+
+  A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+  Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+  In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+  If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+  If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+  A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License.  You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+  Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+  If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all.  For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+  13. Use with the GNU Affero General Public License.
+
+  Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work.  The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+  14. Revised Versions of this License.
+
+  The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation.  If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+  If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+  Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+  15. Disclaimer of Warranty.
+
+  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+  If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+                     END OF TERMS AND CONDITIONS
+
+            How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software: you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation, either version 3 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+  If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+    <program>  Copyright (C) <year>  <name of author>
+    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+  You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<https://www.gnu.org/licenses/>.
+
+  The GNU General Public License does not permit incorporating your program
+into proprietary programs.  If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library.  If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.  But first, please read
+<https://www.gnu.org/licenses/why-not-lgpl.html>.
\ No newline at end of file
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..30f0804
--- /dev/null
+++ b/README.md
@@ -0,0 +1,294 @@
+<p align="center">
+    <img src="https://rawcdn.githack.com/rlaphoenix/pywidevine/077a3aa6bec14777c06cbdcb47041eee9791c06e/docs/images/widevine_icon_24.png">
+    <a href="https://github.com/devine/devine">Devine</a>
+    <br/>
+    <sup><em>Open-Source Movie, TV, and Music Downloading Solution</em></sup>
+</p>
+
+<p align="center">
+    <a href="https://github.com/devine/devine/actions/workflows/ci.yml">
+        <img src="https://github.com/devine/devine/actions/workflows/ci.yml/badge.svg" alt="Build status">
+    </a>
+    <a href="https://python.org">
+        <img src="https://img.shields.io/badge/python-3.8.6%2B-informational" alt="Python version">
+    </a>
+</p>
+
+## Features
+
+- 🎥 Supports Movies, TV shows, and Music
+- 🧩 Easy installation via PIP/PyPI
+- 👥 Multi-profile authentication per-service with credentials or cookies
+- 🤖 Automatic P2P filename structure with Group Tag
+- 🛠️ Flexible Service framework system
+- 📦 Portable Installations
+- 🗃️ Local and Remote SQL-based Key Vault database
+- ⚙️ YAML for Configuration
+- 🌍 Local and Remote Widevine CDMs
+- ❤️ Fully Open-Source! Pull Requests Welcome
+
+## Installation
+
+```shell
+$ pip install devine
+```
+
+> __Note__ If you see warnings about a path not being in your PATH environment variable, add it, or `devine` won't run.
+
+Voilà 🎉! You now have the `devine` package installed and a `devine` executable is now available.  
+Check it out with `devine --help`!
+
+### Dependencies
+
+The following is a list of programs that need to be installed manually. I recommend installing these with [winget],
+[chocolatey] or such where possible as it automatically adds them to your `PATH` environment variable and will be
+easier to update in the future.
+
+- [aria2(c)] for downloading streams and large manifests.
+- [CCExtractor] for extracting Closed Caption data like EIA-608 from video streams and converting as SRT.
+- [FFmpeg] (and ffprobe) for repacking/remuxing streams on specific services, and evaluating stream data.
+- [MKVToolNix] v54+ for muxing individual streams to an `.mkv` file.
+- [shaka-packager] for decrypting CENC-CTR and CENC-CBCS video and audio streams.
+
+For portable downloads, make sure you put them in your current working directory, in the installation directory,
+or put the directory path in your `PATH` environment variable. If you do not do this then their binaries will not be
+able to be found.
+
+  [winget]: <https://winget.run>
+  [chocolatey]: <https://chocolatey.org>
+  [aria2(c)]: <https://aria2.github.io>
+  [CCExtractor]: <https://github.com/CCExtractor/ccextractor>
+  [FFmpeg]: <https://fmpeg.org>
+  [MKVToolNix]: <https://mkvtoolnix.download/downloads.html>
+  [shaka-packager]: <https://github.com/google/shaka-packager/releases/latest>
+
+### Portable installation
+
+1. Download a Python Embeddable Package of a supported Python version (the `.zip` download).  
+   (make sure it's either x64/x86 and not ARM unless you're on an ARM device).
+2. Extract the `.zip` and rename the folder, if you wish.
+3. Open Terminal and `cd` to the extracted folder.
+4. Run the following on Windows:
+```
+(Invoke-WebRequest -Uri https://gist.githubusercontent.com/rlaphoenix/5ef250e61ceeb123c6696c05ad4dee8b/raw -UseBasicParsing).Content | .\python -
+```
+or the following on Linux/macOS:
+```
+curl -sSL https://gist.githubusercontent.com/rlaphoenix/5ef250e61ceeb123c6696c05ad4dee8b/raw | ./python -
+```
+5. Run `.\python -m pip install devine`
+
+You can now call `devine` by,
+
+- running `./python -m devine --help`, or,
+- running `./Scripts/devine.exe --help`, or,
+- symlinking the `/Scripts/devine.exe` binary to the root of the folder, for `./devine --help`, or,
+- zipping the entire folder to `devine.zip`, for `python devine.zip --help`.
+
+The last method of calling devine, by archiving to a zip file, is incredibly useful for sharing and portability!  
+I urge you to give it a try!
+
+### Services
+
+Devine does not come with any infringing Service code. You must develop your own Service code and place them in
+the `/devine/services` directory. There are different ways the add services depending on your installation type.
+In some cases you may use multiple of these methods to have separate copies.
+
+Please refrain from making or using Service code unless you have full rights to do so. I also recommend ensuring that
+you keep the Service code private and secure, i.e. a private repository or keeping it offline.
+
+No matter which method you use, make sure that you install any further dependencies needed by the services. There's
+currently no way to have these dependencies automatically install apart from within the Fork method.
+
+> __Warning__ Please be careful with who you trust and what you run. The users you collaborate with on Service
+> code could update it with malicious code that you would run via devine on the next call.
+
+#### via Copy & Paste
+
+If you have service code already and wish to just install and use it locally, then simply putting it into the Services
+directory of your local pip installation will do the job. However, this method is the worst in terms of collaboration.
+
+1. Get the installation directory by running the following in terminal,
+   `python -c 'import os,devine.__main__ as a;print(os.path.dirname(a.__file__))'`
+2. Head to the installation directory and create a `services` folder if one is not yet created.
+3. Within that `services` folder you may install or create service code.
+
+> __Warning__ Uninstalling Python or Devine may result in the Services you installed being deleted. Make sure you back
+> up the services before uninstalling.
+
+#### via a Forked Repository
+
+If you are collaborating with a team on multiple services then forking the project is the best way to go. I recommend
+forking the project then hard resetting to the latest stable update by tag. Once a new stable update comes out you can
+easily rebase your fork to that commit to update.
+
+However, please make sure you look at changes between each version before rebasing and resolve any breaking changes and
+deprecations when rebasing to a new version.
+
+1. Fork the project with `git` or GitHub [(fork)](https://github.com/devine/devine/fork).
+2. Head inside the root `devine` directory and create a `services` directory.
+3. Within that `services` folder you may install or create service code.
+
+You may now commit changes or additions within that services folder to your forked repository.  
+Once committed all your other team members can easily sync and contribute changes.
+
+> __Note__ You may add Service-specific Python dependencies using `poetry` that can install alongside the project.
+> Just do note that this will complicate rebasing when even the `poetry.lock` gets updates in the upstream project.
+
+#### via Cloud storage (symlink)
+
+This is a great option for those who wish to do something like the forking method, but without the need of constantly
+rebasing their fork to the latest version. Overall less knowledge on git would be required, but each user would need
+to do a bit of symlinking compared to the fork method.
+
+This also opens up the ways you can host or collaborate on Service code. As long as you can receive a directory that
+updates with just the services within it, then you're good to go. Options could include an FTP server, Shared Google
+Drive, a non-fork repository with just services, and more.
+
+1. Follow the steps in the [Copy & Paste method](#via-copy--paste) to create the `services` folder.
+2. Use any Cloud Source that gives you a pseudo-directory to access the Service files. E.g., rclone or google drive fs.
+3. Symlink the services directory from your Cloud Source to the new services folder you made.
+   (you may need to delete it first)
+
+Of course, you have to make sure the original folder keeps receiving and downloading/streaming those changes, or that
+you keep git pulling those changes. You must also make sure that the version of devine you have locally is supported by
+the Services code.
+
+> __Note__ If you're using a cloud source that downloads the file once it gets opened, you don't have to worry as those
+> will automatically download. Python importing the files triggers the download to begin. However, it may cause a delay
+> on startup.
+
+### Profiles (Cookies & Credentials)
+
+Just like a streaming service, devine associates both a cookie and/or credential as a Profile. You can associate up to
+one cookie and one credential per-profile, depending on which (or both) are needed by the Service. This system allows
+you to configure multiple accounts per-service and choose which to use at any time.
+
+Credentials are stored in the config, and Cookies are stored in the data directory. You can find the location of these
+by running `devine env info`. However, you can manage profiles with `devine auth --help`. E.g. to add a new John
+profile to Netflix with a Cookie and Credential, take a look at the following CLI call,
+`devine auth add John NF --cookie "C:\Users\John\Downloads\netflix.com.txt --credential "john@gmail.com:pass123"`
+
+You can also delete a credential with `devine auth delete`. E.g., to delete the cookie for John that we just added, run
+`devine auth delete John --cookie`. Take a look at `devine auth delete --help` for more information.
+
+> __Note__ Profile names are case-sensitive and unique per-service. They also have no arbitrary character or length
+> limit, but for convenience I don't recommend using any special characters as your terminal may get confused.
+
+#### Cookie file format and Extensions
+
+Cookies must be in the standard Netscape cookies file format.  
+Recommended Cookie exporter extensions:
+
+- Firefox: "[Export Cookies]" by `Rotem Dan`
+- Chromium: "[Open Cookies.txt]" by `Ninh Pham`, ~~or "Get cookies.txt" by `Rahul Shaw`~~
+
+  [Export Cookies]: <https://addons.mozilla.org/addon/export-cookies-txt>
+  [Open Cookies.txt]: <https://chrome.google.com/webstore/detail/gdocmgbfkjnnpapoeobnolbbkoibbcif>
+
+Any other extension that exports to the standard Netscape format should theoretically work.
+
+> __Warning__ The Get cookies.txt extension by Rahul Shaw is essentially spyware. Do not use it. There are some safe
+> versions floating around (usually just older versions of the extension), but since there are safe alternatives I'd
+> just avoid it altogether. Source: https://reddit.com/r/youtubedl/comments/10ar7o7
+
+### Widevine Provisions
+
+A Widevine Provision is needed for acquiring licenses containing decryption keys for DRM-protected content.
+They are not needed if you will be using devine on DRM-free services. Please do not ask for any Widevine Device Files,
+Keys, or Provisions as they cannot be provided.
+
+Devine only supports `.WVD` files (Widevine Device Files). However, if you have the Provision RSA Private Key and
+Device Client Identification Blob as blob files (e.g., `device_private_key` and `device_client_id_blob`), then you can
+convert them to a `.WVD` file by running `pywidevine create-device --help`.
+
+Once you have `.WVD` files, place them in the WVDs directory which can be found by calling `devine env info`.
+You can then set in your config which WVD (by filename only) to use by default with `devine cfg cdm.default wvd_name`.
+From here you can then set which WVD to use for each specific service. It's best to use the lowest security-level
+provision where possible.
+
+An alternative would be using a pywidevine Serve-compliant CDM API. Of course, you would need to know someone who is
+serving one, and they would need to give you access. Take a look at the [remote_cdm](CONFIG.md#remotecdm--listdict--)
+config option for setup information. For further information on it see the pywidevine repository.
+
+## Usage
+
+First, take a look at `devine --help` for a full help document, listing all commands available and giving you more
+information on what can be done with Devine.
+
+Here's a checklist on what I recommend getting started with, in no particular order,
+
+- [ ] Add [Services](#services), these will be used in `devine dl`.
+- [ ] Add [Profiles](#profiles--cookies--credentials-), these are your cookies and credentials.
+- [ ] Add [Widevine Provisions](#widevine-provisions), also known as CDMs, these are used for DRM-protected content.
+- [ ] Set your Group Tag, the text at the end of the final filename, e.g., `devine cfg tag NOGRP` for ...-NOGRP.
+- [ ] Set Up a Local Key Vault, take a look at the [Key Vaults Config](CONFIG.md#keyvaults--listdict--).
+
+And here's some more advanced things you could take a look at,
+
+- [ ] Setting default Headers that the Request Session uses.
+- [ ] Setting default Profiles and CDM Provisions to use for services.
+- [ ] NordVPN and Hola Proxy Providers for automatic proxies.
+- [ ] Hosting and/or Using Remote Key Vaults.
+- [ ] Serving and/or Using Remote CDM Provisions.
+
+Documentation on the config is available in the [CONFIG.md](CONFIG.md) file, it has a lot of handy settings.  
+If you start to get sick of putting something in your CLI call, then I recommend taking a look at it!
+
+## Development
+
+The following steps are instructions on downloading, preparing, and running the code under a [Poetry] environment.
+You can skip steps 3-5 with a simple `pip install .` call instead, but you miss out on a wide array of benefits.
+
+1. `git clone https://github.com/devine/devine`
+2. `cd devine`
+3. (optional) `poetry config virtualenvs.in-project true`
+4. `poetry install`
+5. `poetry run devine --help`
+
+As seen in Step 5, running the `devine` executable is somewhat different to a normal PIP installation.
+See [Poetry's Docs] on various ways of making calls under the virtual-environment.
+
+  [Poetry]: <https://python-poetry.org>
+  [Poetry's Docs]: <https://python-poetry.org/docs/basic-usage/#using-your-virtual-environment>
+
+## End User License Agreement
+
+Devine and it's community pages should be treated with the same kindness as other projects.
+Please refrain from spam or asking for questions that infringe upon a Service's End User License Agreement.
+
+1. Do not use Devine for any purposes of which you do not have the rights to do so.
+2. Do not share or request infringing content; this includes Widevine Provision Keys, Content Encryption Keys,
+   or Service API Calls or Code.
+3. The Core codebase is meant to stay Free and Open-Source while the Service code should be kept private.
+4. Do not sell any part of this project, neither alone nor as part of a bundle.
+   If you paid for this software or received it as part of a bundle following payment, you should demand your money
+   back immediately.
+5. Be kind to one another and do not single anyone out.
+
+## Disclaimer
+
+1. This project requires a valid Google-provisioned Private/Public Keypair and a Device-specific Client Identification
+   blob; neither of which are included with this project.
+2. Public testing provisions are available and provided by Google to use for testing projects such as this one.
+3. License Servers have the ability to block requests from any provision, and are likely already blocking test provisions
+   on production endpoints. Therefore, have the ability to block the usage of Devine by themselves.
+4. This project does not condone piracy or any action against the terms of the Service or DRM system.
+5. All efforts in this project have been the result of Reverse-Engineering and Publicly available research.
+
+## Credit
+
+- Widevine Icon © Google.
+- The awesome community for their shared research and insight into the Widevine Protocol and Key Derivation.
+
+## Contributors
+
+<a href="https://github.com/rlaphoenix"><img src="https://images.weserv.nl/?url=avatars.githubusercontent.com/u/17136956?v=4&h=25&w=25&fit=cover&mask=circle&maxage=7d" alt=""/></a>
+<a href="https://github.com/mnmll"><img src="https://images.weserv.nl/?url=avatars.githubusercontent.com/u/22942379?v=4&h=25&w=25&fit=cover&mask=circle&maxage=7d" alt=""/></a>
+<a href="https://github.com/shirt-dev"><img src="https://images.weserv.nl/?url=avatars.githubusercontent.com/u/2660574?v=4&h=25&w=25&fit=cover&mask=circle&maxage=7d" alt=""/></a>
+<a href="https://github.com/nyuszika7h"><img src="https://images.weserv.nl/?url=avatars.githubusercontent.com/u/482367?v=4&h=25&w=25&fit=cover&mask=circle&maxage=7d" alt=""/></a>
+<a href="https://github.com/bccornfo"><img src="https://images.weserv.nl/?url=avatars.githubusercontent.com/u/98013276?v=4&h=25&w=25&fit=cover&mask=circle&maxage=7d" alt=""/></a>
+
+## License
+
+© 2019-2023 rlaphoenix — [GNU General Public License, Version 3.0](LICENSE)
diff --git a/devine/__main__.py b/devine/__main__.py
new file mode 100644
index 0000000..bfc0d7c
--- /dev/null
+++ b/devine/__main__.py
@@ -0,0 +1,3 @@
+if __name__ == "__main__":
+    from devine.core.__main__ import main
+    main()
diff --git a/devine/commands/__init__.py b/devine/commands/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/devine/commands/auth.py b/devine/commands/auth.py
new file mode 100644
index 0000000..c2b2d54
--- /dev/null
+++ b/devine/commands/auth.py
@@ -0,0 +1,252 @@
+import logging
+import tkinter.filedialog
+from pathlib import Path
+from typing import Optional
+
+import click
+from ruamel.yaml import YAML
+
+from devine.core.config import Config, config
+from devine.core.constants import context_settings
+from devine.core.credential import Credential
+
+
+@click.group(
+    short_help="Manage cookies and credentials for profiles of services.",
+    context_settings=context_settings)
+@click.pass_context
+def auth(ctx: click.Context) -> None:
+    """Manage cookies and credentials for profiles of services."""
+    ctx.obj = logging.getLogger("auth")
+
+
+@auth.command(
+    name="list",
+    short_help="List profiles and their state for a service or all services.",
+    context_settings=context_settings)
+@click.argument("service", type=str, required=False)
+@click.pass_context
+def list_(ctx: click.Context, service: Optional[str] = None) -> None:
+    """
+    List profiles and their state for a service or all services.
+
+    \b
+    Profile and Service names are case-insensitive.
+    """
+    log = ctx.obj
+    service_f = service
+
+    profiles: dict[str, dict[str, list]] = {}
+    for cookie_dir in config.directories.cookies.iterdir():
+        service = cookie_dir.name
+        profiles[service] = {}
+        for cookie in cookie_dir.glob("*.txt"):
+            if cookie.stem not in profiles[service]:
+                profiles[service][cookie.stem] = ["Cookie"]
+
+    for service, credentials in config.credentials.items():
+        if service not in profiles:
+            profiles[service] = {}
+        for profile, credential in credentials.items():
+            if profile not in profiles[service]:
+                profiles[service][profile] = []
+            profiles[service][profile].append("Credential")
+
+    for service, profiles in profiles.items():
+        if service_f and service != service_f.upper():
+            continue
+        log.info(service)
+        for profile, authorizations in profiles.items():
+            log.info(f'  "{profile}": {", ".join(authorizations)}')
+
+
+@auth.command(
+    short_help="View profile cookies and credentials for a service.",
+    context_settings=context_settings)
+@click.argument("profile", type=str)
+@click.argument("service", type=str)
+@click.pass_context
+def view(ctx: click.Context, profile: str, service: str) -> None:
+    """
+    View profile cookies and credentials for a service.
+
+    \b
+    Profile and Service names are case-sensitive.
+    """
+    log = ctx.obj
+    service_f = service
+    profile_f = profile
+    found = False
+
+    for cookie_dir in config.directories.cookies.iterdir():
+        if cookie_dir.name == service_f:
+            for cookie in cookie_dir.glob("*.txt"):
+                if cookie.stem == profile_f:
+                    log.info(f"Cookie: {cookie}")
+                    log.debug(cookie.read_text(encoding="utf8").strip())
+                    found = True
+                    break
+
+    for service, credentials in config.credentials.items():
+        if service == service_f:
+            for profile, credential in credentials.items():
+                if profile == profile_f:
+                    log.info(f"Credential: {':'.join(list(credential))}")
+                    found = True
+                    break
+
+    if not found:
+        raise click.ClickException(
+            f"Could not find Profile '{profile_f}' for Service '{service_f}'."
+            f"\nThe profile and service values are case-sensitive."
+        )
+
+
+@auth.command(
+    short_help="Check what profile is used by services.",
+    context_settings=context_settings)
+@click.argument("service", type=str, required=False)
+@click.pass_context
+def status(ctx: click.Context, service: Optional[str] = None) -> None:
+    """
+    Check what profile is used by services.
+
+    \b
+    Service names are case-sensitive.
+    """
+    log = ctx.obj
+    found_profile = False
+    for service_, profile in config.profiles.items():
+        if not service or service_.upper() == service.upper():
+            log.info(f"{service_}: {profile or '--'}")
+            found_profile = True
+
+    if not found_profile:
+        log.info(f"No profile has been explicitly set for {service}")
+
+    default = config.profiles.get("default", "not set")
+    log.info(f"The default profile is {default}")
+
+
+@auth.command(
+    short_help="Delete a profile and all of its authorization from a service.",
+    context_settings=context_settings)
+@click.argument("profile", type=str)
+@click.argument("service", type=str)
+@click.option("--cookie", is_flag=True, default=False, help="Only delete the cookie.")
+@click.option("--credential", is_flag=True, default=False, help="Only delete the credential.")
+@click.pass_context
+def delete(ctx: click.Context, profile: str, service: str, cookie: bool, credential: bool):
+    """
+    Delete a profile and all of its authorization from a service.
+
+    \b
+    By default this does remove both Cookies and Credentials.
+    You may remove only one of them with --cookie or --credential.
+
+    \b
+    Profile and Service names are case-sensitive.
+    Comments may be removed from config!
+    """
+    log = ctx.obj
+    service_f = service
+    profile_f = profile
+    found = False
+
+    if not credential:
+        for cookie_dir in config.directories.cookies.iterdir():
+            if cookie_dir.name == service_f:
+                for cookie_ in cookie_dir.glob("*.txt"):
+                    if cookie_.stem == profile_f:
+                        cookie_.unlink()
+                        log.info(f"Deleted Cookie: {cookie_}")
+                        found = True
+                        break
+
+    if not cookie:
+        for key, credentials in config.credentials.items():
+            if key == service_f:
+                for profile, credential_ in credentials.items():
+                    if profile == profile_f:
+                        config_path = Config._Directories.user_configs / Config._Filenames.root_config
+                        yaml, data = YAML(), None
+                        yaml.default_flow_style = False
+                        data = yaml.load(config_path)
+                        del data["credentials"][key][profile_f]
+                        yaml.dump(data, config_path)
+                        log.info(f"Deleted Credential: {credential_}")
+                        found = True
+                        break
+
+    if not found:
+        raise click.ClickException(
+            f"Could not find Profile '{profile_f}' for Service '{service_f}'."
+            f"\nThe profile and service values are case-sensitive."
+        )
+
+
+@auth.command(
+    short_help="Add a Credential and/or Cookies to an existing or new profile for a service.",
+    context_settings=context_settings)
+@click.argument("profile", type=str)
+@click.argument("service", type=str)
+@click.option("--cookie", type=str, default=None, help="Direct path to Cookies to add.")
+@click.option("--credential", type=str, default=None, help="Direct Credential string to add.")
+@click.pass_context
+def add(ctx: click.Context, profile: str, service: str, cookie: Optional[str] = None, credential: Optional[str] = None):
+    """
+    Add a Credential and/or Cookies to an existing or new profile for a service.
+
+    \b
+    Cancel the Open File dialogue when presented if you do not wish to provide
+    cookies. The Credential should be in `Username:Password` form. The username
+    may be an email. If you do not wish to add a Credential, just hit enter.
+
+    \b
+    Profile and Service names are case-sensitive!
+    Comments may be removed from config!
+    """
+    log = ctx.obj
+    service = service.upper()
+    profile = profile.lower()
+
+    if cookie:
+        cookie = Path(cookie)
+    else:
+        print("Opening File Dialogue, select a Cookie file to import.")
+        cookie = tkinter.filedialog.askopenfilename(
+            title="Select a Cookie file (Cancel to skip)",
+            filetypes=[("Cookies", "*.txt"), ("All files", "*.*")]
+        )
+        if cookie:
+            cookie = Path(cookie)
+        else:
+            log.info("Skipped adding a Cookie...")
+
+    if credential:
+        try:
+            credential = Credential.loads(credential)
+        except ValueError as e:
+            raise click.ClickException(str(e))
+    else:
+        credential = input("Credential: ")
+        if credential:
+            try:
+                credential = Credential.loads(credential)
+            except ValueError as e:
+                raise click.ClickException(str(e))
+        else:
+            log.info("Skipped adding a Credential...")
+
+    if cookie:
+        cookie = cookie.rename((config.directories.cookies / service / profile).with_suffix(".txt"))
+        log.info(f"Moved Cookie file to: {cookie}")
+
+    if credential:
+        config_path = Config._Directories.user_configs / Config._Filenames.root_config
+        yaml, data = YAML(), None
+        yaml.default_flow_style = False
+        data = yaml.load(config_path)
+        data["credentials"][service][profile] = credential.dumps()
+        yaml.dump(data, config_path)
+        log.info(f"Added Credential: {credential}")
diff --git a/devine/commands/cfg.py b/devine/commands/cfg.py
new file mode 100644
index 0000000..7ac56f6
--- /dev/null
+++ b/devine/commands/cfg.py
@@ -0,0 +1,86 @@
+import ast
+import logging
+import sys
+
+import click
+from ruamel.yaml import YAML
+
+from devine.core.config import config
+from devine.core.constants import context_settings
+
+
+@click.command(
+    short_help="Manage configuration values for the program and its services.",
+    context_settings=context_settings)
+@click.argument("key", type=str, required=False)
+@click.argument("value", type=str, required=False)
+@click.option("--unset", is_flag=True, default=False, help="Unset/remove the configuration value.")
+@click.option("--list", "list_", is_flag=True, default=False, help="List all set configuration values.")
+@click.pass_context
+def cfg(ctx: click.Context, key: str, value: str, unset: bool, list_: bool) -> None:
+    """
+    Manage configuration values for the program and its services.
+
+    \b
+    Known Issues:
+    - Config changes remove all comments of the changed files, which may hold critical data. (#14)
+    """
+    if not key and not value and not list_:
+        raise click.UsageError("Nothing to do.", ctx)
+
+    if value:
+        try:
+            value = ast.literal_eval(value)
+        except (ValueError, SyntaxError):
+            pass  # probably a str without quotes or similar, assume it's a string value
+
+    log = logging.getLogger("cfg")
+
+    config_path = config.directories.user_configs / config.filenames.root_config
+
+    yaml, data = YAML(), None
+    yaml.default_flow_style = False
+    if config_path.is_file():
+        data = yaml.load(config_path)
+
+    if not data:
+        log.warning(f"{config_path} has no configuration data, yet")
+
+    if list_:
+        yaml.dump(data, sys.stdout)
+        return
+
+    key_items = key.split(".")
+    parent_key = key_items[:-1]
+    trailing_key = key_items[-1]
+
+    is_write = value is not None
+    is_delete = unset
+    if is_write and is_delete:
+        raise click.ClickException("You cannot set a value and use --unset at the same time.")
+
+    if not is_write and not is_delete:
+        data = data.mlget(key_items, default=KeyError)
+        if data == KeyError:
+            raise click.ClickException(f"Key '{key}' does not exist in the config.")
+        yaml.dump(data, sys.stdout)
+    else:
+        try:
+            parent_data = data
+            if parent_key:
+                parent_data = data.mlget(parent_key, default=data)
+                if parent_data == data:
+                    for key in parent_key:
+                        if not hasattr(parent_data, key):
+                            parent_data[key] = {}
+                        parent_data = parent_data[key]
+            if is_write:
+                parent_data[trailing_key] = value
+                log.info(f"Set {key} to {repr(value)}")
+            elif is_delete:
+                del parent_data[trailing_key]
+                log.info(f"Unset {key}")
+        except KeyError:
+            raise click.ClickException(f"Key '{key}' does not exist in the config.")
+        config_path.parent.mkdir(parents=True, exist_ok=True)
+        yaml.dump(data, config_path)
diff --git a/devine/commands/dl.py b/devine/commands/dl.py
new file mode 100644
index 0000000..bc5daaa
--- /dev/null
+++ b/devine/commands/dl.py
@@ -0,0 +1,732 @@
+from __future__ import annotations
+
+import html
+import logging
+import random
+import re
+import sys
+import time
+from collections import defaultdict
+from concurrent import futures
+from concurrent.futures import ThreadPoolExecutor
+from copy import deepcopy
+from datetime import datetime
+from functools import partial
+from http.cookiejar import MozillaCookieJar
+from pathlib import Path
+from threading import Event
+from typing import Any, Optional, Callable
+
+import click
+import jsonpickle
+import yaml
+from pymediainfo import MediaInfo
+from pywidevine.cdm import Cdm as WidevineCdm
+from pywidevine.device import Device
+from pywidevine.remotecdm import RemoteCdm
+from tqdm import tqdm
+
+from devine.core.config import config
+from devine.core.constants import AnyTrack, context_settings, LOG_FORMATTER, DRM_SORT_MAP
+from devine.core.drm import Widevine, DRM_T
+from devine.core.proxies import Basic, NordVPN, Hola
+from devine.core.service import Service
+from devine.core.services import Services
+from devine.core.titles import Title_T, Movie, Song
+from devine.core.titles.episode import Episode
+from devine.core.tracks import Audio, Video
+from devine.core.utilities import is_close_match, get_binary_path
+from devine.core.utils.click_types import LANGUAGE_RANGE, QUALITY, SEASON_RANGE, ContextData
+from devine.core.utils.collections import merge_dict
+from devine.core.credential import Credential
+from devine.core.utils.subprocess import ffprobe
+from devine.core.vaults import Vaults
+
+
+class dl:
+    @click.group(
+        short_help="Download, Decrypt, and Mux tracks for titles from a Service.",
+        cls=Services,
+        context_settings=dict(
+            **context_settings,
+            default_map=config.dl,
+            token_normalize_func=Services.get_tag
+        ))
+    @click.option("-p", "--profile", type=str, default=None,
+                  help="Profile to use for Credentials and Cookies (if available). Overrides profile set by config.")
+    @click.option("-q", "--quality", type=QUALITY, default=None,
+                  help="Download Resolution, defaults to best available.")
+    @click.option("-v", "--vcodec", type=click.Choice(Video.Codec, case_sensitive=False),
+                  default=Video.Codec.AVC,
+                  help="Video Codec to download, defaults to H.264.")
+    @click.option("-a", "--acodec", type=click.Choice(Audio.Codec, case_sensitive=False),
+                  default=None,
+                  help="Audio Codec to download, defaults to any codec.")
+    @click.option("-r", "--range", "range_", type=click.Choice(Video.Range, case_sensitive=False),
+                  default=Video.Range.SDR,
+                  help="Video Color Range, defaults to SDR.")
+    @click.option("-w", "--wanted", type=SEASON_RANGE, default=None,
+                  help="Wanted episodes, e.g. `S01-S05,S07`, `S01E01-S02E03`, `S02-S02E03`, e.t.c, defaults to all.")
+    @click.option("-l", "--lang", type=LANGUAGE_RANGE, default="en",
+                  help="Language wanted for Video and Audio.")
+    @click.option("-vl", "--v-lang", type=LANGUAGE_RANGE, default=[],
+                  help="Language wanted for Video, you would use this if the video language doesn't match the audio.")
+    @click.option("-sl", "--s-lang", type=LANGUAGE_RANGE, default=["all"],
+                  help="Language wanted for Subtitles.")
+    @click.option("--proxy", type=str, default=None,
+                  help="Proxy URI to use. If a 2-letter country is provided, it will try get a proxy from the config.")
+    @click.option("--group", type=str, default=None,
+                  help="Set the Group Tag to be used, overriding the one in config if any.")
+    @click.option("-A", "--audio-only", is_flag=True, default=False,
+                  help="Only download audio tracks.")
+    @click.option("-S", "--subs-only", is_flag=True, default=False,
+                  help="Only download subtitle tracks.")
+    @click.option("-C", "--chapters-only", is_flag=True, default=False,
+                  help="Only download chapters.")
+    @click.option("--slow", is_flag=True, default=False,
+                  help="Add a 60-120 second delay between each Title download to act more like a real device. "
+                       "This is recommended if you are downloading high-risk titles or streams.")
+    @click.option("--list", "list_", is_flag=True, default=False,
+                  help="Skip downloading and list available tracks and what tracks would have been downloaded.")
+    @click.option("--list-titles", is_flag=True, default=False,
+                  help="Skip downloading, only list available titles that would have been downloaded.")
+    @click.option("--skip-dl", is_flag=True, default=False,
+                  help="Skip downloading while still retrieving the decryption keys.")
+    @click.option("--export", type=Path,
+                  help="Export Decryption Keys as you obtain them to a JSON file.")
+    @click.option("--cdm-only/--vaults-only", is_flag=True, default=None,
+                  help="Only use CDM, or only use Key Vaults for retrieval of Decryption Keys.")
+    @click.option("--no-proxy", is_flag=True, default=False,
+                  help="Force disable all proxy use.")
+    @click.option("--no-folder", is_flag=True, default=False,
+                  help="Disable folder creation for TV Shows.")
+    @click.option("--no-source", is_flag=True, default=False,
+                  help="Disable the source tag from the output file name and path.")
+    @click.option("--workers", type=int, default=1,
+                  help="Max concurrent workers to use throughout the code, particularly downloads.")
+    @click.option("--log", "log_path", type=Path, default=config.directories.logs / config.filenames.log,
+                  help="Log path (or filename). Path can contain the following f-string args: {name} {time}.")
+    @click.pass_context
+    def cli(ctx: click.Context, **kwargs: Any) -> dl:
+        return dl(ctx, **kwargs)
+
+    DL_POOL_STOP = Event()
+
+    def __init__(
+        self,
+        ctx: click.Context,
+        log_path: Path,
+        profile: Optional[str] = None,
+        proxy: Optional[str] = None,
+        group: Optional[str] = None,
+        *_: Any,
+        **__: Any
+    ):
+        if not ctx.invoked_subcommand:
+            raise ValueError("A subcommand to invoke was not specified, the main code cannot continue.")
+
+        self.log = logging.getLogger("download")
+        if log_path:
+            new_log_path = self.rotate_log_file(log_path)
+            fh = logging.FileHandler(new_log_path, encoding="utf8")
+            fh.setFormatter(LOG_FORMATTER)
+            self.log.addHandler(fh)
+
+        self.service = Services.get_tag(ctx.invoked_subcommand)
+
+        self.log.info(f"Loading Profile Data for {self.service}")
+        if profile:
+            self.profile = profile
+            self.log.info(f" + Profile: {self.profile} (explicit)")
+        else:
+            self.profile = self.get_profile(self.service)
+            self.log.info(f" + Profile: {self.profile} (from config)")
+
+        self.log.info("Initializing Widevine CDM")
+        try:
+            self.cdm = self.get_cdm(self.service, self.profile)
+        except ValueError as e:
+            self.log.error(f" - {e}")
+            sys.exit(1)
+        self.log.info(
+            f" + {self.cdm.__class__.__name__}: {self.cdm.system_id} (L{self.cdm.security_level})"
+        )
+
+        self.log.info("Loading Vaults")
+        self.vaults = Vaults(self.service)
+        for vault in config.key_vaults:
+            vault_type = vault["type"]
+            del vault["type"]
+            self.vaults.load(vault_type, **vault)
+        self.log.info(f" + {len(self.vaults)} Vaults")
+
+        self.log.info("Getting Service Config")
+        service_config_path = Services.get_path(self.service) / config.filenames.config
+        if service_config_path.is_file():
+            self.service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
+            self.log.info(" + Got Service Config")
+        else:
+            self.service_config = {}
+            self.log.info(" - No Service Config")
+        merge_dict(config.services.get(self.service), self.service_config)
+
+        self.log.info("Loading Proxy Providers")
+        self.proxy_providers = []
+        if config.proxy_providers.get("basic"):
+            self.proxy_providers.append(Basic(**config.proxy_providers["basic"]))
+        if config.proxy_providers.get("nordvpn"):
+            self.proxy_providers.append(NordVPN(**config.proxy_providers["nordvpn"]))
+        if get_binary_path("hola-proxy"):
+            self.proxy_providers.append(Hola())
+        for proxy_provider in self.proxy_providers:
+            self.log.info(f" + {proxy_provider.__class__.__name__}: {repr(proxy_provider)}")
+
+        if proxy:
+            requested_provider = None
+            if re.match(rf"^[a-z]+:.+$", proxy, re.IGNORECASE):
+                # requesting proxy from a specific proxy provider
+                requested_provider, proxy = proxy.split(":", maxsplit=1)
+            if re.match(r"^[a-z]{2}(?:\d+)?$", proxy, re.IGNORECASE):
+                proxy = proxy.lower()
+                self.log.info(f"Getting a Proxy to '{proxy}'")
+                if requested_provider:
+                    proxy_provider = next((
+                        x
+                        for x in self.proxy_providers
+                        if x.__class__.__name__.lower() == requested_provider
+                    ), None)
+                    if not proxy_provider:
+                        self.log.error(f"The proxy provider '{requested_provider}' was not recognised.")
+                        sys.exit(1)
+                    proxy_uri = proxy_provider.get_proxy(proxy)
+                    if not proxy_uri:
+                        self.log.error(f"The proxy provider {requested_provider} had no proxy for {proxy}")
+                        sys.exit(1)
+                    proxy = ctx.params["proxy"] = proxy_uri
+                    self.log.info(f" + {proxy} (from {proxy_provider.__class__.__name__})")
+                else:
+                    for proxy_provider in self.proxy_providers:
+                        proxy_uri = proxy_provider.get_proxy(proxy)
+                        if proxy_uri:
+                            proxy = ctx.params["proxy"] = proxy_uri
+                            self.log.info(f" + {proxy} (from {proxy_provider.__class__.__name__})")
+                            break
+            else:
+                self.log.info(f"Proxy: {proxy} (from args)")
+
+        ctx.obj = ContextData(
+            config=self.service_config,
+            cdm=self.cdm,
+            proxy_providers=self.proxy_providers,
+            profile=self.profile
+        )
+
+        if group:
+            config.tag = group
+
+        # needs to be added this way instead of @cli.result_callback to be
+        # able to keep `self` as the first positional
+        self.cli._result_callback = self.result
+
+    def result(
+        self, service: Service, quality: Optional[int], vcodec: Video.Codec,
+        acodec: Optional[Audio.Codec], range_: Video.Range, wanted: list[str], lang: list[str], v_lang: list[str],
+        s_lang: list[str], audio_only: bool, subs_only: bool, chapters_only: bool, slow: bool, list_: bool,
+        list_titles: bool, skip_dl: bool, export: Optional[Path], cdm_only: Optional[bool], no_folder: bool,
+        no_source: bool, workers: int, *_: Any, **__: Any
+    ) -> None:
+        if cdm_only is None:
+            vaults_only = None
+        else:
+            vaults_only = not cdm_only
+
+        if self.profile:
+            cookies = self.get_cookie_jar(self.service, self.profile)
+            credential = self.get_credentials(self.service, self.profile)
+            if not cookies and not credential:
+                self.log.error(f"The Profile '{self.profile}' has no Cookies or Credentials. Check for typos.")
+                sys.exit(1)
+
+            self.log.info(f"Authenticating with Profile '{self.profile}'")
+            service.authenticate(cookies, credential)
+            self.log.info(" + Authenticated")
+
+        self.log.info("Retrieving Titles")
+        titles = service.get_titles()
+        if not titles:
+            self.log.error(" - No titles returned!")
+            sys.exit(1)
+
+        for line in str(titles).splitlines(keepends=False):
+            self.log.info(line)
+
+        if list_titles:
+            for title in titles:
+                self.log.info(title)
+            return
+
+        for i, title in enumerate(titles):
+            if isinstance(title, Episode) and wanted and f"{title.season}x{title.number}" not in wanted:
+                continue
+
+            self.log.info(f"Getting tracks for {title}")
+            if slow and i != 0:
+                delay = random.randint(60, 120)
+                self.log.info(f" - Delaying by {delay} seconds due to --slow ...")
+                time.sleep(delay)
+
+            title.tracks.add(service.get_tracks(title), warn_only=True)
+            title.tracks.add(service.get_chapters(title))
+
+            # strip SDH subs to non-SDH if no equivalent same-lang non-SDH is available
+            # uses a loose check, e.g, wont strip en-US SDH sub if a non-SDH en-GB is available
+            for subtitle in title.tracks.subtitles:
+                if subtitle.sdh and not any(
+                    is_close_match(subtitle.language, [x.language])
+                    for x in title.tracks.subtitles
+                    if not x.sdh and not x.forced
+                ):
+                    non_sdh_sub = deepcopy(subtitle)
+                    non_sdh_sub.id += "_stripped"
+                    non_sdh_sub.sdh = False
+                    non_sdh_sub.OnDownloaded = lambda x: x.strip_hearing_impaired()
+                    title.tracks.add(non_sdh_sub)
+
+            title.tracks.sort_videos(by_language=v_lang or lang)
+            title.tracks.sort_audio(by_language=lang)
+            title.tracks.sort_subtitles(by_language=s_lang)
+            title.tracks.sort_chapters()
+
+            self.log.info("> All Tracks:")
+            title.tracks.print()
+
+            self.log.info("> Selected Tracks:")  # log early so errors logs make sense
+
+            if isinstance(title, (Movie, Episode)):
+                # filter video tracks
+                title.tracks.select_video(lambda x: x.codec == vcodec)
+                title.tracks.select_video(lambda x: x.range == range_)
+                if quality:
+                    title.tracks.with_resolution(quality)
+                if not title.tracks.videos:
+                    self.log.error(f"There's no {quality}p {vcodec.name} ({range_.name}) Video Track...")
+                    sys.exit(1)
+
+                video_language = v_lang or lang
+                if video_language and "all" not in video_language:
+                    title.tracks.videos = title.tracks.select_per_language(title.tracks.videos, video_language)
+                    if not title.tracks.videos:
+                        self.log.error(f"There's no {video_language} Video Track...")
+                        sys.exit(1)
+
+                # filter subtitle tracks
+                if s_lang and "all" not in s_lang:
+                    title.tracks.select_subtitles(lambda x: is_close_match(x.language, s_lang))
+                    if not title.tracks.subtitles:
+                        self.log.error(f"There's no {s_lang} Subtitle Track...")
+                        sys.exit(1)
+
+                title.tracks.select_subtitles(lambda x: not x.forced or is_close_match(x.language, lang))
+
+            # filter audio tracks
+            title.tracks.select_audio(lambda x: not x.descriptive)  # exclude descriptive audio
+            if acodec:
+                title.tracks.select_audio(lambda x: x.codec == acodec)
+                if not title.tracks.audio:
+                    self.log.error(f"There's no {acodec.name} Audio Tracks...")
+                    sys.exit(1)
+
+            if lang and "all" not in lang:
+                title.tracks.audio = title.tracks.select_per_language(title.tracks.audio, lang)
+                if not title.tracks.audio:
+                    if all(x.descriptor == Video.Descriptor.M3U for x in title.tracks.videos):
+                        self.log.warning(f"There's no {lang} Audio Tracks, "
+                                         f"likely part of an invariant playlist, continuing...")
+                    else:
+                        self.log.error(f"There's no {lang} Audio Track, cannot continue...")
+                        sys.exit(1)
+
+            if audio_only or subs_only or chapters_only:
+                title.tracks.videos.clear()
+                if audio_only:
+                    if not subs_only:
+                        title.tracks.subtitles.clear()
+                    if not chapters_only:
+                        title.tracks.chapters.clear()
+                elif subs_only:
+                    if not audio_only:
+                        title.tracks.audio.clear()
+                    if not chapters_only:
+                        title.tracks.chapters.clear()
+                elif chapters_only:
+                    if not audio_only:
+                        title.tracks.audio.clear()
+                    if not subs_only:
+                        title.tracks.subtitles.clear()
+
+            title.tracks.print()
+
+            if list_:
+                continue  # only wanted to see what tracks were available and chosen
+
+            # Prepare Track DRM (if any)
+            for track in title.tracks:
+                if not track.drm and isinstance(track, (Video, Audio)):
+                    # service might not list DRM in manifest, get from stream data
+                    try:
+                        track.drm = [Widevine.from_track(track, service.session)]
+                    except Widevine.Exceptions.PSSHNotFound:
+                        # it might not have Widevine DRM, or might not have found the PSSH
+                        self.log.warning("No Widevine PSSH was found for this track, is it DRM free?")
+                if track.drm:
+                    # choose first-available DRM in order of Enum value
+                    track.drm = next(iter(sorted(track.drm, key=lambda x: DRM_SORT_MAP.index(x.__class__.__name__))))
+                    if isinstance(track.drm, Widevine):
+                        # Get Widevine Content Keys now, this must be done in main thread due to SQLite objects
+                        self.log.info(f"Getting {track.drm.__class__.__name__} Keys for: {track}")
+                        self.prepare_drm(
+                            drm=track.drm,
+                            licence=partial(
+                                service.get_widevine_license,
+                                title=title,
+                                track=track
+                            ),
+                            certificate=partial(
+                                service.get_widevine_service_certificate,
+                                title=title,
+                                track=track
+                            ),
+                            cdm_only=cdm_only,
+                            vaults_only=vaults_only
+                        )
+
+                        if export:
+                            keys = {}
+                            if export.is_file():
+                                keys = jsonpickle.loads(export.read_text(encoding="utf8"))
+                            if str(title) not in keys:
+                                keys[str(title)] = {}
+                            keys[str(title)][str(track)] = {
+                                kid: key
+                                for kid, key in track.drm.content_keys.items()
+                                if kid in track.drm.kids
+                            }
+                            export.write_text(jsonpickle.dumps(keys, indent=4), encoding="utf8")
+
+            if skip_dl:
+                self.log.info("Skipping Download...")
+            else:
+                with tqdm(total=len(title.tracks)) as pbar:
+                    with ThreadPoolExecutor(workers) as pool:
+                        try:
+                            for download in futures.as_completed((
+                                pool.submit(
+                                    self.download_track,
+                                    service=service,
+                                    track=track,
+                                    title=title
+                                )
+                                for track in title.tracks
+                            )):
+                                if download.cancelled():
+                                    continue
+                                e = download.exception()
+                                if e:
+                                    self.DL_POOL_STOP.set()
+                                    pool.shutdown(wait=False, cancel_futures=True)
+                                    self.log.error(f"Download worker threw an unhandled exception: {e!r}")
+                                    return
+                                else:
+                                    pbar.update(1)
+                        except KeyboardInterrupt:
+                            self.DL_POOL_STOP.set()
+                            pool.shutdown(wait=False, cancel_futures=True)
+                            self.log.info("Received Keyboard Interrupt, stopping...")
+                            return
+
+            if not skip_dl:
+                self.mux_tracks(title, not no_folder, not no_source)
+
+            # update cookies
+            cookie_file = config.directories.cookies / service.__class__.__name__ / f"{self.profile}.txt"
+            if cookie_file.exists():
+                cookie_jar = MozillaCookieJar(cookie_file)
+                cookie_jar.load()
+                for cookie in service.session.cookies:
+                    cookie_jar.set_cookie(cookie)
+                cookie_jar.save(ignore_discard=True)
+
+        self.log.info("Processed all titles!")
+
+    def download_track(
+        self,
+        service: Service,
+        track: AnyTrack,
+        title: Title_T
+    ):
+        time.sleep(1)
+        if self.DL_POOL_STOP.is_set():
+            return
+
+        if track.needs_proxy:
+            proxy = next(iter(service.session.proxies.values()), None)
+        else:
+            proxy = None
+
+        self.log.info(f"Downloading: {track}")
+        track.download(config.directories.temp, headers=service.session.headers, proxy=proxy)
+        if callable(track.OnDownloaded):
+            track.OnDownloaded(track)
+
+        if track.drm:
+            self.log.info(f"Decrypting file with {track.drm.__class__.__name__} DRM...")
+            track.drm.decrypt(track)
+            self.log.info(" + Decrypted")
+            if callable(track.OnDecrypted):
+                track.OnDecrypted(track)
+
+        if track.needs_repack:
+            self.log.info("Repackaging stream with FFMPEG (fix malformed streams)")
+            track.repackage()
+            self.log.info(" + Repackaged")
+            if callable(track.OnRepacked):
+                track.OnRepacked(track)
+
+        if (
+            isinstance(track, Video) and
+            not title.tracks.subtitles and
+            any(
+                x.get("codec_name", "").startswith("eia_")
+                for x in ffprobe(track.path).get("streams", [])
+            )
+        ):
+            self.log.info("Checking for EIA-CC Captions")
+            try:
+                # TODO: Figure out the real language, it might be different
+                #       EIA-CC tracks sadly don't carry language information :(
+                # TODO: Figure out if the CC language is original lang or not.
+                #       Will need to figure out above first to do so.
+                track_id = f"ccextractor-{track.id}"
+                cc_lang = track.language
+                cc = track.ccextractor(
+                    track_id=track_id,
+                    out_path=config.directories.temp / config.filenames.subtitle.format(
+                        id=track_id,
+                        language=cc_lang
+                    ),
+                    language=cc_lang,
+                    original=False
+                )
+                if cc:
+                    title.tracks.add(cc)
+                    self.log.info(" + Found & Extracted an EIA-CC Caption")
+            except EnvironmentError:
+                self.log.error(" - Track needs to have CC extracted, but ccextractor wasn't found")
+                sys.exit(1)
+            self.log.info(" + No EIA-CC Captions...")
+
+    def prepare_drm(
+        self,
+        drm: DRM_T,
+        certificate: Callable,
+        licence: Callable,
+        cdm_only: bool = False,
+        vaults_only: bool = False
+    ) -> None:
+        """
+        Prepare the DRM by getting decryption data like KIDs, Keys, and such.
+        The DRM object should be ready for decryption once this function ends.
+        """
+        if not drm:
+            return
+
+        if isinstance(drm, Widevine):
+            self.log.info(f"PSSH: {drm.pssh.dumps()}")
+            self.log.info("KIDs:")
+            for kid in drm.kids:
+                self.log.info(f" + {kid.hex}")
+
+            for kid in drm.kids:
+                if kid in drm.content_keys:
+                    continue
+
+                if not cdm_only:
+                    content_key, vault_used = self.vaults.get_key(kid)
+                    if content_key:
+                        drm.content_keys[kid] = content_key
+                        self.log.info(f"Content Key: {kid.hex}:{content_key} ({vault_used})")
+                        add_count = self.vaults.add_key(kid, content_key, excluding=vault_used)
+                        self.log.info(f" + Cached to {add_count}/{len(self.vaults) - 1} Vaults")
+                    elif vaults_only:
+                        self.log.error(f" - No Content Key found in vaults for {kid.hex}")
+                        sys.exit(1)
+
+                if kid not in drm.content_keys and not vaults_only:
+                    from_vaults = drm.content_keys.copy()
+
+                    try:
+                        drm.get_content_keys(
+                            cdm=self.cdm,
+                            licence=licence,
+                            certificate=certificate
+                        )
+                    except ValueError as e:
+                        self.log.error(str(e))
+                        sys.exit(1)
+
+                    self.log.info("Content Keys:")
+                    for kid_, key in drm.content_keys.items():
+                        msg = f" + {kid_.hex}:{key}"
+                        if kid_ == kid:
+                            msg += " *"
+                        if key == "0" * 32:
+                            msg += " [Unusable!]"
+                        self.log.info(msg)
+
+                    drm.content_keys = {
+                        kid_: key
+                        for kid_, key in drm.content_keys.items()
+                        if key and key.count("0") != len(key)
+                    }
+
+                    # The CDM keys may have returned blank content keys for KIDs we got from vaults.
+                    # So we re-add the keys from vaults earlier overwriting blanks or removed KIDs data.
+                    drm.content_keys.update(from_vaults)
+
+                    cached_keys = self.vaults.add_keys(drm.content_keys)
+                    self.log.info(f" + Newly added to {cached_keys}/{len(drm.content_keys)} Vaults")
+
+                    if kid not in drm.content_keys:
+                        self.log.error(f" - No Content Key with the KID ({kid.hex}) was returned...")
+                        sys.exit(1)
+
+    def mux_tracks(self, title: Title_T, season_folder: bool = True, add_source: bool = True) -> None:
+        """Mux Tracks, Delete Pre-Mux files, and move to the final location."""
+        self.log.info("Muxing Tracks into a Matroska Container")
+
+        if isinstance(title, (Movie, Episode)):
+            muxed_path, return_code = title.tracks.mux(str(title))
+            if return_code == 1:
+                self.log.warning("mkvmerge had at least one warning, will continue anyway...")
+            elif return_code >= 2:
+                self.log.error(" - Failed to Mux video to Matroska file")
+                sys.exit(1)
+            self.log.info(f" + Muxed to {muxed_path}")
+        else:
+            # dont mux
+            muxed_path = title.tracks.audio[0].path
+
+        media_info = MediaInfo.parse(muxed_path)
+        final_dir = config.directories.downloads
+        final_filename = title.get_filename(media_info, show_service=add_source)
+
+        if season_folder and isinstance(title, (Episode, Song)):
+            final_dir /= title.get_filename(media_info, show_service=add_source, folder=True)
+
+        final_dir.mkdir(parents=True, exist_ok=True)
+        final_path = final_dir / f"{final_filename}{muxed_path.suffix}"
+
+        muxed_path.rename(final_path)
+        self.log.info(f" + Moved to {final_path}")
+
+    @staticmethod
+    def rotate_log_file(log_path: Path, keep: int = 20) -> Path:
+        """
+        Update Log Filename and delete old log files.
+        It keeps only the 20 newest logs by default.
+        """
+        if not log_path:
+            raise ValueError("A log path must be provided")
+
+        try:
+            log_path.relative_to(Path(""))  # file name only
+        except ValueError:
+            pass
+        else:
+            log_path = config.directories.logs / log_path
+
+        log_path = log_path.parent / log_path.name.format_map(defaultdict(
+            str,
+            name="root",
+            time=datetime.now().strftime("%Y%m%d-%H%M%S")
+        ))
+
+        if log_path.parent.exists():
+            log_files = [x for x in log_path.parent.iterdir() if x.suffix == log_path.suffix]
+            for log_file in log_files[::-1][keep-1:]:
+                # keep n newest files and delete the rest
+                log_file.unlink()
+
+        log_path.parent.mkdir(parents=True, exist_ok=True)
+        return log_path
+
+    @staticmethod
+    def get_profile(service: str) -> Optional[str]:
+        """Get profile for Service from config."""
+        profile = config.profiles.get(service)
+        if profile is False:
+            return None  # auth-less service if `false` in config
+        if not profile:
+            profile = config.profiles.get("default")
+        if not profile:
+            raise ValueError(f"No profile has been defined for '{service}' in the config.")
+        return profile
+
+    @staticmethod
+    def get_cookie_jar(service: str, profile: str) -> Optional[MozillaCookieJar]:
+        """Get Profile's Cookies as Mozilla Cookie Jar if available."""
+        cookie_file = config.directories.cookies / service / f"{profile}.txt"
+        if cookie_file.is_file():
+            cookie_jar = MozillaCookieJar(cookie_file)
+            cookie_data = html.unescape(cookie_file.read_text("utf8")).splitlines(keepends=False)
+            for i, line in enumerate(cookie_data):
+                if line and not line.startswith("#"):
+                    line_data = line.lstrip().split("\t")
+                    # Disable client-side expiry checks completely across everywhere
+                    # Even though the cookies are loaded under ignore_expires=True, stuff
+                    # like python-requests may not use them if they are expired
+                    line_data[4] = ""
+                    cookie_data[i] = "\t".join(line_data)
+            cookie_data = "\n".join(cookie_data)
+            cookie_file.write_text(cookie_data, "utf8")
+            cookie_jar.load(ignore_discard=True, ignore_expires=True)
+            return cookie_jar
+        return None
+
+    @staticmethod
+    def get_credentials(service: str, profile: str) -> Optional[Credential]:
+        """Get Profile's Credential if available."""
+        cred = config.credentials.get(service, {}).get(profile)
+        if cred:
+            if isinstance(cred, list):
+                return Credential(*cred)
+            return Credential.loads(cred)
+        return None
+
+    @staticmethod
+    def get_cdm(service: str, profile: Optional[str] = None) -> WidevineCdm:
+        """
+        Get CDM for a specified service (either Local or Remote CDM).
+        Raises a ValueError if there's a problem getting a CDM.
+        """
+        cdm_name = config.cdm.get(service) or config.cdm.get("default")
+        if not cdm_name:
+            raise ValueError("A CDM to use wasn't listed in the config")
+
+        if isinstance(cdm_name, dict):
+            if not profile:
+                raise ValueError("CDM config is mapped for profiles, but no profile was chosen")
+            cdm_name = cdm_name.get(profile) or config.cdm.get("default")
+            if not cdm_name:
+                raise ValueError(f"A CDM to use was not mapped for the profile {profile}")
+
+        cdm_api = next(iter(x for x in config.remote_cdm if x["name"] == cdm_name), None)
+        if cdm_api:
+            del cdm_api["name"]
+            return RemoteCdm(**cdm_api)
+
+        cdm_path = config.directories.wvds / f"{cdm_name}.wvd"
+        if not cdm_path.is_file():
+            raise ValueError(f"{cdm_name} does not exist or is not a file")
+        device = Device.load(cdm_path)
+        return WidevineCdm.from_device(device)
diff --git a/devine/commands/env.py b/devine/commands/env.py
new file mode 100644
index 0000000..9dddb76
--- /dev/null
+++ b/devine/commands/env.py
@@ -0,0 +1,64 @@
+import logging
+import shutil
+from typing import Optional
+
+import click
+
+from devine.core.config import config
+from devine.core.constants import context_settings
+from devine.core.services import Services
+
+
+@click.group(short_help="Manage and configure the project environment.", context_settings=context_settings)
+def env() -> None:
+    """Manage and configure the project environment."""
+
+
+@env.command()
+def info() -> None:
+    """Displays information about the current environment."""
+    log = logging.getLogger("env")
+    log.info(f"[Root Config]     : {config.directories.user_configs / config.filenames.root_config}")
+    log.info(f"[Cookies]         : {config.directories.cookies}")
+    log.info(f"[WVDs]            : {config.directories.wvds}")
+    log.info(f"[Cache]           : {config.directories.cache}")
+    log.info(f"[Logs]            : {config.directories.logs}")
+    log.info(f"[Temp Files]      : {config.directories.temp}")
+    log.info(f"[Downloads]       : {config.directories.downloads}")
+
+
+@env.group(name="clear", short_help="Clear an environment directory.", context_settings=context_settings)
+def clear() -> None:
+    """Clear an environment directory."""
+
+
+@clear.command()
+@click.argument("service", type=str, required=False)
+def cache(service: Optional[str]) -> None:
+    """Clear the environment cache directory."""
+    log = logging.getLogger("env")
+    cache_dir = config.directories.cache
+    if service:
+        cache_dir = cache_dir / Services.get_tag(service)
+    log.info(f"Clearing cache directory: {cache_dir}")
+    files_count = len(list(cache_dir.glob("**/*")))
+    if not files_count:
+        log.info("No files to delete")
+    else:
+        log.info(f"Deleting {files_count} files...")
+        shutil.rmtree(cache_dir)
+        log.info("Cleared")
+
+
+@clear.command()
+def temp() -> None:
+    """Clear the environment temp directory."""
+    log = logging.getLogger("env")
+    log.info(f"Clearing temp directory: {config.directories.temp}")
+    files_count = len(list(config.directories.temp.glob("**/*")))
+    if not files_count:
+        log.info("No files to delete")
+    else:
+        log.info(f"Deleting {files_count} files...")
+        shutil.rmtree(config.directories.temp)
+        log.info("Cleared")
diff --git a/devine/commands/kv.py b/devine/commands/kv.py
new file mode 100644
index 0000000..fc16779
--- /dev/null
+++ b/devine/commands/kv.py
@@ -0,0 +1,212 @@
+from __future__ import annotations
+
+import logging
+import re
+from pathlib import Path
+from typing import Optional
+
+import click
+
+from devine.core.vault import Vault
+from devine.core.config import config
+from devine.core.constants import context_settings
+from devine.core.services import Services
+from devine.core.vaults import Vaults
+
+
+@click.group(short_help="Manage and configure Key Vaults.", context_settings=context_settings)
+def kv() -> None:
+    """Manage and configure Key Vaults."""
+
+
+@kv.command()
+@click.argument("to_vault", type=str)
+@click.argument("from_vaults", nargs=-1, type=click.UNPROCESSED)
+@click.option("-s", "--service", type=str, default=None,
+              help="Only copy data to and from a specific service.")
+def copy(to_vault: str, from_vaults: list[str], service: Optional[str] = None) -> None:
+    """
+    Copy data from multiple Key Vaults into a single Key Vault.
+    Rows with matching KIDs are skipped unless there's no KEY set.
+    Existing data is not deleted or altered.
+
+    The `to_vault` argument is the key vault you wish to copy data to.
+    It should be the name of a Key Vault defined in the config.
+
+    The `from_vaults` argument is the key vault(s) you wish to take
+    data from. You may supply multiple key vaults.
+    """
+    if not from_vaults:
+        raise click.ClickException("No Vaults were specified to copy data from.")
+
+    log = logging.getLogger("kv")
+
+    vaults = Vaults()
+    for vault_name in [to_vault] + list(from_vaults):
+        vault = next((x for x in config.key_vaults if x["name"] == vault_name), None)
+        if not vault:
+            raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.")
+        vault_type = vault["type"]
+        vault_args = vault.copy()
+        del vault_args["type"]
+        vaults.load(vault_type, **vault_args)
+
+    to_vault: Vault = vaults.vaults[0]
+    from_vaults: list[Vault] = vaults.vaults[1:]
+
+    log.info(f"Copying data from {', '.join([x.name for x in from_vaults])}, into {to_vault.name}")
+    if service:
+        service = Services.get_tag(service)
+        log.info(f"Only copying data for service {service}")
+
+    total_added = 0
+    for from_vault in from_vaults:
+        if service:
+            services = [service]
+        else:
+            services = from_vault.get_services()
+
+        for service_ in services:
+            log.info(f"Getting data from {from_vault} for {service_}")
+            content_keys = list(from_vault.get_keys(service_))  # important as it's a generator we iterate twice
+
+            bad_keys = {
+                kid: key
+                for kid, key in content_keys
+                if not key or key.count("0") == len(key)
+            }
+
+            for kid, key in bad_keys.items():
+                log.warning(f"Cannot add a NULL Content Key to a Vault, skipping: {kid}:{key}")
+
+            content_keys = {
+                kid: key
+                for kid, key in content_keys
+                if kid not in bad_keys
+            }
+
+            total_count = len(content_keys)
+            log.info(f"Adding {total_count} Content Keys to {to_vault} for {service_}")
+
+            try:
+                added = to_vault.add_keys(service_, content_keys, commit=True)
+            except PermissionError:
+                log.warning(f" - No permission to create table ({service_}) in {to_vault}, skipping...")
+                continue
+
+            total_added += added
+            existed = total_count - added
+
+            log.info(f"{to_vault} ({service_}): {added} newly added, {existed} already existed (skipped)")
+
+    log.info(f"{to_vault}: {total_added} total newly added")
+
+
+@kv.command()
+@click.argument("vaults", nargs=-1, type=click.UNPROCESSED)
+@click.option("-s", "--service", type=str, default=None,
+              help="Only sync data to and from a specific service.")
+@click.pass_context
+def sync(ctx: click.Context, vaults: list[str], service: Optional[str] = None) -> None:
+    """
+    Ensure multiple Key Vaults copies of all keys as each other.
+    It's essentially just a bi-way copy between each vault.
+    To see the precise details of what it's doing between each
+    provided vault, see the documentation for the `copy` command.
+    """
+    if not len(vaults) > 1:
+        raise click.ClickException("You must provide more than one Vault to sync.")
+
+    ctx.invoke(copy, to_vault=vaults[0], from_vaults=vaults[1:], service=service)
+    for i in range(1, len(vaults)):
+        ctx.invoke(copy, to_vault=vaults[i], from_vaults=[vaults[i-1]], service=service)
+
+
+@kv.command()
+@click.argument("file", type=Path)
+@click.argument("service", type=str)
+@click.argument("vaults", nargs=-1, type=click.UNPROCESSED)
+def add(file: Path, service: str, vaults: list[str]) -> None:
+    """
+    Add new Content Keys to Key Vault(s) by service.
+
+    File should contain one key per line in the format KID:KEY (HEX:HEX).
+    Each line should have nothing else within it except for the KID:KEY.
+    Encoding is presumed to be UTF8.
+    """
+    if not file.exists():
+        raise click.ClickException(f"File provided ({file}) does not exist.")
+    if not file.is_file():
+        raise click.ClickException(f"File provided ({file}) is not a file.")
+    if not service or not isinstance(service, str):
+        raise click.ClickException(f"Service provided ({service}) is invalid.")
+    if len(vaults) < 1:
+        raise click.ClickException("You must provide at least one Vault.")
+
+    log = logging.getLogger("kv")
+    service = Services.get_tag(service)
+
+    vaults_ = Vaults()
+    for vault_name in vaults:
+        vault = next((x for x in config.key_vaults if x["name"] == vault_name), None)
+        if not vault:
+            raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.")
+        vault_type = vault["type"]
+        vault_args = vault.copy()
+        del vault_args["type"]
+        vaults_.load(vault_type, **vault_args)
+
+    data = file.read_text(encoding="utf8")
+    kid_keys: dict[str, str] = {}
+    for line in data.splitlines(keepends=False):
+        line = line.strip()
+        match = re.search(r"^(?P<kid>[0-9a-fA-F]{32}):(?P<key>[0-9a-fA-F]{32})$", line)
+        if not match:
+            continue
+        kid = match.group("kid").lower()
+        key = match.group("key").lower()
+        kid_keys[kid] = key
+
+    total_count = len(kid_keys)
+
+    for vault in vaults_:
+        log.info(f"Adding {total_count} Content Keys to {vault}")
+        added_count = vault.add_keys(service, kid_keys, commit=True)
+        existed_count = total_count - added_count
+        log.info(f"{vault}: {added_count} newly added, {existed_count} already existed (skipped)")
+
+    log.info("Done!")
+
+
+@kv.command()
+@click.argument("vaults", nargs=-1, type=click.UNPROCESSED)
+def prepare(vaults: list[str]) -> None:
+    """Create Service Tables on Vaults if not yet created."""
+    log = logging.getLogger("kv")
+
+    vaults_ = Vaults()
+    for vault_name in vaults:
+        vault = next((x for x in config.key_vaults if x["name"] == vault_name), None)
+        if not vault:
+            raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.")
+        vault_type = vault["type"]
+        vault_args = vault.copy()
+        del vault_args["type"]
+        vaults_.load(vault_type, **vault_args)
+
+    for vault in vaults_:
+        if hasattr(vault, "has_table") and hasattr(vault, "create_table"):
+            for service_tag in Services.get_tags():
+                if vault.has_table(service_tag):
+                    log.info(f"{vault} already has a {service_tag} Table")
+                else:
+                    try:
+                        vault.create_table(service_tag, commit=True)
+                        log.info(f"{vault}: Created {service_tag} Table")
+                    except PermissionError:
+                        log.error(f"{vault} user has no create table permission, skipping...")
+                        continue
+        else:
+            log.info(f"{vault} does not use tables, skipping...")
+
+    log.info("Done!")
diff --git a/devine/commands/serve.py b/devine/commands/serve.py
new file mode 100644
index 0000000..b025fa1
--- /dev/null
+++ b/devine/commands/serve.py
@@ -0,0 +1,50 @@
+import subprocess
+
+import click
+
+from devine.core.config import config
+from devine.core.constants import context_settings
+from devine.core.utilities import get_binary_path
+
+
+@click.command(
+    short_help="Serve your Local Widevine Devices for Remote Access.",
+    context_settings=context_settings)
+@click.option("-h", "--host", type=str, default="0.0.0.0", help="Host to serve from.")
+@click.option("-p", "--port", type=int, default=8786, help="Port to serve from.")
+@click.option("--caddy", is_flag=True, default=False, help="Also serve with Caddy.")
+def serve(host: str, port: int, caddy: bool) -> None:
+    """
+    Serve your Local Widevine Devices for Remote Access.
+
+    \b
+    Host as 127.0.0.1 may block remote access even if port-forwarded.
+    Instead, use 0.0.0.0 and ensure the TCP port you choose is forwarded.
+
+    \b
+    You may serve with Caddy at the same time with --caddy. You can use Caddy
+    as a reverse-proxy to serve with HTTPS. The config used will be the Caddyfile
+    next to the devine config.
+    """
+    from pywidevine import serve
+
+    if caddy:
+        executable = get_binary_path("caddy")
+        if not executable:
+            raise click.ClickException("Caddy executable \"caddy\" not found but is required for --caddy.")
+        caddy_p = subprocess.Popen([
+            executable,
+            "run",
+            "--config", str(config.directories.user_configs / "Caddyfile")
+        ])
+    else:
+        caddy_p = None
+
+    try:
+        if not config.serve.get("devices"):
+            config.serve["devices"] = []
+        config.serve["devices"].extend(list(config.directories.wvds.glob("*.wvd")))
+        serve.run(config.serve, host, port)
+    finally:
+        if caddy_p:
+            caddy_p.kill()
diff --git a/devine/commands/util.py b/devine/commands/util.py
new file mode 100644
index 0000000..69e08bf
--- /dev/null
+++ b/devine/commands/util.py
@@ -0,0 +1,104 @@
+import subprocess
+from pathlib import Path
+
+import click
+from pymediainfo import MediaInfo
+
+from devine.core.constants import context_settings
+from devine.core.utilities import get_binary_path
+
+
+@click.group(short_help="Various helper scripts and programs.", context_settings=context_settings)
+def util() -> None:
+    """Various helper scripts and programs."""
+
+
+@util.command()
+@click.argument("path", type=Path)
+@click.argument("aspect", type=str)
+@click.option("--letter/--pillar", default=True,
+              help="Specify which direction to crop. Top and Bottom would be --letter, Sides would be --pillar.")
+@click.option("-o", "--offset", type=int, default=0,
+              help="Fine tune the computed crop area if not perfectly centered.")
+@click.option("-p", "--preview", is_flag=True, default=False,
+              help="Instantly preview the newly-set aspect crop in MPV (or ffplay if mpv is unavailable).")
+def crop(path: Path, aspect: str, letter: bool, offset: int, preview: bool) -> None:
+    """
+    Losslessly crop H.264 and H.265 video files at the bit-stream level.
+    You may provide a path to a file, or a folder of mkv and/or mp4 files.
+
+    Note: If you notice that the values you put in are not quite working, try
+    tune -o/--offset. This may be necessary on videos with sub-sampled chroma.
+
+    Do note that you may not get an ideal lossless cropping result on some
+    cases, again due to sub-sampled chroma.
+
+    It's recommended that you try -o about 10 or so pixels and lower it until
+    you get as close in as possible. Do make sure it's not over-cropping either
+    as it may go from being 2px away from a perfect crop, to 20px over-cropping
+    again due to sub-sampled chroma.
+    """
+    executable = get_binary_path("ffmpeg")
+    if not executable:
+        raise click.ClickException("FFmpeg executable \"ffmpeg\" not found but is required.")
+
+    if path.is_dir():
+        paths = list(path.glob("*.mkv")) + list(path.glob("*.mp4"))
+    else:
+        paths = [path]
+    for video_path in paths:
+        try:
+            video_track = next(iter(MediaInfo.parse(video_path).video_tracks or []))
+        except StopIteration:
+            raise click.ClickException("There's no video tracks in the provided file.")
+
+        crop_filter = {
+            "HEVC": "hevc_metadata",
+            "AVC": "h264_metadata"
+        }.get(video_track.commercial_name)
+        if not crop_filter:
+            raise click.ClickException(f"{video_track.commercial_name} Codec not supported.")
+
+        aspect_w, aspect_h = list(map(float, aspect.split(":")))
+        if letter:
+            crop_value = (video_track.height - (video_track.width / (aspect_w * aspect_h))) / 2
+            left, top, right, bottom = map(int, [0, crop_value + offset, 0, crop_value - offset])
+        else:
+            crop_value = (video_track.width - (video_track.height * (aspect_w / aspect_h))) / 2
+            left, top, right, bottom = map(int, [crop_value + offset, 0, crop_value - offset, 0])
+        crop_filter += f"=crop_left={left}:crop_top={top}:crop_right={right}:crop_bottom={bottom}"
+
+        if min(left, top, right, bottom) < 0:
+            raise click.ClickException("Cannot crop less than 0, are you cropping in the right direction?")
+
+        if preview:
+            out_path = ["-f", "mpegts", "-"]  # pipe
+        else:
+            out_path = [str(video_path.with_stem(".".join(filter(bool, [
+                video_path.stem,
+                video_track.language,
+                "crop",
+                str(offset or "")
+            ]))).with_suffix({
+                # ffmpeg's MKV muxer does not yet support HDR
+                "HEVC": ".h265",
+                "AVC": ".h264"
+            }.get(video_track.commercial_name, ".mp4")))]
+
+        ffmpeg_call = subprocess.Popen([
+            executable, "-y",
+            "-i", str(video_path),
+            "-map", "0:v:0",
+            "-c", "copy",
+            "-bsf:v", crop_filter
+        ] + out_path, stdout=subprocess.PIPE)
+        try:
+            if preview:
+                previewer = get_binary_path("mpv", "ffplay")
+                if not previewer:
+                    raise click.ClickException("MPV/FFplay executables weren't found but are required for previewing.")
+                subprocess.Popen((previewer, "-"), stdin=ffmpeg_call.stdout)
+        finally:
+            if ffmpeg_call.stdout:
+                ffmpeg_call.stdout.close()
+            ffmpeg_call.wait()
diff --git a/devine/commands/wvd.py b/devine/commands/wvd.py
new file mode 100644
index 0000000..53c50cc
--- /dev/null
+++ b/devine/commands/wvd.py
@@ -0,0 +1,215 @@
+from __future__ import annotations
+
+import logging
+from pathlib import Path
+from typing import Optional
+
+import click
+import yaml
+from google.protobuf.json_format import MessageToDict
+from pywidevine.device import Device
+from pywidevine.license_protocol_pb2 import FileHashes
+from unidecode import UnidecodeError, unidecode
+
+from devine.core.config import config
+from devine.core.constants import context_settings
+
+
+@click.group(
+    short_help="Manage configuration and creation of WVD (Widevine Device) files.",
+    context_settings=context_settings)
+def wvd() -> None:
+    """Manage configuration and creation of WVD (Widevine Device) files."""
+
+
+@wvd.command()
+@click.argument("path", type=Path)
+def parse(path: Path) -> None:
+    """
+    Parse a .WVD Widevine Device file to check information.
+    Relative paths are relative to the WVDs directory.
+    """
+    try:
+        named = not path.suffix and path.relative_to(Path(""))
+    except ValueError:
+        named = False
+    if named:
+        path = config.directories.wvds / f"{path.name}.wvd"
+
+    log = logging.getLogger("wvd")
+
+    device = Device.load(path)
+
+    log.info(f"System ID: {device.system_id}")
+    log.info(f"Security Level: {device.security_level}")
+    log.info(f"Type: {device.type}")
+    log.info(f"Flags: {device.flags}")
+    log.info(f"Private Key: {bool(device.private_key)}")
+    log.info(f"Client ID: {bool(device.client_id)}")
+    log.info(f"VMP: {bool(device.client_id.vmp_data)}")
+
+    log.info("Client ID:")
+    log.info(device.client_id)
+
+    log.info("VMP:")
+    if device.client_id.vmp_data:
+        file_hashes = FileHashes()
+        file_hashes.ParseFromString(device.client_id.vmp_data)
+        log.info(str(file_hashes))
+    else:
+        log.info("None")
+
+
+@wvd.command()
+@click.argument("wvd_paths", type=Path, nargs=-1)
+@click.argument("out_dir", type=Path, nargs=1)
+def dump(wvd_paths: list[Path], out_dir: Path) -> None:
+    """
+    Extract data from a .WVD Widevine Device file to a folder structure.
+
+    If the path is relative, with no file extension, it will dump the WVD in the WVDs
+    directory.
+    """
+    if wvd_paths == (Path(""),):
+        wvd_paths = list(config.directories.wvds.iterdir())
+    for wvd_path, out_path in zip(wvd_paths, (out_dir / x.stem for x in wvd_paths)):
+        try:
+            named = not wvd_path.suffix and wvd_path.relative_to(Path(""))
+        except ValueError:
+            named = False
+        if named:
+            wvd_path = config.directories.wvds / f"{wvd_path.stem}.wvd"
+        out_path.mkdir(parents=True, exist_ok=True)
+
+        device = Device.load(wvd_path)
+
+        log = logging.getLogger("wvd")
+        log.info(f"Dumping: {wvd_path}")
+        log.info(f"L{device.security_level} {device.system_id} {device.type.name}")
+        log.info(f"Saving to: {out_path}")
+
+        device_meta = {
+            "wvd": {
+                "device_type": device.type.name,
+                "security_level": device.security_level,
+                **device.flags
+            },
+            "client_info": {},
+            "capabilities": MessageToDict(device.client_id, preserving_proto_field_name=True)["client_capabilities"]
+        }
+        for client_info in device.client_id.client_info:
+            device_meta["client_info"][client_info.name] = client_info.value
+
+        device_meta_path = out_path / "metadata.yml"
+        device_meta_path.write_text(yaml.dump(device_meta), encoding="utf8")
+        log.info(" + Device Metadata")
+
+        if device.private_key:
+            private_key_path = out_path / "private_key.pem"
+            private_key_path.write_text(
+                data=device.private_key.export_key().decode(),
+                encoding="utf8"
+            )
+            private_key_path.with_suffix(".der").write_bytes(
+                device.private_key.export_key(format="DER")
+            )
+            log.info(" + Private Key")
+        else:
+            log.warning(" - No Private Key available")
+
+        if device.client_id:
+            client_id_path = out_path / "client_id.bin"
+            client_id_path.write_bytes(device.client_id.SerializeToString())
+            log.info(" + Client ID")
+        else:
+            log.warning(" - No Client ID available")
+
+        if device.client_id.vmp_data:
+            vmp_path = out_path / "vmp.bin"
+            vmp_path.write_bytes(device.client_id.vmp_data)
+            log.info(" + VMP (File Hashes)")
+        else:
+            log.info(" - No VMP (File Hashes) available")
+
+
+@wvd.command()
+@click.argument("name", type=str)
+@click.argument("private_key", type=Path)
+@click.argument("client_id", type=Path)
+@click.argument("file_hashes", type=Path, required=False)
+@click.option("-t", "--type", "type_", type=click.Choice([x.name for x in Device.Types], case_sensitive=False),
+              default="Android", help="Device Type")
+@click.option("-l", "--level", type=click.IntRange(1, 3), default=1, help="Device Security Level")
+@click.option("-o", "--output", type=Path, default=None, help="Output Directory")
+@click.pass_context
+def new(
+    ctx: click.Context,
+    name: str,
+    private_key: Path,
+    client_id: Path,
+    file_hashes: Optional[Path],
+    type_: str,
+    level: int,
+    output: Optional[Path]
+) -> None:
+    """
+    Create a new .WVD Widevine provision file.
+
+    name: The origin device name of the provided data. e.g. `Nexus 6P`. You do not need to
+        specify the security level, that will be done automatically.
+    private_key: A PEM file of a Device's private key.
+    client_id: A binary blob file which follows the Widevine ClientIdentification protobuf
+        schema.
+    file_hashes: A binary blob file with follows the Widevine FileHashes protobuf schema.
+        Also known as VMP as it's used for VMP (Verified Media Path) assurance.
+    """
+    try:
+        # TODO: Remove need for name, create name based on Client IDs ClientInfo values
+        name = unidecode(name.strip().lower().replace(" ", "_"))
+    except UnidecodeError as e:
+        raise click.UsageError(f"name: Failed to sanitize name, {e}", ctx)
+    if not name:
+        raise click.UsageError("name: Empty after sanitizing, please make sure the name is valid.", ctx)
+    if not private_key.is_file():
+        raise click.UsageError("private_key: Not a path to a file, or it doesn't exist.", ctx)
+    if not client_id.is_file():
+        raise click.UsageError("client_id: Not a path to a file, or it doesn't exist.", ctx)
+    if file_hashes and not file_hashes.is_file():
+        raise click.UsageError("file_hashes: Not a path to a file, or it doesn't exist.", ctx)
+
+    device = Device(
+        type_=Device.Types[type_.upper()],
+        security_level=level,
+        flags=None,
+        private_key=private_key.read_bytes(),
+        client_id=client_id.read_bytes()
+    )
+
+    if file_hashes:
+        device.client_id.vmp_data = file_hashes.read_bytes()
+
+    out_path = (output or config.directories.wvds) / f"{name}_{device.system_id}_l{device.security_level}.wvd"
+    device.dump(out_path)
+
+    log = logging.getLogger("wvd")
+
+    log.info(f"Created binary WVD file, {out_path.name}")
+    log.info(f" + Saved to: {out_path.absolute()}")
+    log.info(f" + System ID: {device.system_id}")
+    log.info(f" + Security Level: {device.security_level}")
+    log.info(f" + Type: {device.type}")
+    log.info(f" + Flags: {device.flags}")
+    log.info(f" + Private Key: {bool(device.private_key)}")
+    log.info(f" + Client ID: {bool(device.client_id)}")
+    log.info(f" + VMP: {bool(device.client_id.vmp_data)}")
+
+    log.debug("Client ID:")
+    log.debug(device.client_id)
+
+    log.debug("VMP:")
+    if device.client_id.vmp_data:
+        file_hashes = FileHashes()
+        file_hashes.ParseFromString(device.client_id.vmp_data)
+        log.info(str(file_hashes))
+    else:
+        log.info("None")
diff --git a/devine/core/__init__.py b/devine/core/__init__.py
new file mode 100644
index 0000000..5becc17
--- /dev/null
+++ b/devine/core/__init__.py
@@ -0,0 +1 @@
+__version__ = "1.0.0"
diff --git a/devine/core/__main__.py b/devine/core/__main__.py
new file mode 100644
index 0000000..cef44da
--- /dev/null
+++ b/devine/core/__main__.py
@@ -0,0 +1,29 @@
+import logging
+from datetime import datetime
+
+import click
+import coloredlogs
+
+from devine.core import __version__
+from devine.core.commands import Commands
+from devine.core.constants import context_settings, LOG_FORMAT
+
+
+@click.command(cls=Commands, invoke_without_command=True, context_settings=context_settings)
+@click.option("-v", "--version", is_flag=True, default=False, help="Print version information.")
+@click.option("-d", "--debug", is_flag=True, default=False, help="Enable DEBUG level logs.")
+def main(version: bool, debug: bool) -> None:
+    """Devine—Open-Source Movie, TV, and Music Downloading Solution."""
+    logging.basicConfig(level=logging.DEBUG if debug else logging.INFO)
+    log = logging.getLogger()
+    coloredlogs.install(level=log.level, fmt=LOG_FORMAT, style="{")
+
+    log.info(f"Devine version {__version__} Copyright (c) 2019-{datetime.now().year} rlaphoenix")
+    log.info("Convenient Widevine-DRM Downloader and Decrypter.")
+    log.info("https://github.com/devine/devine")
+    if version:
+        return
+
+
+if __name__ == "__main__":
+    main()
diff --git a/devine/core/cacher.py b/devine/core/cacher.py
new file mode 100644
index 0000000..a46455c
--- /dev/null
+++ b/devine/core/cacher.py
@@ -0,0 +1,161 @@
+from __future__ import annotations
+
+import zlib
+from datetime import datetime, timedelta
+from os import stat_result
+from pathlib import Path
+from typing import Optional, Any, Union
+
+import jsonpickle
+import jwt
+
+from devine.core.config import config
+
+
+EXP_T = Union[datetime, str, int, float]
+
+
+class Cacher:
+    """Cacher for Services to get and set arbitrary data with expiration dates."""
+
+    def __init__(
+        self,
+        service_tag: str,
+        key: Optional[str] = None,
+        version: Optional[int] = 1,
+        data: Optional[Any] = None,
+        expiration: Optional[datetime] = None
+    ) -> None:
+        self.service_tag = service_tag
+        self.key = key
+        self.version = version
+        self.data = data or {}
+        self.expiration = expiration
+
+        if self.expiration and self.expired:
+            # if its expired, remove the data for safety and delete cache file
+            self.data = None
+            self.path.unlink()
+
+    def __bool__(self) -> bool:
+        return bool(self.data)
+
+    @property
+    def path(self) -> Path:
+        """Get the path at which the cache will be read and written."""
+        return (config.directories.cache / self.service_tag / self.key).with_suffix(".json")
+
+    @property
+    def expired(self) -> bool:
+        return self.expiration and self.expiration < datetime.utcnow()
+
+    def get(self, key: str, version: int = 1) -> Cacher:
+        """
+        Get Cached data for the Service by Key.
+        :param key: the filename to save the data to, should be url-safe.
+        :param version: the config data version you expect to use.
+        :returns: Cache object containing the cached data or None if the file does not exist.
+        """
+        cache = Cacher(self.service_tag, key, version)
+        if cache.path.is_file():
+            data = jsonpickle.loads(cache.path.read_text(encoding="utf8"))
+            payload = data.copy()
+            del payload["crc32"]
+            checksum = data["crc32"]
+            calculated = zlib.crc32(jsonpickle.dumps(payload).encode("utf8"))
+            if calculated != checksum:
+                raise ValueError(
+                    f"The checksum of the Cache payload mismatched. "
+                    f"Checksum: {checksum} !== Calculated: {calculated}"
+                )
+            cache.data = data["data"]
+            cache.expiration = data["expiration"]
+            cache.version = data["version"]
+            if cache.version != version:
+                raise ValueError(
+                    f"The version of your {self.service_tag} {key} cache is outdated. "
+                    f"Please delete: {cache.path}"
+                )
+        return cache
+
+    def set(self, data: Any, expiration: Optional[EXP_T] = None) -> Any:
+        """
+        Set Cached data for the Service by Key.
+        :param data: absolutely anything including None.
+        :param expiration: when the data expires, optional. Can be ISO 8601, seconds
+            til expiration, unix timestamp, or a datetime object.
+        :returns: the data provided for quick wrapping of functions or vars.
+        """
+        self.data = data
+
+        if not expiration:
+            try:
+                expiration = jwt.decode(self.data, options={"verify_signature": False})["exp"]
+            except jwt.DecodeError:
+                pass
+
+        self.expiration = self._resolve_datetime(expiration) if expiration else None
+
+        payload = {
+            "data": self.data,
+            "expiration": self.expiration,
+            "version": self.version
+        }
+        payload["crc32"] = zlib.crc32(jsonpickle.dumps(payload).encode("utf8"))
+
+        self.path.parent.mkdir(parents=True, exist_ok=True)
+        self.path.write_text(jsonpickle.dumps(payload))
+
+        return self.data
+
+    def stat(self) -> stat_result:
+        """
+        Get Cache file OS Stat data like Creation Time, Modified Time, and such.
+        :returns: an os.stat_result tuple
+        """
+        return self.path.stat()
+
+    @staticmethod
+    def _resolve_datetime(timestamp: EXP_T) -> datetime:
+        """
+        Resolve multiple formats of a Datetime or Timestamp to an absolute Datetime.
+
+        Examples:
+            >>> now = datetime.now()
+            datetime.datetime(2022, 6, 27, 9, 49, 13, 657208)
+            >>> iso8601 = now.isoformat()
+            '2022-06-27T09:49:13.657208'
+            >>> Cacher._resolve_datetime(iso8601)
+            datetime.datetime(2022, 6, 27, 9, 49, 13, 657208)
+            >>> Cacher._resolve_datetime(iso8601 + "Z")
+            datetime.datetime(2022, 6, 27, 9, 49, 13, 657208)
+            >>> Cacher._resolve_datetime(3600)
+            datetime.datetime(2022, 6, 27, 10, 52, 50, 657208)
+            >>> Cacher._resolve_datetime('3600')
+            datetime.datetime(2022, 6, 27, 10, 52, 51, 657208)
+            >>> Cacher._resolve_datetime(7800.113)
+            datetime.datetime(2022, 6, 27, 11, 59, 13, 770208)
+
+        In the int/float examples you may notice that it did not return now + 3600 seconds
+        but rather something a bit more than that. This is because it did not resolve 3600
+        seconds from the `now` variable but from right now as the function was called.
+        """
+        if isinstance(timestamp, datetime):
+            return timestamp
+        if isinstance(timestamp, str):
+            if timestamp.endswith("Z"):
+                # fromisoformat doesn't accept the final Z
+                timestamp = timestamp.split("Z")[0]
+            try:
+                return datetime.fromisoformat(timestamp)
+            except ValueError:
+                timestamp = float(timestamp)
+        try:
+            timestamp = datetime.fromtimestamp(timestamp)
+        except ValueError:
+            raise ValueError(f"Unrecognized Timestamp value {timestamp!r}")
+        if timestamp < datetime.now():
+            # timestamp is likely an amount of seconds til expiration
+            # or, it's an already expired timestamp which is unlikely
+            timestamp = timestamp + timedelta(seconds=datetime.now().timestamp())
+        return timestamp
diff --git a/devine/core/commands.py b/devine/core/commands.py
new file mode 100644
index 0000000..f775a93
--- /dev/null
+++ b/devine/core/commands.py
@@ -0,0 +1,45 @@
+from __future__ import annotations
+
+from typing import Optional
+
+import click
+
+from devine.core.config import config
+from devine.core.utilities import import_module_by_path
+
+_COMMANDS = sorted(
+    (
+        path
+        for path in config.directories.commands.glob("*.py")
+        if path.stem.lower() != "__init__"
+    ),
+    key=lambda x: x.stem
+)
+
+_MODULES = {
+    path.stem: getattr(import_module_by_path(path), path.stem)
+    for path in _COMMANDS
+}
+
+
+class Commands(click.MultiCommand):
+    """Lazy-loaded command group of project commands."""
+
+    def list_commands(self, ctx: click.Context) -> list[str]:
+        """Returns a list of command names from the command filenames."""
+        return [x.stem for x in _COMMANDS]
+
+    def get_command(self, ctx: click.Context, name: str) -> Optional[click.Command]:
+        """Load the command code and return the main click command function."""
+        module = _MODULES.get(name)
+        if not module:
+            raise click.ClickException(f"Unable to find command by the name '{name}'")
+
+        if hasattr(module, "cli"):
+            return module.cli
+
+        return module
+
+
+# Hide direct access to commands from quick import form, they shouldn't be accessed directly
+__ALL__ = (Commands,)
diff --git a/devine/core/config.py b/devine/core/config.py
new file mode 100644
index 0000000..92069e9
--- /dev/null
+++ b/devine/core/config.py
@@ -0,0 +1,79 @@
+from __future__ import annotations
+
+import tempfile
+from pathlib import Path
+from typing import Any
+
+import yaml
+from appdirs import AppDirs
+
+
+class Config:
+    class _Directories:
+        # default directories, do not modify here, set via config
+        app_dirs = AppDirs("devine", False)
+        core_dir = Path(__file__).resolve().parent
+        namespace_dir = core_dir.parent
+        commands = namespace_dir / "commands"
+        services = namespace_dir / "services"
+        vaults = namespace_dir / "vaults"
+        user_configs = Path(app_dirs.user_config_dir)
+        data = Path(app_dirs.user_data_dir)
+        downloads = Path.home() / "Downloads" / "devine"
+        temp = Path(tempfile.gettempdir()) / "devine"
+        cache = Path(app_dirs.user_cache_dir)
+        cookies = data / "Cookies"
+        logs = Path(app_dirs.user_log_dir)
+        wvds = data / "WVDs"
+        dcsl = data / "DCSL"
+
+    class _Filenames:
+        # default filenames, do not modify here, set via config
+        log = "devine_{name}_{time}.log"  # Directories.logs
+        config = "config.yaml"  # Directories.services / tag
+        root_config = "devine.yaml"  # Directories.user_configs
+        chapters = "Chapters_{title}_{random}.txt"  # Directories.temp
+        subtitle = "Subtitle_{id}_{language}.srt"  # Directories.temp
+
+    def __init__(self, **kwargs: Any):
+        self.dl: dict = kwargs.get("dl") or {}
+        self.aria2c: dict = kwargs.get("aria2c") or {}
+        self.cdm: dict = kwargs.get("cdm") or {}
+        self.remote_cdm: list[dict] = kwargs.get("remote_cdm") or []
+        self.credentials: dict = kwargs.get("credentials") or {}
+
+        self.directories = self._Directories()
+        for name, path in (kwargs.get("directories") or {}).items():
+            if name.lower() in ("app_dirs", "core_dir", "namespace_dir", "user_configs", "data"):
+                # these must not be modified by the user
+                continue
+            setattr(self.directories, name, Path(path).expanduser())
+
+        self.filenames = self._Filenames()
+        for name, filename in (kwargs.get("filenames") or {}).items():
+            setattr(self.filenames, name, filename)
+
+        self.headers: dict = kwargs.get("headers") or {}
+        self.key_vaults: list[dict[str, Any]] = kwargs.get("key_vaults")
+        self.muxing: dict = kwargs.get("muxing") or {}
+        self.nordvpn: dict = kwargs.get("nordvpn") or {}
+        self.profiles: dict = kwargs.get("profiles") or {}
+        self.proxies: dict = kwargs.get("proxies") or {}
+        self.proxy_providers: dict = kwargs.get("proxy_providers") or {}
+        self.serve: dict = kwargs.get("serve") or {}
+        self.services: dict = kwargs.get("services") or {}
+        self.tag: str = kwargs.get("tag") or ""
+
+    @classmethod
+    def from_yaml(cls, path: Path) -> Config:
+        if not path.exists():
+            raise FileNotFoundError(f"Config file path ({path}) was not found")
+        if not path.is_file():
+            raise FileNotFoundError(f"Config file path ({path}) is not to a file.")
+        return cls(**yaml.safe_load(path.read_text(encoding="utf8")))
+
+
+# noinspection PyProtectedMember
+config = Config.from_yaml(Config._Directories.user_configs / Config._Filenames.root_config)
+
+__ALL__ = (config,)
diff --git a/devine/core/constants.py b/devine/core/constants.py
new file mode 100644
index 0000000..374722c
--- /dev/null
+++ b/devine/core/constants.py
@@ -0,0 +1,51 @@
+import logging
+from typing import TypeVar, Union
+
+
+LOG_FORMAT = "{asctime} [{levelname[0]}] {name} : {message}"  # must be '{}' style
+LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
+LOG_FORMATTER = logging.Formatter(LOG_FORMAT, LOG_DATE_FORMAT, "{")
+DRM_SORT_MAP = ["ClearKey", "Widevine"]
+LANGUAGE_MUX_MAP = {
+    # List of language tags that cannot be used by mkvmerge and need replacements.
+    # Try get the replacement to be as specific locale-wise as possible.
+    # A bcp47 as the replacement is recommended.
+    "cmn": "zh",
+    "cmn-Hant": "zh-Hant",
+    "cmn-Hans": "zh-Hans",
+    "none": "und",
+    "yue": "zh-yue",
+    "yue-Hant": "zh-yue-Hant",
+    "yue-Hans": "zh-yue-Hans"
+}
+TERRITORY_MAP = {
+    "Hong Kong SAR China": "Hong Kong"
+}
+LANGUAGE_MAX_DISTANCE = 5  # this is max to be considered "same", e.g., en, en-US, en-AU
+VIDEO_CODEC_MAP = {
+    "AVC": "H.264",
+    "HEVC": "H.265"
+}
+DYNAMIC_RANGE_MAP = {
+    "HDR10": "HDR",
+    "HDR10+": "HDR",
+    "Dolby Vision": "DV"
+}
+AUDIO_CODEC_MAP = {
+    "E-AC-3": "DDP",
+    "AC-3": "DD"
+}
+
+context_settings = dict(
+    help_option_names=["-?", "-h", "--help"],  # default only has --help
+    max_content_width=116,  # max PEP8 line-width, -4 to adjust for initial indent
+)
+
+# For use in signatures of functions which take one specific type of track at a time
+# (it can't be a list that contains e.g. both Video and Audio objects)
+TrackT = TypeVar("TrackT", bound="Track")  # noqa: F821
+
+# For general use in lists that can contain mixed types of tracks.
+# list[Track] won't work because list is invariant.
+# TODO: Add Chapter?
+AnyTrack = Union["Video", "Audio", "Subtitle"]  # noqa: F821
diff --git a/devine/core/credential.py b/devine/core/credential.py
new file mode 100644
index 0000000..36aad38
--- /dev/null
+++ b/devine/core/credential.py
@@ -0,0 +1,90 @@
+from __future__ import annotations
+
+import base64
+import hashlib
+import re
+from pathlib import Path
+from typing import Optional, Union
+
+
+class Credential:
+    """Username (or Email) and Password Credential."""
+
+    def __init__(self, username: str, password: str, extra: Optional[str] = None):
+        self.username = username
+        self.password = password
+        self.extra = extra
+        self.sha1 = hashlib.sha1(self.dumps().encode()).hexdigest()
+
+    def __bool__(self) -> bool:
+        return bool(self.username) and bool(self.password)
+
+    def __str__(self) -> str:
+        return self.dumps()
+
+    def __repr__(self) -> str:
+        return "{name}({items})".format(
+            name=self.__class__.__name__,
+            items=", ".join([f"{k}={repr(v)}" for k, v in self.__dict__.items()])
+        )
+
+    def dumps(self) -> str:
+        """Return credential data as a string."""
+        return f"{self.username}:{self.password}" + (f":{self.extra}" if self.extra else "")
+
+    def dump(self, path: Union[Path, str]) -> int:
+        """Write credential data to a file."""
+        if isinstance(path, str):
+            path = Path(path)
+        return path.write_text(self.dumps(), encoding="utf8")
+
+    def as_base64(self, with_extra: bool = False, encode_password: bool = False, encode_extra: bool = False) -> str:
+        """
+        Dump Credential as a Base64-encoded string in Basic Authorization style.
+        encode_password and encode_extra will also Base64-encode the password and extra respectively.
+        """
+        value = f"{self.username}:"
+        if encode_password:
+            value += base64.b64encode(self.password.encode()).decode()
+        else:
+            value += self.password
+        if with_extra and self.extra:
+            if encode_extra:
+                value += f":{base64.b64encode(self.extra.encode()).decode()}"
+            else:
+                value += f":{self.extra}"
+        return base64.b64encode(value.encode()).decode()
+
+    @classmethod
+    def loads(cls, text: str) -> Credential:
+        """
+        Load credential from a text string.
+
+        Format: {username}:{password}
+        Rules:
+            Only one Credential must be in this text contents.
+            All whitespace before and after all text will be removed.
+            Any whitespace between text will be kept and used.
+            The credential can be spanned across one or multiple lines as long as it
+                abides with all the above rules and the format.
+
+        Example that follows the format and rules:
+            `\tJohnd\noe@gm\n\rail.com\n:Pass1\n23\n\r  \t  \t`
+            >>>Credential(username='Johndoe@gmail.com', password='Pass123')
+        """
+        text = "".join([
+            x.strip() for x in text.splitlines(keepends=False)
+        ]).strip()
+        credential = re.fullmatch(r"^([^:]+?):([^:]+?)(?::(.+))?$", text)
+        if credential:
+            return cls(*credential.groups())
+        raise ValueError("No credentials found in text string. Expecting the format `username:password`")
+
+    @classmethod
+    def load(cls, path: Path) -> Credential:
+        """
+        Load Credential from a file path.
+        Use Credential.loads() for loading from text content and seeing the rules and
+        format expected to be found in the URIs contents.
+        """
+        return cls.loads(path.read_text("utf8"))
diff --git a/devine/core/downloaders/__init__.py b/devine/core/downloaders/__init__.py
new file mode 100644
index 0000000..8edba3a
--- /dev/null
+++ b/devine/core/downloaders/__init__.py
@@ -0,0 +1,2 @@
+from .aria2c import aria2c
+from .saldl import saldl
diff --git a/devine/core/downloaders/aria2c.py b/devine/core/downloaders/aria2c.py
new file mode 100644
index 0000000..f8adf7e
--- /dev/null
+++ b/devine/core/downloaders/aria2c.py
@@ -0,0 +1,88 @@
+import asyncio
+import subprocess
+from pathlib import Path
+from typing import Union, Optional
+
+from devine.core.config import config
+from devine.core.utilities import get_binary_path, start_pproxy
+
+
+async def aria2c(
+    uri: Union[str, list[str]],
+    out: Path,
+    headers: Optional[dict] = None,
+    proxy: Optional[str] = None
+) -> int:
+    """
+    Download files using Aria2(c).
+    https://aria2.github.io
+
+    If multiple URLs are provided they will be downloaded in the provided order
+    to the output directory. They will not be merged together.
+    """
+    segmented = False
+    if isinstance(uri, list) and len(uri) == 1:
+        uri = uri[0]
+    if isinstance(uri, list):
+        segmented = True
+        uri = "\n".join([
+            f"{url}\n"
+            f"\tdir={out}\n"
+            f"\tout={i:08}.mp4"
+            for i, url in enumerate(uri)
+        ])
+        if out.is_file():
+            raise ValueError("Provided multiple segments to download, expecting directory path")
+    elif "\t" not in uri:
+        uri = f"{uri}\n" \
+              f"\tdir={out.parent}\n" \
+              f"\tout={out.name}"
+
+    executable = get_binary_path("aria2c", "aria2")
+    if not executable:
+        raise EnvironmentError("Aria2c executable not found...")
+
+    arguments = [
+        "-c",  # Continue downloading a partially downloaded file
+        "--remote-time",  # Retrieve timestamp of the remote file from the and apply if available
+        "-x", "16",  # The maximum number of connections to one server for each download
+        "-j", "16",  # The maximum number of parallel downloads for every static (HTTP/FTP) URL
+        "-s", ("1" if segmented else "16"),  # Download a file using N connections
+        "--min-split-size", ("1024M" if segmented else "20M"),  # effectively disable split if segmented
+        "--allow-overwrite=true",
+        "--auto-file-renaming=false",
+        "--retry-wait", "2",  # Set the seconds to wait between retries.
+        "--max-tries", "5",
+        "--max-file-not-found", "5",
+        "--summary-interval", "0",
+        "--file-allocation", config.aria2c.get("file_allocation", "falloc"),
+        "--console-log-level", "warn",
+        "--download-result", "hide",
+        "-i", "-"
+    ]
+
+    for header, value in (headers or {}).items():
+        if header.lower() == "accept-encoding":
+            # we cannot set an allowed encoding, or it will return compressed
+            # and the code is not set up to uncompress the data
+            continue
+        arguments.extend(["--header", f"{header}: {value}"])
+
+    if proxy and proxy.lower().split(":")[0] != "http":
+        # HTTPS proxies not supported by Aria2c.
+        # Proxy the proxy via pproxy to access it as a HTTP proxy.
+        async with start_pproxy(proxy) as pproxy_:
+            return await aria2c(uri, out, headers, pproxy_)
+
+    if proxy:
+        arguments += ["--all-proxy", proxy]
+
+    p = await asyncio.create_subprocess_exec(executable, *arguments, stdin=subprocess.PIPE)
+    await p.communicate(uri.encode())
+    if p.returncode != 0:
+        raise subprocess.CalledProcessError(p.returncode, arguments)
+
+    return p.returncode
+
+
+__ALL__ = (aria2c,)
diff --git a/devine/core/downloaders/saldl.py b/devine/core/downloaders/saldl.py
new file mode 100644
index 0000000..5e38d4f
--- /dev/null
+++ b/devine/core/downloaders/saldl.py
@@ -0,0 +1,51 @@
+import subprocess
+from pathlib import Path
+from typing import Union, Optional
+
+from devine.core.utilities import get_binary_path
+
+
+async def saldl(
+    uri: Union[str, list[str]],
+    out: Union[Path, str],
+    headers: Optional[dict] = None,
+    proxy: Optional[str] = None
+) -> int:
+    out = Path(out)
+
+    if headers:
+        headers.update({k: v for k, v in headers.items() if k.lower() != "accept-encoding"})
+
+    executable = get_binary_path("saldl", "saldl-win64", "saldl-win32")
+    if not executable:
+        raise EnvironmentError("Saldl executable not found...")
+
+    arguments = [
+        executable,
+        # "--no-status",
+        "--skip-TLS-verification",
+        "--resume",
+        "--merge-in-order",
+        "-c8",
+        "--auto-size", "1",
+        "-D", str(out.parent),
+        "-o", out.name
+    ]
+
+    if headers:
+        arguments.extend([
+            "--custom-headers",
+            "\r\n".join([f"{k}: {v}" for k, v in headers.items()])
+        ])
+
+    if proxy:
+        arguments.extend(["--proxy", proxy])
+
+    if isinstance(uri, list):
+        raise ValueError("Saldl code does not yet support multiple uri (e.g. segmented) downloads.")
+    arguments.append(uri)
+
+    return subprocess.check_call(arguments)
+
+
+__ALL__ = (saldl,)
diff --git a/devine/core/drm/__init__.py b/devine/core/drm/__init__.py
new file mode 100644
index 0000000..8798f8e
--- /dev/null
+++ b/devine/core/drm/__init__.py
@@ -0,0 +1,6 @@
+from typing import Union
+
+from devine.core.drm.clearkey import ClearKey
+from devine.core.drm.widevine import Widevine
+
+DRM_T = Union[ClearKey, Widevine]
diff --git a/devine/core/drm/clearkey.py b/devine/core/drm/clearkey.py
new file mode 100644
index 0000000..5803bde
--- /dev/null
+++ b/devine/core/drm/clearkey.py
@@ -0,0 +1,82 @@
+from __future__ import annotations
+
+from typing import Optional, Union
+from urllib.parse import urljoin
+
+import requests
+from Cryptodome.Cipher import AES
+from m3u8.model import Key
+
+from devine.core.constants import TrackT
+
+
+class ClearKey:
+    """AES Clear Key DRM System."""
+    def __init__(self, key: Union[bytes, str], iv: Optional[Union[bytes, str]] = None):
+        """
+        Generally IV should be provided where possible. If not provided, it will be
+        set to \x00 of the same bit-size of the key.
+        """
+        if isinstance(key, str):
+            key = bytes.fromhex(key.replace("0x", ""))
+        if not isinstance(key, bytes):
+            raise ValueError(f"Expected AES Key to be bytes, not {key!r}")
+        if not iv:
+            iv = b"\x00"
+        if isinstance(iv, str):
+            iv = bytes.fromhex(iv.replace("0x", ""))
+        if not isinstance(iv, bytes):
+            raise ValueError(f"Expected IV to be bytes, not {iv!r}")
+
+        if len(iv) < len(key):
+            iv = iv * (len(key) - len(iv) + 1)
+
+        self.key: bytes = key
+        self.iv: bytes = iv
+
+    def decrypt(self, track: TrackT) -> None:
+        """Decrypt a Track with AES Clear Key DRM."""
+        if not track.path or not track.path.exists():
+            raise ValueError("Tried to decrypt a track that has not yet been downloaded.")
+
+        decrypted = AES. \
+            new(self.key, AES.MODE_CBC, self.iv). \
+            decrypt(track.path.read_bytes())
+
+        decrypted_path = track.path.with_suffix(f".decrypted{track.path.suffix}")
+        decrypted_path.write_bytes(decrypted)
+
+        track.swap(decrypted_path)
+        track.drm = None
+
+    @classmethod
+    def from_m3u_key(cls, m3u_key: Key, proxy: Optional[str] = None) -> ClearKey:
+        if not isinstance(m3u_key, Key):
+            raise ValueError(f"Provided M3U Key is in an unexpected type {m3u_key!r}")
+        if not m3u_key.method.startswith("AES"):
+            raise ValueError(f"Provided M3U Key is not an AES Clear Key, {m3u_key.method}")
+        if not m3u_key.uri:
+            raise ValueError("No URI in M3U Key, unable to get Key.")
+
+        res = requests.get(
+            url=urljoin(m3u_key.base_uri, m3u_key.uri),
+            headers={
+                "User-Agent": "smartexoplayer/1.1.0 (Linux;Android 8.0.0) ExoPlayerLib/2.13.3"
+            },
+            proxies={"all": proxy} if proxy else None
+        )
+        res.raise_for_status()
+        if not res.content:
+            raise EOFError("Unexpected Empty Response by M3U Key URI.")
+        if len(res.content) < 16:
+            raise EOFError(f"Unexpected Length of Key ({len(res.content)} bytes) in M3U Key.")
+
+        key = res.content
+        iv = None
+        if m3u_key.iv:
+            iv = bytes.fromhex(m3u_key.iv.replace("0x", ""))
+
+        return cls(key=key, iv=iv)
+
+
+__ALL__ = (ClearKey,)
diff --git a/devine/core/drm/widevine.py b/devine/core/drm/widevine.py
new file mode 100644
index 0000000..841078f
--- /dev/null
+++ b/devine/core/drm/widevine.py
@@ -0,0 +1,222 @@
+from __future__ import annotations
+
+import base64
+import subprocess
+import sys
+from typing import Any, Optional, Union, Callable
+from uuid import UUID
+
+import m3u8
+from construct import Container
+from pymp4.parser import Box
+from pywidevine.cdm import Cdm as WidevineCdm
+from pywidevine.pssh import PSSH
+from requests import Session
+
+from devine.core.config import config
+from devine.core.constants import AnyTrack, TrackT
+from devine.core.utilities import get_binary_path, get_boxes
+from devine.core.utils.subprocess import ffprobe
+
+
+class Widevine:
+    """Widevine DRM System."""
+    def __init__(self, pssh: PSSH, kid: Union[UUID, str, bytes, None] = None, **kwargs: Any):
+        if not pssh:
+            raise ValueError("Provided PSSH is empty.")
+        if not isinstance(pssh, PSSH):
+            raise TypeError(f"Expected pssh to be a {PSSH}, not {pssh!r}")
+
+        if pssh.system_id == PSSH.SystemId.PlayReady:
+            pssh.to_widevine()
+
+        if kid:
+            if isinstance(kid, str):
+                kid = UUID(hex=kid)
+            elif isinstance(kid, bytes):
+                kid = UUID(bytes=kid)
+            if not isinstance(kid, UUID):
+                raise ValueError(f"Expected kid to be a {UUID}, str, or bytes, not {kid!r}")
+            pssh.set_key_ids([kid])
+
+        self._pssh = pssh
+
+        if not self.kids:
+            raise Widevine.Exceptions.KIDNotFound("No Key ID was found within PSSH and none were provided.")
+
+        self.content_keys: dict[UUID, str] = {}
+        self.data: dict = kwargs or {}
+
+    @classmethod
+    def from_track(cls, track: AnyTrack, session: Optional[Session] = None) -> Widevine:
+        """
+        Get PSSH and KID from within the Initiation Segment of the Track Data.
+        It also tries to get PSSH and KID from other track data like M3U8 data
+        as well as through ffprobe.
+
+        Create a Widevine DRM System object from a track's information.
+        This should only be used if a PSSH could not be provided directly.
+        It is *rare* to need to use this.
+
+        You may provide your own requests session to be able to use custom
+        headers and more.
+
+        Raises:
+            PSSHNotFound - If the PSSH was not found within the data.
+            KIDNotFound - If the KID was not found within the data or PSSH.
+        """
+        if not session:
+            session = Session()
+            session.headers.update(config.headers)
+
+        kid: Optional[UUID] = None
+        pssh_boxes: list[Container] = []
+        tenc_boxes: list[Container] = []
+
+        if track.descriptor == track.Descriptor.M3U:
+            m3u_url = track.url
+            if isinstance(m3u_url, list):
+                # TODO: Find out why exactly the track url could be a list in this
+                #       scenario, as if its a list of segments, they would be files
+                #       not m3u documents
+                m3u_url = m3u_url[0]
+            master = m3u8.loads(session.get(m3u_url).text, uri=m3u_url)
+            pssh_boxes.extend(
+                Box.parse(base64.b64decode(x.uri.split(",")[-1]))
+                for x in (master.session_keys or master.keys)
+                if x and x.keyformat and x.keyformat.lower() == WidevineCdm.urn
+            )
+
+        init_data = track.get_init_segment(session)
+        if init_data:
+            # try get via ffprobe, needed for non mp4 data e.g. WEBM from Google Play
+            probe = ffprobe(init_data)
+            if probe:
+                for stream in probe.get("streams") or []:
+                    enc_key_id = stream.get("tags", {}).get("enc_key_id")
+                    if enc_key_id:
+                        kid = UUID(bytes=base64.b64decode(enc_key_id))
+            pssh_boxes.extend(list(get_boxes(init_data, b"pssh")))
+            tenc_boxes.extend(list(get_boxes(init_data, b"tenc")))
+
+        pssh_boxes.sort(key=lambda b: {
+            PSSH.SystemId.Widevine: 0,
+            PSSH.SystemId.PlayReady: 1
+        }[b.system_ID])
+
+        pssh = next(iter(pssh_boxes), None)
+        if not pssh:
+            raise Widevine.Exceptions.PSSHNotFound("PSSH was not found in track data.")
+
+        tenc = next(iter(tenc_boxes), None)
+        if not kid and tenc and tenc.key_ID.int != 0:
+            kid = tenc.key_ID
+
+        return cls(pssh=PSSH(pssh), kid=kid)
+
+    @property
+    def pssh(self) -> PSSH:
+        """Get Protection System Specific Header Box."""
+        return self._pssh
+
+    @property
+    def kid(self) -> Optional[UUID]:
+        """Get first Key ID, if any."""
+        return next(iter(self.kids), None)
+
+    @property
+    def kids(self) -> list[UUID]:
+        """Get all Key IDs."""
+        return self._pssh.key_ids
+
+    def get_content_keys(self, cdm: WidevineCdm, certificate: Callable, licence: Callable) -> None:
+        """
+        Create a CDM Session and obtain Content Keys for this DRM Instance.
+        The certificate and license params are expected to be a function and will
+        be provided with the challenge and session ID.
+        """
+        for kid in self.kids:
+            if kid in self.content_keys:
+                continue
+
+            session_id = cdm.open()
+
+            try:
+                cdm.set_service_certificate(
+                    session_id,
+                    certificate(
+                        challenge=cdm.service_certificate_challenge
+                    )
+                )
+
+                cdm.parse_license(
+                    session_id,
+                    licence(
+                        challenge=cdm.get_license_challenge(session_id, self.pssh)
+                    )
+                )
+
+                self.content_keys = {
+                    key.kid: key.key.hex()
+                    for key in cdm.get_keys(session_id, "CONTENT")
+                }
+                if not self.content_keys:
+                    raise ValueError("No Content Keys were returned by the License")
+
+                if kid not in self.content_keys:
+                    raise ValueError(f"No Content Key with the KID ({kid.hex}) was returned")
+            finally:
+                cdm.close(session_id)
+
+    def decrypt(self, track: TrackT) -> None:
+        """
+        Decrypt a Track with Widevine DRM.
+        Raises:
+            EnvironmentError if the Shaka Packager executable could not be found.
+            ValueError if the track has not yet been downloaded.
+            SubprocessError if Shaka Packager returned a non-zero exit code.
+        """
+        if not self.content_keys:
+            raise ValueError("Cannot decrypt a Track without any Content Keys...")
+
+        platform = {"win32": "win", "darwin": "osx"}.get(sys.platform, sys.platform)
+        executable = get_binary_path("shaka-packager", f"packager-{platform}", f"packager-{platform}-x64")
+        if not executable:
+            raise EnvironmentError("Shaka Packager executable not found but is required.")
+        if not track.path or not track.path.exists():
+            raise ValueError("Tried to decrypt a track that has not yet been downloaded.")
+
+        decrypted_path = track.path.with_suffix(f".decrypted{track.path.suffix}")
+        config.directories.temp.mkdir(parents=True, exist_ok=True)
+        try:
+            subprocess.check_call([
+                executable,
+                f"input={track.path},stream=0,output={decrypted_path}",
+                "--enable_raw_key_decryption", "--keys",
+                ",".join([
+                    *[
+                        "label={}:key_id={}:key={}".format(i, kid.hex, key.lower())
+                        for i, (kid, key) in enumerate(self.content_keys.items())
+                    ],
+                    *[
+                        # Apple TV+ needs this as their files do not use the KID supplied in it's manifest
+                        "label={}:key_id={}:key={}".format(i, "00" * 16, key.lower())
+                        for i, (kid, key) in enumerate(self.content_keys.items(), len(self.content_keys))
+                    ]
+                ]),
+                "--temp_dir", config.directories.temp
+            ])
+        except subprocess.CalledProcessError as e:
+            raise subprocess.SubprocessError(f"Failed to Decrypt! Shaka Packager Error: {e}")
+        track.swap(decrypted_path)
+        track.drm = None
+
+    class Exceptions:
+        class PSSHNotFound(Exception):
+            """PSSH (Protection System Specific Header) was not found."""
+
+        class KIDNotFound(Exception):
+            """KID (Encryption Key ID) was not found."""
+
+
+__ALL__ = (Widevine,)
diff --git a/devine/core/manifests/__init__.py b/devine/core/manifests/__init__.py
new file mode 100644
index 0000000..fd7ad85
--- /dev/null
+++ b/devine/core/manifests/__init__.py
@@ -0,0 +1,2 @@
+from .dash import DASH
+from .hls import HLS
diff --git a/devine/core/manifests/dash.py b/devine/core/manifests/dash.py
new file mode 100644
index 0000000..0ffa3a2
--- /dev/null
+++ b/devine/core/manifests/dash.py
@@ -0,0 +1,432 @@
+from __future__ import annotations
+
+import base64
+from hashlib import md5
+
+import math
+import re
+from copy import copy
+from typing import Any, Optional, Union, Callable
+from urllib.parse import urljoin, urlparse
+from uuid import UUID
+
+import requests
+from langcodes import Language, tag_is_valid
+from pywidevine.cdm import Cdm as WidevineCdm
+from pywidevine.pssh import PSSH
+from requests import Session
+
+from devine.core.drm import Widevine
+from devine.core.tracks import Tracks, Video, Audio, Subtitle
+from devine.core.utilities import is_close_match, FPS
+from devine.core.utils.xml import load_xml
+
+
+class DASH:
+    def __init__(self, manifest, url: str):
+        if manifest is None:
+            raise ValueError("DASH manifest must be provided.")
+        if manifest.tag != "MPD":
+            raise TypeError(f"Expected 'MPD' document, but received a '{manifest.tag}' document instead.")
+
+        if not url:
+            raise requests.URLRequired("DASH manifest URL must be provided for relative path computations.")
+        if not isinstance(url, str):
+            raise TypeError(f"Expected url to be a {str}, not {url!r}")
+
+        self.manifest = manifest
+        self.url = url
+
+    @classmethod
+    def from_url(cls, url: str, session: Optional[Session] = None, **args: Any) -> DASH:
+        if not url:
+            raise requests.URLRequired("DASH manifest URL must be provided for relative path computations.")
+        if not isinstance(url, str):
+            raise TypeError(f"Expected url to be a {str}, not {url!r}")
+
+        if not session:
+            session = Session()
+        elif not isinstance(session, Session):
+            raise TypeError(f"Expected session to be a {Session}, not {session!r}")
+
+        res = session.get(url, **args)
+        if not res.ok:
+            raise requests.ConnectionError(
+                "Failed to request the MPD document.",
+                response=res
+            )
+
+        return DASH.from_text(res.text, url)
+
+    @classmethod
+    def from_text(cls, text: str, url: str) -> DASH:
+        if not text:
+            raise ValueError("DASH manifest Text must be provided.")
+        if not isinstance(text, str):
+            raise TypeError(f"Expected text to be a {str}, not {text!r}")
+
+        if not url:
+            raise requests.URLRequired("DASH manifest URL must be provided for relative path computations.")
+        if not isinstance(url, str):
+            raise TypeError(f"Expected url to be a {str}, not {url!r}")
+
+        manifest = load_xml(text)
+
+        return cls(manifest, url)
+
+    def to_tracks(self, language: Union[str, Language], period_filter: Optional[Callable] = None) -> Tracks:
+        """
+        Convert an MPEG-DASH MPD (Media Presentation Description) document to Video, Audio and Subtitle Track objects.
+
+        Parameters:
+            language: Language you expect the Primary Track to be in.
+            period_filter: Filter out period's within the manifest.
+
+        All Track URLs will be a list of segment URLs.
+        """
+        tracks = Tracks()
+
+        for period in self.manifest.findall("Period"):
+            if callable(period_filter) and period_filter(period):
+                continue
+
+            period_base_url = period.findtext("BaseURL") or self.manifest.findtext("BaseURL")
+            if not period_base_url or not re.match("^https?://", period_base_url, re.IGNORECASE):
+                period_base_url = urljoin(self.url, period_base_url)
+
+            for adaptation_set in period.findall("AdaptationSet"):
+                # flags
+                trick_mode = any(
+                    x.get("schemeIdUri") == "http://dashif.org/guidelines/trickmode"
+                    for x in (
+                            adaptation_set.findall("EssentialProperty") +
+                            adaptation_set.findall("SupplementalProperty")
+                    )
+                )
+                descriptive = any(
+                    (x.get("schemeIdUri"), x.get("value")) == ("urn:mpeg:dash:role:2011", "descriptive")
+                    for x in adaptation_set.findall("Accessibility")
+                ) or any(
+                    (x.get("schemeIdUri"), x.get("value")) == ("urn:tva:metadata:cs:AudioPurposeCS:2007", "1")
+                    for x in adaptation_set.findall("Accessibility")
+                )
+                forced = any(
+                    (x.get("schemeIdUri"), x.get("value")) == ("urn:mpeg:dash:role:2011", "forced-subtitle")
+                    for x in adaptation_set.findall("Role")
+                )
+                cc = any(
+                    (x.get("schemeIdUri"), x.get("value")) == ("urn:mpeg:dash:role:2011", "caption")
+                    for x in adaptation_set.findall("Role")
+                )
+
+                if trick_mode:
+                    # we don't want trick mode streams (they are only used for fast-forward/rewind)
+                    continue
+
+                for rep in adaptation_set.findall("Representation"):
+                    supplements = rep.findall("SupplementalProperty") + adaptation_set.findall("SupplementalProperty")
+
+                    content_type = adaptation_set.get("contentType") or \
+                        adaptation_set.get("mimeType") or \
+                        rep.get("contentType") or \
+                        rep.get("mimeType")
+                    if not content_type:
+                        raise ValueError("No content type value could be found")
+                    content_type = content_type.split("/")[0]
+
+                    codecs = rep.get("codecs") or adaptation_set.get("codecs")
+
+                    if content_type.startswith("image"):
+                        # we don't want what's likely thumbnails for the seekbar
+                        continue
+                    if content_type == "application":
+                        # possibly application/mp4 which could be mp4-boxed subtitles
+                        try:
+                            Subtitle.Codec.from_mime(codecs)
+                            content_type = "text"
+                        except ValueError:
+                            raise ValueError(f"Unsupported content type '{content_type}' with codecs of '{codecs}'")
+
+                    if content_type == "text":
+                        mime = adaptation_set.get("mimeType")
+                        if mime and not mime.endswith("/mp4"):
+                            codecs = mime.split("/")[1]
+
+                    joc = next((
+                        x.get("value")
+                        for x in supplements
+                        if x.get("schemeIdUri") == "tag:dolby.com,2018:dash:EC3_ExtensionComplexityIndex:2018"
+                    ), None)
+
+                    track_lang = DASH.get_language(rep.get("lang"), adaptation_set.get("lang"), language)
+                    if not track_lang:
+                        raise ValueError(
+                            "One or more Tracks had no Language information. "
+                            "The provided fallback language is not valid or is `None` or `und`."
+                        )
+
+                    drm = DASH.get_drm(rep.findall("ContentProtection") + adaptation_set.findall("ContentProtection"))
+
+                    # from here we need to calculate the Segment Template and compute a final list of URLs
+
+                    segment_urls = DASH.get_segment_urls(
+                        representation=rep,
+                        period_duration=period.get("duration") or self.manifest.get("mediaPresentationDuration"),
+                        fallback_segment_template=adaptation_set.find("SegmentTemplate"),
+                        fallback_base_url=period_base_url,
+                        fallback_query=urlparse(self.url).query
+                    )
+
+                    # for some reason it's incredibly common for services to not provide
+                    # a good and actually unique track ID, sometimes because of the lang
+                    # dialect not being represented in the id, or the bitrate, or such.
+                    # this combines all of them as one and hashes it to keep it small(ish).
+                    track_id = md5("{codec}-{lang}-{bitrate}-{base_url}-{extra}".format(
+                        codec=codecs,
+                        lang=track_lang,
+                        bitrate=rep.get("bandwidth") or 0,  # subs may not state bandwidth
+                        base_url=(rep.findtext("BaseURL") or "").split("?")[0],
+                        extra=(adaptation_set.get("audioTrackId") or "") + (rep.get("id") or "") +
+                              (period.get("id") or "")
+                    ).encode()).hexdigest()
+
+                    if content_type == "video":
+                        track_type = Video
+                        track_codec = Video.Codec.from_codecs(codecs)
+                    elif content_type == "audio":
+                        track_type = Audio
+                        track_codec = Audio.Codec.from_codecs(codecs)
+                    elif content_type == "text":
+                        track_type = Subtitle
+                        track_codec = Subtitle.Codec.from_codecs(codecs or "vtt")
+                    else:
+                        raise ValueError(f"Unknown Track Type '{content_type}'")
+
+                    tracks.add(track_type(
+                        id_=track_id,
+                        url=segment_urls,
+                        codec=track_codec,
+                        language=track_lang,
+                        is_original_lang=not track_lang or not language or is_close_match(track_lang, [language]),
+                        descriptor=Video.Descriptor.MPD,
+                        extra=(rep, adaptation_set),
+                        # video track args
+                        **(dict(
+                            range_=(
+                                Video.Range.DV
+                                if codecs.startswith(("dva1", "dvav", "dvhe", "dvh1")) else
+                                Video.Range.from_cicp(
+                                    primaries=next((
+                                        int(x.get("value"))
+                                        for x in (
+                                            adaptation_set.findall("SupplementalProperty")
+                                            + adaptation_set.findall("EssentialProperty")
+                                        )
+                                        if x.get("schemeIdUri") == "urn:mpeg:mpegB:cicp:ColourPrimaries"
+                                    ), 0),
+                                    transfer=next((
+                                        int(x.get("value"))
+                                        for x in (
+                                            adaptation_set.findall("SupplementalProperty")
+                                            + adaptation_set.findall("EssentialProperty")
+                                        )
+                                        if x.get("schemeIdUri") == "urn:mpeg:mpegB:cicp:TransferCharacteristics"
+                                    ), 0),
+                                    matrix=next((
+                                        int(x.get("value"))
+                                        for x in (
+                                            adaptation_set.findall("SupplementalProperty")
+                                            + adaptation_set.findall("EssentialProperty")
+                                        )
+                                        if x.get("schemeIdUri") == "urn:mpeg:mpegB:cicp:MatrixCoefficients"
+                                    ), 0)
+                                )
+                            ),
+                            bitrate=rep.get("bandwidth"),
+                            width=int(rep.get("width") or 0) or adaptation_set.get("width"),
+                            height=int(rep.get("height") or 0) or adaptation_set.get("height"),
+                            fps=(
+                                rep.get("frameRate") or
+                                adaptation_set.get("frameRate") or
+                                FPS.parse(rep.find("SegmentBase").get("timescale"))
+                            ),
+                            drm=drm
+                        ) if track_type is Video else dict(
+                            bitrate=rep.get("bandwidth"),
+                            channels=next(iter(
+                                rep.xpath("AudioChannelConfiguration/@value")
+                                or adaptation_set.xpath("AudioChannelConfiguration/@value")
+                            ), None),
+                            joc=joc,
+                            descriptive=descriptive,
+                            drm=drm
+                        ) if track_type is Audio else dict(
+                            forced=forced,
+                            cc=cc
+                        ) if track_type is Subtitle else {})
+                    ))
+
+            # only get tracks from the first main-content period
+            break
+
+        return tracks
+
+    @staticmethod
+    def get_language(*options: Any) -> Optional[Language]:
+        for option in options:
+            option = (str(option) or "").strip()
+            if not tag_is_valid(option) or option.startswith("und"):
+                continue
+            return Language.get(option)
+
+    @staticmethod
+    def get_drm(protections) -> Optional[list[Widevine]]:
+        drm = []
+        for protection in protections:
+            # TODO: Add checks for PlayReady, FairPlay, maybe more
+            urn = (protection.get("schemeIdUri") or "").lower()
+            if urn != WidevineCdm.urn:
+                continue
+
+            pssh = protection.findtext("pssh")
+            if not pssh:
+                continue
+            pssh = PSSH(pssh)
+
+            kid = protection.get("kid")
+            if kid:
+                kid = UUID(bytes=base64.b64decode(kid))
+
+            default_kid = protection.get("default_KID")
+            if default_kid:
+                kid = UUID(default_kid)
+
+            if not pssh.key_ids and not kid:
+                # weird manifest, look across all protections for a default_KID
+                kid = next((
+                    UUID(protection.get("default_KID"))
+                    for protection in protections
+                    if protection.get("default_KID")
+                ), None)
+
+            drm.append(Widevine(
+                pssh=pssh,
+                kid=kid
+            ))
+
+        if not drm:
+            drm = None
+
+        return drm
+
+    @staticmethod
+    def pt_to_sec(d: Union[str, float]) -> float:
+        if isinstance(d, float):
+            return d
+        has_ymd = d[0:8] == "P0Y0M0DT"
+        if d[0:2] != "PT" and not has_ymd:
+            raise ValueError("Input data is not a valid time string.")
+        if has_ymd:
+            d = d[6:].upper()  # skip `P0Y0M0DT`
+        else:
+            d = d[2:].upper()  # skip `PT`
+        m = re.findall(r"([\d.]+.)", d)
+        return sum(
+            float(x[0:-1]) * {"H": 60 * 60, "M": 60, "S": 1}[x[-1].upper()]
+            for x in m
+        )
+
+    @staticmethod
+    def replace_fields(url: str, **kwargs: Any) -> str:
+        for field, value in kwargs.items():
+            url = url.replace(f"${field}$", str(value))
+            m = re.search(fr"\${re.escape(field)}%([a-z0-9]+)\$", url, flags=re.I)
+            if m:
+                url = url.replace(m.group(), f"{value:{m.group(1)}}")
+        return url
+
+    @staticmethod
+    def get_segment_urls(
+        representation,
+        period_duration: str,
+        fallback_segment_template,
+        fallback_base_url: Optional[str] = None,
+        fallback_query: Optional[str] = None
+    ) -> list[str]:
+        segment_urls: list[str] = []
+        segment_template = representation.find("SegmentTemplate") or fallback_segment_template
+        base_url = representation.findtext("BaseURL") or fallback_base_url
+
+        if segment_template is None:
+            # We could implement SegmentBase, but it's basically a list of Byte Range's to download
+            # So just return the Base URL as a segment, why give the downloader extra effort
+            return [urljoin(fallback_base_url, base_url)]
+
+        segment_template = copy(segment_template)
+        start_number = int(segment_template.get("startNumber") or 1)
+        segment_timeline = segment_template.find("SegmentTimeline")
+
+        for item in ("initialization", "media"):
+            value = segment_template.get(item)
+            if not value:
+                continue
+            if not re.match("^https?://", value, re.IGNORECASE):
+                if not base_url:
+                    raise ValueError("Resolved Segment URL is not absolute, and no Base URL is available.")
+                value = urljoin(base_url, value)
+            if not urlparse(value).query and fallback_query:
+                value += f"?{fallback_query}"
+            segment_template.set(item, value)
+
+        initialization = segment_template.get("initialization")
+        if initialization:
+            segment_urls.append(DASH.replace_fields(
+                initialization,
+                Bandwidth=representation.get("bandwidth"),
+                RepresentationID=representation.get("id")
+            ))
+
+        if segment_timeline is not None:
+            seg_time_list = []
+            current_time = 0
+            for s in segment_timeline.findall("S"):
+                if s.get("t"):
+                    current_time = int(s.get("t"))
+                for _ in range(1 + (int(s.get("r") or 0))):
+                    seg_time_list.append(current_time)
+                    current_time += int(s.get("d"))
+            seg_num_list = list(range(start_number, len(seg_time_list) + start_number))
+            segment_urls += [
+                DASH.replace_fields(
+                    segment_template.get("media"),
+                    Bandwidth=representation.get("bandwidth"),
+                    Number=n,
+                    RepresentationID=representation.get("id"),
+                    Time=t
+                )
+                for t, n in zip(seg_time_list, seg_num_list)
+            ]
+        else:
+            if not period_duration:
+                raise ValueError("Duration of the Period was unable to be determined.")
+            period_duration = DASH.pt_to_sec(period_duration)
+
+            segment_duration = (
+                float(segment_template.get("duration")) / float(segment_template.get("timescale") or 1)
+            )
+            total_segments = math.ceil(period_duration / segment_duration)
+            segment_urls += [
+                DASH.replace_fields(
+                    segment_template.get("media"),
+                    Bandwidth=representation.get("bandwidth"),
+                    Number=s,
+                    RepresentationID=representation.get("id"),
+                    Time=s
+                )
+                for s in range(start_number, start_number + total_segments)
+            ]
+
+        return segment_urls
+
+
+__ALL__ = (DASH,)
diff --git a/devine/core/manifests/hls.py b/devine/core/manifests/hls.py
new file mode 100644
index 0000000..1787102
--- /dev/null
+++ b/devine/core/manifests/hls.py
@@ -0,0 +1,217 @@
+from __future__ import annotations
+
+import re
+from hashlib import md5
+from typing import Union, Any, Optional
+
+import m3u8
+import requests
+from langcodes import Language
+from m3u8 import M3U8
+from pywidevine.cdm import Cdm as WidevineCdm
+from pywidevine.pssh import PSSH
+from requests import Session
+
+from devine.core.drm import ClearKey, Widevine, DRM_T
+from devine.core.tracks import Tracks, Video, Audio, Subtitle
+from devine.core.utilities import is_close_match
+
+
+class HLS:
+    def __init__(self, manifest: M3U8, session: Optional[Session] = None):
+        if not manifest:
+            raise ValueError("HLS manifest must be provided.")
+        if not isinstance(manifest, M3U8):
+            raise TypeError(f"Expected manifest to be a {M3U8}, not {manifest!r}")
+        if not manifest.is_variant:
+            raise ValueError("Expected the M3U(8) manifest to be a Variant Playlist.")
+
+        self.manifest = manifest
+        self.session = session or Session()
+
+    @classmethod
+    def from_url(cls, url: str, session: Optional[Session] = None, **args: Any) -> HLS:
+        if not url:
+            raise requests.URLRequired("HLS manifest URL must be provided.")
+        if not isinstance(url, str):
+            raise TypeError(f"Expected url to be a {str}, not {url!r}")
+
+        if not session:
+            session = Session()
+        elif not isinstance(session, Session):
+            raise TypeError(f"Expected session to be a {Session}, not {session!r}")
+
+        res = session.get(url, **args)
+        if not res.ok:
+            raise requests.ConnectionError(
+                "Failed to request the M3U(8) document.",
+                response=res
+            )
+
+        master = m3u8.loads(res.text, uri=url)
+
+        return cls(master, session)
+
+    @classmethod
+    def from_text(cls, text: str, url: str) -> HLS:
+        if not text:
+            raise ValueError("HLS manifest Text must be provided.")
+        if not isinstance(text, str):
+            raise TypeError(f"Expected text to be a {str}, not {text!r}")
+
+        if not url:
+            raise requests.URLRequired("HLS manifest URL must be provided for relative path computations.")
+        if not isinstance(url, str):
+            raise TypeError(f"Expected url to be a {str}, not {url!r}")
+
+        master = m3u8.loads(text, uri=url)
+
+        return cls(master)
+
+    def to_tracks(self, language: Union[str, Language], **args: Any) -> Tracks:
+        """
+        Convert a Variant Playlist M3U(8) document to Video, Audio and Subtitle Track objects.
+
+        Parameters:
+            language: Language you expect the Primary Track to be in.
+            args: You may pass any arbitrary named header to be passed to all requests made within
+                this method.
+
+        All Track objects' URL will be to another M3U(8) document. However, these documents
+        will be Invariant Playlists and contain the list of segments URIs among other metadata.
+        """
+        session_drm = HLS.get_drm(self.manifest.session_keys)
+
+        audio_codecs_by_group_id: dict[str, Audio.Codec] = {}
+        tracks = Tracks()
+
+        for playlist in self.manifest.playlists:
+            url = playlist.uri
+            if not re.match("^https?://", url):
+                url = playlist.base_uri + url
+
+            audio_group = playlist.stream_info.audio
+            if audio_group:
+                audio_codec = Audio.Codec.from_codecs(playlist.stream_info.codecs)
+                audio_codecs_by_group_id[audio_group] = audio_codec
+
+            if session_drm:
+                drm = session_drm
+            else:
+                # keys may be in the invariant playlist instead, annoying...
+                res = self.session.get(url, **args)
+                if not res.ok:
+                    raise requests.ConnectionError(
+                        "Failed to request an invariant M3U(8) document.",
+                        response=res
+                    )
+
+                invariant_playlist = m3u8.loads(res.text, url)
+                drm = HLS.get_drm(invariant_playlist.keys)
+
+            try:
+                # TODO: Any better way to figure out the primary track type?
+                Video.Codec.from_codecs(playlist.stream_info.codecs)
+            except ValueError:
+                primary_track_type = Audio
+            else:
+                primary_track_type = Video
+
+            tracks.add(primary_track_type(
+                id_=md5(str(playlist).encode()).hexdigest()[0:7],  # 7 chars only for filename length
+                url=url,
+                codec=primary_track_type.Codec.from_codecs(playlist.stream_info.codecs),
+                language=language,  # HLS manifests do not seem to have language info
+                is_original_lang=True,  # TODO: All we can do is assume Yes
+                bitrate=playlist.stream_info.average_bandwidth or playlist.stream_info.bandwidth,
+                descriptor=Video.Descriptor.M3U,
+                drm=drm,
+                extra=playlist,
+                # video track args
+                **(dict(
+                    range_=Video.Range.DV if any(
+                        codec.split(".")[0] in ("dva1", "dvav", "dvhe", "dvh1")
+                        for codec in playlist.stream_info.codecs.lower().split(",")
+                    ) else Video.Range.from_m3u_range_tag(playlist.stream_info.video_range),
+                    width=playlist.stream_info.resolution[0],
+                    height=playlist.stream_info.resolution[1],
+                    fps=playlist.stream_info.frame_rate
+                ) if primary_track_type is Video else {})
+            ))
+
+        for media in self.manifest.media:
+            url = media.uri
+            if not url:
+                continue
+
+            if not re.match("^https?://", url):
+                url = media.base_uri + url
+
+            if media.type == "AUDIO":
+                if session_drm:
+                    drm = session_drm
+                else:
+                    # keys may be in the invariant playlist instead, annoying...
+                    res = self.session.get(url, **args)
+                    if not res.ok:
+                        raise requests.ConnectionError(
+                            "Failed to request an invariant M3U(8) document.",
+                            response=res
+                        )
+
+                    invariant_playlist = m3u8.loads(res.text, url)
+                    drm = HLS.get_drm(invariant_playlist.keys)
+            else:
+                drm = None
+
+            if media.type == "AUDIO":
+                track_type = Audio
+                codec = audio_codecs_by_group_id.get(media.group_id)
+            else:
+                track_type = Subtitle
+                codec = Subtitle.Codec.WebVTT  # assuming WebVTT, codec info isn't shown
+
+            tracks.add(track_type(
+                id_=md5(str(media).encode()).hexdigest()[0:6],  # 6 chars only for filename length
+                url=url,
+                codec=codec,
+                language=media.language or language,  # HLS media may not have language info, fallback if needed
+                is_original_lang=language and is_close_match(media.language, [language]),
+                descriptor=Audio.Descriptor.M3U,
+                drm=drm,
+                extra=media,
+                # audio track args
+                **(dict(
+                    bitrate=0,  # TODO: M3U doesn't seem to state bitrate?
+                    channels=media.channels,
+                    descriptive="public.accessibility.describes-video" in (media.characteristics or ""),
+                ) if track_type is Audio else dict(
+                    forced=media.forced == "YES",
+                    sdh="public.accessibility.describes-music-and-sound" in (media.characteristics or ""),
+                ) if track_type is Subtitle else {})
+            ))
+
+        return tracks
+
+    @staticmethod
+    def get_drm(keys: list[Union[m3u8.model.SessionKey, m3u8.model.Key]]) -> list[DRM_T]:
+        drm = []
+
+        for key in keys:
+            if not key:
+                continue
+            # TODO: Add checks for Merlin, FairPlay, PlayReady, maybe more.
+            if key.method.startswith("AES"):
+                drm.append(ClearKey.from_m3u_key(key))
+            elif key.method == "ISO-23001-7":
+                drm.append(Widevine(PSSH.new(key_ids=[key.uri.split(",")[-1]], system_id=PSSH.SystemId.Widevine)))
+            elif key.keyformat and key.keyformat.lower() == WidevineCdm.urn:
+                drm.append(Widevine(
+                    pssh=PSSH(key.uri.split(",")[-1]),
+                    **key._extra_params  # noqa
+                ))
+
+        return drm
+
+
+__ALL__ = (HLS,)
diff --git a/devine/core/proxies/__init__.py b/devine/core/proxies/__init__.py
new file mode 100644
index 0000000..818a7d7
--- /dev/null
+++ b/devine/core/proxies/__init__.py
@@ -0,0 +1,3 @@
+from .basic import Basic
+from .hola import Hola
+from .nordvpn import NordVPN
diff --git a/devine/core/proxies/basic.py b/devine/core/proxies/basic.py
new file mode 100644
index 0000000..fb2a7ce
--- /dev/null
+++ b/devine/core/proxies/basic.py
@@ -0,0 +1,30 @@
+import random
+from typing import Optional
+
+from devine.core.proxies.proxy import Proxy
+
+
+class Basic(Proxy):
+    def __init__(self, **countries):
+        """Basic Proxy Service using Proxies specified in the config."""
+        self.countries = countries
+
+    def __repr__(self) -> str:
+        countries = len(self.countries)
+        servers = len(self.countries.values())
+
+        return f"{countries} Countr{['ies', 'y'][countries == 1]} ({servers} Server{['s', ''][servers == 1]})"
+
+    def get_proxy(self, query: str) -> Optional[str]:
+        """Get a proxy URI from the config."""
+        servers = self.countries.get(query)
+        if not servers:
+            return
+
+        proxy = random.choice(servers)
+
+        if "://" not in proxy:
+            # TODO: Improve the test for a valid URI
+            raise ValueError(f"The proxy '{proxy}' is not a valid proxy URI supported by Python-Requests.")
+
+        return proxy
diff --git a/devine/core/proxies/hola.py b/devine/core/proxies/hola.py
new file mode 100644
index 0000000..1be75cf
--- /dev/null
+++ b/devine/core/proxies/hola.py
@@ -0,0 +1,69 @@
+import random
+import re
+import subprocess
+from typing import Optional
+
+from devine.core.proxies.proxy import Proxy
+from devine.core.utilities import get_binary_path
+
+
+class Hola(Proxy):
+    def __init__(self):
+        """
+        Proxy Service using Hola's direct connections via the hola-proxy project.
+        https://github.com/Snawoot/hola-proxy
+        """
+        self.binary = get_binary_path("hola-proxy")
+        if not self.binary:
+            raise EnvironmentError("hola-proxy executable not found but is required for the Hola proxy provider.")
+
+        self.countries = self.get_countries()
+
+    def __repr__(self) -> str:
+        countries = len(self.countries)
+
+        return f"{countries} Countr{['ies', 'y'][countries == 1]}"
+
+    def get_proxy(self, query: str) -> Optional[str]:
+        """
+        Get an HTTP proxy URI for a Datacenter ('direct') or Residential ('lum') Hola server.
+
+        TODO: - Add ability to select 'lum' proxies (residential proxies).
+              - Return and use Proxy Authorization
+        """
+        query = query.lower()
+
+        p = subprocess.check_output([
+            self.binary,
+            "-country", query,
+            "-list-proxies"
+        ], stderr=subprocess.STDOUT).decode()
+
+        if "Transaction error: temporary ban detected." in p:
+            raise ConnectionError("Hola banned your IP temporarily from it's services. Try change your IP.")
+
+        username, password, proxy_authorization = re.search(
+            r"Login: (.*)\nPassword: (.*)\nProxy-Authorization: (.*)", p
+        ).groups()
+
+        servers = re.findall(r"(zagent.*)", p)
+        proxies = []
+        for server in servers:
+            host, ip_address, direct, peer, hola, trial, trial_peer, vendor = server.split(",")
+            proxies.append(f"http://{username}:{password}@{ip_address}:{peer}")
+
+        proxy = random.choice(proxies)
+        return proxy
+
+    def get_countries(self) -> list[dict[str, str]]:
+        """Get a list of available Countries."""
+        p = subprocess.check_output([
+            self.binary,
+            "-list-countries"
+        ]).decode("utf8")
+
+        return [
+            {code: name}
+            for country in p.splitlines()
+            for (code, name) in [country.split(" - ", maxsplit=1)]
+        ]
diff --git a/devine/core/proxies/nordvpn.py b/devine/core/proxies/nordvpn.py
new file mode 100644
index 0000000..12b6046
--- /dev/null
+++ b/devine/core/proxies/nordvpn.py
@@ -0,0 +1,138 @@
+import json
+import re
+from typing import Optional
+
+import requests
+
+from devine.core.proxies.proxy import Proxy
+
+
+class NordVPN(Proxy):
+    def __init__(self, username: str, password: str, server_map: Optional[dict[str, int]] = None):
+        """
+        Proxy Service using NordVPN Service Credentials.
+
+        A username and password must be provided. These are Service Credentials, not your Login Credentials.
+        The Service Credentials can be found here: https://my.nordaccount.com/dashboard/nordvpn/
+        """
+        if not username:
+            raise ValueError("No Username was provided to the NordVPN Proxy Service.")
+        if not password:
+            raise ValueError("No Password was provided to the NordVPN Proxy Service.")
+        if not re.match(r"^[a-z0-9]{48}$", username + password, re.IGNORECASE) or "@" in username:
+            raise ValueError(
+                "The Username and Password must be NordVPN Service Credentials, not your Login Credentials. "
+                "The Service Credentials can be found here: https://my.nordaccount.com/dashboard/nordvpn/"
+            )
+
+        if server_map is not None and not isinstance(server_map, dict):
+            raise TypeError(f"Expected server_map to be a dict mapping a region to a server ID, not '{server_map!r}'.")
+
+        self.username = username
+        self.password = password
+        self.server_map = server_map or {}
+
+        self.countries = self.get_countries()
+
+    def __repr__(self) -> str:
+        countries = len(self.countries)
+        servers = sum(x["servers_count"] for x in self.countries)
+
+        return f"{countries} Countr{['ies', 'y'][countries == 1]} ({servers} Server{['s', ''][servers == 1]})"
+
+    def get_proxy(self, query: str) -> Optional[str]:
+        """
+        Get an HTTP(SSL) proxy URI for a NordVPN server.
+
+        HTTP proxies under port 80 were disabled on the 15th of Feb, 2021:
+        https://nordvpn.com/blog/removing-http-proxies
+        """
+        query = query.lower()
+        if re.match(r"^[a-z]{2}\d+$", query):
+            # country and nordvpn server id, e.g., us1, fr1234
+            hostname = f"{query}.nordvpn.com"
+        else:
+            if query.isdigit():
+                # country id
+                country = self.get_country(by_id=int(query))
+            elif re.match(r"^[a-z]+$", query):
+                # country code
+                country = self.get_country(by_code=query)
+            else:
+                raise ValueError(f"The query provided is unsupported and unrecognized: {query}")
+            if not country:
+                # NordVPN doesnt have servers in this region
+                return
+
+            server_mapping = self.server_map.get(country["code"].lower())
+            if server_mapping:
+                # country was set to a specific server ID in config
+                hostname = f"{country['code'].lower()}{server_mapping}.nordvpn.com"
+            else:
+                # get the recommended server ID
+                recommended_servers = self.get_recommended_servers(country["id"])
+                if not recommended_servers:
+                    raise ValueError(
+                        f"The NordVPN Country {query} currently has no recommended servers. "
+                        "Try again later. If the issue persists, double-check the query."
+                    )
+                hostname = recommended_servers[0]["hostname"]
+
+        if hostname.startswith("gb"):
+            # NordVPN uses the alpha2 of 'GB' in API responses, but 'UK' in the hostname
+            hostname = f"gb{hostname[2:]}"
+
+        return f"https://{self.username}:{self.password}@{hostname}:89"
+
+    def get_country(
+        self,
+        by_id: Optional[int] = None,
+        by_code: Optional[str] = None
+    ) -> Optional[dict]:
+        """Search for a Country and it's metadata."""
+        if all(x is None for x in (by_id, by_code)):
+            raise ValueError("At least one search query must be made.")
+
+        for country in self.countries:
+            if all([
+                by_id is None or country["id"] == int(by_id),
+                by_code is None or country["code"] == by_code.upper()
+            ]):
+                return country
+
+    @staticmethod
+    def get_recommended_servers(country_id: int) -> list[dict]:
+        """
+        Get the list of recommended Servers for a Country.
+
+        Note: There may not always be more than one recommended server.
+        """
+        res = requests.get(
+            url="https://nordvpn.com/wp-admin/admin-ajax.php",
+            params={
+                "action": "servers_recommendations",
+                "filters": json.dumps({"country_id": country_id})
+            }
+        )
+        if not res.ok:
+            raise ValueError(f"Failed to get a list of NordVPN countries [{res.status_code}]")
+
+        try:
+            return res.json()
+        except json.JSONDecodeError:
+            raise ValueError("Could not decode list of NordVPN countries, not JSON data.")
+
+    @staticmethod
+    def get_countries() -> list[dict]:
+        """Get a list of available Countries and their metadata."""
+        res = requests.get(
+            url="https://nordvpn.com/wp-admin/admin-ajax.php",
+            params={"action": "servers_countries"}
+        )
+        if not res.ok:
+            raise ValueError(f"Failed to get a list of NordVPN countries [{res.status_code}]")
+
+        try:
+            return res.json()
+        except json.JSONDecodeError:
+            raise ValueError("Could not decode list of NordVPN countries, not JSON data.")
diff --git a/devine/core/proxies/proxy.py b/devine/core/proxies/proxy.py
new file mode 100644
index 0000000..10e044a
--- /dev/null
+++ b/devine/core/proxies/proxy.py
@@ -0,0 +1,31 @@
+from abc import abstractmethod
+from typing import Optional
+
+
+class Proxy:
+    @abstractmethod
+    def __init__(self, **kwargs):
+        """
+        The constructor initializes the Service using passed configuration data.
+
+        Any authorization or pre-fetching of data should be done here.
+        """
+
+    @abstractmethod
+    def __repr__(self) -> str:
+        """Return a string denoting a list of Countries and Servers (if possible)."""
+        countries = ...
+        servers = ...
+        return f"{countries} Countr{['ies', 'y'][countries == 1]} ({servers} Server{['s', ''][servers == 1]})"
+
+    @abstractmethod
+    def get_proxy(self, query: str) -> Optional[str]:
+        """
+        Get a Proxy URI from the Proxy Service.
+
+        Only return None if the query was accepted, but no proxy could be returned.
+        Otherwise, please use exceptions to denote any errors with the call or query.
+
+        The returned Proxy URI must be a string supported by Python-Requests:
+        '{scheme}://[{user}:{pass}@]{host}:{port}'
+        """
diff --git a/devine/core/service.py b/devine/core/service.py
new file mode 100644
index 0000000..71861e1
--- /dev/null
+++ b/devine/core/service.py
@@ -0,0 +1,209 @@
+from __future__ import annotations
+
+import base64
+import logging
+from abc import ABCMeta, abstractmethod
+from http.cookiejar import MozillaCookieJar, CookieJar
+from typing import Optional, Union
+from urllib.parse import urlparse
+
+import click
+import requests
+from requests.adapters import Retry, HTTPAdapter
+
+from devine.core.config import config
+from devine.core.constants import AnyTrack
+from devine.core.titles import Titles_T, Title_T
+from devine.core.tracks import Chapter, Tracks
+from devine.core.utilities import get_ip_info
+from devine.core.cacher import Cacher
+from devine.core.credential import Credential
+
+
+class Service(metaclass=ABCMeta):
+    """The Service Base Class."""
+
+    # Abstract class variables
+    ALIASES: tuple[str, ...] = ()  # list of aliases for the service; alternatives to the service tag.
+    GEOFENCE: tuple[str, ...] = ()  # list of ip regions required to use the service. empty list == no specific region.
+
+    def __init__(self, ctx: click.Context):
+        self.config = ctx.obj.config
+
+        assert ctx.parent is not None
+        assert ctx.parent.parent is not None
+
+        self.log = logging.getLogger(self.__class__.__name__)
+        self.session = self.get_session()
+        self.cache = Cacher(self.__class__.__name__)
+
+        self.proxy = ctx.parent.params["proxy"]
+        if not self.proxy and self.GEOFENCE:
+            # no explicit proxy, let's get one to GEOFENCE if needed
+            current_region = get_ip_info(self.session)["country"].lower()
+            if not any([x.lower() == current_region for x in self.GEOFENCE]):
+                requested_proxy = self.GEOFENCE[0]  # first is likely main region
+                self.log.info(f"Current IP region is blocked by the service, getting Proxy to {requested_proxy}")
+                # current region is not in any of the service's supported regions
+                for proxy_provider in ctx.obj.proxy_providers:
+                    self.proxy = proxy_provider.get_proxy(requested_proxy)
+                    if self.proxy:
+                        self.log.info(f" + {self.proxy} (from {proxy_provider.__class__.__name__})")
+                        break
+        if self.proxy:
+            self.session.proxies.update({"all": self.proxy})
+            proxy_parse = urlparse(self.proxy)
+            if proxy_parse.username and proxy_parse.password:
+                self.session.headers.update({
+                    "Proxy-Authorization": base64.b64encode(
+                        f"{proxy_parse.username}:{proxy_parse.password}".encode("utf8")
+                    ).decode()
+                })
+
+    # Optional Abstract functions
+    # The following functions may be implemented by the Service.
+    # Otherwise, the base service code (if any) of the function will be executed on call.
+    # The functions will be executed in shown order.
+
+    def get_session(self) -> requests.Session:
+        """
+        Creates a Python-requests Session, adds common headers
+        from config, cookies, retry handler, and a proxy if available.
+        :returns: Prepared Python-requests Session
+        """
+        session = requests.Session()
+        session.headers.update(config.headers)
+        session.mount("https://", HTTPAdapter(
+            max_retries=Retry(
+                total=15,
+                backoff_factor=0.2,
+                status_forcelist=[429, 500, 502, 503, 504]
+            )
+        ))
+        session.mount("http://", session.adapters["https://"])
+        return session
+
+    def authenticate(self, cookies: Optional[MozillaCookieJar] = None, credential: Optional[Credential] = None) -> None:
+        """
+        Authenticate the Service with Cookies and/or Credentials (Email/Username and Password).
+
+        This is effectively a login() function. Any API calls or object initializations
+        needing to be made, should be made here. This will be run before any of the
+        following abstract functions.
+
+        You should avoid storing or using the Credential outside this function.
+        Make any calls you need for any Cookies, Tokens, or such, then use those.
+
+        The Cookie jar should also not be stored outside this function. However, you may load
+        the Cookie jar into the service session.
+        """
+        if cookies is not None:
+            if not isinstance(cookies, CookieJar):
+                raise TypeError(f"Expected cookies to be a {MozillaCookieJar}, not {cookies!r}.")
+            self.session.cookies.update(cookies)
+
+    def get_widevine_service_certificate(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Union[bytes, str]:
+        """
+        Get the Widevine Service Certificate used for Privacy Mode.
+
+        :param challenge: The service challenge, providing this to a License endpoint should return the
+            privacy certificate that the service uses.
+        :param title: The current `Title` from get_titles that is being executed. This is provided in
+            case it has data needed to be used, e.g. for a HTTP request.
+        :param track: The current `Track` needing decryption. Provided for same reason as `title`.
+        :return: The Service Privacy Certificate as Bytes or a Base64 string. Don't Base64 Encode or
+            Decode the data, return as is to reduce unnecessary computations.
+        """
+
+    def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[Union[bytes, str]]:
+        """
+        Get a Widevine License message by sending a License Request (challenge).
+
+        This License message contains the encrypted Content Decryption Keys and will be
+        read by the Cdm and decrypted.
+
+        This is a very important request to get correct. A bad, unexpected, or missing
+        value in the request can cause your key to be detected and promptly banned,
+        revoked, disabled, or downgraded.
+
+        :param challenge: The license challenge from the Widevine CDM.
+        :param title: The current `Title` from get_titles that is being executed. This is provided in
+            case it has data needed to be used, e.g. for a HTTP request.
+        :param track: The current `Track` needing decryption. Provided for same reason as `title`.
+        :return: The License response as Bytes or a Base64 string. Don't Base64 Encode or
+            Decode the data, return as is to reduce unnecessary computations.
+        """
+
+    # Required Abstract functions
+    # The following functions *must* be implemented by the Service.
+    # The functions will be executed in shown order.
+
+    @abstractmethod
+    def get_titles(self) -> Titles_T:
+        """
+        Get Titles for the provided title ID.
+
+        Return a Movies, Series, or Album objects containing Movie, Episode, or Song title objects respectively.
+        The returned data must be for the given title ID, or a spawn of the title ID.
+
+        At least one object is expected to be returned, or it will presume an invalid Title ID was
+        provided.
+
+        You can use the `data` dictionary class instance attribute of each Title to store data you may need later on.
+        This can be useful to store information on each title that will be required like any sub-asset IDs, or such.
+        """
+
+    @abstractmethod
+    def get_tracks(self, title: Title_T) -> Tracks:
+        """
+        Get Track objects of the Title.
+
+        Return a Tracks object, which itself can contain Video, Audio, Subtitle or even Chapters.
+        Tracks.videos, Tracks.audio, Tracks.subtitles, and Track.chapters should be a List of Track objects.
+
+        Each Track in the Tracks should represent a Video/Audio Stream/Representation/Adaptation or
+        a Subtitle file.
+
+        While one Track should only hold information for one stream/downloadable, try to get as many
+        unique Track objects per stream type so Stream selection by the root code can give you more
+        options in terms of Resolution, Bitrate, Codecs, Language, e.t.c.
+
+        No decision making or filtering of which Tracks get returned should happen here. It can be
+        considered an error to filter for e.g. resolution, codec, and such. All filtering based on
+        arguments will be done by the root code automatically when needed.
+
+        Make sure you correctly mark which Tracks are encrypted or not, and by which DRM System
+        via its `drm` property.
+
+        If you are able to obtain the Track's KID (Key ID) as a 32 char (16 bit) HEX string, provide
+        it to the Track's `kid` variable as it will speed up the decryption process later on. It may
+        or may not be needed, that depends on the service. Generally if you can provide it, without
+        downloading any of the Track's stream data, then do.
+
+        :param title: The current `Title` from get_titles that is being executed.
+        :return: Tracks object containing Video, Audio, Subtitles, and Chapters, if available.
+        """
+
+    @abstractmethod
+    def get_chapters(self, title: Title_T) -> list[Chapter]:
+        """
+        Get Chapter objects of the Title.
+
+        Return a list of Chapter objects. This will be run after get_tracks. If there's anything
+        from the get_tracks that may be needed, e.g. "device_id" or a-like, store it in the class
+        via `self` and re-use the value in get_chapters.
+
+        How it's used is generally the same as get_titles. These are only separated as to reduce
+        function complexity and keep them focused on simple tasks.
+
+        You do not need to sort or order the chapters in any way. However, you do need to filter
+        and alter them as needed by the service. No modification is made after get_chapters is
+        ran. So that means ensure that the Chapter objects returned have consistent Chapter Titles
+        and Chapter Numbers.
+
+        :param title: The current `Title` from get_titles that is being executed.
+        :return: List of Chapter objects, if available, empty list otherwise.
+        """
+
+
+__ALL__ = (Service,)
diff --git a/devine/core/services.py b/devine/core/services.py
new file mode 100644
index 0000000..d15ece9
--- /dev/null
+++ b/devine/core/services.py
@@ -0,0 +1,89 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+import click
+
+from devine.core.config import config
+from devine.core.service import Service
+from devine.core.utilities import import_module_by_path
+
+_SERVICES = sorted(
+    (
+        path
+        for path in config.directories.services.glob("*/__init__.py")
+    ),
+    key=lambda x: x.parent.stem
+)
+
+_MODULES = {
+    path.parent.stem: getattr(import_module_by_path(path), path.parent.stem)
+    for path in _SERVICES
+}
+
+_ALIASES = {
+    tag: getattr(module, "ALIASES")
+    for tag, module in _MODULES.items()
+}
+
+
+class Services(click.MultiCommand):
+    """Lazy-loaded command group of project services."""
+
+    # Click-specific methods
+
+    def list_commands(self, ctx: click.Context) -> list[str]:
+        """Returns a list of all available Services as command names for Click."""
+        return Services.get_tags()
+
+    def get_command(self, ctx: click.Context, name: str) -> click.Command:
+        """Load the Service and return the Click CLI method."""
+        tag = Services.get_tag(name)
+        service = Services.load(tag)
+
+        if hasattr(service, "cli"):
+            return service.cli
+
+        raise click.ClickException(f"Service '{tag}' has no 'cli' method configured.")
+
+    # Methods intended to be used anywhere
+
+    @staticmethod
+    def get_tags() -> list[str]:
+        """Returns a list of service tags from all available Services."""
+        return [x.parent.stem for x in _SERVICES]
+
+    @staticmethod
+    def get_path(name: str) -> Path:
+        """Get the directory path of a command."""
+        tag = Services.get_tag(name)
+        for service in _SERVICES:
+            if service.parent.stem == tag:
+                return service.parent
+        raise click.ClickException(f"Unable to find service by the name '{name}'")
+
+    @staticmethod
+    def get_tag(value: str) -> str:
+        """
+        Get the Service Tag (e.g. DSNP, not DisneyPlus/Disney+, etc.) by an Alias.
+        Input value can be of any case-sensitivity.
+        Original input value is returned if it did not match a service tag.
+        """
+        original_value = value
+        value = value.lower()
+        for path in _SERVICES:
+            tag = path.parent.stem
+            if value in (tag.lower(), *_ALIASES.get(tag, [])):
+                return tag
+        return original_value
+
+    @staticmethod
+    def load(tag: str) -> Service:
+        """Load a Service module by Service tag."""
+        module = _MODULES.get(tag)
+        if not module:
+            raise click.ClickException(f"Unable to find Service by the tag '{tag}'")
+        return module
+
+
+__ALL__ = (Services,)
diff --git a/devine/core/titles/__init__.py b/devine/core/titles/__init__.py
new file mode 100644
index 0000000..3b0e89e
--- /dev/null
+++ b/devine/core/titles/__init__.py
@@ -0,0 +1,9 @@
+from typing import Union
+
+from .episode import Episode, Series
+from .movie import Movie, Movies
+from .song import Song, Album
+
+
+Title_T = Union[Movie, Episode, Song]
+Titles_T = Union[Movies, Series, Album]
diff --git a/devine/core/titles/episode.py b/devine/core/titles/episode.py
new file mode 100644
index 0000000..6e39b58
--- /dev/null
+++ b/devine/core/titles/episode.py
@@ -0,0 +1,195 @@
+import re
+from abc import ABC
+from collections import Counter
+from typing import Any, Optional, Union, Iterable
+
+from langcodes import Language
+from pymediainfo import MediaInfo
+from sortedcontainers import SortedKeyList
+
+from devine.core.config import config
+from devine.core.constants import AUDIO_CODEC_MAP, DYNAMIC_RANGE_MAP, VIDEO_CODEC_MAP
+from devine.core.titles.title import Title
+from devine.core.utilities import sanitize_filename
+
+
+class Episode(Title):
+    def __init__(
+        self,
+        id_: Any,
+        service: type,
+        title: str,
+        season: Union[int, str],
+        number: Union[int, str],
+        name: Optional[str] = None,
+        year: Optional[Union[int, str]] = None,
+        language: Optional[Union[str, Language]] = None,
+        data: Optional[Any] = None,
+    ) -> None:
+        super().__init__(id_, service, language, data)
+
+        if not title:
+            raise ValueError("Episode title must be provided")
+        if not isinstance(title, str):
+            raise TypeError(f"Expected title to be a str, not {title!r}")
+
+        if season != 0 and not season:
+            raise ValueError("Episode season must be provided")
+        if isinstance(season, str) and season.isdigit():
+            season = int(season)
+        elif not isinstance(season, int):
+            raise TypeError(f"Expected season to be an int, not {season!r}")
+
+        if number != 0 and not number:
+            raise ValueError("Episode number must be provided")
+        if isinstance(number, str) and number.isdigit():
+            number = int(number)
+        elif not isinstance(number, int):
+            raise TypeError(f"Expected number to be an int, not {number!r}")
+
+        if name is not None and not isinstance(name, str):
+            raise TypeError(f"Expected name to be a str, not {name!r}")
+
+        if year is not None:
+            if isinstance(year, str) and year.isdigit():
+                year = int(year)
+            elif not isinstance(year, int):
+                raise TypeError(f"Expected year to be an int, not {year!r}")
+
+        title = title.strip()
+
+        if name is not None:
+            name = name.strip()
+            # ignore episode names that are the episode number or title name
+            if re.match(r"Episode ?#?\d+", name, re.IGNORECASE):
+                name = None
+            elif name.lower() == title.lower():
+                name = None
+
+        if year is not None and year <= 0:
+            raise ValueError(f"Episode year cannot be {year}")
+
+        self.title = title
+        self.season = season
+        self.number = number
+        self.name = name
+        self.year = year
+
+    def __str__(self) -> str:
+        return "{title} S{season:02}E{number:02} {name}".format(
+            title=self.title,
+            season=self.season,
+            number=self.number,
+            name=self.name or ""
+        ).strip()
+
+    def get_filename(self, media_info: MediaInfo, folder: bool = False, show_service: bool = True) -> str:
+        primary_video_track = next(iter(media_info.video_tracks), None)
+        primary_audio_track = next(iter(media_info.audio_tracks), None)
+        unique_audio_languages = len({
+            x.language.split("-")[0]
+            for x in media_info.audio_tracks
+            if x.language
+        })
+
+        # Title SXXEXX Name (or Title SXX if folder)
+        if folder:
+            name = f"{self.title} S{self.season:02}"
+        else:
+            name = "{title} S{season:02}E{number:02} {name}".format(
+                title=self.title.replace("$", "S"),  # e.g., Arli$$
+                season=self.season,
+                number=self.number,
+                name=self.name or ""
+            ).strip()
+
+        # MULTi
+        if unique_audio_languages > 1:
+            name += " MULTi"
+
+        # Resolution
+        if primary_video_track:
+            resolution = primary_video_track.height
+            aspect_ratio = [
+                int(float(plane))
+                for plane in primary_video_track.other_display_aspect_ratio[0].split(":")
+            ]
+            if len(aspect_ratio) == 1:
+                # e.g., aspect ratio of 2 (2.00:1) would end up as `(2.0,)`, add 1
+                aspect_ratio.append(1)
+            if aspect_ratio[0] / aspect_ratio[1] not in (16 / 9, 4 / 3):
+                # We want the resolution represented in a 4:3 or 16:9 canvas.
+                # If it's not 4:3 or 16:9, calculate as if it's inside a 16:9 canvas,
+                # otherwise the track's height value is fine.
+                # We are assuming this title is some weird aspect ratio so most
+                # likely a movie or HD source, so it's most likely widescreen so
+                # 16:9 canvas makes the most sense.
+                resolution = int(primary_video_track.width * (9 / 16))
+            name += f" {resolution}p"
+
+        # Service
+        if show_service:
+            name += f" {self.service.__name__}"
+
+        # 'WEB-DL'
+        name += " WEB-DL"
+
+        # Audio Codec + Channels (+ feature)
+        if primary_audio_track:
+            codec = primary_audio_track.format
+            channel_layout = primary_audio_track.channel_layout or primary_audio_track.channellayout_original
+            channels = float(sum(
+                {"LFE": 0.1}.get(position.upper(), 1)
+                for position in channel_layout.split(" ")
+            ))
+            features = primary_audio_track.format_additionalfeatures or ""
+            name += f" {AUDIO_CODEC_MAP.get(codec, codec)}{channels:.1f}"
+            if "JOC" in features:
+                name += " Atmos"
+
+        # Video (dynamic range + hfr +) Codec
+        if primary_video_track:
+            codec = primary_video_track.format
+            hdr_format = primary_video_track.hdr_format_commercial
+            trc = primary_video_track.transfer_characteristics or primary_video_track.transfer_characteristics_original
+            frame_rate = float(primary_video_track.frame_rate)
+            if hdr_format:
+                name += f" {DYNAMIC_RANGE_MAP.get(hdr_format)} "
+            elif trc and "HLG" in trc:
+                name += " HLG"
+            if frame_rate > 30:
+                name += " HFR"
+            name += f" {VIDEO_CODEC_MAP.get(codec, codec)}"
+
+        if config.tag:
+            name += f"-{config.tag}"
+
+        return sanitize_filename(name)
+
+
+class Series(SortedKeyList, ABC):
+    def __init__(self, iterable: Optional[Iterable] = None):
+        super().__init__(
+            iterable,
+            key=lambda x: (x.season, x.number, x.year or 0)
+        )
+
+    def __str__(self) -> str:
+        if not self:
+            return super().__str__()
+
+        lines = [
+            f"Series: {self[0].title} ({self[0].year or '?'})",
+            f"Episodes: ({len(self)})",
+            *[
+                f"├─ S{season:02}: {episodes} episodes"
+                for season, episodes in Counter(x.season for x in self).items()
+            ]
+        ]
+        last_line = lines.pop(-1)
+        lines.append(last_line.replace("├", "└"))
+
+        return "\n".join(lines)
+
+
+__ALL__ = (Episode, Series)
diff --git a/devine/core/titles/movie.py b/devine/core/titles/movie.py
new file mode 100644
index 0000000..e744b38
--- /dev/null
+++ b/devine/core/titles/movie.py
@@ -0,0 +1,155 @@
+from abc import ABC
+from typing import Any, Optional, Union, Iterable
+
+from langcodes import Language
+from pymediainfo import MediaInfo
+from sortedcontainers import SortedKeyList
+
+from devine.core.config import config
+from devine.core.constants import AUDIO_CODEC_MAP, DYNAMIC_RANGE_MAP, VIDEO_CODEC_MAP
+from devine.core.titles.title import Title
+from devine.core.utilities import sanitize_filename
+
+
+class Movie(Title):
+    def __init__(
+        self,
+        id_: Any,
+        service: type,
+        name: str,
+        year: Optional[Union[int, str]] = None,
+        language: Optional[Union[str, Language]] = None,
+        data: Optional[Any] = None,
+    ) -> None:
+        super().__init__(id_, service, language, data)
+
+        if not name:
+            raise ValueError("Movie name must be provided")
+        if not isinstance(name, str):
+            raise TypeError(f"Expected name to be a str, not {name!r}")
+
+        if year is not None:
+            if isinstance(year, str) and year.isdigit():
+                year = int(year)
+            elif not isinstance(year, int):
+                raise TypeError(f"Expected year to be an int, not {year!r}")
+
+        name = name.strip()
+
+        if year is not None and year <= 0:
+            raise ValueError(f"Movie year cannot be {year}")
+
+        self.name = name
+        self.year = year
+
+    def __str__(self) -> str:
+        if self.year:
+            return f"{self.name} ({self.year})"
+        return self.name
+
+    def get_filename(self, media_info: MediaInfo, folder: bool = False, show_service: bool = True) -> str:
+        primary_video_track = next(iter(media_info.video_tracks), None)
+        primary_audio_track = next(iter(media_info.audio_tracks), None)
+        unique_audio_languages = len({
+            x.language.split("-")[0]
+            for x in media_info.audio_tracks
+            if x.language
+        })
+
+        # Name (Year)
+        name = str(self).replace("$", "S")  # e.g., Arli$$
+
+        # MULTi
+        if unique_audio_languages > 1:
+            name += " MULTi"
+
+        # Resolution
+        if primary_video_track:
+            resolution = primary_video_track.height
+            aspect_ratio = [
+                int(float(plane))
+                for plane in primary_video_track.other_display_aspect_ratio[0].split(":")
+            ]
+            if len(aspect_ratio) == 1:
+                # e.g., aspect ratio of 2 (2.00:1) would end up as `(2.0,)`, add 1
+                aspect_ratio.append(1)
+            if aspect_ratio[0] / aspect_ratio[1] not in (16 / 9, 4 / 3):
+                # We want the resolution represented in a 4:3 or 16:9 canvas.
+                # If it's not 4:3 or 16:9, calculate as if it's inside a 16:9 canvas,
+                # otherwise the track's height value is fine.
+                # We are assuming this title is some weird aspect ratio so most
+                # likely a movie or HD source, so it's most likely widescreen so
+                # 16:9 canvas makes the most sense.
+                resolution = int(primary_video_track.width * (9 / 16))
+            name += f" {resolution}p"
+
+        # Service
+        if show_service:
+            name += f" {self.service.__name__}"
+
+        # 'WEB-DL'
+        name += " WEB-DL"
+
+        # Audio Codec + Channels (+ feature)
+        if primary_audio_track:
+            codec = primary_audio_track.format
+            channel_layout = primary_audio_track.channel_layout or primary_audio_track.channellayout_original
+            channels = float(sum(
+                {"LFE": 0.1}.get(position.upper(), 1)
+                for position in channel_layout.split(" ")
+            ))
+            features = primary_audio_track.format_additionalfeatures or ""
+            name += f" {AUDIO_CODEC_MAP.get(codec, codec)}{channels:.1f}"
+            if "JOC" in features:
+                name += " Atmos"
+
+        # Video (dynamic range + hfr +) Codec
+        if primary_video_track:
+            codec = primary_video_track.format
+            hdr_format = primary_video_track.hdr_format_commercial
+            trc = primary_video_track.transfer_characteristics or primary_video_track.transfer_characteristics_original
+            frame_rate = float(primary_video_track.frame_rate)
+            if hdr_format:
+                name += f" {DYNAMIC_RANGE_MAP.get(hdr_format)} "
+            elif trc and "HLG" in trc:
+                name += " HLG"
+            if frame_rate > 30:
+                name += " HFR"
+            name += f" {VIDEO_CODEC_MAP.get(codec, codec)}"
+
+        if config.tag:
+            name += f"-{config.tag}"
+
+        return sanitize_filename(name)
+
+
+class Movies(SortedKeyList, ABC):
+    def __init__(self, iterable: Optional[Iterable] = None):
+        super().__init__(
+            iterable,
+            key=lambda x: x.year or 0
+        )
+
+    def __str__(self) -> str:
+        if not self:
+            return super().__str__()
+
+        if len(self) > 1:
+            lines = [
+                f"Movies: ({len(self)})",
+                *[
+                    f"├─ {movie.name} ({movie.year or '?'})"
+                    for movie in self
+                ]
+            ]
+            last_line = lines.pop(-1)
+            lines.append(last_line.replace("├", "└"))
+        else:
+            lines = [
+                f"Movie: {self[0].name} ({self[0].year or '?'})"
+            ]
+
+        return "\n".join(lines)
+
+
+__ALL__ = (Movie, Movies)
diff --git a/devine/core/titles/song.py b/devine/core/titles/song.py
new file mode 100644
index 0000000..b881eb2
--- /dev/null
+++ b/devine/core/titles/song.py
@@ -0,0 +1,148 @@
+from abc import ABC
+from typing import Any, Optional, Union, Iterable
+
+from langcodes import Language
+from pymediainfo import MediaInfo
+from sortedcontainers import SortedKeyList
+
+from devine.core.config import config
+from devine.core.constants import AUDIO_CODEC_MAP
+from devine.core.titles.title import Title
+from devine.core.utilities import sanitize_filename
+
+
+class Song(Title):
+    def __init__(
+        self,
+        id_: Any,
+        service: type,
+        name: str,
+        artist: str,
+        album: str,
+        track: int,
+        disc: int,
+        year: int,
+        language: Optional[Union[str, Language]] = None,
+        data: Optional[Any] = None,
+    ) -> None:
+        super().__init__(id_, service, language, data)
+
+        if not name:
+            raise ValueError("Song name must be provided")
+        if not isinstance(name, str):
+            raise TypeError(f"Expected name to be a str, not {name!r}")
+
+        if not artist:
+            raise ValueError("Song artist must be provided")
+        if not isinstance(artist, str):
+            raise TypeError(f"Expected artist to be a str, not {artist!r}")
+
+        if not album:
+            raise ValueError("Song album must be provided")
+        if not isinstance(album, str):
+            raise TypeError(f"Expected album to be a str, not {name!r}")
+
+        if not track:
+            raise ValueError("Song track must be provided")
+        if not isinstance(track, int):
+            raise TypeError(f"Expected track to be an int, not {track!r}")
+
+        if not disc:
+            raise ValueError("Song disc must be provided")
+        if not isinstance(disc, int):
+            raise TypeError(f"Expected disc to be an int, not {disc!r}")
+
+        if not year:
+            raise ValueError("Song year must be provided")
+        if not isinstance(year, int):
+            raise TypeError(f"Expected year to be an int, not {year!r}")
+
+        name = name.strip()
+        artist = artist.strip()
+        album = album.strip()
+
+        if track <= 0:
+            raise ValueError(f"Song track cannot be {track}")
+        if disc <= 0:
+            raise ValueError(f"Song disc cannot be {disc}")
+        if year <= 0:
+            raise ValueError(f"Song year cannot be {year}")
+
+        self.name = name
+        self.artist = artist
+        self.album = album
+        self.track = track
+        self.disc = disc
+        self.year = year
+
+    def __str__(self) -> str:
+        return "{artist} - {album} ({year}) / {track:02}. {name}".format(
+            artist=self.artist,
+            album=self.album,
+            year=self.year,
+            track=self.track,
+            name=self.name
+        ).strip()
+
+    def get_filename(self, media_info: MediaInfo, folder: bool = False, show_service: bool = True) -> str:
+        audio_track = next(iter(media_info.audio_tracks), None)
+        codec = audio_track.format
+        channel_layout = audio_track.channel_layout or audio_track.channellayout_original
+        channels = float(sum(
+            {"LFE": 0.1}.get(position.upper(), 1)
+            for position in channel_layout.split(" ")
+        ))
+        features = audio_track.format_additionalfeatures or ""
+
+        if folder:
+            # Artist - Album (Year)
+            name = str(self).split(" / ")[0]
+        else:
+            # NN. Song Name
+            name = str(self).split(" / ")[1]
+
+        # Service
+        if show_service:
+            name += f" {self.service.__name__}"
+
+        # 'WEB-DL'
+        name += " WEB-DL"
+
+        # Audio Codec + Channels (+ feature)
+        name += f" {AUDIO_CODEC_MAP.get(codec, codec)}{channels:.1f}"
+        if "JOC" in features:
+            name += " Atmos"
+
+        if config.tag:
+            name += f"-{config.tag}"
+
+        return sanitize_filename(name, " ")
+
+
+class Album(SortedKeyList, ABC):
+    def __init__(self, iterable: Optional[Iterable] = None):
+        super().__init__(
+            iterable,
+            key=lambda x: (x.album, x.disc, x.track, x.year or 0)
+        )
+
+    def __str__(self) -> str:
+        if not self:
+            return super().__str__()
+
+        lines = [
+            f"Album: {self[0].album} ({self[0].year or '?'})",
+            f"Artist: {self[0].artist}",
+            f"Tracks: ({len(self)})",
+            *[
+                f"├─ {song.track:02}. {song.name}"
+                for song in self
+            ]
+        ]
+        last_line = lines.pop(-1)
+        lines.append(last_line.replace("├", "└"))
+
+        return "\n".join(lines)
+
+
+__ALL__ = (Song, Album)
diff --git a/devine/core/titles/title.py b/devine/core/titles/title.py
new file mode 100644
index 0000000..998af7e
--- /dev/null
+++ b/devine/core/titles/title.py
@@ -0,0 +1,72 @@
+from __future__ import annotations
+
+from abc import abstractmethod
+from typing import Optional, Union, Any
+
+from langcodes import Language
+from pymediainfo import MediaInfo
+
+from devine.core.tracks import Tracks
+
+
+class Title:
+    def __init__(
+        self,
+        id_: Any,
+        service: type,
+        language: Optional[Union[str, Language]] = None,
+        data: Optional[Any] = None
+    ) -> None:
+        """
+        Media Title from a Service.
+
+        Parameters:
+            id_: An identifier for this specific title. It must be unique. Can be of any
+                value.
+            service: Service class that this title is from.
+            language: The original recorded language for the title. If that information
+                is not available, this should not be set to anything.
+            data: Arbitrary storage for the title. Often used to store extra metadata
+                information, IDs, URIs, and so on.
+        """
+        if not id_:  # includes 0, false, and similar values, this is intended
+            raise ValueError("A unique ID must be provided")
+        if hasattr(id_, "__len__") and len(id_) < 4:
+            raise ValueError("The unique ID is not large enough, clash likely.")
+
+        if not service:
+            raise ValueError("Service class must be provided")
+        if not isinstance(service, type):
+            raise TypeError(f"Expected service to be a Class (type), not {service!r}")
+
+        if language is not None:
+            if isinstance(language, str):
+                language = Language.get(language)
+            elif not isinstance(language, Language):
+                raise TypeError(f"Expected language to be a {Language} or str, not {language!r}")
+
+        self.id = id_
+        self.service = service
+        self.language = language
+        self.data = data
+
+        self.tracks = Tracks()
+
+    def __eq__(self, other: Title) -> bool:
+        return self.id == other.id
+
+    @abstractmethod
+    def get_filename(self, media_info: MediaInfo, folder: bool = False, show_service: bool = True) -> str:
+        """
+        Get a Filename for this Title with the provided Media Info.
+        All filenames should be sanitized with the sanitize_filename() utility function.
+
+        Parameters:
+            media_info: MediaInfo object of the file this name will be used for.
+            folder: This filename will be used as a folder name. Some changes may want to
+                be made if this is the case.
+            show_service: Show the service tag (e.g., iT, NF) in the filename.
+        """
+
+
+__ALL__ = (Title,)
diff --git a/devine/core/tracks/__init__.py b/devine/core/tracks/__init__.py
new file mode 100644
index 0000000..82e61bd
--- /dev/null
+++ b/devine/core/tracks/__init__.py
@@ -0,0 +1,6 @@
+from .audio import Audio
+from .track import Track
+from .chapter import Chapter
+from .subtitle import Subtitle
+from .tracks import Tracks
+from .video import Video
diff --git a/devine/core/tracks/audio.py b/devine/core/tracks/audio.py
new file mode 100644
index 0000000..3b8feec
--- /dev/null
+++ b/devine/core/tracks/audio.py
@@ -0,0 +1,121 @@
+from __future__ import annotations
+
+import math
+from enum import Enum
+from typing import Any, Optional, Union
+
+from devine.core.tracks.track import Track
+
+
+class Audio(Track):
+    class Codec(str, Enum):
+        AAC = "AAC"    # https://wikipedia.org/wiki/Advanced_Audio_Coding
+        AC3 = "DD"     # https://wikipedia.org/wiki/Dolby_Digital
+        EC3 = "DD+"    # https://wikipedia.org/wiki/Dolby_Digital_Plus
+        OPUS = "OPUS"  # https://wikipedia.org/wiki/Opus_(audio_format)
+        OGG = "VORB"  # https://wikipedia.org/wiki/Vorbis
+        DTS = "DTS"  # https://en.wikipedia.org/wiki/DTS_(company)#DTS_Digital_Surround
+        ALAC = "ALAC"  # https://en.wikipedia.org/wiki/Apple_Lossless_Audio_Codec
+
+        @property
+        def extension(self) -> str:
+            return self.name.lower()
+
+        @staticmethod
+        def from_mime(mime: str) -> Audio.Codec:
+            mime = mime.lower().strip().split(".")[0]
+            if mime == "mp4a":
+                return Audio.Codec.AAC
+            if mime == "ac-3":
+                return Audio.Codec.AC3
+            if mime == "ec-3":
+                return Audio.Codec.EC3
+            if mime == "opus":
+                return Audio.Codec.OPUS
+            if mime == "dtsc":
+                return Audio.Codec.DTS
+            if mime == "alac":
+                return Audio.Codec.ALAC
+            raise ValueError(f"The MIME '{mime}' is not a supported Audio Codec")
+
+        @staticmethod
+        def from_codecs(codecs: str) -> Audio.Codec:
+            for codec in codecs.lower().split(","):
+                mime = codec.strip().split(".")[0]
+                try:
+                    return Audio.Codec.from_mime(mime)
+                except ValueError:
+                    pass
+            raise ValueError(f"No MIME types matched any supported Audio Codecs in '{codecs}'")
+
+        @staticmethod
+        def from_netflix_profile(profile: str) -> Audio.Codec:
+            profile = profile.lower().strip()
+            if profile.startswith("heaac"):
+                return Audio.Codec.AAC
+            if profile.startswith("dd-"):
+                return Audio.Codec.AC3
+            if profile.startswith("ddplus"):
+                return Audio.Codec.EC3
+            if profile.startswith("playready-oggvorbis"):
+                return Audio.Codec.OGG
+            raise ValueError(f"The Content Profile '{profile}' is not a supported Audio Codec")
+
+    def __init__(self, *args: Any, codec: Audio.Codec, bitrate: Union[str, int, float],
+                 channels: Optional[Union[str, int, float]] = None, joc: int = 0, descriptive: bool = False,
+                 **kwargs: Any):
+        super().__init__(*args, **kwargs)
+        # required
+        self.codec = codec
+        self.bitrate = int(math.ceil(float(bitrate))) if bitrate else None
+        self.channels = self.parse_channels(channels) if channels else None
+        # optional
+        self.joc = joc
+        self.descriptive = bool(descriptive)
+
+    @staticmethod
+    def parse_channels(channels: Union[str, float]) -> str:
+        """
+        Converts a string to a float-like string which represents audio channels.
+        It does not handle values that are incorrect/out of bounds or e.g. 6.0->5.1, as that
+        isn't what this is intended for.
+        E.g. "3" -> "3.0", "2.1" -> "2.1", ".1" -> "0.1".
+        """
+        # TODO: Support all possible DASH channel configurations (https://datatracker.ietf.org/doc/html/rfc8216)
+        if channels.upper() == "A000":
+            return "2.0"
+        if channels.upper() == "F801":
+            return "5.1"
+
+        if str(channels).isdigit():
+            # This is to avoid incorrectly transforming channels=6 to 6.0, for example
+            return f"{channels}ch"
+
+        try:
+            return str(float(channels))
+        except ValueError:
+            return str(channels)
+
+    def get_track_name(self) -> Optional[str]:
+        """Return the base Track Name."""
+        track_name = super().get_track_name() or ""
+        flag = self.descriptive and "Descriptive"
+        if flag:
+            if track_name:
+                flag = f" ({flag})"
+            track_name += flag
+        return track_name or None
+
+    def __str__(self) -> str:
+        return " | ".join(filter(bool, [
+            "AUD",
+            f"[{self.codec.value}]",
+            (self.channels or "2.0?") + (f" (JOC {self.joc})" if self.joc else ""),
+            f"{self.bitrate // 1000 if self.bitrate else '?'} kb/s",
+            str(self.language),
+            self.get_track_name(),
+            self.edition
+        ]))
+
+
+__ALL__ = (Audio,)
diff --git a/devine/core/tracks/chapter.py b/devine/core/tracks/chapter.py
new file mode 100644
index 0000000..1f33ab1
--- /dev/null
+++ b/devine/core/tracks/chapter.py
@@ -0,0 +1,95 @@
+from __future__ import annotations
+
+import re
+from pathlib import Path
+from typing import Optional, Union
+
+
+class Chapter:
+    line_1 = re.compile(r"^CHAPTER(?P<number>\d+)=(?P<timecode>[\d\\.]+)$")
+    line_2 = re.compile(r"^CHAPTER(?P<number>\d+)NAME=(?P<title>[\d\\.]+)$")
+
+    def __init__(self, number: int, timecode: str, title: Optional[str] = None):
+        self.id = f"chapter-{number}"
+        self.number = number
+        self.timecode = timecode
+        self.title = title
+
+        if "." not in self.timecode:
+            self.timecode += ".000"
+
+    def __bool__(self) -> bool:
+        return self.number and self.number >= 0 and self.timecode
+
+    def __repr__(self) -> str:
+        """
+        OGM-based Simple Chapter Format intended for use with MKVToolNix.
+
+        This format is not officially part of the Matroska spec. This was a format
+        designed for OGM tools that MKVToolNix has since re-used. More Information:
+        https://mkvtoolnix.download/doc/mkvmerge.html#mkvmerge.chapters.simple
+        """
+        return "CHAPTER{num}={time}\nCHAPTER{num}NAME={name}".format(
+            num=f"{self.number:02}",
+            time=self.timecode,
+            name=self.title or ""
+        )
+
+    def __str__(self) -> str:
+        return " | ".join(filter(bool, [
+            "CHP",
+            f"[{self.number:02}]",
+            self.timecode,
+            self.title
+        ]))
+
+    @property
+    def named(self) -> bool:
+        """Check if Chapter is named."""
+        return bool(self.title)
+
+    @classmethod
+    def loads(cls, data: str) -> Chapter:
+        """Load chapter data from a string."""
+        lines = [x.strip() for x in data.strip().splitlines(keepends=False)]
+        if len(lines) > 2:
+            return cls.loads("\n".join(lines))
+        one, two = lines
+
+        one_m = cls.line_1.match(one)
+        two_m = cls.line_2.match(two)
+        if not one_m or not two_m:
+            raise SyntaxError(f"An unexpected syntax error near:\n{one}\n{two}")
+
+        one_str, timecode = one_m.groups()
+        two_str, title = two_m.groups()
+        one_num, two_num = int(one_str.lstrip("0")), int(two_str.lstrip("0"))
+
+        if one_num != two_num:
+            raise SyntaxError(f"The chapter numbers ({one_num},{two_num}) does not match.")
+        if not timecode:
+            raise SyntaxError("The timecode is missing.")
+        if not title:
+            title = None
+
+        return cls(number=one_num, timecode=timecode, title=title)
+
+    @classmethod
+    def load(cls, path: Union[Path, str]) -> Chapter:
+        """Load chapter data from a file."""
+        if isinstance(path, str):
+            path = Path(path)
+        return cls.loads(path.read_text(encoding="utf8"))
+
+    def dumps(self) -> str:
+        """Return chapter data as a string."""
+        return repr(self)
+
+    def dump(self, path: Union[Path, str]) -> int:
+        """Write chapter data to a file."""
+        if isinstance(path, str):
+            path = Path(path)
+        return path.write_text(self.dumps(), encoding="utf8")
+
+
+__ALL__ = (Chapter,)
diff --git a/devine/core/tracks/subtitle.py b/devine/core/tracks/subtitle.py
new file mode 100644
index 0000000..3b6e520
--- /dev/null
+++ b/devine/core/tracks/subtitle.py
@@ -0,0 +1,399 @@
+from __future__ import annotations
+
+import subprocess
+from collections import defaultdict
+from enum import Enum
+from io import BytesIO
+from pathlib import Path
+from typing import Any, Iterable, Optional
+
+import pycaption
+from construct import Container
+from pycaption import Caption, CaptionList, CaptionNode, WebVTTReader
+from pycaption.geometry import Layout
+from pymp4.parser import MP4
+from subtitle_filter import Subtitles
+
+from devine.core.tracks.track import Track
+from devine.core.utilities import get_binary_path
+
+
+class Subtitle(Track):
+    class Codec(str, Enum):
+        SubRip = "SRT"                # https://wikipedia.org/wiki/SubRip
+        SubStationAlpha = "SSA"       # https://wikipedia.org/wiki/SubStation_Alpha
+        SubStationAlphav4 = "ASS"     # https://wikipedia.org/wiki/SubStation_Alpha#Advanced_SubStation_Alpha=
+        TimedTextMarkupLang = "TTML"  # https://wikipedia.org/wiki/Timed_Text_Markup_Language
+        WebVTT = "VTT"                # https://wikipedia.org/wiki/WebVTT
+        # MPEG-DASH box-encapsulated subtitle formats
+        fTTML = "STPP"  # https://www.w3.org/TR/2018/REC-ttml-imsc1.0.1-20180424
+        fVTT = "WVTT"   # https://www.w3.org/TR/webvtt1
+
+        @property
+        def extension(self) -> str:
+            return self.value.lower()
+
+        @staticmethod
+        def from_mime(mime: str) -> Subtitle.Codec:
+            mime = mime.lower().strip().split(".")[0]
+            if mime == "srt":
+                return Subtitle.Codec.SubRip
+            elif mime == "ssa":
+                return Subtitle.Codec.SubStationAlpha
+            elif mime == "ass":
+                return Subtitle.Codec.SubStationAlphav4
+            elif mime == "ttml":
+                return Subtitle.Codec.TimedTextMarkupLang
+            elif mime == "vtt":
+                return Subtitle.Codec.WebVTT
+            elif mime == "stpp":
+                return Subtitle.Codec.fTTML
+            elif mime == "wvtt":
+                return Subtitle.Codec.fVTT
+            raise ValueError(f"The MIME '{mime}' is not a supported Subtitle Codec")
+
+        @staticmethod
+        def from_codecs(codecs: str) -> Subtitle.Codec:
+            for codec in codecs.lower().split(","):
+                mime = codec.strip().split(".")[0]
+                try:
+                    return Subtitle.Codec.from_mime(mime)
+                except ValueError:
+                    pass
+            raise ValueError(f"No MIME types matched any supported Subtitle Codecs in '{codecs}'")
+
+        @staticmethod
+        def from_netflix_profile(profile: str) -> Subtitle.Codec:
+            profile = profile.lower().strip()
+            if profile.startswith("webvtt"):
+                return Subtitle.Codec.WebVTT
+            if profile.startswith("dfxp"):
+                return Subtitle.Codec.TimedTextMarkupLang
+            raise ValueError(f"The Content Profile '{profile}' is not a supported Subtitle Codec")
+
+    def __init__(self, *args: Any, codec: Subtitle.Codec, cc: bool = False, sdh: bool = False, forced: bool = False,
+                 **kwargs: Any):
+        """
+        Information on Subtitle Types:
+            https://bit.ly/2Oe4fLC (3PlayMedia Blog on SUB vs CC vs SDH).
+            However, I wouldn't pay much attention to the claims about SDH needing to
+            be in the original source language. It's logically not true.
+
+            CC == Closed Captions. Source: Basically every site.
+            SDH = Subtitles for the Deaf or Hard-of-Hearing. Source: Basically every site.
+            HOH = Exact same as SDH. Is a term used in the UK. Source: https://bit.ly/2PGJatz (ICO UK)
+
+            More in-depth information, examples, and stuff to look for can be found in the Parameter
+            explanation list below.
+
+        Parameters:
+            cc: Closed Caption.
+                - Intended as if you couldn't hear the audio at all.
+                - Can have Sound as well as Dialogue, but doesn't have to.
+                - Original source would be from an EIA-CC encoded stream. Typically all
+                  upper-case characters.
+                Indicators of it being CC without knowing original source:
+                  - Extracted with CCExtractor, or
+                  - >>> (or similar) being used at the start of some or all lines, or
+                  - All text is uppercase or at least the majority, or
+                  - Subtitles are Scrolling-text style (one line appears, oldest line
+                    then disappears).
+                Just because you downloaded it as a SRT or VTT or such, doesn't mean it
+                 isn't from an EIA-CC stream. And I wouldn't take the streaming services
+                 (CC) as gospel either as they tend to get it wrong too.
+            sdh: Deaf or Hard-of-Hearing. Also known as HOH in the UK (EU?).
+                 - Intended as if you couldn't hear the audio at all.
+                 - MUST have Sound as well as Dialogue to be considered SDH.
+                 - It has no "syntax" or "format" but is not transmitted using archaic
+                   forms like EIA-CC streams, would be intended for transmission via
+                   SubRip (SRT), WebVTT (VTT), TTML, etc.
+                 If you can see important audio/sound transcriptions and not just dialogue
+                  and it doesn't have the indicators of CC, then it's most likely SDH.
+                 If it doesn't have important audio/sounds transcriptions it might just be
+                  regular subtitling (you wouldn't mark as CC or SDH). This would be the
+                  case for most translation subtitles. Like Anime for example.
+            forced: Typically used if there's important information at some point in time
+                     like watching Dubbed content and an important Sign or Letter is shown
+                     or someone talking in a different language.
+                    Forced tracks are recommended by the Matroska Spec to be played if
+                     the player's current playback audio language matches a subtitle
+                     marked as "forced".
+                    However, that doesn't mean every player works like this but there is
+                     no other way to reliably work with Forced subtitles where multiple
+                     forced subtitles may be in the output file. Just know what to expect
+                     with "forced" subtitles.
+        """
+        super().__init__(*args, **kwargs)
+        self.codec = codec
+        self.cc = bool(cc)
+        self.sdh = bool(sdh)
+        if self.cc and self.sdh:
+            raise ValueError("A text track cannot be both CC and SDH.")
+        self.forced = bool(forced)
+        if (self.cc or self.sdh) and self.forced:
+            raise ValueError("A text track cannot be CC/SDH as well as Forced.")
+
+    def get_track_name(self) -> Optional[str]:
+        """Return the base Track Name."""
+        track_name = super().get_track_name() or ""
+        flag = self.cc and "CC" or self.sdh and "SDH" or self.forced and "Forced"
+        if flag:
+            if track_name:
+                flag = f" ({flag})"
+            track_name += flag
+        return track_name or None
+
+    @staticmethod
+    def parse(data: bytes, codec: Subtitle.Codec) -> pycaption.CaptionSet:
+        # TODO: Use an "enum" for subtitle codecs
+        if not isinstance(data, bytes):
+            raise ValueError(f"Subtitle data must be parsed as bytes data, not {type(data).__name__}")
+        try:
+            if codec == Subtitle.Codec.fTTML:
+                captions: dict[str, pycaption.CaptionList] = defaultdict(pycaption.CaptionList)
+                for segment in (
+                    Subtitle.parse(box.data, Subtitle.Codec.TimedTextMarkupLang)
+                    for box in MP4.parse_stream(BytesIO(data))
+                    if box.type == b"mdat"
+                ):
+                    for lang in segment.get_languages():
+                        captions[lang].extend(segment.get_captions(lang))
+                captions: pycaption.CaptionSet = pycaption.CaptionSet(captions)
+                return captions
+            if codec == Subtitle.Codec.TimedTextMarkupLang:
+                text = data.decode("utf8").replace("tt:", "")
+                return pycaption.DFXPReader().read(text)
+            if codec == Subtitle.Codec.fVTT:
+                caption_lists: dict[str, pycaption.CaptionList] = defaultdict(pycaption.CaptionList)
+                caption_list, language = Subtitle.merge_segmented_wvtt(data)
+                caption_lists[language] = caption_list
+                caption_set: pycaption.CaptionSet = pycaption.CaptionSet(caption_lists)
+                return caption_set
+            if codec == Subtitle.Codec.WebVTT:
+                text = data.decode("utf8").replace("\r", "").replace("\n\n\n", "\n \n\n").replace("\n\n<", "\n<")
+                captions: pycaption.CaptionSet = pycaption.WebVTTReader().read(text)
+                return captions
+        except pycaption.exceptions.CaptionReadSyntaxError:
+            raise SyntaxError(f"A syntax error has occurred when reading the \"{codec}\" subtitle")
+        except pycaption.exceptions.CaptionReadNoCaptions:
+            return pycaption.CaptionSet({"en": []})
+
+        raise ValueError(f"Unknown Subtitle Format \"{codec}\"...")
+
+    @staticmethod
+    def merge_same_cues(caption_set: pycaption.CaptionSet):
+        """Merge captions with the same timecodes and text as one in-place."""
+        for lang in caption_set.get_languages():
+            captions = caption_set.get_captions(lang)
+            last_caption = None
+            concurrent_captions = pycaption.CaptionList()
+            merged_captions = pycaption.CaptionList()
+            for caption in captions:
+                if last_caption:
+                    if (caption.start, caption.end) == (last_caption.start, last_caption.end):
+                        if caption.get_text() != last_caption.get_text():
+                            concurrent_captions.append(caption)
+                        last_caption = caption
+                        continue
+                    else:
+                        merged_captions.append(pycaption.base.merge(concurrent_captions))
+                concurrent_captions = [caption]
+                last_caption = caption
+
+            if concurrent_captions:
+                merged_captions.append(pycaption.base.merge(concurrent_captions))
+            if merged_captions:
+                caption_set.set_captions(lang, merged_captions)
+
+    @staticmethod
+    def merge_segmented_wvtt(data: bytes, period_start: float = 0.) -> tuple[CaptionList, Optional[str]]:
+        """
+        Convert Segmented DASH WebVTT cues into a pycaption Caption List.
+        Also returns an ISO 639-2 alpha-3 language code if available.
+
+        Code ported originally by xhlove to Python from shaka-player.
+        Has since been improved upon by rlaphoenix using pymp4 and
+        pycaption functions.
+        """
+        captions = CaptionList()
+
+        # init:
+        saw_wvtt_box = False
+        timescale = None
+        language = None
+
+        # media:
+        # > tfhd
+        default_duration = None
+        # > tfdt
+        saw_tfdt_box = False
+        base_time = 0
+        # > trun
+        saw_trun_box = False
+        samples = []
+
+        def flatten_boxes(box: Container) -> Iterable[Container]:
+            for child in box:
+                if hasattr(child, "children"):
+                    yield from flatten_boxes(child.children)
+                    del child["children"]
+                if hasattr(child, "entries"):
+                    yield from flatten_boxes(child.entries)
+                    del child["entries"]
+                # some boxes (mainly within 'entries') uses format not type
+                child["type"] = child.get("type") or child.get("format")
+                yield child
+
+        for box in flatten_boxes(MP4.parse_stream(BytesIO(data))):
+            # init
+            if box.type == b"mdhd":
+                timescale = box.timescale
+                language = box.language
+
+            if box.type == b"wvtt":
+                saw_wvtt_box = True
+
+            # media
+            if box.type == b"styp":
+                # essentially the start of each segment
+                # media var resets
+                # > tfhd
+                default_duration = None
+                # > tfdt
+                saw_tfdt_box = False
+                base_time = 0
+                # > trun
+                saw_trun_box = False
+                samples = []
+
+            if box.type == b"tfhd":
+                if box.flags.default_sample_duration_present:
+                    default_duration = box.default_sample_duration
+
+            if box.type == b"tfdt":
+                saw_tfdt_box = True
+                base_time = box.baseMediaDecodeTime
+
+            if box.type == b"trun":
+                saw_trun_box = True
+                samples = box.sample_info
+
+            if box.type == b"mdat":
+                if not timescale:
+                    raise ValueError("Timescale was not found in the Segmented WebVTT.")
+                if not saw_wvtt_box:
+                    raise ValueError("The WVTT box was not found in the Segmented WebVTT.")
+                if not saw_tfdt_box:
+                    raise ValueError("The TFDT box was not found in the Segmented WebVTT.")
+                if not saw_trun_box:
+                    raise ValueError("The TRUN box was not found in the Segmented WebVTT.")
+
+                vttc_boxes = MP4.parse_stream(BytesIO(box.data))
+                current_time = base_time + period_start
+
+                for sample, vttc_box in zip(samples, vttc_boxes):
+                    duration = sample.sample_duration or default_duration
+                    if sample.sample_composition_time_offsets:
+                        current_time += sample.sample_composition_time_offsets
+
+                    start_time = current_time
+                    end_time = current_time + (duration or 0)
+                    current_time = end_time
+
+                    if vttc_box.type == b"vtte":
+                        # vtte is a vttc that's empty, skip
+                        continue
+
+                    layout: Optional[Layout] = None
+                    nodes: list[CaptionNode] = []
+
+                    for cue_box in MP4.parse_stream(BytesIO(vttc_box.data)):
+                        if cue_box.type == b"vsid":
+                            # this is a V(?) Source ID box, we don't care
+                            continue
+                        cue_data = cue_box.data.decode("utf8")
+                        if cue_box.type == b"sttg":
+                            layout = Layout(webvtt_positioning=cue_data)
+                        elif cue_box.type == b"payl":
+                            nodes.extend([
+                                node
+                                for line in cue_data.split("\n")
+                                for node in [
+                                    CaptionNode.create_text(WebVTTReader()._decode(line)),
+                                    CaptionNode.create_break()
+                                ]
+                            ])
+                            nodes.pop()
+
+                    if nodes:
+                        caption = Caption(
+                            start=start_time * timescale,  # as microseconds
+                            end=end_time * timescale,
+                            nodes=nodes,
+                            layout_info=layout
+                        )
+                        p_caption = captions[-1] if captions else None
+                        if p_caption and caption.start == p_caption.end and str(caption.nodes) == str(p_caption.nodes):
+                            # it's a duplicate, but lets take its end time
+                            p_caption.end = caption.end
+                            continue
+                        captions.append(caption)
+
+        return captions, language
+
+    def strip_hearing_impaired(self) -> None:
+        """
+        Strip captions for hearing impaired (SDH).
+        It uses SubtitleEdit if available, otherwise filter-subs.
+        """
+        if not self.path or not self.path.exists():
+            raise ValueError("You must download the subtitle track first.")
+
+        executable = get_binary_path("SubtitleEdit")
+        if executable:
+            subprocess.run([
+                executable,
+                "/Convert", self.path, "srt",
+                "/overwrite",
+                "/RemoveTextForHI"
+            ], check=True)
+            # Remove UTF-8 Byte Order Marks
+            self.path.write_text(
+                self.path.read_text(encoding="utf-8-sig"),
+                encoding="utf8"
+            )
+        else:
+            sub = Subtitles(self.path)
+            sub.filter(
+                rm_fonts=True,
+                rm_ast=True,
+                rm_music=True,
+                rm_effects=True,
+                rm_names=True,
+                rm_author=True
+            )
+            sub.save()
+
+    def download(self, *args, **kwargs) -> Path:
+        save_path = super().download(*args, **kwargs)
+        if self.codec not in (Subtitle.Codec.SubRip, Subtitle.Codec.SubStationAlphav4):
+            caption_set = self.parse(save_path.read_bytes(), self.codec)
+            self.merge_same_cues(caption_set)
+            srt = pycaption.SRTWriter().write(caption_set)
+            # NowTV sometimes has this, when it isn't, causing mux problems
+            srt = srt.replace("MULTI-LANGUAGE SRT\n", "")
+            save_path.write_text(srt, encoding="utf8")
+            self.codec = Subtitle.Codec.SubRip
+            self.move(self.path.with_suffix(".srt"))
+        return save_path
+
+    def __str__(self) -> str:
+        return " | ".join(filter(bool, [
+            "SUB",
+            f"[{self.codec.value}]",
+            str(self.language),
+            self.get_track_name()
+        ]))
+
+
+__ALL__ = (Subtitle,)
diff --git a/devine/core/tracks/track.py b/devine/core/tracks/track.py
new file mode 100644
index 0000000..822b815
--- /dev/null
+++ b/devine/core/tracks/track.py
@@ -0,0 +1,335 @@
+import asyncio
+import logging
+import re
+import subprocess
+from enum import Enum
+from pathlib import Path
+from typing import Any, Callable, Iterable, Optional, Union
+from urllib.parse import urljoin
+
+import m3u8
+import requests
+from langcodes import Language
+
+from devine.core.constants import TERRITORY_MAP
+from devine.core.downloaders import aria2c
+from devine.core.drm import DRM_T
+from devine.core.utilities import get_binary_path
+
+
+class Track:
+    class DRM(Enum):
+        pass
+
+    class Descriptor(Enum):
+        URL = 1  # Direct URL, nothing fancy
+        M3U = 2  # https://en.wikipedia.org/wiki/M3U (and M3U8)
+        MPD = 3  # https://en.wikipedia.org/wiki/Dynamic_Adaptive_Streaming_over_HTTP
+
+    def __init__(
+        self,
+        id_: str,
+        url: Union[str, list[str]],
+        language: Union[Language, str],
+        is_original_lang: bool = False,
+        descriptor: Descriptor = Descriptor.URL,
+        needs_proxy: bool = False,
+        needs_repack: bool = False,
+        drm: Optional[Iterable[DRM_T]] = None,
+        edition: Optional[str] = None,
+        extra: Optional[Any] = None
+    ) -> None:
+        self.id = id_
+        self.url = url
+        # required basic metadata
+        self.language = Language.get(language)
+        self.is_original_lang = bool(is_original_lang)
+        # optional io metadata
+        self.descriptor = descriptor
+        self.needs_proxy = bool(needs_proxy)
+        self.needs_repack = bool(needs_repack)
+        # drm
+        self.drm = drm
+        # extra data
+        self.edition: str = edition
+        self.extra: Any = extra or {}  # allow anything for extra, but default to a dict
+
+        # events
+        self.OnSegmentFilter: Optional[Callable] = None
+        self.OnDownloaded: Optional[Callable] = None
+        self.OnDecrypted: Optional[Callable] = None
+        self.OnRepacked: Optional[Callable] = None
+
+        # should only be set internally
+        self.path: Optional[Path] = None
+
+    def __repr__(self) -> str:
+        return "{name}({items})".format(
+            name=self.__class__.__name__,
+            items=", ".join([f"{k}={repr(v)}" for k, v in self.__dict__.items()])
+        )
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, Track) and self.id == other.id
+
+    def get_track_name(self) -> Optional[str]:
+        """Return the base Track Name. This may be enhanced in sub-classes."""
+        if (self.language.language or "").lower() == (self.language.territory or "").lower():
+            self.language.territory = None  # e.g. en-en, de-DE
+        if self.language.territory == "US":
+            self.language.territory = None
+        reduced = self.language.simplify_script()
+        extra_parts = []
+        if reduced.script is not None:
+            extra_parts.append(reduced.script_name(max_distance=25))
+        if reduced.territory is not None:
+            territory = reduced.territory_name(max_distance=25)
+            extra_parts.append(TERRITORY_MAP.get(territory, territory))
+        return ", ".join(extra_parts) or None
+
+    def get_init_segment(self, session: Optional[requests.Session] = None) -> bytes:
+        """
+        Get the Track's Initial Segment Data Stream.
+        If the Track URL is not detected to be an init segment, it will download
+        up to the first 20,000 (20KB) bytes only.
+        """
+        if not session:
+            session = requests.Session()
+
+        url = None
+        is_init_stream = False
+
+        if self.descriptor == self.Descriptor.M3U:
+            master = m3u8.loads(session.get(self.url).text, uri=self.url)
+            for segment in master.segments:
+                if not segment.init_section:
+                    continue
+                # skip any segment that would be skipped from the download
+                # as we cant consider these a true initial segment
+                if callable(self.OnSegmentFilter) and self.OnSegmentFilter(segment):
+                    continue
+                url = ("" if re.match("^https?://", segment.init_section.uri) else segment.init_section.base_uri)
+                url += segment.init_section.uri
+                is_init_stream = True
+                break
+
+        if not url:
+            url = self.url
+
+        if isinstance(url, list):
+            url = url[0]
+            is_init_stream = True
+
+        if is_init_stream:
+            return session.get(url).content
+
+        # likely a full single-file download, get first 20k bytes
+        with session.get(url, stream=True) as s:
+            # assuming enough to contain the pssh/kid
+            for chunk in s.iter_content(20000):
+                # we only want the first chunk
+                return chunk
+
+    def download(self, out: Path, name_template: str = "{type}_{id}", headers: Optional[dict] = None,
+                 proxy: Optional[str] = None) -> Path:
+        """
+        Download the Track and apply any necessary post-edits like Subtitle conversion.
+
+        Parameters:
+            out: Output Directory Path for the downloaded track.
+            name_template: Override the default filename template.
+                Must contain both `{type}` and `{id}` variables.
+            headers: Headers to use when downloading.
+            proxy: Proxy to use when downloading.
+
+        Returns:
+            Where the file was saved, as a Path object.
+        """
+        if out.is_file():
+            raise ValueError("Path must be to a directory and not a file")
+
+        log = logging.getLogger("download")
+
+        out.mkdir(parents=True, exist_ok=True)
+
+        file_name = name_template.format(
+            type=self.__class__.__name__,
+            id=self.id
+        )
+
+        # we must use .mp4 on tracks:
+        # - as shaka-packager expects mp4 input and mp4 output
+        # - and mkvtoolnix would try to parse the file in raw-bitstream
+        save_path = (out / file_name).with_suffix(".mp4")
+        if self.__class__.__name__ == "Subtitle":
+            save_path = save_path.with_suffix(f".{self.codec.extension}")
+
+        # these would be files like .decrypted, .repack and such.
+        # we cannot trust that these files were not interrupted while writing to disc
+        # lets just delete them before re-attempting a download
+        for existing_file in save_path.parent.glob(f"{save_path.stem}.*{save_path.suffix}"):
+            existing_file.unlink()
+        save_path.with_suffix(".srt").unlink(missing_ok=True)
+
+        if self.descriptor == self.Descriptor.M3U:
+            master = m3u8.loads(
+                requests.get(
+                    self.url,
+                    headers=headers,
+                    proxies={"all": proxy} if self.needs_proxy and proxy else None
+                ).text,
+                uri=self.url
+            )
+
+            if not master.segments:
+                raise ValueError("Track URI (an M3U8) has no segments...")
+
+            if all(segment.uri == master.segments[0].uri for segment in master.segments):
+                # all segments use the same file, presumably an EXT-X-BYTERANGE M3U (FUNI)
+                # TODO: This might be a risky way to deal with these kinds of Playlists
+                #       What if there's an init section, or one segment is reusing a byte-range
+                segment = master.segments[0]
+                if not re.match("^https?://", segment.uri):
+                    segment.uri = urljoin(segment.base_uri, segment.uri)
+                self.url = segment.uri
+                self.descriptor = self.Descriptor.URL
+            else:
+                has_init = False
+                segments = []
+                for segment in master.segments:
+                    # merge base uri with uri where needed in both normal and init segments
+                    if not re.match("^https?://", segment.uri):
+                        segment.uri = segment.base_uri + segment.uri
+                    if segment.init_section and not re.match("^https?://", segment.init_section.uri):
+                        segment.init_section.uri = segment.init_section.base_uri + segment.init_section.uri
+
+                    if segment.discontinuity:
+                        has_init = False
+
+                    # skip segments we don't want to download (e.g., bumpers, dub cards)
+                    if callable(self.OnSegmentFilter) and self.OnSegmentFilter(segment):
+                        continue
+
+                    if segment.init_section and not has_init:
+                        segments.append(segment.init_section.uri)
+                        has_init = True
+                    segments.append(segment.uri)
+                self.url = list(dict.fromkeys(segments))
+
+        is_segmented = isinstance(self.url, list) and len(self.url) > 1
+        segments_dir = save_path.with_name(save_path.name + "_segments")
+
+        attempts = 1
+        while True:
+            try:
+                asyncio.run(aria2c(
+                    self.url,
+                    [save_path, segments_dir][is_segmented],
+                    headers,
+                    proxy if self.needs_proxy else None
+                ))
+                break
+            except subprocess.CalledProcessError:
+                log.info(f" - Download attempt {attempts} failed, {['retrying', 'stopping'][attempts == 3]}...")
+                if attempts == 3:
+                    raise
+                attempts += 1
+
+        if is_segmented:
+            # merge the segments together
+            with open(save_path, "wb") as f:
+                for file in sorted(segments_dir.iterdir()):
+                    data = file.read_bytes()
+                    # Apple TV+ needs this done to fix audio decryption
+                    data = re.sub(b"(tfhd\x00\x02\x00\x1a\x00\x00\x00\x01\x00\x00\x00)\x02", b"\\g<1>\x01", data)
+                    f.write(data)
+                    file.unlink()  # delete, we don't need it anymore
+            segments_dir.rmdir()
+
+        self.path = save_path
+
+        if self.path.stat().st_size <= 3:  # Empty UTF-8 BOM == 3 bytes
+            raise IOError(
+                "Download failed, the downloaded file is empty. "
+                f"This {'was' if self.needs_proxy else 'was not'} downloaded with a proxy." +
+                (
+                    " Perhaps you need to set `needs_proxy` as True to use the proxy for this track."
+                    if not self.needs_proxy else ""
+                )
+            )
+
+        return self.path
+
+    def delete(self) -> None:
+        if self.path:
+            self.path.unlink()
+            self.path = None
+
+    def repackage(self) -> None:
+        if not self.path or not self.path.exists():
+            raise ValueError("Cannot repackage a Track that has not been downloaded.")
+
+        executable = get_binary_path("ffmpeg")
+        if not executable:
+            raise EnvironmentError("FFmpeg executable \"ffmpeg\" was not found but is required for this call.")
+
+        repacked_path = self.path.with_suffix(f".repack{self.path.suffix}")
+
+        def _ffmpeg(extra_args: list[str] = None):
+            subprocess.run(
+                [
+                    executable, "-hide_banner",
+                    "-loglevel", "error",
+                    "-i", self.path,
+                    *(extra_args or []),
+                    # Following are very important!
+                    "-map_metadata", "-1",  # don't transfer metadata to output file
+                    "-fflags", "bitexact",  # only have minimal tag data, reproducible mux
+                    "-codec", "copy",
+                    str(repacked_path)
+                ],
+                check=True,
+                stdout=subprocess.PIPE,
+                stderr=subprocess.PIPE
+            )
+
+        try:
+            _ffmpeg()
+        except subprocess.CalledProcessError as e:
+            if b"Malformed AAC bitstream detected" in e.stderr:
+                # e.g., TruTV's dodgy encodes
+                _ffmpeg(["-y", "-bsf:a", "aac_adtstoasc"])
+            else:
+                raise
+
+        self.swap(repacked_path)
+
+    def move(self, target: Union[str, Path]) -> bool:
+        """
+        Move the Track's file from current location, to target location.
+        This will overwrite anything at the target path.
+        """
+        if not self.path:
+            return False
+        target = Path(target)
+        ok = self.path.rename(target).resolve() == target.resolve()
+        if ok:
+            self.path = target
+        return ok
+
+    def swap(self, target: Union[str, Path]) -> bool:
+        """
+        Swaps the Track's file with the Target file. The current Track's file is deleted.
+        Returns False if the Track is not yet downloaded, or the target path does not exist.
+        """
+        target = Path(target)
+        if not target.exists() or not self.path:
+            return False
+        self.path.unlink()
+        ok = target.rename(self.path) == self.path
+        if not ok:
+            return False
+        return self.move(target)
+
+
+__ALL__ = (Track,)
diff --git a/devine/core/tracks/tracks.py b/devine/core/tracks/tracks.py
new file mode 100644
index 0000000..e358c54
--- /dev/null
+++ b/devine/core/tracks/tracks.py
@@ -0,0 +1,354 @@
+from __future__ import annotations
+
+import logging
+import subprocess
+from pathlib import Path
+from typing import Callable, Iterator, Optional, Sequence, Union
+
+from Cryptodome.Random import get_random_bytes
+from langcodes import Language, closest_supported_match
+
+from devine.core.config import config
+from devine.core.constants import LANGUAGE_MAX_DISTANCE, LANGUAGE_MUX_MAP, AnyTrack, TrackT
+from devine.core.tracks.audio import Audio
+from devine.core.tracks.track import Track
+from devine.core.tracks.chapter import Chapter
+from devine.core.tracks.subtitle import Subtitle
+from devine.core.tracks.video import Video
+from devine.core.utilities import sanitize_filename, is_close_match
+from devine.core.utils.collections import as_list, flatten
+
+
+class Tracks:
+    """
+    Video, Audio, Subtitle, and Chapter Track Store.
+    It provides convenience functions for listing, sorting, and selecting tracks.
+    """
+
+    TRACK_ORDER_MAP = {
+        Video: 0,
+        Audio: 1,
+        Subtitle: 2,
+        Chapter: 3
+    }
+
+    def __init__(self, *args: Union[Tracks, list[Track], Track]):
+        self.videos: list[Video] = []
+        self.audio: list[Audio] = []
+        self.subtitles: list[Subtitle] = []
+        self.chapters: list[Chapter] = []
+
+        if args:
+            self.add(args)
+
+    def __iter__(self) -> Iterator[AnyTrack]:
+        return iter(as_list(self.videos, self.audio, self.subtitles))
+
+    def __len__(self) -> int:
+        return len(self.videos) + len(self.audio) + len(self.subtitles)
+
+    def __repr__(self) -> str:
+        return "{name}({items})".format(
+            name=self.__class__.__name__,
+            items=", ".join([f"{k}={repr(v)}" for k, v in self.__dict__.items()])
+        )
+
+    def __str__(self) -> str:
+        rep = {
+            Video: [],
+            Audio: [],
+            Subtitle: [],
+            Chapter: []
+        }
+        tracks = [*list(self), *self.chapters]
+
+        for track in sorted(tracks, key=lambda t: self.TRACK_ORDER_MAP[type(t)]):
+            if not rep[type(track)]:
+                count = sum(type(x) is type(track) for x in tracks)
+                rep[type(track)].append("{count} {type} Track{plural}{colon}".format(
+                    count=count,
+                    type=track.__class__.__name__,
+                    plural="s" if count != 1 else "",
+                    colon=":" if count > 0 else ""
+                ))
+            rep[type(track)].append(str(track))
+
+        for type_ in list(rep):
+            if not rep[type_]:
+                del rep[type_]
+                continue
+            rep[type_] = "\n".join(
+                [rep[type_][0]] +
+                [f"├─ {x}" for x in rep[type_][1:-1]] +
+                [f"└─ {rep[type_][-1]}"]
+            )
+        rep = "\n".join(list(rep.values()))
+
+        return rep
+
+    def exists(self, by_id: Optional[str] = None, by_url: Optional[Union[str, list[str]]] = None) -> bool:
+        """Check if a track already exists by various methods."""
+        if by_id:  # recommended
+            return any(x.id == by_id for x in self)
+        if by_url:
+            return any(x.url == by_url for x in self)
+        return False
+
+    def add(
+        self,
+        tracks: Union[Tracks, Sequence[Union[AnyTrack, Chapter]], Track, Chapter],
+        warn_only: bool = False
+    ) -> None:
+        """Add a provided track to its appropriate array and ensuring it's not a duplicate."""
+        if isinstance(tracks, Tracks):
+            tracks = [*list(tracks), *tracks.chapters]
+
+        duplicates = 0
+        for track in flatten(tracks):
+            if self.exists(by_id=track.id):
+                if not warn_only:
+                    raise ValueError(
+                        "One or more of the provided Tracks is a duplicate. "
+                        "Track IDs must be unique but accurate using static values. The "
+                        "value should stay the same no matter when you request the same "
+                        "content. Use a value that has relation to the track content "
+                        "itself and is static or permanent and not random/RNG data that "
+                        "wont change each refresh or conflict in edge cases."
+                    )
+                duplicates += 1
+                continue
+
+            if isinstance(track, Video):
+                self.videos.append(track)
+            elif isinstance(track, Audio):
+                self.audio.append(track)
+            elif isinstance(track, Subtitle):
+                self.subtitles.append(track)
+            elif isinstance(track, Chapter):
+                self.chapters.append(track)
+            else:
+                raise ValueError("Track type was not set or is invalid.")
+
+        log = logging.getLogger("Tracks")
+
+        if duplicates:
+            log.warning(f" - Found and skipped {duplicates} duplicate tracks...")
+
+    def print(self, level: int = logging.INFO) -> None:
+        """Print the __str__ to log at a specified level."""
+        log = logging.getLogger("Tracks")
+        for line in str(self).splitlines(keepends=False):
+            log.log(level, line)
+
+    def sort_videos(self, by_language: Optional[Sequence[Union[str, Language]]] = None) -> None:
+        """Sort video tracks by bitrate, and optionally language."""
+        if not self.videos:
+            return
+        # bitrate
+        self.videos.sort(
+            key=lambda x: float(x.bitrate or 0.0),
+            reverse=True
+        )
+        # language
+        for language in reversed(by_language or []):
+            if str(language) == "all":
+                language = next((x.language for x in self.videos if x.is_original_lang), "")
+            if not language:
+                continue
+            self.videos.sort(key=lambda x: str(x.language))
+            self.videos.sort(key=lambda x: not is_close_match(language, [x.language]))
+
+    def sort_audio(self, by_language: Optional[Sequence[Union[str, Language]]] = None) -> None:
+        """Sort audio tracks by bitrate, descriptive, and optionally language."""
+        if not self.audio:
+            return
+        # bitrate
+        self.audio.sort(
+            key=lambda x: float(x.bitrate or 0.0),
+            reverse=True
+        )
+        # descriptive
+        self.audio.sort(key=lambda x: str(x.language) if x.descriptive else "")
+        # language
+        for language in reversed(by_language or []):
+            if str(language) == "all":
+                language = next((x.language for x in self.audio if x.is_original_lang), "")
+            if not language:
+                continue
+            self.audio.sort(key=lambda x: str(x.language))
+            self.audio.sort(key=lambda x: not is_close_match(language, [x.language]))
+
+    def sort_subtitles(self, by_language: Optional[Sequence[Union[str, Language]]] = None) -> None:
+        """
+        Sort subtitle tracks by various track attributes to a common P2P standard.
+        You may optionally provide a sequence of languages to prioritize to the top.
+
+        Section Order:
+          - by_language groups prioritized to top, and ascending alphabetically
+          - then rest ascending alphabetically after the prioritized groups
+          (Each section ascending alphabetically, but separated)
+
+        Language Group Order:
+          - Forced
+          - Normal
+          - Hard of Hearing (SDH/CC)
+          (Least to most captions expected in the subtitle)
+        """
+        if not self.subtitles:
+            return
+        # language groups
+        self.subtitles.sort(key=lambda x: str(x.language))
+        self.subtitles.sort(key=lambda x: x.sdh or x.cc)
+        self.subtitles.sort(key=lambda x: x.forced, reverse=True)
+        # sections
+        for language in reversed(by_language or []):
+            if str(language) == "all":
+                language = next((x.language for x in self.subtitles if x.is_original_lang), "")
+            if not language:
+                continue
+            self.subtitles.sort(key=lambda x: is_close_match(language, [x.language]), reverse=True)
+
+    def sort_chapters(self) -> None:
+        """Sort chapter tracks by chapter number."""
+        if not self.chapters:
+            return
+        # number
+        self.chapters.sort(key=lambda x: x.number)
+
+    def select_video(self, x: Callable[[Video], bool]) -> None:
+        self.videos = list(filter(x, self.videos))
+
+    def select_audio(self, x: Callable[[Audio], bool]) -> None:
+        self.audio = list(filter(x, self.audio))
+
+    def select_subtitles(self, x: Callable[[Subtitle], bool]) -> None:
+        self.subtitles = list(filter(x, self.subtitles))
+
+    def with_resolution(self, resolution: int) -> None:
+        if resolution:
+            # Note: Do not merge these list comprehensions. They must be done separately so the results
+            # from the 16:9 canvas check is only used if there's no exact height resolution match.
+            videos_quality = [x for x in self.videos if x.height == resolution]
+            if not videos_quality:
+                videos_quality = [x for x in self.videos if int(x.width * (9 / 16)) == resolution]
+            self.videos = videos_quality
+
+    def export_chapters(self, to_file: Optional[Union[Path, str]] = None) -> str:
+        """Export all chapters in order to a string or file."""
+        self.sort_chapters()
+        data = "\n".join(map(repr, self.chapters))
+        if to_file:
+            to_file = Path(to_file)
+            to_file.parent.mkdir(parents=True, exist_ok=True)
+            to_file.write_text(data, encoding="utf8")
+        return data
+
+    @staticmethod
+    def select_per_language(tracks: list[TrackT], languages: list[str]) -> list[TrackT]:
+        """
+        Enumerates and return the first Track per language.
+        You should sort the list so the wanted track is closer to the start of the list.
+        """
+        tracks_ = []
+        for language in languages:
+            match = closest_supported_match(language, [str(x.language) for x in tracks], LANGUAGE_MAX_DISTANCE)
+            if match:
+                tracks_.append(next(x for x in tracks if str(x.language) == match))
+        return tracks_
+
+    def mux(self, title: str, delete: bool = True) -> tuple[Path, int]:
+        """
+        Takes the Video, Audio and Subtitle Tracks, and muxes them into an MKV file.
+        It will attempt to detect Forced/Default tracks, and will try to parse the language codes of the Tracks
+        """
+        cl = [
+            "mkvmerge",
+            "--no-date",  # remove dates from the output for security
+        ]
+
+        if config.muxing.get("set_title", True):
+            cl.extend(["--title", title])
+
+        for i, vt in enumerate(self.videos):
+            if not vt.path or not vt.path.exists():
+                raise ValueError("Video Track must be downloaded before muxing...")
+            cl.extend([
+                "--language", "0:{}".format(LANGUAGE_MUX_MAP.get(
+                    str(vt.language), str(vt.language)
+                )),
+                "--default-track", f"0:{i == 0}",
+                "--original-flag", f"0:{vt.is_original_lang}",
+                "--compression", "0:none",  # disable extra compression
+                "(", str(vt.path), ")"
+            ])
+
+        for i, at in enumerate(self.audio):
+            if not at.path or not at.path.exists():
+                raise ValueError("Audio Track must be downloaded before muxing...")
+            cl.extend([
+                "--track-name", f"0:{at.get_track_name() or ''}",
+                "--language", "0:{}".format(LANGUAGE_MUX_MAP.get(
+                    str(at.language), str(at.language)
+                )),
+                "--default-track", f"0:{i == 0}",
+                "--visual-impaired-flag", f"0:{at.descriptive}",
+                "--original-flag", f"0:{at.is_original_lang}",
+                "--compression", "0:none",  # disable extra compression
+                "(", str(at.path), ")"
+            ])
+
+        for st in self.subtitles:
+            if not st.path or not st.path.exists():
+                raise ValueError("Text Track must be downloaded before muxing...")
+            default = bool(self.audio and is_close_match(st.language, [self.audio[0].language]) and st.forced)
+            cl.extend([
+                "--track-name", f"0:{st.get_track_name() or ''}",
+                "--language", "0:{}".format(LANGUAGE_MUX_MAP.get(
+                    str(st.language), str(st.language)
+                )),
+                "--sub-charset", "0:UTF-8",
+                "--forced-track", f"0:{st.forced}",
+                "--default-track", f"0:{default}",
+                "--hearing-impaired-flag", f"0:{st.sdh}",
+                "--original-flag", f"0:{st.is_original_lang}",
+                "--compression", "0:none",  # disable extra compression (probably zlib)
+                "(", str(st.path), ")"
+            ])
+
+        if self.chapters:
+            chapters_path = config.directories.temp / config.filenames.chapters.format(
+                title=sanitize_filename(title),
+                random=get_random_bytes(16).hex()
+            )
+            self.export_chapters(chapters_path)
+            cl.extend(["--chapters", str(chapters_path)])
+        else:
+            chapters_path = None
+
+        output_path = (
+            self.videos[0].path.with_suffix(".muxed.mkv") if self.videos else
+            self.audio[0].path.with_suffix(".muxed.mka") if self.audio else
+            self.subtitles[0].path.with_suffix(".muxed.mks") if self.subtitles else
+            chapters_path.with_suffix(".muxed.mkv") if self.chapters else
+            None
+        )
+        if not output_path:
+            raise ValueError("No tracks provided, at least one track must be provided.")
+
+        # let potential failures go to caller, caller should handle
+        try:
+            p = subprocess.run([
+                *cl,
+                "--output", str(output_path)
+            ])
+            return output_path, p.returncode
+        finally:
+            if chapters_path:
+                # regardless of delete param, we delete as it's a file we made during muxing
+                chapters_path.unlink()
+            if delete:
+                for track in self:
+                    track.delete()
+
+
+__ALL__ = (Tracks,)
diff --git a/devine/core/tracks/video.py b/devine/core/tracks/video.py
new file mode 100644
index 0000000..0400fe9
--- /dev/null
+++ b/devine/core/tracks/video.py
@@ -0,0 +1,333 @@
+from __future__ import annotations
+
+import logging
+import math
+import re
+import subprocess
+from enum import Enum
+from pathlib import Path
+from typing import Any, Optional, Union
+
+from langcodes import Language
+
+from devine.core.config import config
+from devine.core.tracks.track import Track
+from devine.core.tracks.subtitle import Subtitle
+from devine.core.utilities import get_binary_path, get_boxes, FPS
+
+
+class Video(Track):
+    class Codec(str, Enum):
+        AVC = "H.264"
+        HEVC = "H.265"
+        VC1 = "VC-1"
+        VP8 = "VP8"
+        VP9 = "VP9"
+        AV1 = "AV1"
+
+        @property
+        def extension(self) -> str:
+            return self.value.lower().replace(".", "").replace("-", "")
+
+        @staticmethod
+        def from_mime(mime: str) -> Video.Codec:
+            mime = mime.lower().strip().split(".")[0]
+            if mime in (
+                "avc1", "avc2", "avc3",
+                "dva1", "dvav",  # Dolby Vision
+            ):
+                return Video.Codec.AVC
+            if mime in (
+                "hev1", "hev2", "hev3", "hvc1", "hvc2", "hvc3",
+                "dvh1", "dvhe",  # Dolby Vision
+                "lhv1", "lhe1",  # Layered
+            ):
+                return Video.Codec.HEVC
+            if mime == "vc-1":
+                return Video.Codec.VC1
+            if mime in ("vp08", "vp8"):
+                return Video.Codec.VP8
+            if mime in ("vp09", "vp9"):
+                return Video.Codec.VP9
+            if mime == "av01":
+                return Video.Codec.AV1
+            raise ValueError(f"The MIME '{mime}' is not a supported Video Codec")
+
+        @staticmethod
+        def from_codecs(codecs: str) -> Video.Codec:
+            for codec in codecs.lower().split(","):
+                codec = codec.strip()
+                mime = codec.split(".")[0]
+                try:
+                    return Video.Codec.from_mime(mime)
+                except ValueError:
+                    pass
+            raise ValueError(f"No MIME types matched any supported Video Codecs in '{codecs}'")
+
+        @staticmethod
+        def from_netflix_profile(profile: str) -> Video.Codec:
+            profile = profile.lower().strip()
+            if profile.startswith("playready-h264"):
+                return Video.Codec.AVC
+            if profile.startswith("hevc"):
+                return Video.Codec.HEVC
+            if profile.startswith("vp9"):
+                return Video.Codec.VP9
+            if profile.startswith("av1"):
+                return Video.Codec.AV1
+            raise ValueError(f"The Content Profile '{profile}' is not a supported Video Codec")
+
+    class Range(str, Enum):
+        SDR = "SDR"        # No Dynamic Range
+        HLG = "HLG"        # https://en.wikipedia.org/wiki/Hybrid_log%E2%80%93gamma
+        HDR10 = "HDR10"    # https://en.wikipedia.org/wiki/HDR10
+        HDR10P = "HDR10+"  # https://en.wikipedia.org/wiki/HDR10%2B
+        DV = "DV"          # https://en.wikipedia.org/wiki/Dolby_Vision
+
+        @staticmethod
+        def from_cicp(primaries: int, transfer: int, matrix: int) -> Video.Range:
+            """
+            ISO/IEC 23001-8 Coding-independent code points to Video Range.
+            Sources for Code points:
+            https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-H.Sup19-201903-S!!PDF-E&type=items
+            """
+            class Primaries(Enum):
+                Unspecified = 0
+                BT_709 = 1
+                BT_601_625 = 5
+                BT_601_525 = 6
+                BT_2020 = 9  # BT.2100 shares the same CP
+
+            class Transfer(Enum):
+                Unspecified = 0
+                SDR_BT_709 = 1
+                SDR_BT_601_625 = 5
+                SDR_BT_601_525 = 6
+                SDR_BT_2020 = 14
+                SDR_BT_2100 = 15
+                PQ = 16
+                HLG = 18
+
+            class Matrix(Enum):
+                RGB = 0
+                YCbCr_BT_709 = 1
+                YCbCr_BT_601_625 = 5
+                YCbCr_BT_601_525 = 6
+                YCbCr_BT_2020 = 9  # YCbCr BT.2100 shares the same CP
+
+            primaries = Primaries(primaries)
+            transfer = Transfer(transfer)
+            matrix = Matrix(matrix)
+
+            # primaries and matrix does not strictly correlate to a range
+
+            if (primaries, transfer, matrix) == (0, 0, 0):
+                return Video.Range.SDR
+
+            if primaries in (Primaries.BT_601_525, Primaries.BT_601_625):
+                return Video.Range.SDR
+
+            if transfer == Transfer.PQ:
+                return Video.Range.HDR10
+            elif transfer == Transfer.HLG:
+                return Video.Range.HLG
+            else:
+                return Video.Range.SDR
+
+        @staticmethod
+        def from_m3u_range_tag(tag: str) -> Video.Range:
+            tag = (tag or "").upper().replace('"', '').strip()
+            if not tag or tag == "SDR":
+                return Video.Range.SDR
+            elif tag == "PQ":
+                return Video.Range.HDR10  # technically could be any PQ-transfer range
+            elif tag == "HLG":
+                return Video.Range.HLG
+            # for some reason there's no Dolby Vision info tag
+            raise ValueError(f"The M3U Range Tag '{tag}' is not a supported Video Range")
+
+    def __init__(self, *args: Any, codec: Video.Codec, range_: Video.Range, bitrate: Union[str, int, float],
+                 width: int, height: int, fps: Optional[Union[str, int, float]] = None, **kwargs: Any) -> None:
+        super().__init__(*args, **kwargs)
+        # required
+        self.codec = codec
+        self.range = range_ or Video.Range.SDR
+        self.bitrate = int(math.ceil(float(bitrate))) if bitrate else None
+        self.width = int(width)
+        self.height = int(height)
+        # optional
+        self.fps = FPS.parse(str(fps)) if fps else None
+
+    def __str__(self) -> str:
+        fps = f"{self.fps:.3f}" if self.fps else "Unknown"
+        return " | ".join(filter(bool, [
+            "VID",
+            f"[{self.codec.value}, {self.range.name}]",
+            str(self.language),
+            f"{self.width}x{self.height} @ {self.bitrate // 1000 if self.bitrate else '?'} kb/s, {fps} FPS",
+            self.edition
+        ]))
+
+    def change_color_range(self, range_: int) -> None:
+        """Change the Video's Color Range to Limited (0) or Full (1)."""
+        if not self.path or not self.path.exists():
+            raise ValueError("Cannot repackage a Track that has not been downloaded.")
+
+        executable = get_binary_path("ffmpeg")
+        if not executable:
+            raise EnvironmentError("FFmpeg executable \"ffmpeg\" was not found but is required for this call.")
+
+        filter_key = {
+            Video.Codec.AVC: "h264_metadata",
+            Video.Codec.HEVC: "hevc_metadata"
+        }[self.codec]
+
+        changed_path = self.path.with_suffix(f".range{range_}{self.path.suffix}")
+        subprocess.run([
+            executable, "-hide_banner",
+            "-loglevel", "panic",
+            "-i", self.path,
+            "-codec", "copy",
+            "-bsf:v", f"{filter_key}=video_full_range_flag={range_}",
+            str(changed_path)
+        ], check=True)
+
+        self.swap(changed_path)
+
+    def ccextractor(
+        self, track_id: Any, out_path: Union[Path, str], language: Language, original: bool = False
+    ) -> Optional[Subtitle]:
+        """Return a TextTrack object representing CC track extracted by CCExtractor."""
+        if not self.path:
+            raise ValueError("You must download the track first.")
+
+        executable = get_binary_path("ccextractor", "ccextractorwin", "ccextractorwinfull")
+        if not executable:
+            raise EnvironmentError("ccextractor executable was not found.")
+
+        out_path = Path(out_path)
+
+        try:
+            subprocess.run([
+                executable,
+                "-trim", "-noru", "-ru1",
+                self.path, "-o", out_path
+            ], check=True)
+        except subprocess.CalledProcessError as e:
+            out_path.unlink(missing_ok=True)
+            if not e.returncode == 10:  # No captions found
+                raise
+
+        if out_path.exists():
+            if out_path.stat().st_size <= 3:
+                # An empty UTF-8 file with BOM is 3 bytes.
+                # If the subtitle file is empty, mkvmerge will fail to mux.
+                out_path.unlink()
+                return None
+            cc_track = Subtitle(
+                id_=track_id,
+                url="",  # doesn't need to be downloaded
+                codec=Subtitle.Codec.SubRip,
+                language=language,
+                is_original_lang=original,
+                cc=True
+            )
+            cc_track.path = out_path
+            return cc_track
+
+        return None
+
+    def extract_c608(self) -> list[Subtitle]:
+        """
+        Extract Apple-Style c608 box (CEA-608) subtitle using ccextractor.
+
+        This isn't much more than a wrapper to the track.ccextractor function.
+        All this does, is actually check if a c608 box exists and only if so
+        does it actually call ccextractor.
+
+        Even though there is a possibility of more than one c608 box, only one
+        can actually be extracted. Not only that but it's very possible this
+        needs to be done before any decryption as the decryption may destroy
+        some of the metadata.
+
+        TODO: Need a test file with more than one c608 box to add support for
+              more than one CEA-608 extraction.
+        """
+        if not self.path:
+            raise ValueError("You must download the track first.")
+        with self.path.open("rb") as f:
+            # assuming 20KB is enough to contain the c608 box.
+            # ffprobe will fail, so a c608 box check must be done.
+            c608_count = len(list(get_boxes(f.read(20000), b"c608")))
+        if c608_count > 0:
+            # TODO: Figure out the real language, it might be different
+            #       CEA-608 boxes doesnt seem to carry language information :(
+            # TODO: Figure out if the CC language is original lang or not.
+            #       Will need to figure out above first to do so.
+            track_id = f"ccextractor-{self.id}"
+            cc_lang = self.language
+            cc_track = self.ccextractor(
+                track_id=track_id,
+                out_path=config.directories.temp / config.filenames.subtitle.format(
+                    id=track_id,
+                    language=cc_lang
+                ),
+                language=cc_lang,
+                original=False
+            )
+            if not cc_track:
+                return []
+            return [cc_track]
+        return []
+
+    def remove_eia_cc(self) -> bool:
+        """
+        Remove EIA-CC data from Bitstream while keeping SEI data.
+
+        This works by removing all NAL Unit's with the Type of 6 from the bistream
+        and then re-adding SEI data (effectively a new NAL Unit with just the SEI data).
+        Only bitstreams with x264 encoding information is currently supported due to the
+        obscurity on the MDAT mp4 box structure. Therefore, we need to use hacky regex.
+        """
+        if not self.path or not self.path.exists():
+            raise ValueError("Cannot clean a Track that has not been downloaded.")
+
+        executable = get_binary_path("ffmpeg")
+        if not executable:
+            raise EnvironmentError("FFmpeg executable \"ffmpeg\" was not found but is required for this call.")
+
+        log = logging.getLogger("x264-clean")
+        log.info("Removing EIA-CC from Video Track with FFMPEG")
+
+        with open(self.path, "rb") as f:
+            file = f.read(60000)
+
+        x264 = re.search(br"(.{16})(x264)", file)
+        if not x264:
+            log.info(" - No x264 encode settings were found, unsupported...")
+            return False
+
+        uuid = x264.group(1).hex()
+        i = file.index(b"x264")
+        encoding_settings = file[i: i + file[i:].index(b"\x00")].replace(b":", br"\\:").replace(b",", br"\,").decode()
+
+        cleaned_path = self.path.with_suffix(f".cleaned{self.path.suffix}")
+        subprocess.run([
+            executable, "-hide_banner",
+            "-loglevel", "panic",
+            "-i", self.path,
+            "-map_metadata", "-1",
+            "-fflags", "bitexact",
+            "-bsf:v", f"filter_units=remove_types=6,h264_metadata=sei_user_data={uuid}+{encoding_settings}",
+            "-codec", "copy",
+            str(cleaned_path)
+        ], check=True)
+
+        log.info(" + Removed")
+
+        self.swap(cleaned_path)
+
+        return True
+
+
+__ALL__ = (Video,)
diff --git a/devine/core/utilities.py b/devine/core/utilities.py
new file mode 100644
index 0000000..c04131b
--- /dev/null
+++ b/devine/core/utilities.py
@@ -0,0 +1,205 @@
+import ast
+import contextlib
+import importlib.util
+import re
+import shutil
+import sys
+from urllib.parse import urlparse
+
+import pproxy
+import requests
+import unicodedata
+from pathlib import Path
+from types import ModuleType
+from typing import Optional, Union, Sequence, AsyncIterator
+
+from langcodes import Language, closest_match
+from pymp4.parser import Box
+from unidecode import unidecode
+
+from devine.core.config import config
+from devine.core.constants import LANGUAGE_MAX_DISTANCE
+
+
+def import_module_by_path(path: Path) -> ModuleType:
+    """Import a Python file by Path as a Module."""
+    if not path:
+        raise ValueError("Path must be provided")
+    if not isinstance(path, Path):
+        raise TypeError(f"Expected path to be a {Path}, not {path!r}")
+    if not path.exists():
+        raise ValueError("Path does not exist")
+
+    # compute package hierarchy for relative import support
+    if path.is_relative_to(config.directories.core_dir):
+        name = []
+        _path = path.parent
+        while _path.stem != config.directories.core_dir.stem:
+            name.append(_path.stem)
+            _path = _path.parent
+        name = ".".join([config.directories.core_dir.stem] + name[::-1])
+    else:
+        # is outside the src package
+        if str(path.parent.parent) not in sys.path:
+            sys.path.insert(1, str(path.parent.parent))
+        name = path.parent.stem
+
+    spec = importlib.util.spec_from_file_location(name, path)
+    module = importlib.util.module_from_spec(spec)
+    spec.loader.exec_module(module)
+
+    return module
+
+
+def get_binary_path(*names: str) -> Optional[Path]:
+    """Find the path of the first found binary name."""
+    for name in names:
+        path = shutil.which(name)
+        if path:
+            return Path(path)
+    return None
+
+
+def sanitize_filename(filename: str, spacer: str = ".") -> str:
+    """
+    Sanitize a string to be filename safe.
+
+    The spacer is safer to be a '.' for older DDL and p2p sharing spaces.
+    This includes web-served content via direct links and such.
+    """
+    # replace all non-ASCII characters with ASCII equivalents
+    filename = unidecode(filename)
+
+    # remove or replace further characters as needed
+    filename = "".join(c for c in filename if unicodedata.category(c) != "Mn")  # hidden characters
+    filename = filename.\
+        replace("/", " & ").\
+        replace(";", " & ")  # e.g. multi-episode filenames
+    filename = re.sub(rf"[:; ]", spacer, filename)  # structural chars to (spacer)
+    filename = re.sub(r"[\\*!?¿,'\"()<>|$#]", "", filename)  # not filename safe chars
+    filename = re.sub(rf"[{spacer}]{{2,}}", spacer, filename)  # remove extra neighbouring (spacer)s
+
+    return filename
+
+
+def is_close_match(language: Union[str, Language], languages: Sequence[Union[str, Language, None]]) -> bool:
+    """Check if a language is a close match to any of the provided languages."""
+    languages = [x for x in languages if x]
+    if not languages:
+        return False
+    return closest_match(language, list(map(str, languages)))[1] <= LANGUAGE_MAX_DISTANCE
+
+
+def get_boxes(data: bytes, box_type: bytes, as_bytes: bool = False) -> Box:
+    """Scan a byte array for a wanted box, then parse and yield each find."""
+    # using slicing to get to the wanted box is done because parsing the entire box and recursively
+    # scanning through each box and its children often wouldn't scan far enough to reach the wanted box.
+    # since it doesnt care what child box the wanted box is from, this works fine.
+    if not isinstance(data, (bytes, bytearray)):
+        raise ValueError("data must be bytes")
+    while True:
+        try:
+            index = data.index(box_type)
+        except ValueError:
+            break
+        if index < 0:
+            break
+        if index > 4:
+            index -= 4  # size is before box type and is 4 bytes long
+        data = data[index:]
+        try:
+            box = Box.parse(data)
+        except IOError:
+            # TODO: Does this miss any data we may need?
+            break
+        if as_bytes:
+            box = Box.build(box)
+        yield box
+
+
+def ap_case(text: str, keep_spaces: bool = False, stop_words: tuple[str] = None) -> str:
+    """
+    Convert a string to title case using AP/APA style.
+    Based on https://github.com/words/ap-style-title-case
+
+    Parameters:
+        text: The text string to title case with AP/APA style.
+        keep_spaces: To keep the original whitespace, or to just use a normal space.
+            This would only be needed if you have special whitespace between words.
+        stop_words: Override the default stop words with your own ones.
+    """
+    if not text:
+        return ""
+
+    if not stop_words:
+        stop_words = ("a", "an", "and", "at", "but", "by", "for", "in", "nor",
+                      "of", "on", "or", "so", "the", "to", "up", "yet")
+
+    splitter = re.compile(r"(\s+|[-‑–—])")
+    words = splitter.split(text)
+
+    return "".join([
+        [" ", word][keep_spaces] if re.match(r"\s+", word) else
+        word if splitter.match(word) else
+        word.lower() if i != 0 and i != len(words) - 1 and word.lower() in stop_words else
+        word.capitalize()
+        for i, word in enumerate(words)
+    ])
+
+
+def get_ip_info(session: Optional[requests.Session] = None) -> dict:
+    """
+    Use ipinfo.io to get IP location information.
+
+    If you provide a Requests Session with a Proxy, that proxies IP information
+    is what will be returned.
+    """
+    return (session or requests.Session()).get("https://ipinfo.io/json").json()
+
+
+@contextlib.asynccontextmanager
+async def start_pproxy(proxy: str) -> AsyncIterator[str]:
+    proxy = urlparse(proxy)
+
+    scheme = {
+        "https": "http+ssl",
+        "socks5h": "socks"
+    }.get(proxy.scheme, proxy.scheme)
+
+    remote_server = f"{scheme}://{proxy.hostname}"
+    if proxy.port:
+        remote_server += f":{proxy.port}"
+    if proxy.username or proxy.password:
+        remote_server += "#"
+    if proxy.username:
+        remote_server += proxy.username
+    if proxy.password:
+        remote_server += f":{proxy.password}"
+
+    server = pproxy.Server("http://localhost:0")  # random port
+    remote = pproxy.Connection(remote_server)
+    handler = await server.start_server({"rserver": [remote]})
+
+    try:
+        port = handler.sockets[0].getsockname()[1]
+        yield f"http://localhost:{port}"
+    finally:
+        handler.close()
+        await handler.wait_closed()
+
+
+class FPS(ast.NodeVisitor):
+    def visit_BinOp(self, node: ast.BinOp) -> float:
+        if isinstance(node.op, ast.Div):
+            return self.visit(node.left) / self.visit(node.right)
+        raise ValueError(f"Invalid operation: {node.op}")
+
+    def visit_Num(self, node: ast.Num) -> complex:
+        return node.n
+
+    def visit_Expr(self, node: ast.Expr) -> float:
+        return self.visit(node.value)
+
+    @classmethod
+    def parse(cls, expr: str) -> float:
+        return cls().visit(ast.parse(expr).body[0])
diff --git a/devine/core/utils/__init__.py b/devine/core/utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/devine/core/utils/atomicsql.py b/devine/core/utils/atomicsql.py
new file mode 100644
index 0000000..dcee82d
--- /dev/null
+++ b/devine/core/utils/atomicsql.py
@@ -0,0 +1,105 @@
+"""
+AtomicSQL - Race-condition and Threading safe SQL Database Interface.
+Copyright (C) 2020-2023 rlaphoenix
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program.  If not, see <https://www.gnu.org/licenses/>.
+"""
+
+import os
+import sqlite3
+import time
+from threading import Lock
+from typing import Any, Callable, Union
+
+import pymysql.cursors
+
+Connections = Union[sqlite3.Connection, pymysql.connections.Connection]
+Cursors = Union[sqlite3.Cursor, pymysql.cursors.Cursor]
+
+
+class AtomicSQL:
+    """
+    Race-condition and Threading safe SQL Database Interface.
+    """
+
+    def __init__(self) -> None:
+        self.master_lock = Lock()  # prevents race condition
+        self.db: dict[bytes, Connections] = {}  # used to hold the database connections and commit changes and such
+        self.cursor: dict[bytes, Cursors] = {}  # used to execute queries and receive results
+        self.session_lock: dict[bytes, Lock] = {}  # like master_lock, but per-session
+
+    def load(self, connection: Connections) -> bytes:
+        """
+        Store SQL Connection object and return a reference ticket.
+        :param connection: SQLite3 or pymysql Connection object.
+        :returns: Session ID in which the database connection is referenced with.
+        """
+        self.master_lock.acquire()
+        try:
+            # obtain a unique cryptographically random session_id
+            session_id = None
+            while not session_id or session_id in self.db:
+                session_id = os.urandom(16)
+            self.db[session_id] = connection
+            self.cursor[session_id] = self.db[session_id].cursor()
+            self.session_lock[session_id] = Lock()
+            return session_id
+        finally:
+            self.master_lock.release()
+
+    def safe_execute(self, session_id: bytes, action: Callable) -> Any:
+        """
+        Execute code on the Database Connection in a race-condition safe way.
+        :param session_id: Database Connection's Session ID.
+        :param action: Function or lambda in which to execute, it's provided `db` and `cursor` arguments.
+        :returns: Whatever `action` returns.
+        """
+        if session_id not in self.db:
+            raise ValueError(f"Session ID {session_id!r} is invalid.")
+        self.master_lock.acquire()
+        self.session_lock[session_id].acquire()
+        try:
+            failures = 0
+            while True:
+                try:
+                    action(
+                        db=self.db[session_id],
+                        cursor=self.cursor[session_id]
+                    )
+                    break
+                except sqlite3.OperationalError as e:
+                    failures += 1
+                    delay = 3 * failures
+                    print(f"AtomicSQL.safe_execute failed, {e}, retrying in {delay} seconds...")
+                    time.sleep(delay)
+                if failures == 10:
+                    raise ValueError("AtomicSQL.safe_execute failed too many time's. Aborting.")
+            return self.cursor[session_id]
+        finally:
+            self.session_lock[session_id].release()
+            self.master_lock.release()
+
+    def commit(self, session_id: bytes) -> bool:
+        """
+        Commit changes to the Database Connection immediately.
+        This isn't necessary to be run every time you make changes, just ensure it's run
+        at least before termination.
+        :param session_id: Database Connection's Session ID.
+        :returns: True if it committed.
+        """
+        self.safe_execute(
+            session_id,
+            lambda db, cursor: db.commit()
+        )
+        return True  # todo ; actually check if db.commit worked
diff --git a/devine/core/utils/click_types.py b/devine/core/utils/click_types.py
new file mode 100644
index 0000000..82e1476
--- /dev/null
+++ b/devine/core/utils/click_types.py
@@ -0,0 +1,117 @@
+from __future__ import annotations
+
+import re
+from typing import Optional, Union
+
+import click
+from pywidevine.cdm import Cdm as WidevineCdm
+
+
+class ContextData:
+    def __init__(self, config: dict, cdm: WidevineCdm, proxy_providers: list, profile: Optional[str] = None):
+        self.config = config
+        self.cdm = cdm
+        self.proxy_providers = proxy_providers
+        self.profile = profile
+
+
+class SeasonRange(click.ParamType):
+    name = "ep_range"
+
+    MIN_EPISODE = 0
+    MAX_EPISODE = 999
+
+    def parse_tokens(self, *tokens: str) -> list[str]:
+        """
+        Parse multiple tokens or ranged tokens as '{s}x{e}' strings.
+
+        Supports exclusioning by putting a `-` before the token.
+
+        Example:
+            >>> sr = SeasonRange()
+            >>> sr.parse_tokens("S01E01")
+            ["1x1"]
+            >>> sr.parse_tokens("S02E01", "S02E03-S02E05")
+            ["2x1", "2x3", "2x4", "2x5"]
+            >>> sr.parse_tokens("S01-S05", "-S03", "-S02E01")
+            ["1x0", "1x1", ..., "2x0", (...), "2x2", (...), "4x0", ..., "5x0", ...]
+        """
+        if len(tokens) == 0:
+            return []
+        computed: list = []
+        exclusions: list = []
+        for token in tokens:
+            exclude = token.startswith("-")
+            if exclude:
+                token = token[1:]
+            parsed = [
+                re.match(r"^S(?P<season>\d+)(E(?P<episode>\d+))?$", x, re.IGNORECASE)
+                for x in re.split(r"[:-]", token)
+            ]
+            if len(parsed) > 2:
+                self.fail(f"Invalid token, only a left and right range is acceptable: {token}")
+            if len(parsed) == 1:
+                parsed.append(parsed[0])
+            if any(x is None for x in parsed):
+                self.fail(f"Invalid token, syntax error occurred: {token}")
+            from_season, from_episode = [
+                int(v) if v is not None else self.MIN_EPISODE
+                for k, v in parsed[0].groupdict().items() if parsed[0]  # type: ignore[union-attr]
+            ]
+            to_season, to_episode = [
+                int(v) if v is not None else self.MAX_EPISODE
+                for k, v in parsed[1].groupdict().items() if parsed[1]  # type: ignore[union-attr]
+            ]
+            if from_season > to_season:
+                self.fail(f"Invalid range, left side season cannot be bigger than right side season: {token}")
+            if from_season == to_season and from_episode > to_episode:
+                self.fail(f"Invalid range, left side episode cannot be bigger than right side episode: {token}")
+            for s in range(from_season, to_season + 1):
+                for e in range(
+                    from_episode if s == from_season else 0,
+                    (self.MAX_EPISODE if s < to_season else to_episode) + 1
+                ):
+                    (computed if not exclude else exclusions).append(f"{s}x{e}")
+        for exclusion in exclusions:
+            if exclusion in computed:
+                computed.remove(exclusion)
+        return list(set(computed))
+
+    def convert(
+        self, value: str, param: Optional[click.Parameter] = None, ctx: Optional[click.Context] = None
+    ) -> list[str]:
+        return self.parse_tokens(*re.split(r"\s*[,;]\s*", value))
+
+
+class LanguageRange(click.ParamType):
+    name = "lang_range"
+
+    def convert(
+        self, value: Union[str, list], param: Optional[click.Parameter] = None, ctx: Optional[click.Context] = None
+    ) -> list[str]:
+        if isinstance(value, list):
+            return value
+        if not value:
+            return []
+        return re.split(r"\s*[,;]\s*", value)
+
+
+class Quality(click.ParamType):
+    name = "quality"
+
+    def convert(self, value: str, param: Optional[click.Parameter] = None, ctx: Optional[click.Context] = None) -> int:
+        try:
+            return int(value.lower().rstrip("p"))
+        except TypeError:
+            self.fail(
+                f"expected string for int() conversion, got {value!r} of type {type(value).__name__}",
+                param,
+                ctx
+            )
+        except ValueError:
+            self.fail(f"{value!r} is not a valid integer", param, ctx)
+
+
+SEASON_RANGE = SeasonRange()
+LANGUAGE_RANGE = LanguageRange()
+QUALITY = Quality()
diff --git a/devine/core/utils/collections.py b/devine/core/utils/collections.py
new file mode 100644
index 0000000..ae01bb2
--- /dev/null
+++ b/devine/core/utils/collections.py
@@ -0,0 +1,51 @@
+import itertools
+from typing import Any, Iterable, Iterator, Sequence, Tuple, Type, Union
+
+
+def as_lists(*args: Any) -> Iterator[Any]:
+    """Converts any input objects to list objects."""
+    for item in args:
+        yield item if isinstance(item, list) else [item]
+
+
+def as_list(*args: Any) -> list:
+    """
+    Convert any input objects to a single merged list object.
+
+    Example:
+        >>> as_list('foo', ['buzz', 'bizz'], 'bazz', 'bozz', ['bar'], ['bur'])
+        ['foo', 'buzz', 'bizz', 'bazz', 'bozz', 'bar', 'bur']
+    """
+    return list(itertools.chain.from_iterable(as_lists(*args)))
+
+
+def flatten(items: Any, ignore_types: Union[Type, Tuple[Type, ...]] = str) -> Iterator:
+    """
+    Flattens items recursively.
+
+    Example:
+    >>> list(flatten(["foo", [["bar", ["buzz", [""]], "bee"]]]))
+    ['foo', 'bar', 'buzz', '', 'bee']
+    >>> list(flatten("foo"))
+    ['foo']
+    >>> list(flatten({1}, set))
+    [{1}]
+    """
+    if isinstance(items, (Iterable, Sequence)) and not isinstance(items, ignore_types):
+        for i in items:
+            yield from flatten(i, ignore_types)
+    else:
+        yield items
+
+
+def merge_dict(source: dict, destination: dict) -> None:
+    """Recursively merge Source into Destination in-place."""
+    if not source:
+        return
+    for key, value in source.items():
+        if isinstance(value, dict):
+            # get node or create one
+            node = destination.setdefault(key, {})
+            merge_dict(value, node)
+        else:
+            destination[key] = value
diff --git a/devine/core/utils/sslciphers.py b/devine/core/utils/sslciphers.py
new file mode 100644
index 0000000..e3eb210
--- /dev/null
+++ b/devine/core/utils/sslciphers.py
@@ -0,0 +1,77 @@
+import ssl
+from typing import Optional
+
+from requests.adapters import HTTPAdapter
+
+
+class SSLCiphers(HTTPAdapter):
+    """
+    Custom HTTP Adapter to change the TLS Cipher set and security requirements.
+
+    Security Level may optionally be provided. A level above 0 must be used at all times.
+    A list of Security Levels and their security is listed below. Usually 2 is used by default.
+    Do not set the Security level via @SECLEVEL in the cipher list.
+
+    Level 0:
+        Everything is permitted. This retains compatibility with previous versions of OpenSSL.
+
+    Level 1:
+        The security level corresponds to a minimum of 80 bits of security. Any parameters
+        offering below 80 bits of security are excluded. As a result RSA, DSA and DH keys
+        shorter than 1024 bits and ECC keys shorter than 160 bits are prohibited. All export
+        cipher suites are prohibited since they all offer less than 80 bits of security. SSL
+        version 2 is prohibited. Any cipher suite using MD5 for the MAC is also prohibited.
+
+    Level 2:
+        Security level set to 112 bits of security. As a result RSA, DSA and DH keys shorter
+        than 2048 bits and ECC keys shorter than 224 bits are prohibited. In addition to the
+        level 1 exclusions any cipher suite using RC4 is also prohibited. SSL version 3 is
+        also not allowed. Compression is disabled.
+
+    Level 3:
+        Security level set to 128 bits of security. As a result RSA, DSA and DH keys shorter
+        than 3072 bits and ECC keys shorter than 256 bits are prohibited. In addition to the
+        level 2 exclusions cipher suites not offering forward secrecy are prohibited. TLS
+        versions below 1.1 are not permitted. Session tickets are disabled.
+
+    Level 4:
+        Security level set to 192 bits of security. As a result RSA, DSA and DH keys shorter
+        than 7680 bits and ECC keys shorter than 384 bits are prohibited. Cipher suites using
+        SHA1 for the MAC are prohibited. TLS versions below 1.2 are not permitted.
+
+    Level 5:
+        Security level set to 256 bits of security. As a result RSA, DSA and DH keys shorter
+        than 15360 bits and ECC keys shorter than 512 bits are prohibited.
+    """
+
+    def __init__(self, cipher_list: Optional[str] = None, security_level: int = 0, *args, **kwargs):
+        if cipher_list:
+            if not isinstance(cipher_list, str):
+                raise TypeError(f"Expected cipher_list to be a str, not {cipher_list!r}")
+            if "@SECLEVEL" in cipher_list:
+                raise ValueError("You must not specify the Security Level manually in the cipher list.")
+        if not isinstance(security_level, int):
+            raise TypeError(f"Expected security_level to be an int, not {security_level!r}")
+        if security_level not in range(6):
+            raise ValueError(f"The security_level must be a value between 0 and 5, not {security_level}")
+
+        if not cipher_list:
+            # cpython's default cipher list differs to Python-requests cipher list
+            cipher_list = "DEFAULT"
+
+        cipher_list += f":@SECLEVEL={security_level}"
+
+        ctx = ssl.create_default_context()
+        ctx.check_hostname = False  # For some reason this is needed to avoid a verification error
+        ctx.set_ciphers(cipher_list)
+
+        self._ssl_context = ctx
+        super().__init__(*args, **kwargs)
+
+    def init_poolmanager(self, *args, **kwargs):
+        kwargs["ssl_context"] = self._ssl_context
+        return super().init_poolmanager(*args, **kwargs)
+
+    def proxy_manager_for(self, *args, **kwargs):
+        kwargs["ssl_context"] = self._ssl_context
+        return super().proxy_manager_for(*args, **kwargs)
diff --git a/devine/core/utils/subprocess.py b/devine/core/utils/subprocess.py
new file mode 100644
index 0000000..2f2561a
--- /dev/null
+++ b/devine/core/utils/subprocess.py
@@ -0,0 +1,31 @@
+import json
+import subprocess
+from pathlib import Path
+from typing import Union
+
+
+def ffprobe(uri: Union[bytes, Path]) -> dict:
+    """Use ffprobe on the provided data to get stream information."""
+    args = [
+        "ffprobe",
+        "-v", "quiet",
+        "-of", "json",
+        "-show_streams"
+    ]
+    if isinstance(uri, Path):
+        args.extend([
+            "-f", "lavfi",
+            "-i", "movie={}[out+subcc]".format(str(uri).replace("\\", '/').replace(":", "\\\\:"))
+        ])
+    elif isinstance(uri, bytes):
+        args.append("pipe:")
+    try:
+        ff = subprocess.run(
+            args,
+            input=uri if isinstance(uri, bytes) else None,
+            check=True,
+            capture_output=True
+        )
+    except subprocess.CalledProcessError:
+        return {}
+    return json.loads(ff.stdout.decode("utf8"))
diff --git a/devine/core/utils/xml.py b/devine/core/utils/xml.py
new file mode 100644
index 0000000..30eb83d
--- /dev/null
+++ b/devine/core/utils/xml.py
@@ -0,0 +1,24 @@
+from typing import Union
+
+from lxml import etree
+from lxml.etree import ElementTree
+
+
+def load_xml(xml: Union[str, bytes]) -> ElementTree:
+    """Safely parse XML data to an ElementTree, without namespaces in tags."""
+    if not isinstance(xml, bytes):
+        xml = xml.encode("utf8")
+    root = etree.fromstring(xml)
+    for elem in root.getiterator():
+        if not hasattr(elem.tag, "find"):
+            # e.g. comment elements
+            continue
+        elem.tag = etree.QName(elem).localname
+        for name, value in elem.attrib.items():
+            local_name = etree.QName(name).localname
+            if local_name == name:
+                continue
+            del elem.attrib[name]
+            elem.attrib[local_name] = value
+    etree.cleanup_namespaces(root)
+    return root
diff --git a/devine/core/vault.py b/devine/core/vault.py
new file mode 100644
index 0000000..01a7d71
--- /dev/null
+++ b/devine/core/vault.py
@@ -0,0 +1,50 @@
+from __future__ import annotations
+
+from abc import ABCMeta, abstractmethod
+from typing import Iterator, Optional, Union
+from uuid import UUID
+
+
+class Vault(metaclass=ABCMeta):
+    def __init__(self, name: str):
+        self.name = name
+
+    def __str__(self) -> str:
+        return f"{self.name} {type(self).__name__}"
+
+    @abstractmethod
+    def get_key(self, kid: Union[UUID, str], service: str) -> Optional[str]:
+        """
+        Get Key from Vault by KID (Key ID) and Service.
+
+        It does not get Key by PSSH as the PSSH can be different depending on it's implementation,
+        or even how it was crafted. Some PSSH values may also actually be a CENC Header rather
+        than a PSSH MP4 Box too, which makes the value even more confusingly different.
+
+        However, the KID never changes unless the video file itself has changed too, meaning the
+        key for the presumed-matching KID wouldn't work, further proving matching by KID is
+        superior.
+        """
+
+    @abstractmethod
+    def get_keys(self, service: str) -> Iterator[tuple[str, str]]:
+        """Get All Keys from Vault by Service."""
+
+    @abstractmethod
+    def add_key(self, service: str, kid: Union[UUID, str], key: str, commit: bool = False) -> bool:
+        """Add KID:KEY to the Vault."""
+
+    @abstractmethod
+    def add_keys(self, service: str, kid_keys: dict[Union[UUID, str], str], commit: bool = False) -> int:
+        """
+        Add Multiple Content Keys with Key IDs for Service to the Vault.
+        Pre-existing Content Keys are ignored/skipped.
+        Raises PermissionError if the user has no permission to create the table.
+        """
+
+    @abstractmethod
+    def get_services(self) -> Iterator[str]:
+        """Get a list of Service Tags from Vault."""
+
+
+__ALL__ = (Vault,)
diff --git a/devine/core/vaults.py b/devine/core/vaults.py
new file mode 100644
index 0000000..e210e10
--- /dev/null
+++ b/devine/core/vaults.py
@@ -0,0 +1,79 @@
+from __future__ import annotations
+
+from typing import Iterator, Optional, Union, Any
+from uuid import UUID
+
+from devine.core.vault import Vault
+from devine.core.config import config
+from devine.core.utilities import import_module_by_path
+
+_VAULTS = sorted(
+    (
+        path
+        for path in config.directories.vaults.glob("*.py")
+        if path.stem.lower() != "__init__"
+    ),
+    key=lambda x: x.stem
+)
+
+_MODULES = {
+    path.stem: getattr(import_module_by_path(path), path.stem)
+    for path in _VAULTS
+}
+
+
+class Vaults:
+    """Keeps hold of Key Vaults with convenience functions, e.g. searching all vaults."""
+
+    def __init__(self, service: Optional[str] = None):
+        self.service = service or ""
+        self.vaults = []
+
+    def __iter__(self) -> Iterator[Vault]:
+        return iter(self.vaults)
+
+    def __len__(self) -> int:
+        return len(self.vaults)
+
+    def load(self, type_: str, **kwargs: Any) -> None:
+        """Load a Vault into the vaults list."""
+        module = _MODULES.get(type_)
+        if not module:
+            raise ValueError(f"Unable to find vault command by the name '{type_}'.")
+        vault = module(**kwargs)
+        self.vaults.append(vault)
+
+    def get_key(self, kid: Union[UUID, str]) -> tuple[Optional[str], Optional[Vault]]:
+        """Get Key from the first Vault it can by KID (Key ID) and Service."""
+        for vault in self.vaults:
+            key = vault.get_key(kid, self.service)
+            if key and key.count("0") != len(key):
+                return key, vault
+        return None, None
+
+    def add_key(self, kid: Union[UUID, str], key: str, excluding: Optional[Vault] = None) -> int:
+        """Add a KID:KEY to all Vaults, optionally with an exclusion."""
+        success = 0
+        for vault in self.vaults:
+            if vault != excluding:
+                try:
+                    success += vault.add_key(self.service, kid, key, commit=True)
+                except (PermissionError, NotImplementedError):
+                    pass
+        return success
+
+    def add_keys(self, kid_keys: dict[Union[UUID, str], str]) -> int:
+        """
+        Add multiple KID:KEYs to all Vaults. Duplicate Content Keys are skipped.
+        PermissionErrors when the user cannot create Tables are absorbed and ignored.
+        """
+        success = 0
+        for vault in self.vaults:
+            try:
+                success += bool(vault.add_keys(self.service, kid_keys, commit=True))
+            except (PermissionError, NotImplementedError):
+                pass
+        return success
+
+
+__ALL__ = (Vaults,)
diff --git a/devine/vaults/MySQL.py b/devine/vaults/MySQL.py
new file mode 100644
index 0000000..221d3f6
--- /dev/null
+++ b/devine/vaults/MySQL.py
@@ -0,0 +1,225 @@
+from __future__ import annotations
+
+from typing import Iterator, Optional, Union
+from uuid import UUID
+
+import pymysql
+from pymysql.cursors import DictCursor
+
+from devine.core.services import Services
+from devine.core.utils.atomicsql import AtomicSQL
+from devine.core.vault import Vault
+
+
+class MySQL(Vault):
+    """Key Vault using a remotely-accessed mysql database connection."""
+
+    def __init__(self, name: str, host: str, database: str, username: str, **kwargs):
+        """
+        All extra arguments provided via **kwargs will be sent to pymysql.connect.
+        This can be used to provide more specific connection information.
+        """
+        super().__init__(name)
+        self.slug = f"{host}:{database}:{username}"
+        self.con = pymysql.connect(
+            host=host,
+            db=database,
+            user=username,
+            cursorclass=DictCursor,
+            **kwargs
+        )
+        self.adb = AtomicSQL()
+        self.ticket = self.adb.load(self.con)
+
+        self.permissions = self.get_permissions()
+        if not self.has_permission("SELECT"):
+            raise PermissionError(f"MySQL vault {self.slug} has no SELECT permission.")
+
+    def get_key(self, kid: Union[UUID, str], service: str) -> Optional[str]:
+        if not self.has_table(service):
+            # no table, no key, simple
+            return None
+
+        if isinstance(kid, UUID):
+            kid = kid.hex
+
+        c = self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute(
+                # TODO: SQL injection risk
+                f"SELECT `id`, `key_` FROM `{service}` WHERE `kid`=%s AND `key_`!=%s",
+                [kid, "0" * 32]
+            )
+        ).fetchone()
+        if not c:
+            return None
+
+        return c["key_"]
+
+    def get_keys(self, service: str) -> Iterator[tuple[str, str]]:
+        if not self.has_table(service):
+            # no table, no keys, simple
+            return None
+
+        c = self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute(
+                # TODO: SQL injection risk
+                f"SELECT `kid`, `key_` FROM `{service}` WHERE `key_`!=%s",
+                ["0" * 32]
+            )
+        )
+
+        for row in c.fetchall():
+            yield row["kid"], row["key_"]
+
+    def add_key(self, service: str, kid: Union[UUID, str], key: str, commit: bool = False) -> bool:
+        if not key or key.count("0") == len(key):
+            raise ValueError("You cannot add a NULL Content Key to a Vault.")
+
+        if not self.has_permission("INSERT", table=service):
+            raise PermissionError(f"MySQL vault {self.slug} has no INSERT permission.")
+
+        if not self.has_table(service):
+            try:
+                self.create_table(service, commit)
+            except PermissionError:
+                return False
+
+        if isinstance(kid, UUID):
+            kid = kid.hex
+
+        if self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute(
+                # TODO: SQL injection risk
+                f"SELECT `id` FROM `{service}` WHERE `kid`=%s AND `key_`=%s",
+                [kid, key]
+            )
+        ).fetchone():
+            # table already has this exact KID:KEY stored
+            return True
+
+        self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute(
+                # TODO: SQL injection risk
+                f"INSERT INTO `{service}` (kid, key_) VALUES (%s, %s)",
+                (kid, key)
+            )
+        )
+
+        if commit:
+            self.commit()
+
+        return True
+
+    def add_keys(self, service: str, kid_keys: dict[Union[UUID, str], str], commit: bool = False) -> int:
+        for kid, key in kid_keys.items():
+            if not key or key.count("0") == len(key):
+                raise ValueError("You cannot add a NULL Content Key to a Vault.")
+
+        if not self.has_permission("INSERT", table=service):
+            raise PermissionError(f"MySQL vault {self.slug} has no INSERT permission.")
+
+        if not self.has_table(service):
+            try:
+                self.create_table(service, commit)
+            except PermissionError:
+                return 0
+
+        if not isinstance(kid_keys, dict):
+            raise ValueError(f"The kid_keys provided is not a dictionary, {kid_keys!r}")
+        if not all(isinstance(kid, (str, UUID)) and isinstance(key_, str) for kid, key_ in kid_keys.items()):
+            raise ValueError("Expecting dict with Key of str/UUID and value of str.")
+
+        if any(isinstance(kid, UUID) for kid, key_ in kid_keys.items()):
+            kid_keys = {
+                kid.hex if isinstance(kid, UUID) else kid: key_
+                for kid, key_ in kid_keys.items()
+            }
+
+        c = self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.executemany(
+                # TODO: SQL injection risk
+                f"INSERT IGNORE INTO `{service}` (kid, key_) VALUES (%s, %s)",
+                kid_keys.items()
+            )
+        )
+
+        if commit:
+            self.commit()
+
+        return c.rowcount
+
+    def get_services(self) -> Iterator[str]:
+        c = self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute("SHOW TABLES")
+        )
+        for table in c.fetchall():
+            # each entry has a key named `Tables_in_<db name>`
+            yield Services.get_tag(list(table.values())[0])
+
+    def has_table(self, name: str) -> bool:
+        """Check if the Vault has a Table with the specified name."""
+        return list(self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute(
+                "SELECT count(TABLE_NAME) FROM information_schema.TABLES WHERE TABLE_SCHEMA=%s AND TABLE_NAME=%s",
+                [self.con.db, name]
+            )
+        ).fetchone().values())[0] == 1
+
+    def create_table(self, name: str, commit: bool = False):
+        """Create a Table with the specified name if not yet created."""
+        if self.has_table(name):
+            return
+
+        if not self.has_permission("CREATE"):
+            raise PermissionError(f"MySQL vault {self.slug} has no CREATE permission.")
+
+        self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute(
+                # TODO: SQL injection risk
+                f"""
+                CREATE TABLE IF NOT EXISTS {name} (
+                  id          int AUTO_INCREMENT PRIMARY KEY,
+                  kid         VARCHAR(255) NOT NULL,
+                  key_        VARCHAR(255) NOT NULL,
+                  UNIQUE(kid, key_)
+                );
+                """
+            )
+        )
+
+        if commit:
+            self.commit()
+
+    def get_permissions(self) -> list:
+        """Get and parse Grants to a more easily usable list tuple array."""
+        with self.con.cursor() as c:
+            c.execute("SHOW GRANTS")
+            grants = c.fetchall()
+            grants = [next(iter(x.values())) for x in grants]
+        grants = [tuple(x[6:].split(" TO ")[0].split(" ON ")) for x in list(grants)]
+        grants = [(
+            list(map(str.strip, perms.replace("ALL PRIVILEGES", "*").split(","))),
+            location.replace("`", "").split(".")
+        ) for perms, location in grants]
+        return grants
+
+    def has_permission(self, operation: str, database: Optional[str] = None, table: Optional[str] = None) -> bool:
+        """Check if the current connection has a specific permission."""
+        grants = [x for x in self.permissions if x[0] == ["*"] or operation.upper() in x[0]]
+        if grants and database:
+            grants = [x for x in grants if x[1][0] in (database, "*")]
+        if grants and table:
+            grants = [x for x in grants if x[1][1] in (table, "*")]
+        return bool(grants)
+
+    def commit(self):
+        """Commit any changes made that has not been written to db."""
+        self.adb.commit(self.ticket)
diff --git a/devine/vaults/SQLite.py b/devine/vaults/SQLite.py
new file mode 100644
index 0000000..02c5307
--- /dev/null
+++ b/devine/vaults/SQLite.py
@@ -0,0 +1,173 @@
+from __future__ import annotations
+
+import sqlite3
+from pathlib import Path
+from typing import Iterator, Optional, Union
+from uuid import UUID
+
+from devine.core.services import Services
+from devine.core.utils.atomicsql import AtomicSQL
+from devine.core.vault import Vault
+
+
+class SQLite(Vault):
+    """Key Vault using a locally-accessed sqlite DB file."""
+
+    def __init__(self, name: str, path: Union[str, Path]):
+        super().__init__(name)
+        self.path = Path(path).expanduser()
+        # TODO: Use a DictCursor or such to get fetches as dict?
+        self.con = sqlite3.connect(self.path)
+        self.adb = AtomicSQL()
+        self.ticket = self.adb.load(self.con)
+
+    def get_key(self, kid: Union[UUID, str], service: str) -> Optional[str]:
+        if not self.has_table(service):
+            # no table, no key, simple
+            return None
+
+        if isinstance(kid, UUID):
+            kid = kid.hex
+
+        c = self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute(
+                # TODO: SQL injection risk
+                f"SELECT `id`, `key_` FROM `{service}` WHERE `kid`=? AND `key_`!=?",
+                [kid, "0" * 32]
+            )
+        ).fetchone()
+        if not c:
+            return None
+
+        return c[1]  # `key_`
+
+    def get_keys(self, service: str) -> Iterator[tuple[str, str]]:
+        if not self.has_table(service):
+            # no table, no keys, simple
+            return None
+        c = self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute(
+                # TODO: SQL injection risk
+                f"SELECT `kid`, `key_` FROM `{service}` WHERE `key_`!=?",
+                ["0" * 32]
+            )
+        )
+        for (kid, key_) in c.fetchall():
+            yield kid, key_
+
+    def add_key(self, service: str, kid: Union[UUID, str], key: str, commit: bool = False) -> bool:
+        if not key or key.count("0") == len(key):
+            raise ValueError("You cannot add a NULL Content Key to a Vault.")
+
+        if not self.has_table(service):
+            self.create_table(service, commit)
+
+        if isinstance(kid, UUID):
+            kid = kid.hex
+
+        if self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute(
+                # TODO: SQL injection risk
+                f"SELECT `id` FROM `{service}` WHERE `kid`=? AND `key_`=?",
+                [kid, key]
+            )
+        ).fetchone():
+            # table already has this exact KID:KEY stored
+            return True
+
+        self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute(
+                # TODO: SQL injection risk
+                f"INSERT INTO `{service}` (kid, key_) VALUES (?, ?)",
+                (kid, key)
+            )
+        )
+
+        if commit:
+            self.commit()
+
+        return True
+
+    def add_keys(self, service: str, kid_keys: dict[Union[UUID, str], str], commit: bool = False) -> int:
+        for kid, key in kid_keys.items():
+            if not key or key.count("0") == len(key):
+                raise ValueError("You cannot add a NULL Content Key to a Vault.")
+
+        if not self.has_table(service):
+            self.create_table(service, commit)
+
+        if not isinstance(kid_keys, dict):
+            raise ValueError(f"The kid_keys provided is not a dictionary, {kid_keys!r}")
+        if not all(isinstance(kid, (str, UUID)) and isinstance(key_, str) for kid, key_ in kid_keys.items()):
+            raise ValueError("Expecting dict with Key of str/UUID and value of str.")
+
+        if any(isinstance(kid, UUID) for kid, key_ in kid_keys.items()):
+            kid_keys = {
+                kid.hex if isinstance(kid, UUID) else kid: key_
+                for kid, key_ in kid_keys.items()
+            }
+
+        c = self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.executemany(
+                # TODO: SQL injection risk
+                f"INSERT OR IGNORE INTO `{service}` (kid, key_) VALUES (?, ?)",
+                kid_keys.items()
+            )
+        )
+
+        if commit:
+            self.commit()
+
+        return c.rowcount
+
+    def get_services(self) -> Iterator[str]:
+        c = self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
+        )
+        for (name,) in c.fetchall():
+            if name != "sqlite_sequence":
+                yield Services.get_tag(name)
+
+    def has_table(self, name: str) -> bool:
+        """Check if the Vault has a Table with the specified name."""
+        return self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute(
+                "SELECT count(name) FROM sqlite_master WHERE type='table' AND name=?",
+                [name]
+            )
+        ).fetchone()[0] == 1
+
+    def create_table(self, name: str, commit: bool = False):
+        """Create a Table with the specified name if not yet created."""
+        if self.has_table(name):
+            return
+
+        self.adb.safe_execute(
+            self.ticket,
+            lambda db, cursor: cursor.execute(
+                # TODO: SQL injection risk
+                f"""
+                CREATE TABLE IF NOT EXISTS {name} (
+                  "id"        INTEGER NOT NULL UNIQUE,
+                  "kid"       TEXT NOT NULL COLLATE NOCASE,
+                  "key_"      TEXT NOT NULL COLLATE NOCASE,
+                  PRIMARY KEY("id" AUTOINCREMENT),
+                  UNIQUE("kid", "key_")
+                );
+                """
+            )
+        )
+
+        if commit:
+            self.commit()
+
+    def commit(self):
+        """Commit any changes made that has not been written to db."""
+        self.adb.commit(self.ticket)
diff --git a/devine/vaults/__init__.py b/devine/vaults/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..3eb08fd
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,1726 @@
+# This file is automatically @generated by Poetry and should not be changed by hand.
+
+[[package]]
+name = "aiohttp"
+version = "3.8.3"
+description = "Async http client/server framework (asyncio)"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ba71c9b4dcbb16212f334126cc3d8beb6af377f6703d9dc2d9fb3874fd667ee9"},
+    {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d24b8bb40d5c61ef2d9b6a8f4528c2f17f1c5d2d31fed62ec860f6006142e83e"},
+    {file = "aiohttp-3.8.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f88df3a83cf9df566f171adba39d5bd52814ac0b94778d2448652fc77f9eb491"},
+    {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97decbb3372d4b69e4d4c8117f44632551c692bb1361b356a02b97b69e18a62"},
+    {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309aa21c1d54b8ef0723181d430347d7452daaff93e8e2363db8e75c72c2fb2d"},
+    {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad5383a67514e8e76906a06741febd9126fc7c7ff0f599d6fcce3e82b80d026f"},
+    {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20acae4f268317bb975671e375493dbdbc67cddb5f6c71eebdb85b34444ac46b"},
+    {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05a3c31c6d7cd08c149e50dc7aa2568317f5844acd745621983380597f027a18"},
+    {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6f76310355e9fae637c3162936e9504b4767d5c52ca268331e2756e54fd4ca5"},
+    {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:256deb4b29fe5e47893fa32e1de2d73c3afe7407738bd3c63829874661d4822d"},
+    {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5c59fcd80b9049b49acd29bd3598cada4afc8d8d69bd4160cd613246912535d7"},
+    {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:059a91e88f2c00fe40aed9031b3606c3f311414f86a90d696dd982e7aec48142"},
+    {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2feebbb6074cdbd1ac276dbd737b40e890a1361b3cc30b74ac2f5e24aab41f7b"},
+    {file = "aiohttp-3.8.3-cp310-cp310-win32.whl", hash = "sha256:5bf651afd22d5f0c4be16cf39d0482ea494f5c88f03e75e5fef3a85177fecdeb"},
+    {file = "aiohttp-3.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:653acc3880459f82a65e27bd6526e47ddf19e643457d36a2250b85b41a564715"},
+    {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:86fc24e58ecb32aee09f864cb11bb91bc4c1086615001647dbfc4dc8c32f4008"},
+    {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75e14eac916f024305db517e00a9252714fce0abcb10ad327fb6dcdc0d060f1d"},
+    {file = "aiohttp-3.8.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d1fde0f44029e02d02d3993ad55ce93ead9bb9b15c6b7ccd580f90bd7e3de476"},
+    {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab94426ddb1ecc6a0b601d832d5d9d421820989b8caa929114811369673235c"},
+    {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89d2e02167fa95172c017732ed7725bc8523c598757f08d13c5acca308e1a061"},
+    {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02f9a2c72fc95d59b881cf38a4b2be9381b9527f9d328771e90f72ac76f31ad8"},
+    {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7149272fb5834fc186328e2c1fa01dda3e1fa940ce18fded6d412e8f2cf76d"},
+    {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:512bd5ab136b8dc0ffe3fdf2dfb0c4b4f49c8577f6cae55dca862cd37a4564e2"},
+    {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7018ecc5fe97027214556afbc7c502fbd718d0740e87eb1217b17efd05b3d276"},
+    {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88c70ed9da9963d5496d38320160e8eb7e5f1886f9290475a881db12f351ab5d"},
+    {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:da22885266bbfb3f78218dc40205fed2671909fbd0720aedba39b4515c038091"},
+    {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:e65bc19919c910127c06759a63747ebe14f386cda573d95bcc62b427ca1afc73"},
+    {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:08c78317e950e0762c2983f4dd58dc5e6c9ff75c8a0efeae299d363d439c8e34"},
+    {file = "aiohttp-3.8.3-cp311-cp311-win32.whl", hash = "sha256:45d88b016c849d74ebc6f2b6e8bc17cabf26e7e40c0661ddd8fae4c00f015697"},
+    {file = "aiohttp-3.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:96372fc29471646b9b106ee918c8eeb4cca423fcbf9a34daa1b93767a88a2290"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c971bf3786b5fad82ce5ad570dc6ee420f5b12527157929e830f51c55dc8af77"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff25f48fc8e623d95eca0670b8cc1469a83783c924a602e0fbd47363bb54aaca"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e381581b37db1db7597b62a2e6b8b57c3deec95d93b6d6407c5b61ddc98aca6d"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db19d60d846283ee275d0416e2a23493f4e6b6028825b51290ac05afc87a6f97"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25892c92bee6d9449ffac82c2fe257f3a6f297792cdb18ad784737d61e7a9a85"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:398701865e7a9565d49189f6c90868efaca21be65c725fc87fc305906be915da"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4a4fbc769ea9b6bd97f4ad0b430a6807f92f0e5eb020f1e42ece59f3ecfc4585"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:b29bfd650ed8e148f9c515474a6ef0ba1090b7a8faeee26b74a8ff3b33617502"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:1e56b9cafcd6531bab5d9b2e890bb4937f4165109fe98e2b98ef0dcfcb06ee9d"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ec40170327d4a404b0d91855d41bfe1fe4b699222b2b93e3d833a27330a87a6d"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2df5f139233060578d8c2c975128fb231a89ca0a462b35d4b5fcf7c501ebdbe1"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-win32.whl", hash = "sha256:f973157ffeab5459eefe7b97a804987876dd0a55570b8fa56b4e1954bf11329b"},
+    {file = "aiohttp-3.8.3-cp36-cp36m-win_amd64.whl", hash = "sha256:437399385f2abcd634865705bdc180c8314124b98299d54fe1d4c8990f2f9494"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:09e28f572b21642128ef31f4e8372adb6888846f32fecb288c8b0457597ba61a"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f3553510abdbec67c043ca85727396ceed1272eef029b050677046d3387be8d"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e168a7560b7c61342ae0412997b069753f27ac4862ec7867eff74f0fe4ea2ad9"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db4c979b0b3e0fa7e9e69ecd11b2b3174c6963cebadeecfb7ad24532ffcdd11a"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e164e0a98e92d06da343d17d4e9c4da4654f4a4588a20d6c73548a29f176abe2"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8a78079d9a39ca9ca99a8b0ac2fdc0c4d25fc80c8a8a82e5c8211509c523363"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:21b30885a63c3f4ff5b77a5d6caf008b037cb521a5f33eab445dc566f6d092cc"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4b0f30372cef3fdc262f33d06e7b411cd59058ce9174ef159ad938c4a34a89da"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:8135fa153a20d82ffb64f70a1b5c2738684afa197839b34cc3e3c72fa88d302c"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ad61a9639792fd790523ba072c0555cd6be5a0baf03a49a5dd8cfcf20d56df48"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978b046ca728073070e9abc074b6299ebf3501e8dee5e26efacb13cec2b2dea0"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-win32.whl", hash = "sha256:0d2c6d8c6872df4a6ec37d2ede71eff62395b9e337b4e18efd2177de883a5033"},
+    {file = "aiohttp-3.8.3-cp37-cp37m-win_amd64.whl", hash = "sha256:21d69797eb951f155026651f7e9362877334508d39c2fc37bd04ff55b2007091"},
+    {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ca9af5f8f5812d475c5259393f52d712f6d5f0d7fdad9acdb1107dd9e3cb7eb"},
+    {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d90043c1882067f1bd26196d5d2db9aa6d268def3293ed5fb317e13c9413ea4"},
+    {file = "aiohttp-3.8.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d737fc67b9a970f3234754974531dc9afeea11c70791dcb7db53b0cf81b79784"},
+    {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf909ea0a3fc9596e40d55d8000702a85e27fd578ff41a5500f68f20fd32e6c"},
+    {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5835f258ca9f7c455493a57ee707b76d2d9634d84d5d7f62e77be984ea80b849"},
+    {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da37dcfbf4b7f45d80ee386a5f81122501ec75672f475da34784196690762f4b"},
+    {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87f44875f2804bc0511a69ce44a9595d5944837a62caecc8490bbdb0e18b1342"},
+    {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:527b3b87b24844ea7865284aabfab08eb0faf599b385b03c2aa91fc6edd6e4b6"},
+    {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d5ba88df9aa5e2f806650fcbeedbe4f6e8736e92fc0e73b0400538fd25a4dd96"},
+    {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e7b8813be97cab8cb52b1375f41f8e6804f6507fe4660152e8ca5c48f0436017"},
+    {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2dea10edfa1a54098703cb7acaa665c07b4e7568472a47f4e64e6319d3821ccf"},
+    {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:713d22cd9643ba9025d33c4af43943c7a1eb8547729228de18d3e02e278472b6"},
+    {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2d252771fc85e0cf8da0b823157962d70639e63cb9b578b1dec9868dd1f4f937"},
+    {file = "aiohttp-3.8.3-cp38-cp38-win32.whl", hash = "sha256:66bd5f950344fb2b3dbdd421aaa4e84f4411a1a13fca3aeb2bcbe667f80c9f76"},
+    {file = "aiohttp-3.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:84b14f36e85295fe69c6b9789b51a0903b774046d5f7df538176516c3e422446"},
+    {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16c121ba0b1ec2b44b73e3a8a171c4f999b33929cd2397124a8c7fcfc8cd9e06"},
+    {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d6aaa4e7155afaf994d7924eb290abbe81a6905b303d8cb61310a2aba1c68ba"},
+    {file = "aiohttp-3.8.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43046a319664a04b146f81b40e1545d4c8ac7b7dd04c47e40bf09f65f2437346"},
+    {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599418aaaf88a6d02a8c515e656f6faf3d10618d3dd95866eb4436520096c84b"},
+    {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a2964319d359f494f16011e23434f6f8ef0434acd3cf154a6b7bec511e2fb7"},
+    {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73a4131962e6d91109bca6536416aa067cf6c4efb871975df734f8d2fd821b37"},
+    {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598adde339d2cf7d67beaccda3f2ce7c57b3b412702f29c946708f69cf8222aa"},
+    {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75880ed07be39beff1881d81e4a907cafb802f306efd6d2d15f2b3c69935f6fb"},
+    {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0239da9fbafd9ff82fd67c16704a7d1bccf0d107a300e790587ad05547681c8"},
+    {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4e3a23ec214e95c9fe85a58470b660efe6534b83e6cbe38b3ed52b053d7cb6ad"},
+    {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:47841407cc89a4b80b0c52276f3cc8138bbbfba4b179ee3acbd7d77ae33f7ac4"},
+    {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:54d107c89a3ebcd13228278d68f1436d3f33f2dd2af5415e3feaeb1156e1a62c"},
+    {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c37c5cce780349d4d51739ae682dec63573847a2a8dcb44381b174c3d9c8d403"},
+    {file = "aiohttp-3.8.3-cp39-cp39-win32.whl", hash = "sha256:f178d2aadf0166be4df834c4953da2d7eef24719e8aec9a65289483eeea9d618"},
+    {file = "aiohttp-3.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:88e5be56c231981428f4f506c68b6a46fa25c4123a2e86d156c58a8369d31ab7"},
+    {file = "aiohttp-3.8.3.tar.gz", hash = "sha256:3828fb41b7203176b82fe5d699e0d845435f2374750a44b480ea6b930f6be269"},
+]
+
+[package.dependencies]
+aiosignal = ">=1.1.2"
+async-timeout = ">=4.0.0a3,<5.0"
+attrs = ">=17.3.0"
+charset-normalizer = ">=2.0,<3.0"
+frozenlist = ">=1.1.1"
+multidict = ">=4.5,<7.0"
+yarl = ">=1.0,<2.0"
+
+[package.extras]
+speedups = ["Brotli", "aiodns", "cchardet"]
+
+[[package]]
+name = "aiosignal"
+version = "1.3.1"
+description = "aiosignal: a list of registered asynchronous callbacks"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
+    {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
+]
+
+[package.dependencies]
+frozenlist = ">=1.1.0"
+
+[[package]]
+name = "appdirs"
+version = "1.4.4"
+description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+    {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
+    {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
+]
+
+[[package]]
+name = "async-timeout"
+version = "4.0.2"
+description = "Timeout context manager for asyncio programs"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
+    {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
+]
+
+[[package]]
+name = "attrs"
+version = "22.2.0"
+description = "Classes Without Boilerplate"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"},
+    {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"},
+]
+
+[package.extras]
+cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"]
+dev = ["attrs[docs,tests]"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"]
+tests = ["attrs[tests-no-zope]", "zope.interface"]
+tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"]
+
+[[package]]
+name = "beautifulsoup4"
+version = "4.11.2"
+description = "Screen-scraping library"
+category = "main"
+optional = false
+python-versions = ">=3.6.0"
+files = [
+    {file = "beautifulsoup4-4.11.2-py3-none-any.whl", hash = "sha256:0e79446b10b3ecb499c1556f7e228a53e64a2bfcebd455f370d8927cb5b59e39"},
+    {file = "beautifulsoup4-4.11.2.tar.gz", hash = "sha256:bc4bdda6717de5a2987436fb8d72f45dc90dd856bdfd512a1314ce90349a0106"},
+]
+
+[package.dependencies]
+soupsieve = ">1.2"
+
+[package.extras]
+html5lib = ["html5lib"]
+lxml = ["lxml"]
+
+[[package]]
+name = "brotli"
+version = "1.0.9"
+description = "Python bindings for the Brotli compression library"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+    {file = "Brotli-1.0.9-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70"},
+    {file = "Brotli-1.0.9-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b"},
+    {file = "Brotli-1.0.9-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6"},
+    {file = "Brotli-1.0.9-cp27-cp27m-win32.whl", hash = "sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa"},
+    {file = "Brotli-1.0.9-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452"},
+    {file = "Brotli-1.0.9-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7"},
+    {file = "Brotli-1.0.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9744a863b489c79a73aba014df554b0e7a0fc44ef3f8a0ef2a52919c7d155031"},
+    {file = "Brotli-1.0.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a72661af47119a80d82fa583b554095308d6a4c356b2a554fdc2799bc19f2a43"},
+    {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ee83d3e3a024a9618e5be64648d6d11c37047ac48adff25f12fa4226cf23d1c"},
+    {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:19598ecddd8a212aedb1ffa15763dd52a388518c4550e615aed88dc3753c0f0c"},
+    {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:44bb8ff420c1d19d91d79d8c3574b8954288bdff0273bf788954064d260d7ab0"},
+    {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e23281b9a08ec338469268f98f194658abfb13658ee98e2b7f85ee9dd06caa91"},
+    {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3496fc835370da351d37cada4cf744039616a6db7d13c430035e901443a34daa"},
+    {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83bb06a0192cccf1eb8d0a28672a1b79c74c3a8a5f2619625aeb6f28b3a82bb"},
+    {file = "Brotli-1.0.9-cp310-cp310-win32.whl", hash = "sha256:26d168aac4aaec9a4394221240e8a5436b5634adc3cd1cdf637f6645cecbf181"},
+    {file = "Brotli-1.0.9-cp310-cp310-win_amd64.whl", hash = "sha256:622a231b08899c864eb87e85f81c75e7b9ce05b001e59bbfbf43d4a71f5f32b2"},
+    {file = "Brotli-1.0.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cc0283a406774f465fb45ec7efb66857c09ffefbe49ec20b7882eff6d3c86d3a"},
+    {file = "Brotli-1.0.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11d3283d89af7033236fa4e73ec2cbe743d4f6a81d41bd234f24bf63dde979df"},
+    {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c1306004d49b84bd0c4f90457c6f57ad109f5cc6067a9664e12b7b79a9948ad"},
+    {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1375b5d17d6145c798661b67e4ae9d5496920d9265e2f00f1c2c0b5ae91fbde"},
+    {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cab1b5964b39607a66adbba01f1c12df2e55ac36c81ec6ed44f2fca44178bf1a"},
+    {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ed6a5b3d23ecc00ea02e1ed8e0ff9a08f4fc87a1f58a2530e71c0f48adf882f"},
+    {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cb02ed34557afde2d2da68194d12f5719ee96cfb2eacc886352cb73e3808fc5d"},
+    {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b3523f51818e8f16599613edddb1ff924eeb4b53ab7e7197f85cbc321cdca32f"},
+    {file = "Brotli-1.0.9-cp311-cp311-win32.whl", hash = "sha256:ba72d37e2a924717990f4d7482e8ac88e2ef43fb95491eb6e0d124d77d2a150d"},
+    {file = "Brotli-1.0.9-cp311-cp311-win_amd64.whl", hash = "sha256:3ffaadcaeafe9d30a7e4e1e97ad727e4f5610b9fa2f7551998471e3736738679"},
+    {file = "Brotli-1.0.9-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4"},
+    {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:6b2ae9f5f67f89aade1fab0f7fd8f2832501311c363a21579d02defa844d9296"},
+    {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430"},
+    {file = "Brotli-1.0.9-cp35-cp35m-win32.whl", hash = "sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1"},
+    {file = "Brotli-1.0.9-cp35-cp35m-win_amd64.whl", hash = "sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea"},
+    {file = "Brotli-1.0.9-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:503fa6af7da9f4b5780bb7e4cbe0c639b010f12be85d02c99452825dd0feef3f"},
+    {file = "Brotli-1.0.9-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4"},
+    {file = "Brotli-1.0.9-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a"},
+    {file = "Brotli-1.0.9-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87fdccbb6bb589095f413b1e05734ba492c962b4a45a13ff3408fa44ffe6479b"},
+    {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:6d847b14f7ea89f6ad3c9e3901d1bc4835f6b390a9c71df999b0162d9bb1e20f"},
+    {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:495ba7e49c2db22b046a53b469bbecea802efce200dffb69b93dd47397edc9b6"},
+    {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:4688c1e42968ba52e57d8670ad2306fe92e0169c6f3af0089be75bbac0c64a3b"},
+    {file = "Brotli-1.0.9-cp36-cp36m-win32.whl", hash = "sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14"},
+    {file = "Brotli-1.0.9-cp36-cp36m-win_amd64.whl", hash = "sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c"},
+    {file = "Brotli-1.0.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126"},
+    {file = "Brotli-1.0.9-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d"},
+    {file = "Brotli-1.0.9-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6c772d6c0a79ac0f414a9f8947cc407e119b8598de7621f39cacadae3cf57d12"},
+    {file = "Brotli-1.0.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29d1d350178e5225397e28ea1b7aca3648fcbab546d20e7475805437bfb0a130"},
+    {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7bbff90b63328013e1e8cb50650ae0b9bac54ffb4be6104378490193cd60f85a"},
+    {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ec1947eabbaf8e0531e8e899fc1d9876c179fc518989461f5d24e2223395a9e3"},
+    {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12effe280b8ebfd389022aa65114e30407540ccb89b177d3fbc9a4f177c4bd5d"},
+    {file = "Brotli-1.0.9-cp37-cp37m-win32.whl", hash = "sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1"},
+    {file = "Brotli-1.0.9-cp37-cp37m-win_amd64.whl", hash = "sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5"},
+    {file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e16eb9541f3dd1a3e92b89005e37b1257b157b7256df0e36bd7b33b50be73bcb"},
+    {file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8"},
+    {file = "Brotli-1.0.9-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb"},
+    {file = "Brotli-1.0.9-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5b6ef7d9f9c38292df3690fe3e302b5b530999fa90014853dcd0d6902fb59f26"},
+    {file = "Brotli-1.0.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a674ac10e0a87b683f4fa2b6fa41090edfd686a6524bd8dedbd6138b309175c"},
+    {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e2d9e1cbc1b25e22000328702b014227737756f4b5bf5c485ac1d8091ada078b"},
+    {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b336c5e9cf03c7be40c47b5fd694c43c9f1358a80ba384a21969e0b4e66a9b17"},
+    {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:85f7912459c67eaab2fb854ed2bc1cc25772b300545fe7ed2dc03954da638649"},
+    {file = "Brotli-1.0.9-cp38-cp38-win32.whl", hash = "sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429"},
+    {file = "Brotli-1.0.9-cp38-cp38-win_amd64.whl", hash = "sha256:269a5743a393c65db46a7bb982644c67ecba4b8d91b392403ad8a861ba6f495f"},
+    {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2aad0e0baa04517741c9bb5b07586c642302e5fb3e75319cb62087bd0995ab19"},
+    {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5cb1e18167792d7d21e21365d7650b72d5081ed476123ff7b8cac7f45189c0c7"},
+    {file = "Brotli-1.0.9-cp39-cp39-manylinux1_i686.whl", hash = "sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b"},
+    {file = "Brotli-1.0.9-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389"},
+    {file = "Brotli-1.0.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bf919756d25e4114ace16a8ce91eb340eb57a08e2c6950c3cebcbe3dff2a5e7"},
+    {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e4c4e92c14a57c9bd4cb4be678c25369bf7a092d55fd0866f759e425b9660806"},
+    {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e48f4234f2469ed012a98f4b7874e7f7e173c167bed4934912a29e03167cf6b1"},
+    {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9ed4c92a0665002ff8ea852353aeb60d9141eb04109e88928026d3c8a9e5433c"},
+    {file = "Brotli-1.0.9-cp39-cp39-win32.whl", hash = "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3"},
+    {file = "Brotli-1.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761"},
+    {file = "Brotli-1.0.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9749a124280a0ada4187a6cfd1ffd35c350fb3af79c706589d98e088c5044267"},
+    {file = "Brotli-1.0.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:73fd30d4ce0ea48010564ccee1a26bfe39323fde05cb34b5863455629db61dc7"},
+    {file = "Brotli-1.0.9-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02177603aaca36e1fd21b091cb742bb3b305a569e2402f1ca38af471777fb019"},
+    {file = "Brotli-1.0.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:76ffebb907bec09ff511bb3acc077695e2c32bc2142819491579a695f77ffd4d"},
+    {file = "Brotli-1.0.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b43775532a5904bc938f9c15b77c613cb6ad6fb30990f3b0afaea82797a402d8"},
+    {file = "Brotli-1.0.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5bf37a08493232fbb0f8229f1824b366c2fc1d02d64e7e918af40acd15f3e337"},
+    {file = "Brotli-1.0.9-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:330e3f10cd01da535c70d09c4283ba2df5fb78e915bea0a28becad6e2ac010be"},
+    {file = "Brotli-1.0.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e1abbeef02962596548382e393f56e4c94acd286bd0c5afba756cffc33670e8a"},
+    {file = "Brotli-1.0.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3148362937217b7072cf80a2dcc007f09bb5ecb96dae4617316638194113d5be"},
+    {file = "Brotli-1.0.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:336b40348269f9b91268378de5ff44dc6fbaa2268194f85177b53463d313842a"},
+    {file = "Brotli-1.0.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b09a16a1950b9ef495a0f8b9d0a87599a9d1f179e2d4ac014b2ec831f87e7"},
+    {file = "Brotli-1.0.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c8e521a0ce7cf690ca84b8cc2272ddaf9d8a50294fd086da67e517439614c755"},
+    {file = "Brotli-1.0.9.zip", hash = "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438"},
+]
+
+[[package]]
+name = "certifi"
+version = "2022.12.7"
+description = "Python package for providing Mozilla's CA Bundle."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
+    {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
+]
+
+[[package]]
+name = "cfgv"
+version = "3.3.1"
+description = "Validate configuration and produce human readable error messages."
+category = "dev"
+optional = false
+python-versions = ">=3.6.1"
+files = [
+    {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
+    {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "2.1.1"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+category = "main"
+optional = false
+python-versions = ">=3.6.0"
+files = [
+    {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
+    {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
+]
+
+[package.extras]
+unicode-backport = ["unicodedata2"]
+
+[[package]]
+name = "click"
+version = "8.1.3"
+description = "Composable command line interface toolkit"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
+    {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+category = "main"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+    {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+    {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "coloredlogs"
+version = "15.0.1"
+description = "Colored terminal output for Python's logging module"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+    {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"},
+    {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"},
+]
+
+[package.dependencies]
+humanfriendly = ">=9.1"
+
+[package.extras]
+cron = ["capturer (>=2.4)"]
+
+[[package]]
+name = "construct"
+version = "2.8.8"
+description = "A powerful declarative parser/builder for binary data"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+    {file = "construct-2.8.8.tar.gz", hash = "sha256:1b84b8147f6fd15bcf64b737c3e8ac5100811ad80c830cb4b2545140511c4157"},
+]
+
+[[package]]
+name = "crccheck"
+version = "1.3.0"
+description = "Calculation library for CRCs and checksums"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+    {file = "crccheck-1.3.0-py3-none-any.whl", hash = "sha256:278ec53d6f417f197f7e0e29b485093d4879b0bc7a2d29b657ef8242e633b48d"},
+    {file = "crccheck-1.3.0.tar.gz", hash = "sha256:5384f437de610ade5c3d8689efc80ccd1267b8c452ade83411fd8500a1024f3e"},
+]
+
+[[package]]
+name = "cssutils"
+version = "2.6.0"
+description = "A CSS Cascading Style Sheets library for Python"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "cssutils-2.6.0-py3-none-any.whl", hash = "sha256:30c72f3a5c5951a11151640600aae7b3bf10e4c0d5c87f5bc505c2cd4a26e0c2"},
+    {file = "cssutils-2.6.0.tar.gz", hash = "sha256:f7dcd23c1cec909fdf3630de346e1413b7b2555936dec14ba2ebb9913bf0818e"},
+]
+
+[package.extras]
+docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"]
+testing = ["cssselect", "flake8 (<5)", "importlib-resources", "jaraco.test (>=5.1)", "lxml", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+
+[[package]]
+name = "distlib"
+version = "0.3.6"
+description = "Distribution utilities"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+    {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
+    {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
+]
+
+[[package]]
+name = "filelock"
+version = "3.9.0"
+description = "A platform independent file lock."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"},
+    {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"},
+]
+
+[package.extras]
+docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
+testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
+
+[[package]]
+name = "frozenlist"
+version = "1.3.3"
+description = "A list-like structure which implements collections.abc.MutableSequence"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"},
+    {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"},
+    {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"},
+    {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"},
+    {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"},
+    {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"},
+    {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"},
+    {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"},
+    {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"},
+    {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"},
+    {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"},
+    {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"},
+    {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"},
+    {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"},
+    {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"},
+    {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"},
+    {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"},
+    {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"},
+    {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"},
+    {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"},
+    {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"},
+    {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"},
+    {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"},
+    {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"},
+    {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"},
+    {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"},
+    {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"},
+    {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"},
+    {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"},
+    {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"},
+    {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"},
+    {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"},
+    {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"},
+    {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"},
+    {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"},
+    {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"},
+    {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"},
+    {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"},
+    {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"},
+    {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"},
+    {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"},
+    {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"},
+    {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"},
+    {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"},
+    {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"},
+    {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"},
+    {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"},
+    {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"},
+    {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"},
+    {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"},
+    {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"},
+    {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"},
+    {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"},
+    {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"},
+    {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"},
+    {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"},
+    {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"},
+    {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"},
+    {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"},
+    {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"},
+    {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"},
+    {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"},
+]
+
+[[package]]
+name = "humanfriendly"
+version = "10.0"
+description = "Human friendly output for text interfaces using Python"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+    {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"},
+    {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"},
+]
+
+[package.dependencies]
+pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""}
+
+[[package]]
+name = "identify"
+version = "2.5.17"
+description = "File identification library for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "identify-2.5.17-py2.py3-none-any.whl", hash = "sha256:7d526dd1283555aafcc91539acc061d8f6f59adb0a7bba462735b0a318bff7ed"},
+    {file = "identify-2.5.17.tar.gz", hash = "sha256:93cc61a861052de9d4c541a7acb7e3dcc9c11b398a2144f6e52ae5285f5f4f06"},
+]
+
+[package.extras]
+license = ["ukkonen"]
+
+[[package]]
+name = "idna"
+version = "3.4"
+description = "Internationalized Domain Names in Applications (IDNA)"
+category = "main"
+optional = false
+python-versions = ">=3.5"
+files = [
+    {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
+    {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
+]
+
+[[package]]
+name = "iso8601"
+version = "1.1.0"
+description = "Simple module to parse ISO 8601 dates"
+category = "main"
+optional = false
+python-versions = ">=3.6.2,<4.0"
+files = [
+    {file = "iso8601-1.1.0-py3-none-any.whl", hash = "sha256:8400e90141bf792bce2634df533dc57e3bee19ea120a87bebcd3da89a58ad73f"},
+    {file = "iso8601-1.1.0.tar.gz", hash = "sha256:32811e7b81deee2063ea6d2e94f8819a86d1f3811e49d23623a41fa832bef03f"},
+]
+
+[[package]]
+name = "isort"
+version = "5.12.0"
+description = "A Python utility / library to sort Python imports."
+category = "dev"
+optional = false
+python-versions = ">=3.8.0"
+files = [
+    {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"},
+    {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"},
+]
+
+[package.extras]
+colors = ["colorama (>=0.4.3)"]
+pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"]
+plugins = ["setuptools"]
+requirements-deprecated-finder = ["pip-api", "pipreqs"]
+
+[[package]]
+name = "jsonpickle"
+version = "3.0.1"
+description = "Python library for serializing any arbitrary object graph into JSON"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "jsonpickle-3.0.1-py2.py3-none-any.whl", hash = "sha256:130d8b293ea0add3845de311aaba55e6d706d0bb17bc123bd2c8baf8a39ac77c"},
+    {file = "jsonpickle-3.0.1.tar.gz", hash = "sha256:032538804795e73b94ead410800ac387fdb6de98f8882ac957fcd247e3a85200"},
+]
+
+[package.extras]
+docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"]
+testing = ["ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"]
+testing-libs = ["simplejson", "ujson"]
+
+[[package]]
+name = "langcodes"
+version = "3.3.0"
+description = "Tools for labeling human languages with IETF language tags"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "langcodes-3.3.0-py3-none-any.whl", hash = "sha256:4d89fc9acb6e9c8fdef70bcdf376113a3db09b67285d9e1d534de6d8818e7e69"},
+    {file = "langcodes-3.3.0.tar.gz", hash = "sha256:794d07d5a28781231ac335a1561b8442f8648ca07cd518310aeb45d6f0807ef6"},
+]
+
+[package.dependencies]
+language-data = {version = ">=1.1,<2.0", optional = true, markers = "extra == \"data\""}
+
+[package.extras]
+data = ["language-data (>=1.1,<2.0)"]
+
+[[package]]
+name = "language-data"
+version = "1.1"
+description = "Supplementary data about languages used by the langcodes module"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "language_data-1.1-py3-none-any.whl", hash = "sha256:f7ba86fafe099ef213ef597eda483d5227b12446604a61f617122d6c925847d5"},
+    {file = "language_data-1.1.tar.gz", hash = "sha256:c1f5283c46bba68befa37505857a3f672497aba0c522b37d99367e911232455b"},
+]
+
+[package.dependencies]
+marisa-trie = ">=0.7.7,<0.8.0"
+
+[[package]]
+name = "lxml"
+version = "4.9.2"
+description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
+files = [
+    {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"},
+    {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"},
+    {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"},
+    {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"},
+    {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"},
+    {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"},
+    {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"},
+    {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"},
+    {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"},
+    {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"},
+    {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"},
+    {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"},
+    {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"},
+    {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"},
+    {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"},
+    {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"},
+    {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"},
+    {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"},
+    {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"},
+    {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"},
+    {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"},
+    {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"},
+    {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"},
+    {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"},
+    {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"},
+    {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"},
+    {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"},
+    {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"},
+    {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"},
+    {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"},
+    {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"},
+    {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"},
+    {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"},
+    {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"},
+    {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"},
+    {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"},
+    {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"},
+    {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"},
+    {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"},
+    {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"},
+    {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"},
+    {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"},
+    {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"},
+    {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"},
+    {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"},
+    {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"},
+    {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"},
+    {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"},
+    {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"},
+    {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"},
+    {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"},
+    {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"},
+    {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"},
+    {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"},
+    {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"},
+    {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"},
+    {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"},
+    {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"},
+    {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"},
+    {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"},
+    {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"},
+    {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"},
+    {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"},
+    {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"},
+    {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"},
+    {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"},
+    {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"},
+    {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"},
+    {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"},
+    {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"},
+    {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"},
+    {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"},
+    {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"},
+    {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"},
+    {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"},
+    {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"},
+    {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"},
+]
+
+[package.extras]
+cssselect = ["cssselect (>=0.7)"]
+html5 = ["html5lib"]
+htmlsoup = ["BeautifulSoup4"]
+source = ["Cython (>=0.29.7)"]
+
+[[package]]
+name = "m3u8"
+version = "3.4.0"
+description = "Python m3u8 parser"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "m3u8-3.4.0-py3-none-any.whl", hash = "sha256:5016060622786abf4924bd7f0e04923c261762c999edaab073690d9031c1a8db"},
+    {file = "m3u8-3.4.0.tar.gz", hash = "sha256:6dddfb57ce485f67bf307e9581990db9a55614eeb2df4b89d6ae2c0ca7e525cd"},
+]
+
+[package.dependencies]
+iso8601 = "*"
+
+[[package]]
+name = "marisa-trie"
+version = "0.7.8"
+description = "Static memory-efficient and fast Trie-like structures for Python."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+    {file = "marisa-trie-0.7.8.tar.gz", hash = "sha256:aee3de5f2836074cfd803f1caf16f68390f262ef09cd7dc7d0e8aee9b6878643"},
+    {file = "marisa_trie-0.7.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f1cf9d5ead4471b149fdb93a1c84eddaa941d23e67b0782091adc222d198a87"},
+    {file = "marisa_trie-0.7.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:73296b4d6d8ce2f6bc3898fe84348756beddb10cb56442391d050bff135e9c4c"},
+    {file = "marisa_trie-0.7.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:782c1515caa603656e15779bc61d5db3b079fa4270ad77f464908796e0d940aa"},
+    {file = "marisa_trie-0.7.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49131e51aad530e4d47c716cef1bbef15a4e5b8f75bddfcdd7903f5043ef2331"},
+    {file = "marisa_trie-0.7.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45b0a38e015d0149141f028b8892ab518946b828c7931685199549294f5893ca"},
+    {file = "marisa_trie-0.7.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a537e0efff1ec880bc212390e97f1d35832a44bd78c96807ddb685d538875096"},
+    {file = "marisa_trie-0.7.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5c2a33ede2655f1a6fb840729128cb4bc48829108711f79b7a645b6c0c54b5c2"},
+    {file = "marisa_trie-0.7.8-cp310-cp310-win32.whl", hash = "sha256:7200cde8e2040811e98661a60463b296b76a6b224411f8899aa0850085e6af40"},
+    {file = "marisa_trie-0.7.8-cp310-cp310-win_amd64.whl", hash = "sha256:a432607bae139183c7251da7eb22f761440bc07d92eacc9e9f7dc0d87f70c495"},
+    {file = "marisa_trie-0.7.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a891d2841da153b98c6c7fbe0a89ea8edbc164bdc96a001f360bdcdd54e2070d"},
+    {file = "marisa_trie-0.7.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c9ab632c5caef23a59cd43c76ab59e325f9eadd1e9c8b1c34005b9756ae716ee"},
+    {file = "marisa_trie-0.7.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68087942e95acb5801f2a5e9a874aa57af27a4afb52aca81fe1cbe22b2a2fd38"},
+    {file = "marisa_trie-0.7.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef2c4a5023bb6ddbaf1803187b7fb3108e9955aa9c60564504e5f622517c9e7"},
+    {file = "marisa_trie-0.7.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24e873619f61bef6a87c669ae459b79d98822270e8a10b21fc52dddf2acc9a46"},
+    {file = "marisa_trie-0.7.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:34189c321f30cefb76a6b20c7f055b3f6cd0bc8378c16ba8b7283fd898bf4ac2"},
+    {file = "marisa_trie-0.7.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:396555d5f52dc86c65717052573fa2875e10f9e5dd014f825677beadcaec8248"},
+    {file = "marisa_trie-0.7.8-cp311-cp311-win32.whl", hash = "sha256:bfe649b02b6318bac572b86d9ddd8276c594411311f8e5ef2edc4bcd7285a06f"},
+    {file = "marisa_trie-0.7.8-cp311-cp311-win_amd64.whl", hash = "sha256:84991b52a187d09b269c4caefc8b857a81156c44997eec7eac0e2862d108cc20"},
+    {file = "marisa_trie-0.7.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0555104fe9f414abb12e967322a13df778b21958d1727470f4c8dedfde76a8f2"},
+    {file = "marisa_trie-0.7.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f96531013252bca14f7665f67aa642be113b6c348ada5e167ebf8db27b1551b5"},
+    {file = "marisa_trie-0.7.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed76391b132c6261cfb402c1a08679e635d09a0a142dae2c1744d816f103c7f"},
+    {file = "marisa_trie-0.7.8-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6232506b4d66da932f70cf359a4c5ba9e086228ccd97b602159e90c6ea53dab"},
+    {file = "marisa_trie-0.7.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34f927f2738d0b402b76821895254e6a164d5020042559f7d910f6632829cdfa"},
+    {file = "marisa_trie-0.7.8-cp36-cp36m-win32.whl", hash = "sha256:645908879ae8fcadfb51650fc176902b9e68eee9a8c4d4d8c682cf99ce3ff029"},
+    {file = "marisa_trie-0.7.8-cp36-cp36m-win_amd64.whl", hash = "sha256:a5bf2912810e135ce1e60a9b56a179ed62258306103bf5dd3186307f5c51b28f"},
+    {file = "marisa_trie-0.7.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bd86212d5037973deda057fc29d60e83dca05e68fa1e7ceaf014c513975c7a0d"},
+    {file = "marisa_trie-0.7.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f280f059be417cff81ac030db6a002f8a93093c7ca4555e570d43a24ed45514"},
+    {file = "marisa_trie-0.7.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ae35c696f3c5b57c5fe4f73725102f3fe884bc658b854d484dfe6d7e72c86f5"},
+    {file = "marisa_trie-0.7.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:524c02f398d361aaf85d8f7709b5ac6de68d020c588fb6c087fb171137643c13"},
+    {file = "marisa_trie-0.7.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:353113e811ccfa176fbb611b83671f0b3b40f46b3896b096c10e43f65d35916d"},
+    {file = "marisa_trie-0.7.8-cp37-cp37m-win32.whl", hash = "sha256:93172a7314d4d5993970dbafb746f23140d3abfa0d93cc174e766a302d125f7d"},
+    {file = "marisa_trie-0.7.8-cp37-cp37m-win_amd64.whl", hash = "sha256:579d69981b18f427bd8e540199c4de400a2bd4ae98e96c814a12cbf766e7029b"},
+    {file = "marisa_trie-0.7.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:08858920d0e09ca07d239252884fd72db2abb56c35ff463145ffc9c1277a4f34"},
+    {file = "marisa_trie-0.7.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a1b4d07158a3f9b4e84ee709a1fa86b9e11f3dd3b1e6fc45493195105a029545"},
+    {file = "marisa_trie-0.7.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f0359f392679774d1ff014f12efdf48da5d661e6241531ff55a3ae5a72a1137e"},
+    {file = "marisa_trie-0.7.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c1daaa8c38423fbd119db6654f92740d5ee40d1185a2bbc47afae6712b9ebfc"},
+    {file = "marisa_trie-0.7.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:266bf4b6e00b4cff2b8618533919d38b883127f4e5c0af0e0bd78a042093dd99"},
+    {file = "marisa_trie-0.7.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fd7e71d8d85d04d2a5d23611663b2d322b60c98c2edab7e9ef9a2019f7435c5b"},
+    {file = "marisa_trie-0.7.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:66b13382be3c277f32143e6c814344118721c7954b2bfb57f5cfe93d17e63c9e"},
+    {file = "marisa_trie-0.7.8-cp38-cp38-win32.whl", hash = "sha256:d75b5d642b3d1e47a0ab649fb5eb6bf3681a5e1d3793c8ea7546586ab72731fd"},
+    {file = "marisa_trie-0.7.8-cp38-cp38-win_amd64.whl", hash = "sha256:07c14c88fde8a0ac55139f9fe763dc0deabc4b7950047719ae986ca62135e1fb"},
+    {file = "marisa_trie-0.7.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c8df5238c7b29498f4ee24fd3ee25e0129b3c56beaed1dd1628bce0ebac8ec8c"},
+    {file = "marisa_trie-0.7.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db2bdc480d83a1a566b3a64027f9fb34eae98bfe45788c41a45e99d430cbf48a"},
+    {file = "marisa_trie-0.7.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:80b22bdbebc3e6677e83db1352e4f6d478364107874c031a34a961437ead4e93"},
+    {file = "marisa_trie-0.7.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6412c816be723a0f11dd41225a30a08182cf2b3b7b3c882c44335003bde47003"},
+    {file = "marisa_trie-0.7.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fcdb7f802db43857df3825c4c11acd14bb380deb961ff91e260950886531400"},
+    {file = "marisa_trie-0.7.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5cf04156f38dc46f0f14423f98559c5def7d83f3a30f8a580c27ad3b0311ce76"},
+    {file = "marisa_trie-0.7.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c53b1d02f4974ecb52c6e8c6f4f1dbf3a15e79bc3861f4ad48b14e4e77c82342"},
+    {file = "marisa_trie-0.7.8-cp39-cp39-win32.whl", hash = "sha256:75317347f20bf05ab2ce5537a90989b1439b5e1752f558aad7b5d6b43194429b"},
+    {file = "marisa_trie-0.7.8-cp39-cp39-win_amd64.whl", hash = "sha256:82ba3caed5acfdff6a23d6881cc1927776b7320415261b6b24f48d0a190ab890"},
+    {file = "marisa_trie-0.7.8-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:43abd082a21295b04859705b088d15acac8956587557680850e3149a79e36789"},
+    {file = "marisa_trie-0.7.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0d891f0138e5aecc9c5afb7b0a57c758e22c5b5c7c0edb0a1f21ae933259815"},
+    {file = "marisa_trie-0.7.8-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9031184fe2215b591a6cdefe5d6d4901806fd7359e813c485a7ff25ea69d603c"},
+    {file = "marisa_trie-0.7.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8ccb3ba8a2a589b8a7aed693d564f20a6d3bbbb552975f904ba311cea6b85706"},
+    {file = "marisa_trie-0.7.8-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f49a2cba047e643e5cd295d75de59f1df710c5e919cd376ac06ead513439881b"},
+    {file = "marisa_trie-0.7.8-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d37ea556bb99d9b0dfbe8fd6bdb17e91b91d04531be9e3b8b1b7b7f76ea55637"},
+    {file = "marisa_trie-0.7.8-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55a5aea422a4c0c9ef143d3703323f2a43b4a5315fc90bbb6e9ff18544b8d931"},
+    {file = "marisa_trie-0.7.8-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d19f363b981fe9b4a302060a8088fd1f00906bc315db24f5d6726b5c309cc47e"},
+    {file = "marisa_trie-0.7.8-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e0d51c31fb41b6bc76c1abb7cf2d63a6e0ba7feffc96ea3d92b4d5084d71721a"},
+    {file = "marisa_trie-0.7.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71ed6286e9d593dac035b8516e7ec35a1b54a7d9c6451a9319e918a8ef722714"},
+    {file = "marisa_trie-0.7.8-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc1c1dca06c0fdcca5bb261a09eca2b3bcf41eaeb467caf600ac68e77d3ed2c0"},
+    {file = "marisa_trie-0.7.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:891be5569cd6e3a059c2de53d63251aaaef513d68e8d2181f71378f9cb69e1ab"},
+]
+
+[package.dependencies]
+setuptools = "*"
+
+[package.extras]
+test = ["hypothesis", "pytest", "readme-renderer"]
+
+[[package]]
+name = "multidict"
+version = "6.0.4"
+description = "multidict implementation"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"},
+    {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"},
+    {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"},
+    {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"},
+    {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"},
+    {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"},
+    {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"},
+    {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"},
+    {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"},
+    {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"},
+    {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"},
+    {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"},
+    {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"},
+    {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"},
+    {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"},
+    {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"},
+    {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"},
+    {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"},
+    {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"},
+    {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"},
+    {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"},
+    {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"},
+    {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"},
+    {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"},
+    {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"},
+    {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"},
+    {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"},
+    {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"},
+    {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"},
+    {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"},
+    {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"},
+    {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"},
+    {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"},
+    {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"},
+    {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"},
+    {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"},
+    {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"},
+    {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"},
+    {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"},
+    {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"},
+    {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"},
+    {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"},
+    {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"},
+    {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"},
+    {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"},
+    {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"},
+    {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"},
+    {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"},
+    {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"},
+    {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"},
+    {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"},
+    {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"},
+    {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"},
+    {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"},
+    {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"},
+    {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"},
+    {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"},
+    {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"},
+    {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"},
+    {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"},
+    {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"},
+    {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"},
+    {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"},
+    {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"},
+    {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"},
+    {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"},
+    {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"},
+    {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"},
+    {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"},
+    {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"},
+    {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"},
+    {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"},
+    {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"},
+    {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
+]
+
+[[package]]
+name = "mypy"
+version = "0.991"
+description = "Optional static typing for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"},
+    {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"},
+    {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"},
+    {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"},
+    {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"},
+    {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"},
+    {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"},
+    {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"},
+    {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"},
+    {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"},
+    {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"},
+    {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"},
+    {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"},
+    {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"},
+    {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"},
+    {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"},
+    {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"},
+    {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"},
+    {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"},
+    {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"},
+    {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"},
+    {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"},
+    {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"},
+    {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"},
+    {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"},
+    {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"},
+    {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"},
+    {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"},
+    {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"},
+    {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"},
+]
+
+[package.dependencies]
+mypy-extensions = ">=0.4.3"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typing-extensions = ">=3.10"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+install-types = ["pip"]
+python2 = ["typed-ast (>=1.4.0,<2)"]
+reports = ["lxml"]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+files = [
+    {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+    {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
+[[package]]
+name = "mypy-protobuf"
+version = "3.3.0"
+description = "Generate mypy stub files from protobuf specs"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "mypy-protobuf-3.3.0.tar.gz", hash = "sha256:24f3b0aecb06656e983f58e07c732a90577b9d7af3e1066fc2b663bbf0370248"},
+    {file = "mypy_protobuf-3.3.0-py3-none-any.whl", hash = "sha256:15604f6943b16c05db646903261e3b3e775cf7f7990b7c37b03d043a907b650d"},
+]
+
+[package.dependencies]
+protobuf = ">=3.19.4"
+types-protobuf = ">=3.19.12"
+
+[[package]]
+name = "nodeenv"
+version = "1.7.0"
+description = "Node.js virtual environment builder"
+category = "dev"
+optional = false
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
+files = [
+    {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"},
+    {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"},
+]
+
+[package.dependencies]
+setuptools = "*"
+
+[[package]]
+name = "platformdirs"
+version = "2.6.2"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"},
+    {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"},
+]
+
+[package.extras]
+docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
+
+[[package]]
+name = "pproxy"
+version = "2.7.8"
+description = "Proxy server that can tunnel among remote servers by regex rules."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "pproxy-2.7.8-py3-none-any.whl", hash = "sha256:9f300bae5288c7c7f56be70d6275571efd2b4862f306d25bdace3c3537fb53a7"},
+    {file = "pproxy-2.7.8.tar.gz", hash = "sha256:fab73cc13b2bb10c9fc4d9c1a8ec8011a354c9bcbffa446d91229e13c5d996c8"},
+]
+
+[package.extras]
+accelerated = ["pycryptodome (>=3.7.2)", "uvloop (>=0.13.0)"]
+daemon = ["python-daemon (>=2.2.3)"]
+quic = ["aioquic (>=0.9.7)"]
+sshtunnel = ["asyncssh (>=2.5.0)"]
+
+[[package]]
+name = "pre-commit"
+version = "3.0.4"
+description = "A framework for managing and maintaining multi-language pre-commit hooks."
+category = "dev"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "pre_commit-3.0.4-py2.py3-none-any.whl", hash = "sha256:9e3255edb0c9e7fe9b4f328cb3dc86069f8fdc38026f1bf521018a05eaf4d67b"},
+    {file = "pre_commit-3.0.4.tar.gz", hash = "sha256:bc4687478d55578c4ac37272fe96df66f73d9b5cf81be6f28627d4e712e752d5"},
+]
+
+[package.dependencies]
+cfgv = ">=2.0.0"
+identify = ">=1.0.0"
+nodeenv = ">=0.11.1"
+pyyaml = ">=5.1"
+virtualenv = ">=20.10.0"
+
+[[package]]
+name = "protobuf"
+version = "4.21.6"
+description = ""
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "protobuf-4.21.6-cp310-abi3-win32.whl", hash = "sha256:49f88d56a9180dbb7f6199c920f5bb5c1dd0172f672983bb281298d57c2ac8eb"},
+    {file = "protobuf-4.21.6-cp310-abi3-win_amd64.whl", hash = "sha256:7a6cc8842257265bdfd6b74d088b829e44bcac3cca234c5fdd6052730017b9ea"},
+    {file = "protobuf-4.21.6-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ba596b9ffb85c909fcfe1b1a23136224ed678af3faf9912d3fa483d5f9813c4e"},
+    {file = "protobuf-4.21.6-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4143513c766db85b9d7c18dbf8339673c8a290131b2a0fe73855ab20770f72b0"},
+    {file = "protobuf-4.21.6-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:b6cea204865595a92a7b240e4b65bcaaca3ad5d2ce25d9db3756eba06041138e"},
+    {file = "protobuf-4.21.6-cp37-cp37m-win32.whl", hash = "sha256:9666da97129138585b26afcb63ad4887f602e169cafe754a8258541c553b8b5d"},
+    {file = "protobuf-4.21.6-cp37-cp37m-win_amd64.whl", hash = "sha256:308173d3e5a3528787bb8c93abea81d5a950bdce62840d9760effc84127fb39c"},
+    {file = "protobuf-4.21.6-cp38-cp38-win32.whl", hash = "sha256:aa29113ec901281f29d9d27b01193407a98aa9658b8a777b0325e6d97149f5ce"},
+    {file = "protobuf-4.21.6-cp38-cp38-win_amd64.whl", hash = "sha256:8f9e60f7d44592c66e7b332b6a7b4b6e8d8b889393c79dbc3a91f815118f8eac"},
+    {file = "protobuf-4.21.6-cp39-cp39-win32.whl", hash = "sha256:80e6540381080715fddac12690ee42d087d0d17395f8d0078dfd6f1181e7be4c"},
+    {file = "protobuf-4.21.6-cp39-cp39-win_amd64.whl", hash = "sha256:77b355c8604fe285536155286b28b0c4cbc57cf81b08d8357bf34829ea982860"},
+    {file = "protobuf-4.21.6-py2.py3-none-any.whl", hash = "sha256:07a0bb9cc6114f16a39c866dc28b6e3d96fa4ffb9cc1033057412547e6e75cb9"},
+    {file = "protobuf-4.21.6-py3-none-any.whl", hash = "sha256:c7c864148a237f058c739ae7a05a2b403c0dfa4ce7d1f3e5213f352ad52d57c6"},
+    {file = "protobuf-4.21.6.tar.gz", hash = "sha256:6b1040a5661cd5f6e610cbca9cfaa2a17d60e2bb545309bc1b278bb05be44bdd"},
+]
+
+[[package]]
+name = "pycaption"
+version = "2.1.1"
+description = "Closed caption converter"
+category = "main"
+optional = false
+python-versions = ">=3.6,<4.0"
+files = [
+    {file = "pycaption-2.1.1-py3-none-any.whl", hash = "sha256:c1c1c0e6968366f78b612763c429120efa88f3fa09e85833b739db92ec94d6ee"},
+    {file = "pycaption-2.1.1.tar.gz", hash = "sha256:07eb8887c5933cf78d554df23fc14419cf0e8a763432925bf5d1cb0be9e18b82"},
+]
+
+[package.dependencies]
+beautifulsoup4 = ">=4.8.1"
+cssutils = ">=2.0.0"
+lxml = ">=4.9.1"
+
+[package.extras]
+dev = ["pytest", "pytest-lazy-fixture"]
+transcript = ["nltk"]
+
+[[package]]
+name = "pycryptodome"
+version = "3.17"
+description = "Cryptographic library for Python"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+    {file = "pycryptodome-3.17-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:2c5631204ebcc7ae33d11c43037b2dafe25e2ab9c1de6448eb6502ac69c19a56"},
+    {file = "pycryptodome-3.17-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:04779cc588ad8f13c80a060b0b1c9d1c203d051d8a43879117fe6b8aaf1cd3fa"},
+    {file = "pycryptodome-3.17-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f812d58c5af06d939b2baccdda614a3ffd80531a26e5faca2c9f8b1770b2b7af"},
+    {file = "pycryptodome-3.17-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:9453b4e21e752df8737fdffac619e93c9f0ec55ead9a45df782055eb95ef37d9"},
+    {file = "pycryptodome-3.17-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:121d61663267f73692e8bde5ec0d23c9146465a0d75cad75c34f75c752527b01"},
+    {file = "pycryptodome-3.17-cp27-cp27m-win32.whl", hash = "sha256:ba2d4fcb844c6ba5df4bbfee9352ad5352c5ae939ac450e06cdceff653280450"},
+    {file = "pycryptodome-3.17-cp27-cp27m-win_amd64.whl", hash = "sha256:87e2ca3aa557781447428c4b6c8c937f10ff215202ab40ece5c13a82555c10d6"},
+    {file = "pycryptodome-3.17-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f44c0d28716d950135ff21505f2c764498eda9d8806b7c78764165848aa419bc"},
+    {file = "pycryptodome-3.17-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:5a790bc045003d89d42e3b9cb3cc938c8561a57a88aaa5691512e8540d1ae79c"},
+    {file = "pycryptodome-3.17-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:d086d46774e27b280e4cece8ab3d87299cf0d39063f00f1e9290d096adc5662a"},
+    {file = "pycryptodome-3.17-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:5587803d5b66dfd99e7caa31ed91fba0fdee3661c5d93684028ad6653fce725f"},
+    {file = "pycryptodome-3.17-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:e7debd9c439e7b84f53be3cf4ba8b75b3d0b6e6015212355d6daf44ac672e210"},
+    {file = "pycryptodome-3.17-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ca1ceb6303be1282148f04ac21cebeebdb4152590842159877778f9cf1634f09"},
+    {file = "pycryptodome-3.17-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:dc22cc00f804485a3c2a7e2010d9f14a705555f67020eb083e833cabd5bd82e4"},
+    {file = "pycryptodome-3.17-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80ea8333b6a5f2d9e856ff2293dba2e3e661197f90bf0f4d5a82a0a6bc83a626"},
+    {file = "pycryptodome-3.17-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c133f6721fba313722a018392a91e3c69d3706ae723484841752559e71d69dc6"},
+    {file = "pycryptodome-3.17-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:333306eaea01fde50a73c4619e25631e56c4c61bd0fb0a2346479e67e3d3a820"},
+    {file = "pycryptodome-3.17-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:1a30f51b990994491cec2d7d237924e5b6bd0d445da9337d77de384ad7f254f9"},
+    {file = "pycryptodome-3.17-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:909e36a43fe4a8a3163e9c7fc103867825d14a2ecb852a63d3905250b308a4e5"},
+    {file = "pycryptodome-3.17-cp35-abi3-win32.whl", hash = "sha256:a3228728a3808bc9f18c1797ec1179a0efb5068c817b2ffcf6bcd012494dffb2"},
+    {file = "pycryptodome-3.17-cp35-abi3-win_amd64.whl", hash = "sha256:9ec565e89a6b400eca814f28d78a9ef3f15aea1df74d95b28b7720739b28f37f"},
+    {file = "pycryptodome-3.17-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:e1819b67bcf6ca48341e9b03c2e45b1c891fa8eb1a8458482d14c2805c9616f2"},
+    {file = "pycryptodome-3.17-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:f8e550caf52472ae9126953415e4fc554ab53049a5691c45b8816895c632e4d7"},
+    {file = "pycryptodome-3.17-pp27-pypy_73-win32.whl", hash = "sha256:afbcdb0eda20a0e1d44e3a1ad6d4ec3c959210f4b48cabc0e387a282f4c7deb8"},
+    {file = "pycryptodome-3.17-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a74f45aee8c5cc4d533e585e0e596e9f78521e1543a302870a27b0ae2106381e"},
+    {file = "pycryptodome-3.17-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38bbd6717eac084408b4094174c0805bdbaba1f57fc250fd0309ae5ec9ed7e09"},
+    {file = "pycryptodome-3.17-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f68d6c8ea2974a571cacb7014dbaada21063a0375318d88ac1f9300bc81e93c3"},
+    {file = "pycryptodome-3.17-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8198f2b04c39d817b206ebe0db25a6653bb5f463c2319d6f6d9a80d012ac1e37"},
+    {file = "pycryptodome-3.17-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3a232474cd89d3f51e4295abe248a8b95d0332d153bf46444e415409070aae1e"},
+    {file = "pycryptodome-3.17-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4992ec965606054e8326e83db1c8654f0549cdb26fce1898dc1a20bc7684ec1c"},
+    {file = "pycryptodome-3.17-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53068e33c74f3b93a8158dacaa5d0f82d254a81b1002e0cd342be89fcb3433eb"},
+    {file = "pycryptodome-3.17-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:74794a2e2896cd0cf56fdc9db61ef755fa812b4a4900fa46c49045663a92b8d0"},
+    {file = "pycryptodome-3.17.tar.gz", hash = "sha256:bce2e2d8e82fcf972005652371a3e8731956a0c1fbb719cc897943b3695ad91b"},
+]
+
+[[package]]
+name = "pycryptodomex"
+version = "3.17"
+description = "Cryptographic library for Python"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+    {file = "pycryptodomex-3.17-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:12056c38e49d972f9c553a3d598425f8a1c1d35b2e4330f89d5ff1ffb70de041"},
+    {file = "pycryptodomex-3.17-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab33c2d9f275e05e235dbca1063753b5346af4a5cac34a51fa0da0d4edfb21d7"},
+    {file = "pycryptodomex-3.17-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:caa937ff29d07a665dfcfd7a84f0d4207b2ebf483362fa9054041d67fdfacc20"},
+    {file = "pycryptodomex-3.17-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:db23d7341e21b273d2440ec6faf6c8b1ca95c8894da612e165be0b89a8688340"},
+    {file = "pycryptodomex-3.17-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:f854c8476512cebe6a8681cc4789e4fcff6019c17baa0fd72b459155dc605ab4"},
+    {file = "pycryptodomex-3.17-cp27-cp27m-win32.whl", hash = "sha256:a57e3257bacd719769110f1f70dd901c5b6955e9596ad403af11a3e6e7e3311c"},
+    {file = "pycryptodomex-3.17-cp27-cp27m-win_amd64.whl", hash = "sha256:d38ab9e53b1c09608ba2d9b8b888f1e75d6f66e2787e437adb1fecbffec6b112"},
+    {file = "pycryptodomex-3.17-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:3c2516b42437ae6c7a29ef3ddc73c8d4714e7b6df995b76be4695bbe4b3b5cd2"},
+    {file = "pycryptodomex-3.17-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:5c23482860302d0d9883404eaaa54b0615eefa5274f70529703e2c43cc571827"},
+    {file = "pycryptodomex-3.17-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:7a8dc3ee7a99aae202a4db52de5a08aa4d01831eb403c4d21da04ec2f79810db"},
+    {file = "pycryptodomex-3.17-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:7cc28dd33f1f3662d6da28ead4f9891035f63f49d30267d3b41194c8778997c8"},
+    {file = "pycryptodomex-3.17-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:2d4d395f109faba34067a08de36304e846c791808524614c731431ee048fe70a"},
+    {file = "pycryptodomex-3.17-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:55eed98b4150a744920597c81b3965b632038781bab8a08a12ea1d004213c600"},
+    {file = "pycryptodomex-3.17-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:7fa0b52df90343fafe319257b31d909be1d2e8852277fb0376ba89d26d2921db"},
+    {file = "pycryptodomex-3.17-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78f0ddd4adc64baa39b416f3637aaf99f45acb0bcdc16706f0cc7ebfc6f10109"},
+    {file = "pycryptodomex-3.17-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4fa037078e92c7cc49f6789a8bac3de06856740bb2038d05f2d9a2e4b165d59"},
+    {file = "pycryptodomex-3.17-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:88b0d5bb87eaf2a31e8a759302b89cf30c97f2f8ca7d83b8c9208abe8acb447a"},
+    {file = "pycryptodomex-3.17-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:6feedf4b0e36b395329b4186a805f60f900129cdf0170e120ecabbfcb763995d"},
+    {file = "pycryptodomex-3.17-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7a6651a07f67c28b6e978d63aa3a3fccea0feefed9a8453af3f7421a758461b7"},
+    {file = "pycryptodomex-3.17-cp35-abi3-win32.whl", hash = "sha256:32e764322e902bbfac49ca1446604d2839381bbbdd5a57920c9daaf2e0b778df"},
+    {file = "pycryptodomex-3.17-cp35-abi3-win_amd64.whl", hash = "sha256:4b51e826f0a04d832eda0790bbd0665d9bfe73e5a4d8ea93b6a9b38beeebe935"},
+    {file = "pycryptodomex-3.17-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:d4cf0128da167562c49b0e034f09e9cedd733997354f2314837c2fa461c87bb1"},
+    {file = "pycryptodomex-3.17-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:c92537b596bd5bffb82f8964cabb9fef1bca8a28a9e0a69ffd3ec92a4a7ad41b"},
+    {file = "pycryptodomex-3.17-pp27-pypy_73-win32.whl", hash = "sha256:599bb4ae4bbd614ca05f49bd4e672b7a250b80b13ae1238f05fd0f09d87ed80a"},
+    {file = "pycryptodomex-3.17-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4c4674f4b040321055c596aac926d12f7f6859dfe98cd12f4d9453b43ab6adc8"},
+    {file = "pycryptodomex-3.17-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a3648025e4ddb72d43addab764336ba2e670c8377dba5dd752e42285440d31"},
+    {file = "pycryptodomex-3.17-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40e8a11f578bd0851b02719c862d55d3ee18d906c8b68a9c09f8c564d6bb5b92"},
+    {file = "pycryptodomex-3.17-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:23d83b610bd97704f0cd3acc48d99b76a15c8c1540d8665c94d514a49905bad7"},
+    {file = "pycryptodomex-3.17-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd29d35ac80755e5c0a99d96b44fb9abbd7e871849581ea6a4cb826d24267537"},
+    {file = "pycryptodomex-3.17-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64b876d57cb894b31056ad8dd6a6ae1099b117ae07a3d39707221133490e5715"},
+    {file = "pycryptodomex-3.17-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee8bf4fdcad7d66beb744957db8717afc12d176e3fd9c5d106835133881a049b"},
+    {file = "pycryptodomex-3.17-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c84689c73358dfc23f9fdcff2cb9e7856e65e2ce3b5ed8ff630d4c9bdeb1867b"},
+    {file = "pycryptodomex-3.17.tar.gz", hash = "sha256:0af93aad8d62e810247beedef0261c148790c52f3cd33643791cc6396dd217c1"},
+]
+
+[[package]]
+name = "pyjwt"
+version = "2.6.0"
+description = "JSON Web Token implementation in Python"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"},
+    {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"},
+]
+
+[package.extras]
+crypto = ["cryptography (>=3.4.0)"]
+dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
+docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
+tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
+
+[[package]]
+name = "pymediainfo"
+version = "6.0.1"
+description = "A Python wrapper for the mediainfo library."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "pymediainfo-6.0.1-py3-none-macosx_10_15_x86_64.whl", hash = "sha256:81165e895e1e362fa11c128ce2bc976cb8a74224f96f309a88ee047106041b0a"},
+    {file = "pymediainfo-6.0.1-py3-none-win32.whl", hash = "sha256:bb3a48ac9706626fd2fa7881f4271728459a1c9a082917deb0c7dd343d8a1be5"},
+    {file = "pymediainfo-6.0.1-py3-none-win_amd64.whl", hash = "sha256:c38e79d4d2062732ae555b564c3cac18a6de4f36e033066c617f386cf5e77564"},
+    {file = "pymediainfo-6.0.1.tar.gz", hash = "sha256:96e04bac0dfcb726bed70c314b1219121c4b9344c66a98f426ce27d7f9abffb0"},
+]
+
+[[package]]
+name = "pymp4"
+version = "1.2.0"
+description = "A Python parser for MP4 boxes"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+    {file = "pymp4-1.2.0.tar.gz", hash = "sha256:4a3d2e0838cfe28cd3dc64f45379e16d91b0212192f87a3e28f3804372727456"},
+]
+
+[package.dependencies]
+construct = "2.8.8"
+
+[[package]]
+name = "pymysql"
+version = "1.0.2"
+description = "Pure Python MySQL Driver"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "PyMySQL-1.0.2-py3-none-any.whl", hash = "sha256:41fc3a0c5013d5f039639442321185532e3e2c8924687abe6537de157d403641"},
+    {file = "PyMySQL-1.0.2.tar.gz", hash = "sha256:816927a350f38d56072aeca5dfb10221fe1dc653745853d30a216637f5d7ad36"},
+]
+
+[package.extras]
+ed25519 = ["PyNaCl (>=1.4.0)"]
+rsa = ["cryptography"]
+
+[[package]]
+name = "pyreadline3"
+version = "3.4.1"
+description = "A python implementation of GNU readline."
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+    {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"},
+    {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"},
+]
+
+[[package]]
+name = "pysocks"
+version = "1.7.1"
+description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+    {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"},
+    {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"},
+    {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"},
+]
+
+[[package]]
+name = "pywidevine"
+version = "1.6.0"
+description = "Widevine CDM (Content Decryption Module) implementation in Python."
+category = "main"
+optional = false
+python-versions = ">=3.7,<3.12"
+files = [
+    {file = "pywidevine-1.6.0-py3-none-any.whl", hash = "sha256:54789f29453794424d6627c7fdda3bcf4caa57c8fcf070245332b8ecbd64cc61"},
+    {file = "pywidevine-1.6.0.tar.gz", hash = "sha256:7dd7922f6130bfabc77f0ad8761a2b26bb086029cb78f07abd9b23b6a3e0d6c3"},
+]
+
+[package.dependencies]
+aiohttp = {version = ">=3.8.1,<4.0.0", optional = true, markers = "extra == \"serve\""}
+click = ">=8.1.3,<9.0.0"
+lxml = ">=4.9.2"
+protobuf = "4.21.6"
+pycryptodome = ">=3.15.0,<4.0.0"
+pymp4 = ">=1.2.0,<2.0.0"
+PyYAML = {version = ">=6.0,<7.0", optional = true, markers = "extra == \"serve\""}
+requests = ">=2.28.1,<3.0.0"
+Unidecode = ">=1.3.4,<2.0.0"
+
+[package.extras]
+serve = ["PyYAML (>=6.0,<7.0)", "aiohttp (>=3.8.1,<4.0.0)"]
+
+[[package]]
+name = "pyyaml"
+version = "6.0"
+description = "YAML parser and emitter for Python"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
+    {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
+    {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
+    {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
+    {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
+    {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
+    {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
+    {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
+    {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
+    {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
+    {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
+    {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
+    {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
+    {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
+    {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
+    {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
+    {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
+    {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
+    {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
+    {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
+    {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
+    {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
+    {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
+    {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
+    {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
+    {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
+    {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
+    {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
+    {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
+    {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
+    {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
+    {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
+    {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
+    {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
+    {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
+    {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
+    {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
+    {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
+    {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
+    {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
+]
+
+[[package]]
+name = "requests"
+version = "2.28.2"
+description = "Python HTTP for Humans."
+category = "main"
+optional = false
+python-versions = ">=3.7, <4"
+files = [
+    {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"},
+    {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"},
+]
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+charset-normalizer = ">=2,<4"
+idna = ">=2.5,<4"
+PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7", optional = true, markers = "extra == \"socks\""}
+urllib3 = ">=1.21.1,<1.27"
+
+[package.extras]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+
+[[package]]
+name = "ruamel-yaml"
+version = "0.17.21"
+description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order"
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+    {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"},
+    {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"},
+]
+
+[package.dependencies]
+"ruamel.yaml.clib" = {version = ">=0.2.6", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""}
+
+[package.extras]
+docs = ["ryd"]
+jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"]
+
+[[package]]
+name = "ruamel-yaml-clib"
+version = "0.2.7"
+description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml"
+category = "main"
+optional = false
+python-versions = ">=3.5"
+files = [
+    {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"},
+    {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"},
+    {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"},
+    {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab"},
+    {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"},
+    {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"},
+    {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"},
+    {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"},
+    {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"},
+    {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"},
+    {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"},
+    {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"},
+    {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763"},
+    {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win32.whl", hash = "sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e"},
+    {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win_amd64.whl", hash = "sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646"},
+    {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f"},
+    {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0"},
+    {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282"},
+    {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7"},
+    {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93"},
+    {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b"},
+    {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb"},
+    {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307"},
+    {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697"},
+    {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b"},
+    {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win32.whl", hash = "sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac"},
+    {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f"},
+    {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9"},
+    {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1"},
+    {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640"},
+    {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b"},
+    {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win32.whl", hash = "sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8"},
+    {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5"},
+    {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"},
+]
+
+[[package]]
+name = "setuptools"
+version = "67.1.0"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "setuptools-67.1.0-py3-none-any.whl", hash = "sha256:a7687c12b444eaac951ea87a9627c4f904ac757e7abdc5aac32833234af90378"},
+    {file = "setuptools-67.1.0.tar.gz", hash = "sha256:e261cdf010c11a41cb5cb5f1bf3338a7433832029f559a6a7614bd42a967c300"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+
+[[package]]
+name = "sortedcontainers"
+version = "2.4.0"
+description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+    {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
+    {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
+]
+
+[[package]]
+name = "soupsieve"
+version = "2.3.2.post1"
+description = "A modern CSS selector implementation for Beautiful Soup."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"},
+    {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"},
+]
+
+[[package]]
+name = "subtitle-filter"
+version = "1.4.4"
+description = "Filter SDH entries and more from .srt files"
+category = "main"
+optional = false
+python-versions = ">=3.5"
+files = [
+    {file = "subtitle-filter-1.4.4.tar.gz", hash = "sha256:7f414532de7ffc54ea3129f9ee1528c642feabbc8c5d5836bdddb47a98165158"},
+    {file = "subtitle_filter-1.4.4-py3-none-any.whl", hash = "sha256:6d806a6dcd1989e2c97f4142281393f89a0e2e83b1d72192f28e239a4cc5ca8e"},
+]
+
+[[package]]
+name = "tomli"
+version = "2.0.1"
+description = "A lil' TOML parser"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+    {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
+
+[[package]]
+name = "tqdm"
+version = "4.64.1"
+description = "Fast, Extensible Progress Meter"
+category = "main"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
+files = [
+    {file = "tqdm-4.64.1-py2.py3-none-any.whl", hash = "sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1"},
+    {file = "tqdm-4.64.1.tar.gz", hash = "sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+dev = ["py-make (>=0.1.0)", "twine", "wheel"]
+notebook = ["ipywidgets (>=6)"]
+slack = ["slack-sdk"]
+telegram = ["requests"]
+
+[[package]]
+name = "types-protobuf"
+version = "3.20.4.6"
+description = "Typing stubs for protobuf"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+    {file = "types-protobuf-3.20.4.6.tar.gz", hash = "sha256:ba27443c592bbec1629dd69494a24c84461c63f0d3b7d648ce258aaae9680965"},
+    {file = "types_protobuf-3.20.4.6-py3-none-any.whl", hash = "sha256:ab2d315ba82246b83d28f8797c98dc0fe1dd5cfd187909e56faf87239aedaae3"},
+]
+
+[[package]]
+name = "types-pymysql"
+version = "1.0.19.2"
+description = "Typing stubs for PyMySQL"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+    {file = "types-PyMySQL-1.0.19.2.tar.gz", hash = "sha256:cf2926741d46b299d13b631aa0a178b5a4277706a0a1f2462b31c9c20bbf1cdf"},
+    {file = "types_PyMySQL-1.0.19.2-py3-none-any.whl", hash = "sha256:753fc786288d4d856490ba3d804d347a0de764cdd354d3ae716f159d696788d8"},
+]
+
+[[package]]
+name = "types-requests"
+version = "2.28.11.8"
+description = "Typing stubs for requests"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+    {file = "types-requests-2.28.11.8.tar.gz", hash = "sha256:e67424525f84adfbeab7268a159d3c633862dafae15c5b19547ce1b55954f0a3"},
+    {file = "types_requests-2.28.11.8-py3-none-any.whl", hash = "sha256:61960554baca0008ae7e2db2bd3b322ca9a144d3e80ce270f5fb640817e40994"},
+]
+
+[package.dependencies]
+types-urllib3 = "<1.27"
+
+[[package]]
+name = "types-urllib3"
+version = "1.26.25.4"
+description = "Typing stubs for urllib3"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+    {file = "types-urllib3-1.26.25.4.tar.gz", hash = "sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee"},
+    {file = "types_urllib3-1.26.25.4-py3-none-any.whl", hash = "sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49"},
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.4.0"
+description = "Backported and Experimental Type Hints for Python 3.7+"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
+    {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
+]
+
+[[package]]
+name = "unidecode"
+version = "1.3.6"
+description = "ASCII transliterations of Unicode text"
+category = "main"
+optional = false
+python-versions = ">=3.5"
+files = [
+    {file = "Unidecode-1.3.6-py3-none-any.whl", hash = "sha256:547d7c479e4f377b430dd91ac1275d593308dce0fc464fb2ab7d41f82ec653be"},
+    {file = "Unidecode-1.3.6.tar.gz", hash = "sha256:fed09cf0be8cf415b391642c2a5addfc72194407caee4f98719e40ec2a72b830"},
+]
+
+[[package]]
+name = "urllib3"
+version = "1.26.14"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+files = [
+    {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"},
+    {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"},
+]
+
+[package.extras]
+brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
+socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
+
+[[package]]
+name = "virtualenv"
+version = "20.17.1"
+description = "Virtual Python Environment builder"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "virtualenv-20.17.1-py3-none-any.whl", hash = "sha256:ce3b1684d6e1a20a3e5ed36795a97dfc6af29bc3970ca8dab93e11ac6094b3c4"},
+    {file = "virtualenv-20.17.1.tar.gz", hash = "sha256:f8b927684efc6f1cc206c9db297a570ab9ad0e51c16fa9e45487d36d1905c058"},
+]
+
+[package.dependencies]
+distlib = ">=0.3.6,<1"
+filelock = ">=3.4.1,<4"
+platformdirs = ">=2.4,<3"
+
+[package.extras]
+docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"]
+testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"]
+
+[[package]]
+name = "yarl"
+version = "1.8.2"
+description = "Yet another URL library"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"},
+    {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"},
+    {file = "yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe"},
+    {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e"},
+    {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996"},
+    {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef"},
+    {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8"},
+    {file = "yarl-1.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6"},
+    {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146"},
+    {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f"},
+    {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826"},
+    {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3"},
+    {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80"},
+    {file = "yarl-1.8.2-cp310-cp310-win32.whl", hash = "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42"},
+    {file = "yarl-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574"},
+    {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634"},
+    {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd"},
+    {file = "yarl-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581"},
+    {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a"},
+    {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf"},
+    {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee"},
+    {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76"},
+    {file = "yarl-1.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b"},
+    {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a"},
+    {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6"},
+    {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4"},
+    {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c"},
+    {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2"},
+    {file = "yarl-1.8.2-cp311-cp311-win32.whl", hash = "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b"},
+    {file = "yarl-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c"},
+    {file = "yarl-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef"},
+    {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1"},
+    {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077"},
+    {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2"},
+    {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89"},
+    {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c"},
+    {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0"},
+    {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739"},
+    {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b"},
+    {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7"},
+    {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37"},
+    {file = "yarl-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89"},
+    {file = "yarl-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5"},
+    {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1"},
+    {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918"},
+    {file = "yarl-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229"},
+    {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e"},
+    {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e"},
+    {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1"},
+    {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3"},
+    {file = "yarl-1.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901"},
+    {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0"},
+    {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05"},
+    {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d"},
+    {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"},
+    {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946"},
+    {file = "yarl-1.8.2-cp38-cp38-win32.whl", hash = "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165"},
+    {file = "yarl-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f"},
+    {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8"},
+    {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf"},
+    {file = "yarl-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77"},
+    {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87"},
+    {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0"},
+    {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2"},
+    {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08"},
+    {file = "yarl-1.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d"},
+    {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951"},
+    {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220"},
+    {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06"},
+    {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516"},
+    {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588"},
+    {file = "yarl-1.8.2-cp39-cp39-win32.whl", hash = "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83"},
+    {file = "yarl-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778"},
+    {file = "yarl-1.8.2.tar.gz", hash = "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562"},
+]
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
+
+[metadata]
+lock-version = "2.0"
+python-versions = ">=3.8.6,<3.12"
+content-hash = "a5ae30165f979359df7f0a2ff60822904261657d7c89acd5f5fba94643325e64"
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..f32ed74
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,78 @@
+[build-system]
+requires = ['poetry-core>=1.0.0']
+build-backend = 'poetry.core.masonry.api'
+
+[tool.poetry]
+name = 'devine'
+version = '1.0.0'
+description = 'Open-Source Movie, TV, and Music Downloading Solution'
+license = 'GPL-3.0-only'
+authors = ['rlaphoenix <rlaphoenix@pm.me>']
+readme = 'README.md'
+homepage = 'https://github.com/devine/devine'
+repository = 'https://github.com/devine/devine'
+keywords = ['widevine', 'drm', 'downloader']
+classifiers = [
+    'Development Status :: 4 - Beta',
+    'Environment :: Console',
+    'Intended Audience :: End Users/Desktop',
+    'Natural Language :: English',
+    'Operating System :: OS Independent',
+    'Topic :: Multimedia :: Video',
+    'Topic :: Security :: Cryptography',
+]
+
+[tool.poetry.dependencies]
+python = ">=3.8.6,<3.12"
+appdirs = "^1.4.4"
+Brotli = "^1.0.9"
+click = "^8.1.3"
+colorama = "^0.4.6"
+coloredlogs = "^15.0.1"
+construct = "^2.8.8"
+crccheck = "^1.3.0"
+jsonpickle = "^3.0.1"
+langcodes = { extras = ["data"], version = "^3.3.0" }
+lxml = "^4.9.2"
+m3u8 = "^3.4.0"
+pproxy = "^2.7.8"
+protobuf = "4.21.6"
+pycaption = "^2.1.1"
+pycryptodomex = "^3.17.0"
+pyjwt = "^2.6.0"
+pymediainfo = "^6.0.1"
+pymp4 = "^1.2.0"
+pymysql = "^1.0.2"
+pywidevine = { extras = ["serve"], version = "^1.6.0" }
+PyYAML = "^6.0"
+requests = { extras = ["socks"], version = "^2.28.2" }
+"ruamel.yaml" = "^0.17.21"
+sortedcontainers = "^2.4.0"
+subtitle-filter = "^1.4.4"
+tqdm = "^4.64.1"
+Unidecode = "^1.3.6"
+urllib3 = "^1.26.14"
+
+[tool.poetry.dev-dependencies]
+pre-commit = "^3.0.4"
+mypy = "^0.991"
+mypy-protobuf = "^3.3.0"
+types-protobuf = "^3.19.22"
+types-PyMySQL = "^1.0.19.2"
+types-requests = "^2.28.11.8"
+isort = "^5.12.0"
+
+[tool.poetry.scripts]
+devine = 'devine.core.__main__:main'
+
+[tool.isort]
+line_length = 120
+
+[tool.mypy]
+exclude = '_pb2\.pyi?$'
+check_untyped_defs = true
+disallow_incomplete_defs = true
+disallow_untyped_defs = true
+follow_imports = 'silent'
+ignore_missing_imports = true
+no_implicit_optional = true